source: trunk/src/gcc/gcc/cse.c@ 1392

Last change on this file since 1392 was 1392, checked in by bird, 21 years ago

This commit was generated by cvs2svn to compensate for changes in r1391,
which included commits to RCS files with non-trunk default branches.

  • Property cvs2svn:cvs-rev set to 1.1.1.2
  • Property svn:eol-style set to native
  • Property svn:executable set to *
File size: 238.3 KB
Line 
1/* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
25
26#include "rtl.h"
27#include "tm_p.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "flags.h"
32#include "real.h"
33#include "insn-config.h"
34#include "recog.h"
35#include "function.h"
36#include "expr.h"
37#include "toplev.h"
38#include "output.h"
39#include "ggc.h"
40#include "timevar.h"
41
42/* The basic idea of common subexpression elimination is to go
43 through the code, keeping a record of expressions that would
44 have the same value at the current scan point, and replacing
45 expressions encountered with the cheapest equivalent expression.
46
47 It is too complicated to keep track of the different possibilities
48 when control paths merge in this code; so, at each label, we forget all
49 that is known and start fresh. This can be described as processing each
50 extended basic block separately. We have a separate pass to perform
51 global CSE.
52
53 Note CSE can turn a conditional or computed jump into a nop or
54 an unconditional jump. When this occurs we arrange to run the jump
55 optimizer after CSE to delete the unreachable code.
56
57 We use two data structures to record the equivalent expressions:
58 a hash table for most expressions, and a vector of "quantity
59 numbers" to record equivalent (pseudo) registers.
60
61 The use of the special data structure for registers is desirable
62 because it is faster. It is possible because registers references
63 contain a fairly small number, the register number, taken from
64 a contiguously allocated series, and two register references are
65 identical if they have the same number. General expressions
66 do not have any such thing, so the only way to retrieve the
67 information recorded on an expression other than a register
68 is to keep it in a hash table.
69
70Registers and "quantity numbers":
71
72 At the start of each basic block, all of the (hardware and pseudo)
73 registers used in the function are given distinct quantity
74 numbers to indicate their contents. During scan, when the code
75 copies one register into another, we copy the quantity number.
76 When a register is loaded in any other way, we allocate a new
77 quantity number to describe the value generated by this operation.
78 `reg_qty' records what quantity a register is currently thought
79 of as containing.
80
81 All real quantity numbers are greater than or equal to `max_reg'.
82 If register N has not been assigned a quantity, reg_qty[N] will equal N.
83
84 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
85 entries should be referenced with an index below `max_reg'.
86
87 We also maintain a bidirectional chain of registers for each
88 quantity number. The `qty_table` members `first_reg' and `last_reg',
89 and `reg_eqv_table' members `next' and `prev' hold these chains.
90
91 The first register in a chain is the one whose lifespan is least local.
92 Among equals, it is the one that was seen first.
93 We replace any equivalent register with that one.
94
95 If two registers have the same quantity number, it must be true that
96 REG expressions with qty_table `mode' must be in the hash table for both
97 registers and must be in the same class.
98
99 The converse is not true. Since hard registers may be referenced in
100 any mode, two REG expressions might be equivalent in the hash table
101 but not have the same quantity number if the quantity number of one
102 of the registers is not the same mode as those expressions.
103
104Constants and quantity numbers
105
106 When a quantity has a known constant value, that value is stored
107 in the appropriate qty_table `const_rtx'. This is in addition to
108 putting the constant in the hash table as is usual for non-regs.
109
110 Whether a reg or a constant is preferred is determined by the configuration
111 macro CONST_COSTS and will often depend on the constant value. In any
112 event, expressions containing constants can be simplified, by fold_rtx.
113
114 When a quantity has a known nearly constant value (such as an address
115 of a stack slot), that value is stored in the appropriate qty_table
116 `const_rtx'.
117
118 Integer constants don't have a machine mode. However, cse
119 determines the intended machine mode from the destination
120 of the instruction that moves the constant. The machine mode
121 is recorded in the hash table along with the actual RTL
122 constant expression so that different modes are kept separate.
123
124Other expressions:
125
126 To record known equivalences among expressions in general
127 we use a hash table called `table'. It has a fixed number of buckets
128 that contain chains of `struct table_elt' elements for expressions.
129 These chains connect the elements whose expressions have the same
130 hash codes.
131
132 Other chains through the same elements connect the elements which
133 currently have equivalent values.
134
135 Register references in an expression are canonicalized before hashing
136 the expression. This is done using `reg_qty' and qty_table `first_reg'.
137 The hash code of a register reference is computed using the quantity
138 number, not the register number.
139
140 When the value of an expression changes, it is necessary to remove from the
141 hash table not just that expression but all expressions whose values
142 could be different as a result.
143
144 1. If the value changing is in memory, except in special cases
145 ANYTHING referring to memory could be changed. That is because
146 nobody knows where a pointer does not point.
147 The function `invalidate_memory' removes what is necessary.
148
149 The special cases are when the address is constant or is
150 a constant plus a fixed register such as the frame pointer
151 or a static chain pointer. When such addresses are stored in,
152 we can tell exactly which other such addresses must be invalidated
153 due to overlap. `invalidate' does this.
154 All expressions that refer to non-constant
155 memory addresses are also invalidated. `invalidate_memory' does this.
156
157 2. If the value changing is a register, all expressions
158 containing references to that register, and only those,
159 must be removed.
160
161 Because searching the entire hash table for expressions that contain
162 a register is very slow, we try to figure out when it isn't necessary.
163 Precisely, this is necessary only when expressions have been
164 entered in the hash table using this register, and then the value has
165 changed, and then another expression wants to be added to refer to
166 the register's new value. This sequence of circumstances is rare
167 within any one basic block.
168
169 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
170 reg_tick[i] is incremented whenever a value is stored in register i.
171 reg_in_table[i] holds -1 if no references to register i have been
172 entered in the table; otherwise, it contains the value reg_tick[i] had
173 when the references were entered. If we want to enter a reference
174 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
175 Until we want to enter a new entry, the mere fact that the two vectors
176 don't match makes the entries be ignored if anyone tries to match them.
177
178 Registers themselves are entered in the hash table as well as in
179 the equivalent-register chains. However, the vectors `reg_tick'
180 and `reg_in_table' do not apply to expressions which are simple
181 register references. These expressions are removed from the table
182 immediately when they become invalid, and this can be done even if
183 we do not immediately search for all the expressions that refer to
184 the register.
185
186 A CLOBBER rtx in an instruction invalidates its operand for further
187 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
188 invalidates everything that resides in memory.
189
190Related expressions:
191
192 Constant expressions that differ only by an additive integer
193 are called related. When a constant expression is put in
194 the table, the related expression with no constant term
195 is also entered. These are made to point at each other
196 so that it is possible to find out if there exists any
197 register equivalent to an expression related to a given expression. */
198
199/* One plus largest register number used in this function. */
200
201static int max_reg;
202
203/* One plus largest instruction UID used in this function at time of
204 cse_main call. */
205
206static int max_insn_uid;
207
208/* Length of qty_table vector. We know in advance we will not need
209 a quantity number this big. */
210
211static int max_qty;
212
213/* Next quantity number to be allocated.
214 This is 1 + the largest number needed so far. */
215
216static int next_qty;
217
218/* Per-qty information tracking.
219
220 `first_reg' and `last_reg' track the head and tail of the
221 chain of registers which currently contain this quantity.
222
223 `mode' contains the machine mode of this quantity.
224
225 `const_rtx' holds the rtx of the constant value of this
226 quantity, if known. A summations of the frame/arg pointer
227 and a constant can also be entered here. When this holds
228 a known value, `const_insn' is the insn which stored the
229 constant value.
230
231 `comparison_{code,const,qty}' are used to track when a
232 comparison between a quantity and some constant or register has
233 been passed. In such a case, we know the results of the comparison
234 in case we see it again. These members record a comparison that
235 is known to be true. `comparison_code' holds the rtx code of such
236 a comparison, else it is set to UNKNOWN and the other two
237 comparison members are undefined. `comparison_const' holds
238 the constant being compared against, or zero if the comparison
239 is not against a constant. `comparison_qty' holds the quantity
240 being compared against when the result is known. If the comparison
241 is not with a register, `comparison_qty' is -1. */
242
243struct qty_table_elem
244{
245 rtx const_rtx;
246 rtx const_insn;
247 rtx comparison_const;
248 int comparison_qty;
249 unsigned int first_reg, last_reg;
250 enum machine_mode mode;
251 enum rtx_code comparison_code;
252};
253
254/* The table of all qtys, indexed by qty number. */
255static struct qty_table_elem *qty_table;
256
257#ifdef HAVE_cc0
258/* For machines that have a CC0, we do not record its value in the hash
259 table since its use is guaranteed to be the insn immediately following
260 its definition and any other insn is presumed to invalidate it.
261
262 Instead, we store below the value last assigned to CC0. If it should
263 happen to be a constant, it is stored in preference to the actual
264 assigned value. In case it is a constant, we store the mode in which
265 the constant should be interpreted. */
266
267static rtx prev_insn_cc0;
268static enum machine_mode prev_insn_cc0_mode;
269#endif
270
271/* Previous actual insn. 0 if at first insn of basic block. */
272
273static rtx prev_insn;
274
275/* Insn being scanned. */
276
277static rtx this_insn;
278
279/* Index by register number, gives the number of the next (or
280 previous) register in the chain of registers sharing the same
281 value.
282
283 Or -1 if this register is at the end of the chain.
284
285 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
286
287/* Per-register equivalence chain. */
288struct reg_eqv_elem
289{
290 int next, prev;
291};
292
293/* The table of all register equivalence chains. */
294static struct reg_eqv_elem *reg_eqv_table;
295
296struct cse_reg_info
297{
298 /* Next in hash chain. */
299 struct cse_reg_info *hash_next;
300
301 /* The next cse_reg_info structure in the free or used list. */
302 struct cse_reg_info *next;
303
304 /* Search key */
305 unsigned int regno;
306
307 /* The quantity number of the register's current contents. */
308 int reg_qty;
309
310 /* The number of times the register has been altered in the current
311 basic block. */
312 int reg_tick;
313
314 /* The REG_TICK value at which rtx's containing this register are
315 valid in the hash table. If this does not equal the current
316 reg_tick value, such expressions existing in the hash table are
317 invalid. */
318 int reg_in_table;
319
320 /* The SUBREG that was set when REG_TICK was last incremented. Set
321 to -1 if the last store was to the whole register, not a subreg. */
322 unsigned int subreg_ticked;
323};
324
325/* A free list of cse_reg_info entries. */
326static struct cse_reg_info *cse_reg_info_free_list;
327
328/* A used list of cse_reg_info entries. */
329static struct cse_reg_info *cse_reg_info_used_list;
330static struct cse_reg_info *cse_reg_info_used_list_end;
331
332/* A mapping from registers to cse_reg_info data structures. */
333#define REGHASH_SHIFT 7
334#define REGHASH_SIZE (1 << REGHASH_SHIFT)
335#define REGHASH_MASK (REGHASH_SIZE - 1)
336static struct cse_reg_info *reg_hash[REGHASH_SIZE];
337
338#define REGHASH_FN(REGNO) \
339 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
340
341/* The last lookup we did into the cse_reg_info_tree. This allows us
342 to cache repeated lookups. */
343static unsigned int cached_regno;
344static struct cse_reg_info *cached_cse_reg_info;
345
346/* A HARD_REG_SET containing all the hard registers for which there is
347 currently a REG expression in the hash table. Note the difference
348 from the above variables, which indicate if the REG is mentioned in some
349 expression in the table. */
350
351static HARD_REG_SET hard_regs_in_table;
352
353/* CUID of insn that starts the basic block currently being cse-processed. */
354
355static int cse_basic_block_start;
356
357/* CUID of insn that ends the basic block currently being cse-processed. */
358
359static int cse_basic_block_end;
360
361/* Vector mapping INSN_UIDs to cuids.
362 The cuids are like uids but increase monotonically always.
363 We use them to see whether a reg is used outside a given basic block. */
364
365static int *uid_cuid;
366
367/* Highest UID in UID_CUID. */
368static int max_uid;
369
370/* Get the cuid of an insn. */
371
372#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
373
374/* Nonzero if this pass has made changes, and therefore it's
375 worthwhile to run the garbage collector. */
376
377static int cse_altered;
378
379/* Nonzero if cse has altered conditional jump insns
380 in such a way that jump optimization should be redone. */
381
382static int cse_jumps_altered;
383
384/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
385 REG_LABEL, we have to rerun jump after CSE to put in the note. */
386static int recorded_label_ref;
387
388/* canon_hash stores 1 in do_not_record
389 if it notices a reference to CC0, PC, or some other volatile
390 subexpression. */
391
392static int do_not_record;
393
394#ifdef LOAD_EXTEND_OP
395
396/* Scratch rtl used when looking for load-extended copy of a MEM. */
397static rtx memory_extend_rtx;
398#endif
399
400/* canon_hash stores 1 in hash_arg_in_memory
401 if it notices a reference to memory within the expression being hashed. */
402
403static int hash_arg_in_memory;
404
405/* The hash table contains buckets which are chains of `struct table_elt's,
406 each recording one expression's information.
407 That expression is in the `exp' field.
408
409 The canon_exp field contains a canonical (from the point of view of
410 alias analysis) version of the `exp' field.
411
412 Those elements with the same hash code are chained in both directions
413 through the `next_same_hash' and `prev_same_hash' fields.
414
415 Each set of expressions with equivalent values
416 are on a two-way chain through the `next_same_value'
417 and `prev_same_value' fields, and all point with
418 the `first_same_value' field at the first element in
419 that chain. The chain is in order of increasing cost.
420 Each element's cost value is in its `cost' field.
421
422 The `in_memory' field is nonzero for elements that
423 involve any reference to memory. These elements are removed
424 whenever a write is done to an unidentified location in memory.
425 To be safe, we assume that a memory address is unidentified unless
426 the address is either a symbol constant or a constant plus
427 the frame pointer or argument pointer.
428
429 The `related_value' field is used to connect related expressions
430 (that differ by adding an integer).
431 The related expressions are chained in a circular fashion.
432 `related_value' is zero for expressions for which this
433 chain is not useful.
434
435 The `cost' field stores the cost of this element's expression.
436 The `regcost' field stores the value returned by approx_reg_cost for
437 this element's expression.
438
439 The `is_const' flag is set if the element is a constant (including
440 a fixed address).
441
442 The `flag' field is used as a temporary during some search routines.
443
444 The `mode' field is usually the same as GET_MODE (`exp'), but
445 if `exp' is a CONST_INT and has no machine mode then the `mode'
446 field is the mode it was being used as. Each constant is
447 recorded separately for each mode it is used with. */
448
449struct table_elt
450{
451 rtx exp;
452 rtx canon_exp;
453 struct table_elt *next_same_hash;
454 struct table_elt *prev_same_hash;
455 struct table_elt *next_same_value;
456 struct table_elt *prev_same_value;
457 struct table_elt *first_same_value;
458 struct table_elt *related_value;
459 int cost;
460 int regcost;
461 enum machine_mode mode;
462 char in_memory;
463 char is_const;
464 char flag;
465};
466
467/* We don't want a lot of buckets, because we rarely have very many
468 things stored in the hash table, and a lot of buckets slows
469 down a lot of loops that happen frequently. */
470#define HASH_SHIFT 5
471#define HASH_SIZE (1 << HASH_SHIFT)
472#define HASH_MASK (HASH_SIZE - 1)
473
474/* Compute hash code of X in mode M. Special-case case where X is a pseudo
475 register (hard registers may require `do_not_record' to be set). */
476
477#define HASH(X, M) \
478 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
479 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
480 : canon_hash (X, M)) & HASH_MASK)
481
482/* Determine whether register number N is considered a fixed register for the
483 purpose of approximating register costs.
484 It is desirable to replace other regs with fixed regs, to reduce need for
485 non-fixed hard regs.
486 A reg wins if it is either the frame pointer or designated as fixed. */
487#define FIXED_REGNO_P(N) \
488 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
489 || fixed_regs[N] || global_regs[N])
490
491/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
492 hard registers and pointers into the frame are the cheapest with a cost
493 of 0. Next come pseudos with a cost of one and other hard registers with
494 a cost of 2. Aside from these special cases, call `rtx_cost'. */
495
496#define CHEAP_REGNO(N) \
497 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
498 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
499 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
500 || ((N) < FIRST_PSEUDO_REGISTER \
501 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
502
503#define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
504#define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
505
506/* Get the info associated with register N. */
507
508#define GET_CSE_REG_INFO(N) \
509 (((N) == cached_regno && cached_cse_reg_info) \
510 ? cached_cse_reg_info : get_cse_reg_info ((N)))
511
512/* Get the number of times this register has been updated in this
513 basic block. */
514
515#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
516
517/* Get the point at which REG was recorded in the table. */
518
519#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
520
521/* Get the SUBREG set at the last increment to REG_TICK (-1 if not a
522 SUBREG). */
523
524#define SUBREG_TICKED(N) ((GET_CSE_REG_INFO (N))->subreg_ticked)
525
526/* Get the quantity number for REG. */
527
528#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
529
530/* Determine if the quantity number for register X represents a valid index
531 into the qty_table. */
532
533#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
534
535static struct table_elt *table[HASH_SIZE];
536
537/* Chain of `struct table_elt's made so far for this function
538 but currently removed from the table. */
539
540static struct table_elt *free_element_chain;
541
542/* Number of `struct table_elt' structures made so far for this function. */
543
544static int n_elements_made;
545
546/* Maximum value `n_elements_made' has had so far in this compilation
547 for functions previously processed. */
548
549static int max_elements_made;
550
551/* Surviving equivalence class when two equivalence classes are merged
552 by recording the effects of a jump in the last insn. Zero if the
553 last insn was not a conditional jump. */
554
555static struct table_elt *last_jump_equiv_class;
556
557/* Set to the cost of a constant pool reference if one was found for a
558 symbolic constant. If this was found, it means we should try to
559 convert constants into constant pool entries if they don't fit in
560 the insn. */
561
562static int constant_pool_entries_cost;
563
564/* Define maximum length of a branch path. */
565
566#define PATHLENGTH 10
567
568/* This data describes a block that will be processed by cse_basic_block. */
569
570struct cse_basic_block_data
571{
572 /* Lowest CUID value of insns in block. */
573 int low_cuid;
574 /* Highest CUID value of insns in block. */
575 int high_cuid;
576 /* Total number of SETs in block. */
577 int nsets;
578 /* Last insn in the block. */
579 rtx last;
580 /* Size of current branch path, if any. */
581 int path_size;
582 /* Current branch path, indicating which branches will be taken. */
583 struct branch_path
584 {
585 /* The branch insn. */
586 rtx branch;
587 /* Whether it should be taken or not. AROUND is the same as taken
588 except that it is used when the destination label is not preceded
589 by a BARRIER. */
590 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
591 } path[PATHLENGTH];
592};
593
594/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
595 virtual regs here because the simplify_*_operation routines are called
596 by integrate.c, which is called before virtual register instantiation.
597
598 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
599 a header file so that their definitions can be shared with the
600 simplification routines in simplify-rtx.c. Until then, do not
601 change these macros without also changing the copy in simplify-rtx.c. */
602
603#define FIXED_BASE_PLUS_P(X) \
604 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
605 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
606 || (X) == virtual_stack_vars_rtx \
607 || (X) == virtual_incoming_args_rtx \
608 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
609 && (XEXP (X, 0) == frame_pointer_rtx \
610 || XEXP (X, 0) == hard_frame_pointer_rtx \
611 || ((X) == arg_pointer_rtx \
612 && fixed_regs[ARG_POINTER_REGNUM]) \
613 || XEXP (X, 0) == virtual_stack_vars_rtx \
614 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
615 || GET_CODE (X) == ADDRESSOF)
616
617/* Similar, but also allows reference to the stack pointer.
618
619 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
620 arg_pointer_rtx by itself is nonzero, because on at least one machine,
621 the i960, the arg pointer is zero when it is unused. */
622
623#define NONZERO_BASE_PLUS_P(X) \
624 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
625 || (X) == virtual_stack_vars_rtx \
626 || (X) == virtual_incoming_args_rtx \
627 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
628 && (XEXP (X, 0) == frame_pointer_rtx \
629 || XEXP (X, 0) == hard_frame_pointer_rtx \
630 || ((X) == arg_pointer_rtx \
631 && fixed_regs[ARG_POINTER_REGNUM]) \
632 || XEXP (X, 0) == virtual_stack_vars_rtx \
633 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
634 || (X) == stack_pointer_rtx \
635 || (X) == virtual_stack_dynamic_rtx \
636 || (X) == virtual_outgoing_args_rtx \
637 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
638 && (XEXP (X, 0) == stack_pointer_rtx \
639 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
640 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
641 || GET_CODE (X) == ADDRESSOF)
642
643static int notreg_cost PARAMS ((rtx, enum rtx_code));
644static int approx_reg_cost_1 PARAMS ((rtx *, void *));
645static int approx_reg_cost PARAMS ((rtx));
646static int preferrable PARAMS ((int, int, int, int));
647static void new_basic_block PARAMS ((void));
648static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
649static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
650static void delete_reg_equiv PARAMS ((unsigned int));
651static int mention_regs PARAMS ((rtx));
652static int insert_regs PARAMS ((rtx, struct table_elt *, int));
653static void remove_from_table PARAMS ((struct table_elt *, unsigned));
654static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
655 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
656static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
657static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
658 enum machine_mode));
659static void merge_equiv_classes PARAMS ((struct table_elt *,
660 struct table_elt *));
661static void invalidate PARAMS ((rtx, enum machine_mode));
662static int cse_rtx_varies_p PARAMS ((rtx, int));
663static void remove_invalid_refs PARAMS ((unsigned int));
664static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
665 enum machine_mode));
666static void rehash_using_reg PARAMS ((rtx));
667static void invalidate_memory PARAMS ((void));
668static void invalidate_for_call PARAMS ((void));
669static rtx use_related_value PARAMS ((rtx, struct table_elt *));
670static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
671static unsigned canon_hash_string PARAMS ((const char *));
672static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
673static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
674static rtx canon_reg PARAMS ((rtx, rtx));
675static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
676static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
677 enum machine_mode *,
678 enum machine_mode *));
679static rtx fold_rtx PARAMS ((rtx, rtx));
680static rtx equiv_constant PARAMS ((rtx));
681static void record_jump_equiv PARAMS ((rtx, int));
682static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
683 rtx, rtx, int));
684static void cse_insn PARAMS ((rtx, rtx));
685static int addr_affects_sp_p PARAMS ((rtx));
686static void invalidate_from_clobbers PARAMS ((rtx));
687static rtx cse_process_notes PARAMS ((rtx, rtx));
688static void cse_around_loop PARAMS ((rtx));
689static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
690static void invalidate_skipped_block PARAMS ((rtx));
691static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
692static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
693static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
694static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
695static int check_for_label_ref PARAMS ((rtx *, void *));
696extern void dump_class PARAMS ((struct table_elt*));
697static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
698static int check_dependence PARAMS ((rtx *, void *));
699
700static void flush_hash_table PARAMS ((void));
701static bool insn_live_p PARAMS ((rtx, int *));
702static bool set_live_p PARAMS ((rtx, rtx, int *));
703static bool dead_libcall_p PARAMS ((rtx, int *));
704
705
706/* Dump the expressions in the equivalence class indicated by CLASSP.
707 This function is used only for debugging. */
708void
709dump_class (classp)
710 struct table_elt *classp;
711{
712 struct table_elt *elt;
713
714 fprintf (stderr, "Equivalence chain for ");
715 print_rtl (stderr, classp->exp);
716 fprintf (stderr, ": \n");
717
718 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
719 {
720 print_rtl (stderr, elt->exp);
721 fprintf (stderr, "\n");
722 }
723}
724
725/* Subroutine of approx_reg_cost; called through for_each_rtx. */
726
727static int
728approx_reg_cost_1 (xp, data)
729 rtx *xp;
730 void *data;
731{
732 rtx x = *xp;
733 int *cost_p = data;
734
735 if (x && GET_CODE (x) == REG)
736 {
737 unsigned int regno = REGNO (x);
738
739 if (! CHEAP_REGNO (regno))
740 {
741 if (regno < FIRST_PSEUDO_REGISTER)
742 {
743 if (SMALL_REGISTER_CLASSES)
744 return 1;
745 *cost_p += 2;
746 }
747 else
748 *cost_p += 1;
749 }
750 }
751
752 return 0;
753}
754
755/* Return an estimate of the cost of the registers used in an rtx.
756 This is mostly the number of different REG expressions in the rtx;
757 however for some exceptions like fixed registers we use a cost of
758 0. If any other hard register reference occurs, return MAX_COST. */
759
760static int
761approx_reg_cost (x)
762 rtx x;
763{
764 int cost = 0;
765
766 if (for_each_rtx (&x, approx_reg_cost_1, (void *) &cost))
767 return MAX_COST;
768
769 return cost;
770}
771
772/* Return a negative value if an rtx A, whose costs are given by COST_A
773 and REGCOST_A, is more desirable than an rtx B.
774 Return a positive value if A is less desirable, or 0 if the two are
775 equally good. */
776static int
777preferrable (cost_a, regcost_a, cost_b, regcost_b)
778 int cost_a, regcost_a, cost_b, regcost_b;
779{
780 /* First, get rid of cases involving expressions that are entirely
781 unwanted. */
782 if (cost_a != cost_b)
783 {
784 if (cost_a == MAX_COST)
785 return 1;
786 if (cost_b == MAX_COST)
787 return -1;
788 }
789
790 /* Avoid extending lifetimes of hardregs. */
791 if (regcost_a != regcost_b)
792 {
793 if (regcost_a == MAX_COST)
794 return 1;
795 if (regcost_b == MAX_COST)
796 return -1;
797 }
798
799 /* Normal operation costs take precedence. */
800 if (cost_a != cost_b)
801 return cost_a - cost_b;
802 /* Only if these are identical consider effects on register pressure. */
803 if (regcost_a != regcost_b)
804 return regcost_a - regcost_b;
805 return 0;
806}
807
808/* Internal function, to compute cost when X is not a register; called
809 from COST macro to keep it simple. */
810
811static int
812notreg_cost (x, outer)
813 rtx x;
814 enum rtx_code outer;
815{
816 return ((GET_CODE (x) == SUBREG
817 && GET_CODE (SUBREG_REG (x)) == REG
818 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
819 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
820 && (GET_MODE_SIZE (GET_MODE (x))
821 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
822 && subreg_lowpart_p (x)
823 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
824 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
825 ? 0
826 : rtx_cost (x, outer) * 2);
827}
828
829/* Return an estimate of the cost of computing rtx X.
830 One use is in cse, to decide which expression to keep in the hash table.
831 Another is in rtl generation, to pick the cheapest way to multiply.
832 Other uses like the latter are expected in the future. */
833
834int
835rtx_cost (x, outer_code)
836 rtx x;
837 enum rtx_code outer_code ATTRIBUTE_UNUSED;
838{
839 int i, j;
840 enum rtx_code code;
841 const char *fmt;
842 int total;
843
844 if (x == 0)
845 return 0;
846
847 /* Compute the default costs of certain things.
848 Note that RTX_COSTS can override the defaults. */
849
850 code = GET_CODE (x);
851 switch (code)
852 {
853 case MULT:
854 total = COSTS_N_INSNS (5);
855 break;
856 case DIV:
857 case UDIV:
858 case MOD:
859 case UMOD:
860 total = COSTS_N_INSNS (7);
861 break;
862 case USE:
863 /* Used in loop.c and combine.c as a marker. */
864 total = 0;
865 break;
866 default:
867 total = COSTS_N_INSNS (1);
868 }
869
870 switch (code)
871 {
872 case REG:
873 return 0;
874
875 case SUBREG:
876 /* If we can't tie these modes, make this expensive. The larger
877 the mode, the more expensive it is. */
878 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
879 return COSTS_N_INSNS (2
880 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
881 break;
882
883#ifdef RTX_COSTS
884 RTX_COSTS (x, code, outer_code);
885#endif
886#ifdef CONST_COSTS
887 CONST_COSTS (x, code, outer_code);
888#endif
889
890 default:
891#ifdef DEFAULT_RTX_COSTS
892 DEFAULT_RTX_COSTS (x, code, outer_code);
893#endif
894 break;
895 }
896
897 /* Sum the costs of the sub-rtx's, plus cost of this operation,
898 which is already in total. */
899
900 fmt = GET_RTX_FORMAT (code);
901 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
902 if (fmt[i] == 'e')
903 total += rtx_cost (XEXP (x, i), code);
904 else if (fmt[i] == 'E')
905 for (j = 0; j < XVECLEN (x, i); j++)
906 total += rtx_cost (XVECEXP (x, i, j), code);
907
908 return total;
909}
910
911
912/* Return cost of address expression X.
913 Expect that X is properly formed address reference. */
914
915int
916address_cost (x, mode)
917 rtx x;
918 enum machine_mode mode;
919{
920 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
921 during CSE, such nodes are present. Using an ADDRESSOF node which
922 refers to the address of a REG is a good thing because we can then
923 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
924
925 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
926 return -1;
927
928 /* We may be asked for cost of various unusual addresses, such as operands
929 of push instruction. It is not worthwhile to complicate writing
930 of ADDRESS_COST macro by such cases. */
931
932 if (!memory_address_p (mode, x))
933 return 1000;
934#ifdef ADDRESS_COST
935 return ADDRESS_COST (x);
936#else
937 return rtx_cost (x, MEM);
938#endif
939}
940
941
942
943static struct cse_reg_info *
944get_cse_reg_info (regno)
945 unsigned int regno;
946{
947 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
948 struct cse_reg_info *p;
949
950 for (p = *hash_head; p != NULL; p = p->hash_next)
951 if (p->regno == regno)
952 break;
953
954 if (p == NULL)
955 {
956 /* Get a new cse_reg_info structure. */
957 if (cse_reg_info_free_list)
958 {
959 p = cse_reg_info_free_list;
960 cse_reg_info_free_list = p->next;
961 }
962 else
963 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
964
965 /* Insert into hash table. */
966 p->hash_next = *hash_head;
967 *hash_head = p;
968
969 /* Initialize it. */
970 p->reg_tick = 1;
971 p->reg_in_table = -1;
972 p->subreg_ticked = -1;
973 p->reg_qty = regno;
974 p->regno = regno;
975 p->next = cse_reg_info_used_list;
976 cse_reg_info_used_list = p;
977 if (!cse_reg_info_used_list_end)
978 cse_reg_info_used_list_end = p;
979 }
980
981 /* Cache this lookup; we tend to be looking up information about the
982 same register several times in a row. */
983 cached_regno = regno;
984 cached_cse_reg_info = p;
985
986 return p;
987}
988
989/* Clear the hash table and initialize each register with its own quantity,
990 for a new basic block. */
991
992static void
993new_basic_block ()
994{
995 int i;
996
997 next_qty = max_reg;
998
999 /* Clear out hash table state for this pass. */
1000
1001 memset ((char *) reg_hash, 0, sizeof reg_hash);
1002
1003 if (cse_reg_info_used_list)
1004 {
1005 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
1006 cse_reg_info_free_list = cse_reg_info_used_list;
1007 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1008 }
1009 cached_cse_reg_info = 0;
1010
1011 CLEAR_HARD_REG_SET (hard_regs_in_table);
1012
1013 /* The per-quantity values used to be initialized here, but it is
1014 much faster to initialize each as it is made in `make_new_qty'. */
1015
1016 for (i = 0; i < HASH_SIZE; i++)
1017 {
1018 struct table_elt *first;
1019
1020 first = table[i];
1021 if (first != NULL)
1022 {
1023 struct table_elt *last = first;
1024
1025 table[i] = NULL;
1026
1027 while (last->next_same_hash != NULL)
1028 last = last->next_same_hash;
1029
1030 /* Now relink this hash entire chain into
1031 the free element list. */
1032
1033 last->next_same_hash = free_element_chain;
1034 free_element_chain = first;
1035 }
1036 }
1037
1038 prev_insn = 0;
1039
1040#ifdef HAVE_cc0
1041 prev_insn_cc0 = 0;
1042#endif
1043}
1044
1045/* Say that register REG contains a quantity in mode MODE not in any
1046 register before and initialize that quantity. */
1047
1048static void
1049make_new_qty (reg, mode)
1050 unsigned int reg;
1051 enum machine_mode mode;
1052{
1053 int q;
1054 struct qty_table_elem *ent;
1055 struct reg_eqv_elem *eqv;
1056
1057 if (next_qty >= max_qty)
1058 abort ();
1059
1060 q = REG_QTY (reg) = next_qty++;
1061 ent = &qty_table[q];
1062 ent->first_reg = reg;
1063 ent->last_reg = reg;
1064 ent->mode = mode;
1065 ent->const_rtx = ent->const_insn = NULL_RTX;
1066 ent->comparison_code = UNKNOWN;
1067
1068 eqv = &reg_eqv_table[reg];
1069 eqv->next = eqv->prev = -1;
1070}
1071
1072/* Make reg NEW equivalent to reg OLD.
1073 OLD is not changing; NEW is. */
1074
1075static void
1076make_regs_eqv (new, old)
1077 unsigned int new, old;
1078{
1079 unsigned int lastr, firstr;
1080 int q = REG_QTY (old);
1081 struct qty_table_elem *ent;
1082
1083 ent = &qty_table[q];
1084
1085 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1086 if (! REGNO_QTY_VALID_P (old))
1087 abort ();
1088
1089 REG_QTY (new) = q;
1090 firstr = ent->first_reg;
1091 lastr = ent->last_reg;
1092
1093 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1094 hard regs. Among pseudos, if NEW will live longer than any other reg
1095 of the same qty, and that is beyond the current basic block,
1096 make it the new canonical replacement for this qty. */
1097 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1098 /* Certain fixed registers might be of the class NO_REGS. This means
1099 that not only can they not be allocated by the compiler, but
1100 they cannot be used in substitutions or canonicalizations
1101 either. */
1102 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1103 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1104 || (new >= FIRST_PSEUDO_REGISTER
1105 && (firstr < FIRST_PSEUDO_REGISTER
1106 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1107 || (uid_cuid[REGNO_FIRST_UID (new)]
1108 < cse_basic_block_start))
1109 && (uid_cuid[REGNO_LAST_UID (new)]
1110 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1111 {
1112 reg_eqv_table[firstr].prev = new;
1113 reg_eqv_table[new].next = firstr;
1114 reg_eqv_table[new].prev = -1;
1115 ent->first_reg = new;
1116 }
1117 else
1118 {
1119 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1120 Otherwise, insert before any non-fixed hard regs that are at the
1121 end. Registers of class NO_REGS cannot be used as an
1122 equivalent for anything. */
1123 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1124 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1125 && new >= FIRST_PSEUDO_REGISTER)
1126 lastr = reg_eqv_table[lastr].prev;
1127 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1128 if (reg_eqv_table[lastr].next >= 0)
1129 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1130 else
1131 qty_table[q].last_reg = new;
1132 reg_eqv_table[lastr].next = new;
1133 reg_eqv_table[new].prev = lastr;
1134 }
1135}
1136
1137/* Remove REG from its equivalence class. */
1138
1139static void
1140delete_reg_equiv (reg)
1141 unsigned int reg;
1142{
1143 struct qty_table_elem *ent;
1144 int q = REG_QTY (reg);
1145 int p, n;
1146
1147 /* If invalid, do nothing. */
1148 if (q == (int) reg)
1149 return;
1150
1151 ent = &qty_table[q];
1152
1153 p = reg_eqv_table[reg].prev;
1154 n = reg_eqv_table[reg].next;
1155
1156 if (n != -1)
1157 reg_eqv_table[n].prev = p;
1158 else
1159 ent->last_reg = p;
1160 if (p != -1)
1161 reg_eqv_table[p].next = n;
1162 else
1163 ent->first_reg = n;
1164
1165 REG_QTY (reg) = reg;
1166}
1167
1168/* Remove any invalid expressions from the hash table
1169 that refer to any of the registers contained in expression X.
1170
1171 Make sure that newly inserted references to those registers
1172 as subexpressions will be considered valid.
1173
1174 mention_regs is not called when a register itself
1175 is being stored in the table.
1176
1177 Return 1 if we have done something that may have changed the hash code
1178 of X. */
1179
1180static int
1181mention_regs (x)
1182 rtx x;
1183{
1184 enum rtx_code code;
1185 int i, j;
1186 const char *fmt;
1187 int changed = 0;
1188
1189 if (x == 0)
1190 return 0;
1191
1192 code = GET_CODE (x);
1193 if (code == REG)
1194 {
1195 unsigned int regno = REGNO (x);
1196 unsigned int endregno
1197 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1198 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1199 unsigned int i;
1200
1201 for (i = regno; i < endregno; i++)
1202 {
1203 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1204 remove_invalid_refs (i);
1205
1206 REG_IN_TABLE (i) = REG_TICK (i);
1207 SUBREG_TICKED (i) = -1;
1208 }
1209
1210 return 0;
1211 }
1212
1213 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1214 pseudo if they don't use overlapping words. We handle only pseudos
1215 here for simplicity. */
1216 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1217 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1218 {
1219 unsigned int i = REGNO (SUBREG_REG (x));
1220
1221 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1222 {
1223 /* If REG_IN_TABLE (i) differs from REG_TICK (i) by one, and
1224 the last store to this register really stored into this
1225 subreg, then remove the memory of this subreg.
1226 Otherwise, remove any memory of the entire register and
1227 all its subregs from the table. */
1228 if (REG_TICK (i) - REG_IN_TABLE (i) > 1
1229 || SUBREG_TICKED (i) != REGNO (SUBREG_REG (x)))
1230 remove_invalid_refs (i);
1231 else
1232 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1233 }
1234
1235 REG_IN_TABLE (i) = REG_TICK (i);
1236 SUBREG_TICKED (i) = REGNO (SUBREG_REG (x));
1237 return 0;
1238 }
1239
1240 /* If X is a comparison or a COMPARE and either operand is a register
1241 that does not have a quantity, give it one. This is so that a later
1242 call to record_jump_equiv won't cause X to be assigned a different
1243 hash code and not found in the table after that call.
1244
1245 It is not necessary to do this here, since rehash_using_reg can
1246 fix up the table later, but doing this here eliminates the need to
1247 call that expensive function in the most common case where the only
1248 use of the register is in the comparison. */
1249
1250 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1251 {
1252 if (GET_CODE (XEXP (x, 0)) == REG
1253 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1254 if (insert_regs (XEXP (x, 0), NULL, 0))
1255 {
1256 rehash_using_reg (XEXP (x, 0));
1257 changed = 1;
1258 }
1259
1260 if (GET_CODE (XEXP (x, 1)) == REG
1261 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1262 if (insert_regs (XEXP (x, 1), NULL, 0))
1263 {
1264 rehash_using_reg (XEXP (x, 1));
1265 changed = 1;
1266 }
1267 }
1268
1269 fmt = GET_RTX_FORMAT (code);
1270 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1271 if (fmt[i] == 'e')
1272 changed |= mention_regs (XEXP (x, i));
1273 else if (fmt[i] == 'E')
1274 for (j = 0; j < XVECLEN (x, i); j++)
1275 changed |= mention_regs (XVECEXP (x, i, j));
1276
1277 return changed;
1278}
1279
1280/* Update the register quantities for inserting X into the hash table
1281 with a value equivalent to CLASSP.
1282 (If the class does not contain a REG, it is irrelevant.)
1283 If MODIFIED is nonzero, X is a destination; it is being modified.
1284 Note that delete_reg_equiv should be called on a register
1285 before insert_regs is done on that register with MODIFIED != 0.
1286
1287 Nonzero value means that elements of reg_qty have changed
1288 so X's hash code may be different. */
1289
1290static int
1291insert_regs (x, classp, modified)
1292 rtx x;
1293 struct table_elt *classp;
1294 int modified;
1295{
1296 if (GET_CODE (x) == REG)
1297 {
1298 unsigned int regno = REGNO (x);
1299 int qty_valid;
1300
1301 /* If REGNO is in the equivalence table already but is of the
1302 wrong mode for that equivalence, don't do anything here. */
1303
1304 qty_valid = REGNO_QTY_VALID_P (regno);
1305 if (qty_valid)
1306 {
1307 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1308
1309 if (ent->mode != GET_MODE (x))
1310 return 0;
1311 }
1312
1313 if (modified || ! qty_valid)
1314 {
1315 if (classp)
1316 for (classp = classp->first_same_value;
1317 classp != 0;
1318 classp = classp->next_same_value)
1319 if (GET_CODE (classp->exp) == REG
1320 && GET_MODE (classp->exp) == GET_MODE (x))
1321 {
1322 make_regs_eqv (regno, REGNO (classp->exp));
1323 return 1;
1324 }
1325
1326 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1327 than REG_IN_TABLE to find out if there was only a single preceding
1328 invalidation - for the SUBREG - or another one, which would be
1329 for the full register. However, if we find here that REG_TICK
1330 indicates that the register is invalid, it means that it has
1331 been invalidated in a separate operation. The SUBREG might be used
1332 now (then this is a recursive call), or we might use the full REG
1333 now and a SUBREG of it later. So bump up REG_TICK so that
1334 mention_regs will do the right thing. */
1335 if (! modified
1336 && REG_IN_TABLE (regno) >= 0
1337 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1338 REG_TICK (regno)++;
1339 make_new_qty (regno, GET_MODE (x));
1340 return 1;
1341 }
1342
1343 return 0;
1344 }
1345
1346 /* If X is a SUBREG, we will likely be inserting the inner register in the
1347 table. If that register doesn't have an assigned quantity number at
1348 this point but does later, the insertion that we will be doing now will
1349 not be accessible because its hash code will have changed. So assign
1350 a quantity number now. */
1351
1352 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1353 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1354 {
1355 insert_regs (SUBREG_REG (x), NULL, 0);
1356 mention_regs (x);
1357 return 1;
1358 }
1359 else
1360 return mention_regs (x);
1361}
1362
1363
1364/* Look in or update the hash table. */
1365
1366/* Remove table element ELT from use in the table.
1367 HASH is its hash code, made using the HASH macro.
1368 It's an argument because often that is known in advance
1369 and we save much time not recomputing it. */
1370
1371static void
1372remove_from_table (elt, hash)
1373 struct table_elt *elt;
1374 unsigned hash;
1375{
1376 if (elt == 0)
1377 return;
1378
1379 /* Mark this element as removed. See cse_insn. */
1380 elt->first_same_value = 0;
1381
1382 /* Remove the table element from its equivalence class. */
1383
1384 {
1385 struct table_elt *prev = elt->prev_same_value;
1386 struct table_elt *next = elt->next_same_value;
1387
1388 if (next)
1389 next->prev_same_value = prev;
1390
1391 if (prev)
1392 prev->next_same_value = next;
1393 else
1394 {
1395 struct table_elt *newfirst = next;
1396 while (next)
1397 {
1398 next->first_same_value = newfirst;
1399 next = next->next_same_value;
1400 }
1401 }
1402 }
1403
1404 /* Remove the table element from its hash bucket. */
1405
1406 {
1407 struct table_elt *prev = elt->prev_same_hash;
1408 struct table_elt *next = elt->next_same_hash;
1409
1410 if (next)
1411 next->prev_same_hash = prev;
1412
1413 if (prev)
1414 prev->next_same_hash = next;
1415 else if (table[hash] == elt)
1416 table[hash] = next;
1417 else
1418 {
1419 /* This entry is not in the proper hash bucket. This can happen
1420 when two classes were merged by `merge_equiv_classes'. Search
1421 for the hash bucket that it heads. This happens only very
1422 rarely, so the cost is acceptable. */
1423 for (hash = 0; hash < HASH_SIZE; hash++)
1424 if (table[hash] == elt)
1425 table[hash] = next;
1426 }
1427 }
1428
1429 /* Remove the table element from its related-value circular chain. */
1430
1431 if (elt->related_value != 0 && elt->related_value != elt)
1432 {
1433 struct table_elt *p = elt->related_value;
1434
1435 while (p->related_value != elt)
1436 p = p->related_value;
1437 p->related_value = elt->related_value;
1438 if (p->related_value == p)
1439 p->related_value = 0;
1440 }
1441
1442 /* Now add it to the free element chain. */
1443 elt->next_same_hash = free_element_chain;
1444 free_element_chain = elt;
1445}
1446
1447/* Look up X in the hash table and return its table element,
1448 or 0 if X is not in the table.
1449
1450 MODE is the machine-mode of X, or if X is an integer constant
1451 with VOIDmode then MODE is the mode with which X will be used.
1452
1453 Here we are satisfied to find an expression whose tree structure
1454 looks like X. */
1455
1456static struct table_elt *
1457lookup (x, hash, mode)
1458 rtx x;
1459 unsigned hash;
1460 enum machine_mode mode;
1461{
1462 struct table_elt *p;
1463
1464 for (p = table[hash]; p; p = p->next_same_hash)
1465 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1466 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1467 return p;
1468
1469 return 0;
1470}
1471
1472/* Like `lookup' but don't care whether the table element uses invalid regs.
1473 Also ignore discrepancies in the machine mode of a register. */
1474
1475static struct table_elt *
1476lookup_for_remove (x, hash, mode)
1477 rtx x;
1478 unsigned hash;
1479 enum machine_mode mode;
1480{
1481 struct table_elt *p;
1482
1483 if (GET_CODE (x) == REG)
1484 {
1485 unsigned int regno = REGNO (x);
1486
1487 /* Don't check the machine mode when comparing registers;
1488 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1489 for (p = table[hash]; p; p = p->next_same_hash)
1490 if (GET_CODE (p->exp) == REG
1491 && REGNO (p->exp) == regno)
1492 return p;
1493 }
1494 else
1495 {
1496 for (p = table[hash]; p; p = p->next_same_hash)
1497 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1498 return p;
1499 }
1500
1501 return 0;
1502}
1503
1504/* Look for an expression equivalent to X and with code CODE.
1505 If one is found, return that expression. */
1506
1507static rtx
1508lookup_as_function (x, code)
1509 rtx x;
1510 enum rtx_code code;
1511{
1512 struct table_elt *p
1513 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1514
1515 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1516 long as we are narrowing. So if we looked in vain for a mode narrower
1517 than word_mode before, look for word_mode now. */
1518 if (p == 0 && code == CONST_INT
1519 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1520 {
1521 x = copy_rtx (x);
1522 PUT_MODE (x, word_mode);
1523 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1524 }
1525
1526 if (p == 0)
1527 return 0;
1528
1529 for (p = p->first_same_value; p; p = p->next_same_value)
1530 if (GET_CODE (p->exp) == code
1531 /* Make sure this is a valid entry in the table. */
1532 && exp_equiv_p (p->exp, p->exp, 1, 0))
1533 return p->exp;
1534
1535 return 0;
1536}
1537
1538/* Insert X in the hash table, assuming HASH is its hash code
1539 and CLASSP is an element of the class it should go in
1540 (or 0 if a new class should be made).
1541 It is inserted at the proper position to keep the class in
1542 the order cheapest first.
1543
1544 MODE is the machine-mode of X, or if X is an integer constant
1545 with VOIDmode then MODE is the mode with which X will be used.
1546
1547 For elements of equal cheapness, the most recent one
1548 goes in front, except that the first element in the list
1549 remains first unless a cheaper element is added. The order of
1550 pseudo-registers does not matter, as canon_reg will be called to
1551 find the cheapest when a register is retrieved from the table.
1552
1553 The in_memory field in the hash table element is set to 0.
1554 The caller must set it nonzero if appropriate.
1555
1556 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1557 and if insert_regs returns a nonzero value
1558 you must then recompute its hash code before calling here.
1559
1560 If necessary, update table showing constant values of quantities. */
1561
1562#define CHEAPER(X, Y) \
1563 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1564
1565static struct table_elt *
1566insert (x, classp, hash, mode)
1567 rtx x;
1568 struct table_elt *classp;
1569 unsigned hash;
1570 enum machine_mode mode;
1571{
1572 struct table_elt *elt;
1573
1574 /* If X is a register and we haven't made a quantity for it,
1575 something is wrong. */
1576 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1577 abort ();
1578
1579 /* If X is a hard register, show it is being put in the table. */
1580 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1581 {
1582 unsigned int regno = REGNO (x);
1583 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1584 unsigned int i;
1585
1586 for (i = regno; i < endregno; i++)
1587 SET_HARD_REG_BIT (hard_regs_in_table, i);
1588 }
1589
1590 /* Put an element for X into the right hash bucket. */
1591
1592 elt = free_element_chain;
1593 if (elt)
1594 free_element_chain = elt->next_same_hash;
1595 else
1596 {
1597 n_elements_made++;
1598 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1599 }
1600
1601 elt->exp = x;
1602 elt->canon_exp = NULL_RTX;
1603 elt->cost = COST (x);
1604 elt->regcost = approx_reg_cost (x);
1605 elt->next_same_value = 0;
1606 elt->prev_same_value = 0;
1607 elt->next_same_hash = table[hash];
1608 elt->prev_same_hash = 0;
1609 elt->related_value = 0;
1610 elt->in_memory = 0;
1611 elt->mode = mode;
1612 elt->is_const = (CONSTANT_P (x)
1613 /* GNU C++ takes advantage of this for `this'
1614 (and other const values). */
1615 || (GET_CODE (x) == REG
1616 && RTX_UNCHANGING_P (x)
1617 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1618 || FIXED_BASE_PLUS_P (x));
1619
1620 if (table[hash])
1621 table[hash]->prev_same_hash = elt;
1622 table[hash] = elt;
1623
1624 /* Put it into the proper value-class. */
1625 if (classp)
1626 {
1627 classp = classp->first_same_value;
1628 if (CHEAPER (elt, classp))
1629 /* Insert at the head of the class */
1630 {
1631 struct table_elt *p;
1632 elt->next_same_value = classp;
1633 classp->prev_same_value = elt;
1634 elt->first_same_value = elt;
1635
1636 for (p = classp; p; p = p->next_same_value)
1637 p->first_same_value = elt;
1638 }
1639 else
1640 {
1641 /* Insert not at head of the class. */
1642 /* Put it after the last element cheaper than X. */
1643 struct table_elt *p, *next;
1644
1645 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1646 p = next);
1647
1648 /* Put it after P and before NEXT. */
1649 elt->next_same_value = next;
1650 if (next)
1651 next->prev_same_value = elt;
1652
1653 elt->prev_same_value = p;
1654 p->next_same_value = elt;
1655 elt->first_same_value = classp;
1656 }
1657 }
1658 else
1659 elt->first_same_value = elt;
1660
1661 /* If this is a constant being set equivalent to a register or a register
1662 being set equivalent to a constant, note the constant equivalence.
1663
1664 If this is a constant, it cannot be equivalent to a different constant,
1665 and a constant is the only thing that can be cheaper than a register. So
1666 we know the register is the head of the class (before the constant was
1667 inserted).
1668
1669 If this is a register that is not already known equivalent to a
1670 constant, we must check the entire class.
1671
1672 If this is a register that is already known equivalent to an insn,
1673 update the qtys `const_insn' to show that `this_insn' is the latest
1674 insn making that quantity equivalent to the constant. */
1675
1676 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1677 && GET_CODE (x) != REG)
1678 {
1679 int exp_q = REG_QTY (REGNO (classp->exp));
1680 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1681
1682 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1683 exp_ent->const_insn = this_insn;
1684 }
1685
1686 else if (GET_CODE (x) == REG
1687 && classp
1688 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1689 && ! elt->is_const)
1690 {
1691 struct table_elt *p;
1692
1693 for (p = classp; p != 0; p = p->next_same_value)
1694 {
1695 if (p->is_const && GET_CODE (p->exp) != REG)
1696 {
1697 int x_q = REG_QTY (REGNO (x));
1698 struct qty_table_elem *x_ent = &qty_table[x_q];
1699
1700 x_ent->const_rtx
1701 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1702 x_ent->const_insn = this_insn;
1703 break;
1704 }
1705 }
1706 }
1707
1708 else if (GET_CODE (x) == REG
1709 && qty_table[REG_QTY (REGNO (x))].const_rtx
1710 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1711 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1712
1713 /* If this is a constant with symbolic value,
1714 and it has a term with an explicit integer value,
1715 link it up with related expressions. */
1716 if (GET_CODE (x) == CONST)
1717 {
1718 rtx subexp = get_related_value (x);
1719 unsigned subhash;
1720 struct table_elt *subelt, *subelt_prev;
1721
1722 if (subexp != 0)
1723 {
1724 /* Get the integer-free subexpression in the hash table. */
1725 subhash = safe_hash (subexp, mode) & HASH_MASK;
1726 subelt = lookup (subexp, subhash, mode);
1727 if (subelt == 0)
1728 subelt = insert (subexp, NULL, subhash, mode);
1729 /* Initialize SUBELT's circular chain if it has none. */
1730 if (subelt->related_value == 0)
1731 subelt->related_value = subelt;
1732 /* Find the element in the circular chain that precedes SUBELT. */
1733 subelt_prev = subelt;
1734 while (subelt_prev->related_value != subelt)
1735 subelt_prev = subelt_prev->related_value;
1736 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1737 This way the element that follows SUBELT is the oldest one. */
1738 elt->related_value = subelt_prev->related_value;
1739 subelt_prev->related_value = elt;
1740 }
1741 }
1742
1743 return elt;
1744}
1745
1746
1747/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1748 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1749 the two classes equivalent.
1750
1751 CLASS1 will be the surviving class; CLASS2 should not be used after this
1752 call.
1753
1754 Any invalid entries in CLASS2 will not be copied. */
1755
1756static void
1757merge_equiv_classes (class1, class2)
1758 struct table_elt *class1, *class2;
1759{
1760 struct table_elt *elt, *next, *new;
1761
1762 /* Ensure we start with the head of the classes. */
1763 class1 = class1->first_same_value;
1764 class2 = class2->first_same_value;
1765
1766 /* If they were already equal, forget it. */
1767 if (class1 == class2)
1768 return;
1769
1770 for (elt = class2; elt; elt = next)
1771 {
1772 unsigned int hash;
1773 rtx exp = elt->exp;
1774 enum machine_mode mode = elt->mode;
1775
1776 next = elt->next_same_value;
1777
1778 /* Remove old entry, make a new one in CLASS1's class.
1779 Don't do this for invalid entries as we cannot find their
1780 hash code (it also isn't necessary). */
1781 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1782 {
1783 hash_arg_in_memory = 0;
1784 hash = HASH (exp, mode);
1785
1786 if (GET_CODE (exp) == REG)
1787 delete_reg_equiv (REGNO (exp));
1788
1789 remove_from_table (elt, hash);
1790
1791 if (insert_regs (exp, class1, 0))
1792 {
1793 rehash_using_reg (exp);
1794 hash = HASH (exp, mode);
1795 }
1796 new = insert (exp, class1, hash, mode);
1797 new->in_memory = hash_arg_in_memory;
1798 }
1799 }
1800}
1801
1802
1803/* Flush the entire hash table. */
1804
1805static void
1806flush_hash_table ()
1807{
1808 int i;
1809 struct table_elt *p;
1810
1811 for (i = 0; i < HASH_SIZE; i++)
1812 for (p = table[i]; p; p = table[i])
1813 {
1814 /* Note that invalidate can remove elements
1815 after P in the current hash chain. */
1816 if (GET_CODE (p->exp) == REG)
1817 invalidate (p->exp, p->mode);
1818 else
1819 remove_from_table (p, i);
1820 }
1821}
1822
1823
1824/* Function called for each rtx to check whether true dependence exist. */
1825struct check_dependence_data
1826{
1827 enum machine_mode mode;
1828 rtx exp;
1829};
1830
1831static int
1832check_dependence (x, data)
1833 rtx *x;
1834 void *data;
1835{
1836 struct check_dependence_data *d = (struct check_dependence_data *) data;
1837 if (*x && GET_CODE (*x) == MEM)
1838 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1839 else
1840 return 0;
1841}
1842
1843
1844/* Remove from the hash table, or mark as invalid, all expressions whose
1845 values could be altered by storing in X. X is a register, a subreg, or
1846 a memory reference with nonvarying address (because, when a memory
1847 reference with a varying address is stored in, all memory references are
1848 removed by invalidate_memory so specific invalidation is superfluous).
1849 FULL_MODE, if not VOIDmode, indicates that this much should be
1850 invalidated instead of just the amount indicated by the mode of X. This
1851 is only used for bitfield stores into memory.
1852
1853 A nonvarying address may be just a register or just a symbol reference,
1854 or it may be either of those plus a numeric offset. */
1855
1856static void
1857invalidate (x, full_mode)
1858 rtx x;
1859 enum machine_mode full_mode;
1860{
1861 int i;
1862 struct table_elt *p;
1863
1864 switch (GET_CODE (x))
1865 {
1866 case REG:
1867 {
1868 /* If X is a register, dependencies on its contents are recorded
1869 through the qty number mechanism. Just change the qty number of
1870 the register, mark it as invalid for expressions that refer to it,
1871 and remove it itself. */
1872 unsigned int regno = REGNO (x);
1873 unsigned int hash = HASH (x, GET_MODE (x));
1874
1875 /* Remove REGNO from any quantity list it might be on and indicate
1876 that its value might have changed. If it is a pseudo, remove its
1877 entry from the hash table.
1878
1879 For a hard register, we do the first two actions above for any
1880 additional hard registers corresponding to X. Then, if any of these
1881 registers are in the table, we must remove any REG entries that
1882 overlap these registers. */
1883
1884 delete_reg_equiv (regno);
1885 REG_TICK (regno)++;
1886 SUBREG_TICKED (regno) = -1;
1887
1888 if (regno >= FIRST_PSEUDO_REGISTER)
1889 {
1890 /* Because a register can be referenced in more than one mode,
1891 we might have to remove more than one table entry. */
1892 struct table_elt *elt;
1893
1894 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1895 remove_from_table (elt, hash);
1896 }
1897 else
1898 {
1899 HOST_WIDE_INT in_table
1900 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1901 unsigned int endregno
1902 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1903 unsigned int tregno, tendregno, rn;
1904 struct table_elt *p, *next;
1905
1906 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1907
1908 for (rn = regno + 1; rn < endregno; rn++)
1909 {
1910 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1911 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1912 delete_reg_equiv (rn);
1913 REG_TICK (rn)++;
1914 SUBREG_TICKED (rn) = -1;
1915 }
1916
1917 if (in_table)
1918 for (hash = 0; hash < HASH_SIZE; hash++)
1919 for (p = table[hash]; p; p = next)
1920 {
1921 next = p->next_same_hash;
1922
1923 if (GET_CODE (p->exp) != REG
1924 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1925 continue;
1926
1927 tregno = REGNO (p->exp);
1928 tendregno
1929 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1930 if (tendregno > regno && tregno < endregno)
1931 remove_from_table (p, hash);
1932 }
1933 }
1934 }
1935 return;
1936
1937 case SUBREG:
1938 invalidate (SUBREG_REG (x), VOIDmode);
1939 return;
1940
1941 case PARALLEL:
1942 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1943 invalidate (XVECEXP (x, 0, i), VOIDmode);
1944 return;
1945
1946 case EXPR_LIST:
1947 /* This is part of a disjoint return value; extract the location in
1948 question ignoring the offset. */
1949 invalidate (XEXP (x, 0), VOIDmode);
1950 return;
1951
1952 case MEM:
1953 /* Calculate the canonical version of X here so that
1954 true_dependence doesn't generate new RTL for X on each call. */
1955 x = canon_rtx (x);
1956
1957 /* Remove all hash table elements that refer to overlapping pieces of
1958 memory. */
1959 if (full_mode == VOIDmode)
1960 full_mode = GET_MODE (x);
1961
1962 for (i = 0; i < HASH_SIZE; i++)
1963 {
1964 struct table_elt *next;
1965
1966 for (p = table[i]; p; p = next)
1967 {
1968 next = p->next_same_hash;
1969 if (p->in_memory)
1970 {
1971 struct check_dependence_data d;
1972
1973 /* Just canonicalize the expression once;
1974 otherwise each time we call invalidate
1975 true_dependence will canonicalize the
1976 expression again. */
1977 if (!p->canon_exp)
1978 p->canon_exp = canon_rtx (p->exp);
1979 d.exp = x;
1980 d.mode = full_mode;
1981 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1982 remove_from_table (p, i);
1983 }
1984 }
1985 }
1986 return;
1987
1988 default:
1989 abort ();
1990 }
1991}
1992
1993
1994/* Remove all expressions that refer to register REGNO,
1995 since they are already invalid, and we are about to
1996 mark that register valid again and don't want the old
1997 expressions to reappear as valid. */
1998
1999static void
2000remove_invalid_refs (regno)
2001 unsigned int regno;
2002{
2003 unsigned int i;
2004 struct table_elt *p, *next;
2005
2006 for (i = 0; i < HASH_SIZE; i++)
2007 for (p = table[i]; p; p = next)
2008 {
2009 next = p->next_same_hash;
2010 if (GET_CODE (p->exp) != REG
2011 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2012 remove_from_table (p, i);
2013 }
2014}
2015
2016/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2017 and mode MODE. */
2018static void
2019remove_invalid_subreg_refs (regno, offset, mode)
2020 unsigned int regno;
2021 unsigned int offset;
2022 enum machine_mode mode;
2023{
2024 unsigned int i;
2025 struct table_elt *p, *next;
2026 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2027
2028 for (i = 0; i < HASH_SIZE; i++)
2029 for (p = table[i]; p; p = next)
2030 {
2031 rtx exp = p->exp;
2032 next = p->next_same_hash;
2033
2034 if (GET_CODE (exp) != REG
2035 && (GET_CODE (exp) != SUBREG
2036 || GET_CODE (SUBREG_REG (exp)) != REG
2037 || REGNO (SUBREG_REG (exp)) != regno
2038 || (((SUBREG_BYTE (exp)
2039 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2040 && SUBREG_BYTE (exp) <= end))
2041 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx *) 0))
2042 remove_from_table (p, i);
2043 }
2044}
2045
2046
2047/* Recompute the hash codes of any valid entries in the hash table that
2048 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2049
2050 This is called when we make a jump equivalence. */
2051
2052static void
2053rehash_using_reg (x)
2054 rtx x;
2055{
2056 unsigned int i;
2057 struct table_elt *p, *next;
2058 unsigned hash;
2059
2060 if (GET_CODE (x) == SUBREG)
2061 x = SUBREG_REG (x);
2062
2063 /* If X is not a register or if the register is known not to be in any
2064 valid entries in the table, we have no work to do. */
2065
2066 if (GET_CODE (x) != REG
2067 || REG_IN_TABLE (REGNO (x)) < 0
2068 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2069 return;
2070
2071 /* Scan all hash chains looking for valid entries that mention X.
2072 If we find one and it is in the wrong hash chain, move it. We can skip
2073 objects that are registers, since they are handled specially. */
2074
2075 for (i = 0; i < HASH_SIZE; i++)
2076 for (p = table[i]; p; p = next)
2077 {
2078 next = p->next_same_hash;
2079 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2080 && exp_equiv_p (p->exp, p->exp, 1, 0)
2081 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2082 {
2083 if (p->next_same_hash)
2084 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2085
2086 if (p->prev_same_hash)
2087 p->prev_same_hash->next_same_hash = p->next_same_hash;
2088 else
2089 table[i] = p->next_same_hash;
2090
2091 p->next_same_hash = table[hash];
2092 p->prev_same_hash = 0;
2093 if (table[hash])
2094 table[hash]->prev_same_hash = p;
2095 table[hash] = p;
2096 }
2097 }
2098}
2099
2100
2101/* Remove from the hash table any expression that is a call-clobbered
2102 register. Also update their TICK values. */
2103
2104static void
2105invalidate_for_call ()
2106{
2107 unsigned int regno, endregno;
2108 unsigned int i;
2109 unsigned hash;
2110 struct table_elt *p, *next;
2111 int in_table = 0;
2112
2113 /* Go through all the hard registers. For each that is clobbered in
2114 a CALL_INSN, remove the register from quantity chains and update
2115 reg_tick if defined. Also see if any of these registers is currently
2116 in the table. */
2117
2118 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2119 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2120 {
2121 delete_reg_equiv (regno);
2122 if (REG_TICK (regno) >= 0)
2123 {
2124 REG_TICK (regno)++;
2125 SUBREG_TICKED (regno) = -1;
2126 }
2127
2128 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2129 }
2130
2131 /* In the case where we have no call-clobbered hard registers in the
2132 table, we are done. Otherwise, scan the table and remove any
2133 entry that overlaps a call-clobbered register. */
2134
2135 if (in_table)
2136 for (hash = 0; hash < HASH_SIZE; hash++)
2137 for (p = table[hash]; p; p = next)
2138 {
2139 next = p->next_same_hash;
2140
2141 if (GET_CODE (p->exp) != REG
2142 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2143 continue;
2144
2145 regno = REGNO (p->exp);
2146 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2147
2148 for (i = regno; i < endregno; i++)
2149 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2150 {
2151 remove_from_table (p, hash);
2152 break;
2153 }
2154 }
2155}
2156
2157
2158/* Given an expression X of type CONST,
2159 and ELT which is its table entry (or 0 if it
2160 is not in the hash table),
2161 return an alternate expression for X as a register plus integer.
2162 If none can be found, return 0. */
2163
2164static rtx
2165use_related_value (x, elt)
2166 rtx x;
2167 struct table_elt *elt;
2168{
2169 struct table_elt *relt = 0;
2170 struct table_elt *p, *q;
2171 HOST_WIDE_INT offset;
2172
2173 /* First, is there anything related known?
2174 If we have a table element, we can tell from that.
2175 Otherwise, must look it up. */
2176
2177 if (elt != 0 && elt->related_value != 0)
2178 relt = elt;
2179 else if (elt == 0 && GET_CODE (x) == CONST)
2180 {
2181 rtx subexp = get_related_value (x);
2182 if (subexp != 0)
2183 relt = lookup (subexp,
2184 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2185 GET_MODE (subexp));
2186 }
2187
2188 if (relt == 0)
2189 return 0;
2190
2191 /* Search all related table entries for one that has an
2192 equivalent register. */
2193
2194 p = relt;
2195 while (1)
2196 {
2197 /* This loop is strange in that it is executed in two different cases.
2198 The first is when X is already in the table. Then it is searching
2199 the RELATED_VALUE list of X's class (RELT). The second case is when
2200 X is not in the table. Then RELT points to a class for the related
2201 value.
2202
2203 Ensure that, whatever case we are in, that we ignore classes that have
2204 the same value as X. */
2205
2206 if (rtx_equal_p (x, p->exp))
2207 q = 0;
2208 else
2209 for (q = p->first_same_value; q; q = q->next_same_value)
2210 if (GET_CODE (q->exp) == REG)
2211 break;
2212
2213 if (q)
2214 break;
2215
2216 p = p->related_value;
2217
2218 /* We went all the way around, so there is nothing to be found.
2219 Alternatively, perhaps RELT was in the table for some other reason
2220 and it has no related values recorded. */
2221 if (p == relt || p == 0)
2222 break;
2223 }
2224
2225 if (q == 0)
2226 return 0;
2227
2228 offset = (get_integer_term (x) - get_integer_term (p->exp));
2229 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2230 return plus_constant (q->exp, offset);
2231}
2232
2233
2234/* Hash a string. Just add its bytes up. */
2235static inline unsigned
2236canon_hash_string (ps)
2237 const char *ps;
2238{
2239 unsigned hash = 0;
2240 const unsigned char *p = (const unsigned char *) ps;
2241
2242 if (p)
2243 while (*p)
2244 hash += *p++;
2245
2246 return hash;
2247}
2248
2249/* Hash an rtx. We are careful to make sure the value is never negative.
2250 Equivalent registers hash identically.
2251 MODE is used in hashing for CONST_INTs only;
2252 otherwise the mode of X is used.
2253
2254 Store 1 in do_not_record if any subexpression is volatile.
2255
2256 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2257 which does not have the RTX_UNCHANGING_P bit set.
2258
2259 Note that cse_insn knows that the hash code of a MEM expression
2260 is just (int) MEM plus the hash code of the address. */
2261
2262static unsigned
2263canon_hash (x, mode)
2264 rtx x;
2265 enum machine_mode mode;
2266{
2267 int i, j;
2268 unsigned hash = 0;
2269 enum rtx_code code;
2270 const char *fmt;
2271
2272 /* repeat is used to turn tail-recursion into iteration. */
2273 repeat:
2274 if (x == 0)
2275 return hash;
2276
2277 code = GET_CODE (x);
2278 switch (code)
2279 {
2280 case REG:
2281 {
2282 unsigned int regno = REGNO (x);
2283 bool record;
2284
2285 /* On some machines, we can't record any non-fixed hard register,
2286 because extending its life will cause reload problems. We
2287 consider ap, fp, sp, gp to be fixed for this purpose.
2288
2289 We also consider CCmode registers to be fixed for this purpose;
2290 failure to do so leads to failure to simplify 0<100 type of
2291 conditionals.
2292
2293 On all machines, we can't record any global registers.
2294 Nor should we record any register that is in a small
2295 class, as defined by CLASS_LIKELY_SPILLED_P. */
2296
2297 if (regno >= FIRST_PSEUDO_REGISTER)
2298 record = true;
2299 else if (x == frame_pointer_rtx
2300 || x == hard_frame_pointer_rtx
2301 || x == arg_pointer_rtx
2302 || x == stack_pointer_rtx
2303 || x == pic_offset_table_rtx)
2304 record = true;
2305 else if (global_regs[regno])
2306 record = false;
2307 else if (fixed_regs[regno])
2308 record = true;
2309 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2310 record = true;
2311 else if (SMALL_REGISTER_CLASSES)
2312 record = false;
2313 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2314 record = false;
2315 else
2316 record = true;
2317
2318 if (!record)
2319 {
2320 do_not_record = 1;
2321 return 0;
2322 }
2323
2324 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2325 return hash;
2326 }
2327
2328 /* We handle SUBREG of a REG specially because the underlying
2329 reg changes its hash value with every value change; we don't
2330 want to have to forget unrelated subregs when one subreg changes. */
2331 case SUBREG:
2332 {
2333 if (GET_CODE (SUBREG_REG (x)) == REG)
2334 {
2335 hash += (((unsigned) SUBREG << 7)
2336 + REGNO (SUBREG_REG (x))
2337 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2338 return hash;
2339 }
2340 break;
2341 }
2342
2343 case CONST_INT:
2344 {
2345 unsigned HOST_WIDE_INT tem = INTVAL (x);
2346 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2347 return hash;
2348 }
2349
2350 case CONST_DOUBLE:
2351 /* This is like the general case, except that it only counts
2352 the integers representing the constant. */
2353 hash += (unsigned) code + (unsigned) GET_MODE (x);
2354 if (GET_MODE (x) != VOIDmode)
2355 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
2356 else
2357 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2358 + (unsigned) CONST_DOUBLE_HIGH (x));
2359 return hash;
2360
2361 case CONST_VECTOR:
2362 {
2363 int units;
2364 rtx elt;
2365
2366 units = CONST_VECTOR_NUNITS (x);
2367
2368 for (i = 0; i < units; ++i)
2369 {
2370 elt = CONST_VECTOR_ELT (x, i);
2371 hash += canon_hash (elt, GET_MODE (elt));
2372 }
2373
2374 return hash;
2375 }
2376
2377 /* Assume there is only one rtx object for any given label. */
2378 case LABEL_REF:
2379 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2380 return hash;
2381
2382 case SYMBOL_REF:
2383 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2384 return hash;
2385
2386 case MEM:
2387 /* We don't record if marked volatile or if BLKmode since we don't
2388 know the size of the move. */
2389 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2390 {
2391 do_not_record = 1;
2392 return 0;
2393 }
2394 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2395 {
2396 hash_arg_in_memory = 1;
2397 }
2398 /* Now that we have already found this special case,
2399 might as well speed it up as much as possible. */
2400 hash += (unsigned) MEM;
2401 x = XEXP (x, 0);
2402 goto repeat;
2403
2404 case USE:
2405 /* A USE that mentions non-volatile memory needs special
2406 handling since the MEM may be BLKmode which normally
2407 prevents an entry from being made. Pure calls are
2408 marked by a USE which mentions BLKmode memory. */
2409 if (GET_CODE (XEXP (x, 0)) == MEM
2410 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2411 {
2412 hash += (unsigned) USE;
2413 x = XEXP (x, 0);
2414
2415 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2416 hash_arg_in_memory = 1;
2417
2418 /* Now that we have already found this special case,
2419 might as well speed it up as much as possible. */
2420 hash += (unsigned) MEM;
2421 x = XEXP (x, 0);
2422 goto repeat;
2423 }
2424 break;
2425
2426 case PRE_DEC:
2427 case PRE_INC:
2428 case POST_DEC:
2429 case POST_INC:
2430 case PRE_MODIFY:
2431 case POST_MODIFY:
2432 case PC:
2433 case CC0:
2434 case CALL:
2435 case UNSPEC_VOLATILE:
2436 do_not_record = 1;
2437 return 0;
2438
2439 case ASM_OPERANDS:
2440 if (MEM_VOLATILE_P (x))
2441 {
2442 do_not_record = 1;
2443 return 0;
2444 }
2445 else
2446 {
2447 /* We don't want to take the filename and line into account. */
2448 hash += (unsigned) code + (unsigned) GET_MODE (x)
2449 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2450 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2451 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2452
2453 if (ASM_OPERANDS_INPUT_LENGTH (x))
2454 {
2455 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2456 {
2457 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2458 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2459 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2460 (x, i)));
2461 }
2462
2463 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2464 x = ASM_OPERANDS_INPUT (x, 0);
2465 mode = GET_MODE (x);
2466 goto repeat;
2467 }
2468
2469 return hash;
2470 }
2471 break;
2472
2473 default:
2474 break;
2475 }
2476
2477 i = GET_RTX_LENGTH (code) - 1;
2478 hash += (unsigned) code + (unsigned) GET_MODE (x);
2479 fmt = GET_RTX_FORMAT (code);
2480 for (; i >= 0; i--)
2481 {
2482 if (fmt[i] == 'e')
2483 {
2484 rtx tem = XEXP (x, i);
2485
2486 /* If we are about to do the last recursive call
2487 needed at this level, change it into iteration.
2488 This function is called enough to be worth it. */
2489 if (i == 0)
2490 {
2491 x = tem;
2492 goto repeat;
2493 }
2494 hash += canon_hash (tem, 0);
2495 }
2496 else if (fmt[i] == 'E')
2497 for (j = 0; j < XVECLEN (x, i); j++)
2498 hash += canon_hash (XVECEXP (x, i, j), 0);
2499 else if (fmt[i] == 's')
2500 hash += canon_hash_string (XSTR (x, i));
2501 else if (fmt[i] == 'i')
2502 {
2503 unsigned tem = XINT (x, i);
2504 hash += tem;
2505 }
2506 else if (fmt[i] == '0' || fmt[i] == 't')
2507 /* Unused. */
2508 ;
2509 else
2510 abort ();
2511 }
2512 return hash;
2513}
2514
2515/* Like canon_hash but with no side effects. */
2516
2517static unsigned
2518safe_hash (x, mode)
2519 rtx x;
2520 enum machine_mode mode;
2521{
2522 int save_do_not_record = do_not_record;
2523 int save_hash_arg_in_memory = hash_arg_in_memory;
2524 unsigned hash = canon_hash (x, mode);
2525 hash_arg_in_memory = save_hash_arg_in_memory;
2526 do_not_record = save_do_not_record;
2527 return hash;
2528}
2529
2530
2531/* Return 1 iff X and Y would canonicalize into the same thing,
2532 without actually constructing the canonicalization of either one.
2533 If VALIDATE is nonzero,
2534 we assume X is an expression being processed from the rtl
2535 and Y was found in the hash table. We check register refs
2536 in Y for being marked as valid.
2537
2538 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2539 that is known to be in the register. Ordinarily, we don't allow them
2540 to match, because letting them match would cause unpredictable results
2541 in all the places that search a hash table chain for an equivalent
2542 for a given value. A possible equivalent that has different structure
2543 has its hash code computed from different data. Whether the hash code
2544 is the same as that of the given value is pure luck. */
2545
2546static int
2547exp_equiv_p (x, y, validate, equal_values)
2548 rtx x, y;
2549 int validate;
2550 int equal_values;
2551{
2552 int i, j;
2553 enum rtx_code code;
2554 const char *fmt;
2555
2556 /* Note: it is incorrect to assume an expression is equivalent to itself
2557 if VALIDATE is nonzero. */
2558 if (x == y && !validate)
2559 return 1;
2560 if (x == 0 || y == 0)
2561 return x == y;
2562
2563 code = GET_CODE (x);
2564 if (code != GET_CODE (y))
2565 {
2566 if (!equal_values)
2567 return 0;
2568
2569 /* If X is a constant and Y is a register or vice versa, they may be
2570 equivalent. We only have to validate if Y is a register. */
2571 if (CONSTANT_P (x) && GET_CODE (y) == REG
2572 && REGNO_QTY_VALID_P (REGNO (y)))
2573 {
2574 int y_q = REG_QTY (REGNO (y));
2575 struct qty_table_elem *y_ent = &qty_table[y_q];
2576
2577 if (GET_MODE (y) == y_ent->mode
2578 && rtx_equal_p (x, y_ent->const_rtx)
2579 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2580 return 1;
2581 }
2582
2583 if (CONSTANT_P (y) && code == REG
2584 && REGNO_QTY_VALID_P (REGNO (x)))
2585 {
2586 int x_q = REG_QTY (REGNO (x));
2587 struct qty_table_elem *x_ent = &qty_table[x_q];
2588
2589 if (GET_MODE (x) == x_ent->mode
2590 && rtx_equal_p (y, x_ent->const_rtx))
2591 return 1;
2592 }
2593
2594 return 0;
2595 }
2596
2597 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2598 if (GET_MODE (x) != GET_MODE (y))
2599 return 0;
2600
2601 switch (code)
2602 {
2603 case PC:
2604 case CC0:
2605 case CONST_INT:
2606 return x == y;
2607
2608 case LABEL_REF:
2609 return XEXP (x, 0) == XEXP (y, 0);
2610
2611 case SYMBOL_REF:
2612 return XSTR (x, 0) == XSTR (y, 0);
2613
2614 case REG:
2615 {
2616 unsigned int regno = REGNO (y);
2617 unsigned int endregno
2618 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2619 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2620 unsigned int i;
2621
2622 /* If the quantities are not the same, the expressions are not
2623 equivalent. If there are and we are not to validate, they
2624 are equivalent. Otherwise, ensure all regs are up-to-date. */
2625
2626 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2627 return 0;
2628
2629 if (! validate)
2630 return 1;
2631
2632 for (i = regno; i < endregno; i++)
2633 if (REG_IN_TABLE (i) != REG_TICK (i))
2634 return 0;
2635
2636 return 1;
2637 }
2638
2639 /* For commutative operations, check both orders. */
2640 case PLUS:
2641 case MULT:
2642 case AND:
2643 case IOR:
2644 case XOR:
2645 case NE:
2646 case EQ:
2647 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2648 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2649 validate, equal_values))
2650 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2651 validate, equal_values)
2652 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2653 validate, equal_values)));
2654
2655 case ASM_OPERANDS:
2656 /* We don't use the generic code below because we want to
2657 disregard filename and line numbers. */
2658
2659 /* A volatile asm isn't equivalent to any other. */
2660 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2661 return 0;
2662
2663 if (GET_MODE (x) != GET_MODE (y)
2664 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2665 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2666 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2667 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2668 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2669 return 0;
2670
2671 if (ASM_OPERANDS_INPUT_LENGTH (x))
2672 {
2673 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2674 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2675 ASM_OPERANDS_INPUT (y, i),
2676 validate, equal_values)
2677 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2678 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2679 return 0;
2680 }
2681
2682 return 1;
2683
2684 default:
2685 break;
2686 }
2687
2688 /* Compare the elements. If any pair of corresponding elements
2689 fail to match, return 0 for the whole things. */
2690
2691 fmt = GET_RTX_FORMAT (code);
2692 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2693 {
2694 switch (fmt[i])
2695 {
2696 case 'e':
2697 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2698 return 0;
2699 break;
2700
2701 case 'E':
2702 if (XVECLEN (x, i) != XVECLEN (y, i))
2703 return 0;
2704 for (j = 0; j < XVECLEN (x, i); j++)
2705 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2706 validate, equal_values))
2707 return 0;
2708 break;
2709
2710 case 's':
2711 if (strcmp (XSTR (x, i), XSTR (y, i)))
2712 return 0;
2713 break;
2714
2715 case 'i':
2716 if (XINT (x, i) != XINT (y, i))
2717 return 0;
2718 break;
2719
2720 case 'w':
2721 if (XWINT (x, i) != XWINT (y, i))
2722 return 0;
2723 break;
2724
2725 case '0':
2726 case 't':
2727 break;
2728
2729 default:
2730 abort ();
2731 }
2732 }
2733
2734 return 1;
2735}
2736
2737
2738/* Return 1 if X has a value that can vary even between two
2739 executions of the program. 0 means X can be compared reliably
2740 against certain constants or near-constants. */
2741
2742static int
2743cse_rtx_varies_p (x, from_alias)
2744 rtx x;
2745 int from_alias;
2746{
2747 /* We need not check for X and the equivalence class being of the same
2748 mode because if X is equivalent to a constant in some mode, it
2749 doesn't vary in any mode. */
2750
2751 if (GET_CODE (x) == REG
2752 && REGNO_QTY_VALID_P (REGNO (x)))
2753 {
2754 int x_q = REG_QTY (REGNO (x));
2755 struct qty_table_elem *x_ent = &qty_table[x_q];
2756
2757 if (GET_MODE (x) == x_ent->mode
2758 && x_ent->const_rtx != NULL_RTX)
2759 return 0;
2760 }
2761
2762 if (GET_CODE (x) == PLUS
2763 && GET_CODE (XEXP (x, 1)) == CONST_INT
2764 && GET_CODE (XEXP (x, 0)) == REG
2765 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2766 {
2767 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2768 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2769
2770 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2771 && x0_ent->const_rtx != NULL_RTX)
2772 return 0;
2773 }
2774
2775 /* This can happen as the result of virtual register instantiation, if
2776 the initial constant is too large to be a valid address. This gives
2777 us a three instruction sequence, load large offset into a register,
2778 load fp minus a constant into a register, then a MEM which is the
2779 sum of the two `constant' registers. */
2780 if (GET_CODE (x) == PLUS
2781 && GET_CODE (XEXP (x, 0)) == REG
2782 && GET_CODE (XEXP (x, 1)) == REG
2783 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2784 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2785 {
2786 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2787 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2788 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2789 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2790
2791 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2792 && x0_ent->const_rtx != NULL_RTX
2793 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2794 && x1_ent->const_rtx != NULL_RTX)
2795 return 0;
2796 }
2797
2798 return rtx_varies_p (x, from_alias);
2799}
2800
2801
2802/* Canonicalize an expression:
2803 replace each register reference inside it
2804 with the "oldest" equivalent register.
2805
2806 If INSN is nonzero and we are replacing a pseudo with a hard register
2807 or vice versa, validate_change is used to ensure that INSN remains valid
2808 after we make our substitution. The calls are made with IN_GROUP nonzero
2809 so apply_change_group must be called upon the outermost return from this
2810 function (unless INSN is zero). The result of apply_change_group can
2811 generally be discarded since the changes we are making are optional. */
2812
2813static rtx
2814canon_reg (x, insn)
2815 rtx x;
2816 rtx insn;
2817{
2818 int i;
2819 enum rtx_code code;
2820 const char *fmt;
2821
2822 if (x == 0)
2823 return x;
2824
2825 code = GET_CODE (x);
2826 switch (code)
2827 {
2828 case PC:
2829 case CC0:
2830 case CONST:
2831 case CONST_INT:
2832 case CONST_DOUBLE:
2833 case CONST_VECTOR:
2834 case SYMBOL_REF:
2835 case LABEL_REF:
2836 case ADDR_VEC:
2837 case ADDR_DIFF_VEC:
2838 return x;
2839
2840 case REG:
2841 {
2842 int first;
2843 int q;
2844 struct qty_table_elem *ent;
2845
2846 /* Never replace a hard reg, because hard regs can appear
2847 in more than one machine mode, and we must preserve the mode
2848 of each occurrence. Also, some hard regs appear in
2849 MEMs that are shared and mustn't be altered. Don't try to
2850 replace any reg that maps to a reg of class NO_REGS. */
2851 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2852 || ! REGNO_QTY_VALID_P (REGNO (x)))
2853 return x;
2854
2855 q = REG_QTY (REGNO (x));
2856 ent = &qty_table[q];
2857 first = ent->first_reg;
2858 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2859 : REGNO_REG_CLASS (first) == NO_REGS ? x
2860 : gen_rtx_REG (ent->mode, first));
2861 }
2862
2863 default:
2864 break;
2865 }
2866
2867 fmt = GET_RTX_FORMAT (code);
2868 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2869 {
2870 int j;
2871
2872 if (fmt[i] == 'e')
2873 {
2874 rtx new = canon_reg (XEXP (x, i), insn);
2875 int insn_code;
2876
2877 /* If replacing pseudo with hard reg or vice versa, ensure the
2878 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2879 if (insn != 0 && new != 0
2880 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2881 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2882 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2883 || (insn_code = recog_memoized (insn)) < 0
2884 || insn_data[insn_code].n_dups > 0))
2885 validate_change (insn, &XEXP (x, i), new, 1);
2886 else
2887 XEXP (x, i) = new;
2888 }
2889 else if (fmt[i] == 'E')
2890 for (j = 0; j < XVECLEN (x, i); j++)
2891 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2892 }
2893
2894 return x;
2895}
2896
2897
2898/* LOC is a location within INSN that is an operand address (the contents of
2899 a MEM). Find the best equivalent address to use that is valid for this
2900 insn.
2901
2902 On most CISC machines, complicated address modes are costly, and rtx_cost
2903 is a good approximation for that cost. However, most RISC machines have
2904 only a few (usually only one) memory reference formats. If an address is
2905 valid at all, it is often just as cheap as any other address. Hence, for
2906 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2907 costs of various addresses. For two addresses of equal cost, choose the one
2908 with the highest `rtx_cost' value as that has the potential of eliminating
2909 the most insns. For equal costs, we choose the first in the equivalence
2910 class. Note that we ignore the fact that pseudo registers are cheaper
2911 than hard registers here because we would also prefer the pseudo registers.
2912 */
2913
2914static void
2915find_best_addr (insn, loc, mode)
2916 rtx insn;
2917 rtx *loc;
2918 enum machine_mode mode;
2919{
2920 struct table_elt *elt;
2921 rtx addr = *loc;
2922#ifdef ADDRESS_COST
2923 struct table_elt *p;
2924 int found_better = 1;
2925#endif
2926 int save_do_not_record = do_not_record;
2927 int save_hash_arg_in_memory = hash_arg_in_memory;
2928 int addr_volatile;
2929 int regno;
2930 unsigned hash;
2931
2932 /* Do not try to replace constant addresses or addresses of local and
2933 argument slots. These MEM expressions are made only once and inserted
2934 in many instructions, as well as being used to control symbol table
2935 output. It is not safe to clobber them.
2936
2937 There are some uncommon cases where the address is already in a register
2938 for some reason, but we cannot take advantage of that because we have
2939 no easy way to unshare the MEM. In addition, looking up all stack
2940 addresses is costly. */
2941 if ((GET_CODE (addr) == PLUS
2942 && GET_CODE (XEXP (addr, 0)) == REG
2943 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2944 && (regno = REGNO (XEXP (addr, 0)),
2945 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2946 || regno == ARG_POINTER_REGNUM))
2947 || (GET_CODE (addr) == REG
2948 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2949 || regno == HARD_FRAME_POINTER_REGNUM
2950 || regno == ARG_POINTER_REGNUM))
2951 || GET_CODE (addr) == ADDRESSOF
2952 || CONSTANT_ADDRESS_P (addr))
2953 return;
2954
2955 /* If this address is not simply a register, try to fold it. This will
2956 sometimes simplify the expression. Many simplifications
2957 will not be valid, but some, usually applying the associative rule, will
2958 be valid and produce better code. */
2959 if (GET_CODE (addr) != REG)
2960 {
2961 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2962 int addr_folded_cost = address_cost (folded, mode);
2963 int addr_cost = address_cost (addr, mode);
2964
2965 if ((addr_folded_cost < addr_cost
2966 || (addr_folded_cost == addr_cost
2967 /* ??? The rtx_cost comparison is left over from an older
2968 version of this code. It is probably no longer helpful. */
2969 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2970 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2971 && validate_change (insn, loc, folded, 0))
2972 addr = folded;
2973 }
2974
2975 /* If this address is not in the hash table, we can't look for equivalences
2976 of the whole address. Also, ignore if volatile. */
2977
2978 do_not_record = 0;
2979 hash = HASH (addr, Pmode);
2980 addr_volatile = do_not_record;
2981 do_not_record = save_do_not_record;
2982 hash_arg_in_memory = save_hash_arg_in_memory;
2983
2984 if (addr_volatile)
2985 return;
2986
2987 elt = lookup (addr, hash, Pmode);
2988
2989#ifndef ADDRESS_COST
2990 if (elt)
2991 {
2992 int our_cost = elt->cost;
2993
2994 /* Find the lowest cost below ours that works. */
2995 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2996 if (elt->cost < our_cost
2997 && (GET_CODE (elt->exp) == REG
2998 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2999 && validate_change (insn, loc,
3000 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
3001 return;
3002 }
3003#else
3004
3005 if (elt)
3006 {
3007 /* We need to find the best (under the criteria documented above) entry
3008 in the class that is valid. We use the `flag' field to indicate
3009 choices that were invalid and iterate until we can't find a better
3010 one that hasn't already been tried. */
3011
3012 for (p = elt->first_same_value; p; p = p->next_same_value)
3013 p->flag = 0;
3014
3015 while (found_better)
3016 {
3017 int best_addr_cost = address_cost (*loc, mode);
3018 int best_rtx_cost = (elt->cost + 1) >> 1;
3019 int exp_cost;
3020 struct table_elt *best_elt = elt;
3021
3022 found_better = 0;
3023 for (p = elt->first_same_value; p; p = p->next_same_value)
3024 if (! p->flag)
3025 {
3026 if ((GET_CODE (p->exp) == REG
3027 || exp_equiv_p (p->exp, p->exp, 1, 0))
3028 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
3029 || (exp_cost == best_addr_cost
3030 && ((p->cost + 1) >> 1) > best_rtx_cost)))
3031 {
3032 found_better = 1;
3033 best_addr_cost = exp_cost;
3034 best_rtx_cost = (p->cost + 1) >> 1;
3035 best_elt = p;
3036 }
3037 }
3038
3039 if (found_better)
3040 {
3041 if (validate_change (insn, loc,
3042 canon_reg (copy_rtx (best_elt->exp),
3043 NULL_RTX), 0))
3044 return;
3045 else
3046 best_elt->flag = 1;
3047 }
3048 }
3049 }
3050
3051 /* If the address is a binary operation with the first operand a register
3052 and the second a constant, do the same as above, but looking for
3053 equivalences of the register. Then try to simplify before checking for
3054 the best address to use. This catches a few cases: First is when we
3055 have REG+const and the register is another REG+const. We can often merge
3056 the constants and eliminate one insn and one register. It may also be
3057 that a machine has a cheap REG+REG+const. Finally, this improves the
3058 code on the Alpha for unaligned byte stores. */
3059
3060 if (flag_expensive_optimizations
3061 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3062 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3063 && GET_CODE (XEXP (*loc, 0)) == REG
3064 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3065 {
3066 rtx c = XEXP (*loc, 1);
3067
3068 do_not_record = 0;
3069 hash = HASH (XEXP (*loc, 0), Pmode);
3070 do_not_record = save_do_not_record;
3071 hash_arg_in_memory = save_hash_arg_in_memory;
3072
3073 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3074 if (elt == 0)
3075 return;
3076
3077 /* We need to find the best (under the criteria documented above) entry
3078 in the class that is valid. We use the `flag' field to indicate
3079 choices that were invalid and iterate until we can't find a better
3080 one that hasn't already been tried. */
3081
3082 for (p = elt->first_same_value; p; p = p->next_same_value)
3083 p->flag = 0;
3084
3085 while (found_better)
3086 {
3087 int best_addr_cost = address_cost (*loc, mode);
3088 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3089 struct table_elt *best_elt = elt;
3090 rtx best_rtx = *loc;
3091 int count;
3092
3093 /* This is at worst case an O(n^2) algorithm, so limit our search
3094 to the first 32 elements on the list. This avoids trouble
3095 compiling code with very long basic blocks that can easily
3096 call simplify_gen_binary so many times that we run out of
3097 memory. */
3098
3099 found_better = 0;
3100 for (p = elt->first_same_value, count = 0;
3101 p && count < 32;
3102 p = p->next_same_value, count++)
3103 if (! p->flag
3104 && (GET_CODE (p->exp) == REG
3105 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3106 {
3107 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3108 p->exp, c);
3109 int new_cost;
3110 new_cost = address_cost (new, mode);
3111
3112 if (new_cost < best_addr_cost
3113 || (new_cost == best_addr_cost
3114 && (COST (new) + 1) >> 1 > best_rtx_cost))
3115 {
3116 found_better = 1;
3117 best_addr_cost = new_cost;
3118 best_rtx_cost = (COST (new) + 1) >> 1;
3119 best_elt = p;
3120 best_rtx = new;
3121 }
3122 }
3123
3124 if (found_better)
3125 {
3126 if (validate_change (insn, loc,
3127 canon_reg (copy_rtx (best_rtx),
3128 NULL_RTX), 0))
3129 return;
3130 else
3131 best_elt->flag = 1;
3132 }
3133 }
3134 }
3135#endif
3136}
3137
3138
3139/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3140 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3141 what values are being compared.
3142
3143 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3144 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3145 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3146 compared to produce cc0.
3147
3148 The return value is the comparison operator and is either the code of
3149 A or the code corresponding to the inverse of the comparison. */
3150
3151static enum rtx_code
3152find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3153 enum rtx_code code;
3154 rtx *parg1, *parg2;
3155 enum machine_mode *pmode1, *pmode2;
3156{
3157 rtx arg1, arg2;
3158
3159 arg1 = *parg1, arg2 = *parg2;
3160
3161 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3162
3163 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3164 {
3165 /* Set nonzero when we find something of interest. */
3166 rtx x = 0;
3167 int reverse_code = 0;
3168 struct table_elt *p = 0;
3169
3170 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3171 On machines with CC0, this is the only case that can occur, since
3172 fold_rtx will return the COMPARE or item being compared with zero
3173 when given CC0. */
3174
3175 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3176 x = arg1;
3177
3178 /* If ARG1 is a comparison operator and CODE is testing for
3179 STORE_FLAG_VALUE, get the inner arguments. */
3180
3181 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3182 {
3183#ifdef FLOAT_STORE_FLAG_VALUE
3184 REAL_VALUE_TYPE fsfv;
3185#endif
3186
3187 if (code == NE
3188 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3189 && code == LT && STORE_FLAG_VALUE == -1)
3190#ifdef FLOAT_STORE_FLAG_VALUE
3191 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3192 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3193 REAL_VALUE_NEGATIVE (fsfv)))
3194#endif
3195 )
3196 x = arg1;
3197 else if (code == EQ
3198 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3199 && code == GE && STORE_FLAG_VALUE == -1)
3200#ifdef FLOAT_STORE_FLAG_VALUE
3201 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3202 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3203 REAL_VALUE_NEGATIVE (fsfv)))
3204#endif
3205 )
3206 x = arg1, reverse_code = 1;
3207 }
3208
3209 /* ??? We could also check for
3210
3211 (ne (and (eq (...) (const_int 1))) (const_int 0))
3212
3213 and related forms, but let's wait until we see them occurring. */
3214
3215 if (x == 0)
3216 /* Look up ARG1 in the hash table and see if it has an equivalence
3217 that lets us see what is being compared. */
3218 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3219 GET_MODE (arg1));
3220 if (p)
3221 {
3222 p = p->first_same_value;
3223
3224 /* If what we compare is already known to be constant, that is as
3225 good as it gets.
3226 We need to break the loop in this case, because otherwise we
3227 can have an infinite loop when looking at a reg that is known
3228 to be a constant which is the same as a comparison of a reg
3229 against zero which appears later in the insn stream, which in
3230 turn is constant and the same as the comparison of the first reg
3231 against zero... */
3232 if (p->is_const)
3233 break;
3234 }
3235
3236 for (; p; p = p->next_same_value)
3237 {
3238 enum machine_mode inner_mode = GET_MODE (p->exp);
3239#ifdef FLOAT_STORE_FLAG_VALUE
3240 REAL_VALUE_TYPE fsfv;
3241#endif
3242
3243 /* If the entry isn't valid, skip it. */
3244 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3245 continue;
3246
3247 if (GET_CODE (p->exp) == COMPARE
3248 /* Another possibility is that this machine has a compare insn
3249 that includes the comparison code. In that case, ARG1 would
3250 be equivalent to a comparison operation that would set ARG1 to
3251 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3252 ORIG_CODE is the actual comparison being done; if it is an EQ,
3253 we must reverse ORIG_CODE. On machine with a negative value
3254 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3255 || ((code == NE
3256 || (code == LT
3257 && GET_MODE_CLASS (inner_mode) == MODE_INT
3258 && (GET_MODE_BITSIZE (inner_mode)
3259 <= HOST_BITS_PER_WIDE_INT)
3260 && (STORE_FLAG_VALUE
3261 & ((HOST_WIDE_INT) 1
3262 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3263#ifdef FLOAT_STORE_FLAG_VALUE
3264 || (code == LT
3265 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3266 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3267 REAL_VALUE_NEGATIVE (fsfv)))
3268#endif
3269 )
3270 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3271 {
3272 x = p->exp;
3273 break;
3274 }
3275 else if ((code == EQ
3276 || (code == GE
3277 && GET_MODE_CLASS (inner_mode) == MODE_INT
3278 && (GET_MODE_BITSIZE (inner_mode)
3279 <= HOST_BITS_PER_WIDE_INT)
3280 && (STORE_FLAG_VALUE
3281 & ((HOST_WIDE_INT) 1
3282 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3283#ifdef FLOAT_STORE_FLAG_VALUE
3284 || (code == GE
3285 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3286 && (fsfv = FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)),
3287 REAL_VALUE_NEGATIVE (fsfv)))
3288#endif
3289 )
3290 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3291 {
3292 reverse_code = 1;
3293 x = p->exp;
3294 break;
3295 }
3296
3297 /* If this is fp + constant, the equivalent is a better operand since
3298 it may let us predict the value of the comparison. */
3299 else if (NONZERO_BASE_PLUS_P (p->exp))
3300 {
3301 arg1 = p->exp;
3302 continue;
3303 }
3304 }
3305
3306 /* If we didn't find a useful equivalence for ARG1, we are done.
3307 Otherwise, set up for the next iteration. */
3308 if (x == 0)
3309 break;
3310
3311 /* If we need to reverse the comparison, make sure that that is
3312 possible -- we can't necessarily infer the value of GE from LT
3313 with floating-point operands. */
3314 if (reverse_code)
3315 {
3316 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3317 if (reversed == UNKNOWN)
3318 break;
3319 else
3320 code = reversed;
3321 }
3322 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3323 code = GET_CODE (x);
3324 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3325 }
3326
3327 /* Return our results. Return the modes from before fold_rtx
3328 because fold_rtx might produce const_int, and then it's too late. */
3329 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3330 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3331
3332 return code;
3333}
3334
3335
3336/* If X is a nontrivial arithmetic operation on an argument
3337 for which a constant value can be determined, return
3338 the result of operating on that value, as a constant.
3339 Otherwise, return X, possibly with one or more operands
3340 modified by recursive calls to this function.
3341
3342 If X is a register whose contents are known, we do NOT
3343 return those contents here. equiv_constant is called to
3344 perform that task.
3345
3346 INSN is the insn that we may be modifying. If it is 0, make a copy
3347 of X before modifying it. */
3348
3349static rtx
3350fold_rtx (x, insn)
3351 rtx x;
3352 rtx insn;
3353{
3354 enum rtx_code code;
3355 enum machine_mode mode;
3356 const char *fmt;
3357 int i;
3358 rtx new = 0;
3359 int copied = 0;
3360 int must_swap = 0;
3361
3362 /* Folded equivalents of first two operands of X. */
3363 rtx folded_arg0;
3364 rtx folded_arg1;
3365
3366 /* Constant equivalents of first three operands of X;
3367 0 when no such equivalent is known. */
3368 rtx const_arg0;
3369 rtx const_arg1;
3370 rtx const_arg2;
3371
3372 /* The mode of the first operand of X. We need this for sign and zero
3373 extends. */
3374 enum machine_mode mode_arg0;
3375
3376 if (x == 0)
3377 return x;
3378
3379 mode = GET_MODE (x);
3380 code = GET_CODE (x);
3381 switch (code)
3382 {
3383 case CONST:
3384 case CONST_INT:
3385 case CONST_DOUBLE:
3386 case CONST_VECTOR:
3387 case SYMBOL_REF:
3388 case LABEL_REF:
3389 case REG:
3390 /* No use simplifying an EXPR_LIST
3391 since they are used only for lists of args
3392 in a function call's REG_EQUAL note. */
3393 case EXPR_LIST:
3394 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3395 want to (e.g.,) make (addressof (const_int 0)) just because
3396 the location is known to be zero. */
3397 case ADDRESSOF:
3398 return x;
3399
3400#ifdef HAVE_cc0
3401 case CC0:
3402 return prev_insn_cc0;
3403#endif
3404
3405 case PC:
3406 /* If the next insn is a CODE_LABEL followed by a jump table,
3407 PC's value is a LABEL_REF pointing to that label. That
3408 lets us fold switch statements on the VAX. */
3409 if (insn && GET_CODE (insn) == JUMP_INSN)
3410 {
3411 rtx next = next_nonnote_insn (insn);
3412
3413 if (next && GET_CODE (next) == CODE_LABEL
3414 && NEXT_INSN (next) != 0
3415 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3416 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3417 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3418 return gen_rtx_LABEL_REF (Pmode, next);
3419 }
3420 break;
3421
3422 case SUBREG:
3423 /* See if we previously assigned a constant value to this SUBREG. */
3424 if ((new = lookup_as_function (x, CONST_INT)) != 0
3425 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3426 return new;
3427
3428 /* If this is a paradoxical SUBREG, we have no idea what value the
3429 extra bits would have. However, if the operand is equivalent
3430 to a SUBREG whose operand is the same as our mode, and all the
3431 modes are within a word, we can just use the inner operand
3432 because these SUBREGs just say how to treat the register.
3433
3434 Similarly if we find an integer constant. */
3435
3436 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3437 {
3438 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3439 struct table_elt *elt;
3440
3441 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3442 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3443 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3444 imode)) != 0)
3445 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3446 {
3447 if (CONSTANT_P (elt->exp)
3448 && GET_MODE (elt->exp) == VOIDmode)
3449 return elt->exp;
3450
3451 if (GET_CODE (elt->exp) == SUBREG
3452 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3453 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3454 return copy_rtx (SUBREG_REG (elt->exp));
3455 }
3456
3457 return x;
3458 }
3459
3460 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3461 We might be able to if the SUBREG is extracting a single word in an
3462 integral mode or extracting the low part. */
3463
3464 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3465 const_arg0 = equiv_constant (folded_arg0);
3466 if (const_arg0)
3467 folded_arg0 = const_arg0;
3468
3469 if (folded_arg0 != SUBREG_REG (x))
3470 {
3471 new = simplify_subreg (mode, folded_arg0,
3472 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3473 if (new)
3474 return new;
3475 }
3476
3477 /* If this is a narrowing SUBREG and our operand is a REG, see if
3478 we can find an equivalence for REG that is an arithmetic operation
3479 in a wider mode where both operands are paradoxical SUBREGs
3480 from objects of our result mode. In that case, we couldn't report
3481 an equivalent value for that operation, since we don't know what the
3482 extra bits will be. But we can find an equivalence for this SUBREG
3483 by folding that operation is the narrow mode. This allows us to
3484 fold arithmetic in narrow modes when the machine only supports
3485 word-sized arithmetic.
3486
3487 Also look for a case where we have a SUBREG whose operand is the
3488 same as our result. If both modes are smaller than a word, we
3489 are simply interpreting a register in different modes and we
3490 can use the inner value. */
3491
3492 if (GET_CODE (folded_arg0) == REG
3493 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3494 && subreg_lowpart_p (x))
3495 {
3496 struct table_elt *elt;
3497
3498 /* We can use HASH here since we know that canon_hash won't be
3499 called. */
3500 elt = lookup (folded_arg0,
3501 HASH (folded_arg0, GET_MODE (folded_arg0)),
3502 GET_MODE (folded_arg0));
3503
3504 if (elt)
3505 elt = elt->first_same_value;
3506
3507 for (; elt; elt = elt->next_same_value)
3508 {
3509 enum rtx_code eltcode = GET_CODE (elt->exp);
3510
3511 /* Just check for unary and binary operations. */
3512 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3513 && GET_CODE (elt->exp) != SIGN_EXTEND
3514 && GET_CODE (elt->exp) != ZERO_EXTEND
3515 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3516 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3517 && (GET_MODE_CLASS (mode)
3518 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3519 {
3520 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3521
3522 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3523 op0 = fold_rtx (op0, NULL_RTX);
3524
3525 op0 = equiv_constant (op0);
3526 if (op0)
3527 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3528 op0, mode);
3529 }
3530 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3531 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3532 && eltcode != DIV && eltcode != MOD
3533 && eltcode != UDIV && eltcode != UMOD
3534 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3535 && eltcode != ROTATE && eltcode != ROTATERT
3536 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3537 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3538 == mode))
3539 || CONSTANT_P (XEXP (elt->exp, 0)))
3540 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3541 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3542 == mode))
3543 || CONSTANT_P (XEXP (elt->exp, 1))))
3544 {
3545 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3546 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3547
3548 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3549 op0 = fold_rtx (op0, NULL_RTX);
3550
3551 if (op0)
3552 op0 = equiv_constant (op0);
3553
3554 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3555 op1 = fold_rtx (op1, NULL_RTX);
3556
3557 if (op1)
3558 op1 = equiv_constant (op1);
3559
3560 /* If we are looking for the low SImode part of
3561 (ashift:DI c (const_int 32)), it doesn't work
3562 to compute that in SImode, because a 32-bit shift
3563 in SImode is unpredictable. We know the value is 0. */
3564 if (op0 && op1
3565 && GET_CODE (elt->exp) == ASHIFT
3566 && GET_CODE (op1) == CONST_INT
3567 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3568 {
3569 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3570
3571 /* If the count fits in the inner mode's width,
3572 but exceeds the outer mode's width,
3573 the value will get truncated to 0
3574 by the subreg. */
3575 new = const0_rtx;
3576 else
3577 /* If the count exceeds even the inner mode's width,
3578 don't fold this expression. */
3579 new = 0;
3580 }
3581 else if (op0 && op1)
3582 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3583 op0, op1);
3584 }
3585
3586 else if (GET_CODE (elt->exp) == SUBREG
3587 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3588 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3589 <= UNITS_PER_WORD)
3590 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3591 new = copy_rtx (SUBREG_REG (elt->exp));
3592
3593 if (new)
3594 return new;
3595 }
3596 }
3597
3598 return x;
3599
3600 case NOT:
3601 case NEG:
3602 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3603 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3604 new = lookup_as_function (XEXP (x, 0), code);
3605 if (new)
3606 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3607 break;
3608
3609 case MEM:
3610 /* If we are not actually processing an insn, don't try to find the
3611 best address. Not only don't we care, but we could modify the
3612 MEM in an invalid way since we have no insn to validate against. */
3613 if (insn != 0)
3614 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3615
3616 {
3617 /* Even if we don't fold in the insn itself,
3618 we can safely do so here, in hopes of getting a constant. */
3619 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3620 rtx base = 0;
3621 HOST_WIDE_INT offset = 0;
3622
3623 if (GET_CODE (addr) == REG
3624 && REGNO_QTY_VALID_P (REGNO (addr)))
3625 {
3626 int addr_q = REG_QTY (REGNO (addr));
3627 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3628
3629 if (GET_MODE (addr) == addr_ent->mode
3630 && addr_ent->const_rtx != NULL_RTX)
3631 addr = addr_ent->const_rtx;
3632 }
3633
3634 /* If address is constant, split it into a base and integer offset. */
3635 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3636 base = addr;
3637 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3638 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3639 {
3640 base = XEXP (XEXP (addr, 0), 0);
3641 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3642 }
3643 else if (GET_CODE (addr) == LO_SUM
3644 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3645 base = XEXP (addr, 1);
3646 else if (GET_CODE (addr) == ADDRESSOF)
3647 return change_address (x, VOIDmode, addr);
3648
3649 /* If this is a constant pool reference, we can fold it into its
3650 constant to allow better value tracking. */
3651 if (base && GET_CODE (base) == SYMBOL_REF
3652 && CONSTANT_POOL_ADDRESS_P (base))
3653 {
3654 rtx constant = get_pool_constant (base);
3655 enum machine_mode const_mode = get_pool_mode (base);
3656 rtx new;
3657
3658 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3659 constant_pool_entries_cost = COST (constant);
3660
3661 /* If we are loading the full constant, we have an equivalence. */
3662 if (offset == 0 && mode == const_mode)
3663 return constant;
3664
3665 /* If this actually isn't a constant (weird!), we can't do
3666 anything. Otherwise, handle the two most common cases:
3667 extracting a word from a multi-word constant, and extracting
3668 the low-order bits. Other cases don't seem common enough to
3669 worry about. */
3670 if (! CONSTANT_P (constant))
3671 return x;
3672
3673 if (GET_MODE_CLASS (mode) == MODE_INT
3674 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3675 && offset % UNITS_PER_WORD == 0
3676 && (new = operand_subword (constant,
3677 offset / UNITS_PER_WORD,
3678 0, const_mode)) != 0)
3679 return new;
3680
3681 if (((BYTES_BIG_ENDIAN
3682 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3683 || (! BYTES_BIG_ENDIAN && offset == 0))
3684 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3685 return new;
3686 }
3687
3688 /* If this is a reference to a label at a known position in a jump
3689 table, we also know its value. */
3690 if (base && GET_CODE (base) == LABEL_REF)
3691 {
3692 rtx label = XEXP (base, 0);
3693 rtx table_insn = NEXT_INSN (label);
3694
3695 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3696 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3697 {
3698 rtx table = PATTERN (table_insn);
3699
3700 if (offset >= 0
3701 && (offset / GET_MODE_SIZE (GET_MODE (table))
3702 < XVECLEN (table, 0)))
3703 return XVECEXP (table, 0,
3704 offset / GET_MODE_SIZE (GET_MODE (table)));
3705 }
3706 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3707 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3708 {
3709 rtx table = PATTERN (table_insn);
3710
3711 if (offset >= 0
3712 && (offset / GET_MODE_SIZE (GET_MODE (table))
3713 < XVECLEN (table, 1)))
3714 {
3715 offset /= GET_MODE_SIZE (GET_MODE (table));
3716 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3717 XEXP (table, 0));
3718
3719 if (GET_MODE (table) != Pmode)
3720 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3721
3722 /* Indicate this is a constant. This isn't a
3723 valid form of CONST, but it will only be used
3724 to fold the next insns and then discarded, so
3725 it should be safe.
3726
3727 Note this expression must be explicitly discarded,
3728 by cse_insn, else it may end up in a REG_EQUAL note
3729 and "escape" to cause problems elsewhere. */
3730 return gen_rtx_CONST (GET_MODE (new), new);
3731 }
3732 }
3733 }
3734
3735 return x;
3736 }
3737
3738#ifdef NO_FUNCTION_CSE
3739 case CALL:
3740 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3741 return x;
3742 break;
3743#endif
3744
3745 case ASM_OPERANDS:
3746 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3747 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3748 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3749 break;
3750
3751 default:
3752 break;
3753 }
3754
3755 const_arg0 = 0;
3756 const_arg1 = 0;
3757 const_arg2 = 0;
3758 mode_arg0 = VOIDmode;
3759
3760 /* Try folding our operands.
3761 Then see which ones have constant values known. */
3762
3763 fmt = GET_RTX_FORMAT (code);
3764 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3765 if (fmt[i] == 'e')
3766 {
3767 rtx arg = XEXP (x, i);
3768 rtx folded_arg = arg, const_arg = 0;
3769 enum machine_mode mode_arg = GET_MODE (arg);
3770 rtx cheap_arg, expensive_arg;
3771 rtx replacements[2];
3772 int j;
3773 int old_cost = COST_IN (XEXP (x, i), code);
3774
3775 /* Most arguments are cheap, so handle them specially. */
3776 switch (GET_CODE (arg))
3777 {
3778 case REG:
3779 /* This is the same as calling equiv_constant; it is duplicated
3780 here for speed. */
3781 if (REGNO_QTY_VALID_P (REGNO (arg)))
3782 {
3783 int arg_q = REG_QTY (REGNO (arg));
3784 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3785
3786 if (arg_ent->const_rtx != NULL_RTX
3787 && GET_CODE (arg_ent->const_rtx) != REG
3788 && GET_CODE (arg_ent->const_rtx) != PLUS)
3789 const_arg
3790 = gen_lowpart_if_possible (GET_MODE (arg),
3791 arg_ent->const_rtx);
3792 }
3793 break;
3794
3795 case CONST:
3796 case CONST_INT:
3797 case SYMBOL_REF:
3798 case LABEL_REF:
3799 case CONST_DOUBLE:
3800 case CONST_VECTOR:
3801 const_arg = arg;
3802 break;
3803
3804#ifdef HAVE_cc0
3805 case CC0:
3806 folded_arg = prev_insn_cc0;
3807 mode_arg = prev_insn_cc0_mode;
3808 const_arg = equiv_constant (folded_arg);
3809 break;
3810#endif
3811
3812 default:
3813 folded_arg = fold_rtx (arg, insn);
3814 const_arg = equiv_constant (folded_arg);
3815 }
3816
3817 /* For the first three operands, see if the operand
3818 is constant or equivalent to a constant. */
3819 switch (i)
3820 {
3821 case 0:
3822 folded_arg0 = folded_arg;
3823 const_arg0 = const_arg;
3824 mode_arg0 = mode_arg;
3825 break;
3826 case 1:
3827 folded_arg1 = folded_arg;
3828 const_arg1 = const_arg;
3829 break;
3830 case 2:
3831 const_arg2 = const_arg;
3832 break;
3833 }
3834
3835 /* Pick the least expensive of the folded argument and an
3836 equivalent constant argument. */
3837 if (const_arg == 0 || const_arg == folded_arg
3838 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3839 cheap_arg = folded_arg, expensive_arg = const_arg;
3840 else
3841 cheap_arg = const_arg, expensive_arg = folded_arg;
3842
3843 /* Try to replace the operand with the cheapest of the two
3844 possibilities. If it doesn't work and this is either of the first
3845 two operands of a commutative operation, try swapping them.
3846 If THAT fails, try the more expensive, provided it is cheaper
3847 than what is already there. */
3848
3849 if (cheap_arg == XEXP (x, i))
3850 continue;
3851
3852 if (insn == 0 && ! copied)
3853 {
3854 x = copy_rtx (x);
3855 copied = 1;
3856 }
3857
3858 /* Order the replacements from cheapest to most expensive. */
3859 replacements[0] = cheap_arg;
3860 replacements[1] = expensive_arg;
3861
3862 for (j = 0; j < 2 && replacements[j]; j++)
3863 {
3864 int new_cost = COST_IN (replacements[j], code);
3865
3866 /* Stop if what existed before was cheaper. Prefer constants
3867 in the case of a tie. */
3868 if (new_cost > old_cost
3869 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3870 break;
3871
3872 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3873 break;
3874
3875 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3876 || code == LTGT || code == UNEQ || code == ORDERED
3877 || code == UNORDERED)
3878 {
3879 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3880 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3881
3882 if (apply_change_group ())
3883 {
3884 /* Swap them back to be invalid so that this loop can
3885 continue and flag them to be swapped back later. */
3886 rtx tem;
3887
3888 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3889 XEXP (x, 1) = tem;
3890 must_swap = 1;
3891 break;
3892 }
3893 }
3894 }
3895 }
3896
3897 else
3898 {
3899 if (fmt[i] == 'E')
3900 /* Don't try to fold inside of a vector of expressions.
3901 Doing nothing is harmless. */
3902 {;}
3903 }
3904
3905 /* If a commutative operation, place a constant integer as the second
3906 operand unless the first operand is also a constant integer. Otherwise,
3907 place any constant second unless the first operand is also a constant. */
3908
3909 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3910 || code == LTGT || code == UNEQ || code == ORDERED
3911 || code == UNORDERED)
3912 {
3913 if (must_swap || (const_arg0
3914 && (const_arg1 == 0
3915 || (GET_CODE (const_arg0) == CONST_INT
3916 && GET_CODE (const_arg1) != CONST_INT))))
3917 {
3918 rtx tem = XEXP (x, 0);
3919
3920 if (insn == 0 && ! copied)
3921 {
3922 x = copy_rtx (x);
3923 copied = 1;
3924 }
3925
3926 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3927 validate_change (insn, &XEXP (x, 1), tem, 1);
3928 if (apply_change_group ())
3929 {
3930 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3931 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3932 }
3933 }
3934 }
3935
3936 /* If X is an arithmetic operation, see if we can simplify it. */
3937
3938 switch (GET_RTX_CLASS (code))
3939 {
3940 case '1':
3941 {
3942 int is_const = 0;
3943
3944 /* We can't simplify extension ops unless we know the
3945 original mode. */
3946 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3947 && mode_arg0 == VOIDmode)
3948 break;
3949
3950 /* If we had a CONST, strip it off and put it back later if we
3951 fold. */
3952 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3953 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3954
3955 new = simplify_unary_operation (code, mode,
3956 const_arg0 ? const_arg0 : folded_arg0,
3957 mode_arg0);
3958 if (new != 0 && is_const)
3959 new = gen_rtx_CONST (mode, new);
3960 }
3961 break;
3962
3963 case '<':
3964 /* See what items are actually being compared and set FOLDED_ARG[01]
3965 to those values and CODE to the actual comparison code. If any are
3966 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3967 do anything if both operands are already known to be constant. */
3968
3969 if (const_arg0 == 0 || const_arg1 == 0)
3970 {
3971 struct table_elt *p0, *p1;
3972 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3973 enum machine_mode mode_arg1;
3974
3975#ifdef FLOAT_STORE_FLAG_VALUE
3976 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3977 {
3978 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3979 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3980 false_rtx = CONST0_RTX (mode);
3981 }
3982#endif
3983
3984 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3985 &mode_arg0, &mode_arg1);
3986 const_arg0 = equiv_constant (folded_arg0);
3987 const_arg1 = equiv_constant (folded_arg1);
3988
3989 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3990 what kinds of things are being compared, so we can't do
3991 anything with this comparison. */
3992
3993 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3994 break;
3995
3996 /* If we do not now have two constants being compared, see
3997 if we can nevertheless deduce some things about the
3998 comparison. */
3999 if (const_arg0 == 0 || const_arg1 == 0)
4000 {
4001 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
4002 non-explicit constant? These aren't zero, but we
4003 don't know their sign. */
4004 if (const_arg1 == const0_rtx
4005 && (NONZERO_BASE_PLUS_P (folded_arg0)
4006#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
4007 come out as 0. */
4008 || GET_CODE (folded_arg0) == SYMBOL_REF
4009#endif
4010 || GET_CODE (folded_arg0) == LABEL_REF
4011 || GET_CODE (folded_arg0) == CONST))
4012 {
4013 if (code == EQ)
4014 return false_rtx;
4015 else if (code == NE)
4016 return true_rtx;
4017 }
4018
4019 /* See if the two operands are the same. */
4020
4021 if (folded_arg0 == folded_arg1
4022 || (GET_CODE (folded_arg0) == REG
4023 && GET_CODE (folded_arg1) == REG
4024 && (REG_QTY (REGNO (folded_arg0))
4025 == REG_QTY (REGNO (folded_arg1))))
4026 || ((p0 = lookup (folded_arg0,
4027 (safe_hash (folded_arg0, mode_arg0)
4028 & HASH_MASK), mode_arg0))
4029 && (p1 = lookup (folded_arg1,
4030 (safe_hash (folded_arg1, mode_arg0)
4031 & HASH_MASK), mode_arg0))
4032 && p0->first_same_value == p1->first_same_value))
4033 {
4034 /* Sadly two equal NaNs are not equivalent. */
4035 if (!HONOR_NANS (mode_arg0))
4036 return ((code == EQ || code == LE || code == GE
4037 || code == LEU || code == GEU || code == UNEQ
4038 || code == UNLE || code == UNGE
4039 || code == ORDERED)
4040 ? true_rtx : false_rtx);
4041 /* Take care for the FP compares we can resolve. */
4042 if (code == UNEQ || code == UNLE || code == UNGE)
4043 return true_rtx;
4044 if (code == LTGT || code == LT || code == GT)
4045 return false_rtx;
4046 }
4047
4048 /* If FOLDED_ARG0 is a register, see if the comparison we are
4049 doing now is either the same as we did before or the reverse
4050 (we only check the reverse if not floating-point). */
4051 else if (GET_CODE (folded_arg0) == REG)
4052 {
4053 int qty = REG_QTY (REGNO (folded_arg0));
4054
4055 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4056 {
4057 struct qty_table_elem *ent = &qty_table[qty];
4058
4059 if ((comparison_dominates_p (ent->comparison_code, code)
4060 || (! FLOAT_MODE_P (mode_arg0)
4061 && comparison_dominates_p (ent->comparison_code,
4062 reverse_condition (code))))
4063 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4064 || (const_arg1
4065 && rtx_equal_p (ent->comparison_const,
4066 const_arg1))
4067 || (GET_CODE (folded_arg1) == REG
4068 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4069 return (comparison_dominates_p (ent->comparison_code, code)
4070 ? true_rtx : false_rtx);
4071 }
4072 }
4073 }
4074 }
4075
4076 /* If we are comparing against zero, see if the first operand is
4077 equivalent to an IOR with a constant. If so, we may be able to
4078 determine the result of this comparison. */
4079
4080 if (const_arg1 == const0_rtx)
4081 {
4082 rtx y = lookup_as_function (folded_arg0, IOR);
4083 rtx inner_const;
4084
4085 if (y != 0
4086 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4087 && GET_CODE (inner_const) == CONST_INT
4088 && INTVAL (inner_const) != 0)
4089 {
4090 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4091 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4092 && (INTVAL (inner_const)
4093 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4094 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4095
4096#ifdef FLOAT_STORE_FLAG_VALUE
4097 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4098 {
4099 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4100 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4101 false_rtx = CONST0_RTX (mode);
4102 }
4103#endif
4104
4105 switch (code)
4106 {
4107 case EQ:
4108 return false_rtx;
4109 case NE:
4110 return true_rtx;
4111 case LT: case LE:
4112 if (has_sign)
4113 return true_rtx;
4114 break;
4115 case GT: case GE:
4116 if (has_sign)
4117 return false_rtx;
4118 break;
4119 default:
4120 break;
4121 }
4122 }
4123 }
4124
4125 new = simplify_relational_operation (code,
4126 (mode_arg0 != VOIDmode
4127 ? mode_arg0
4128 : (GET_MODE (const_arg0
4129 ? const_arg0
4130 : folded_arg0)
4131 != VOIDmode)
4132 ? GET_MODE (const_arg0
4133 ? const_arg0
4134 : folded_arg0)
4135 : GET_MODE (const_arg1
4136 ? const_arg1
4137 : folded_arg1)),
4138 const_arg0 ? const_arg0 : folded_arg0,
4139 const_arg1 ? const_arg1 : folded_arg1);
4140#ifdef FLOAT_STORE_FLAG_VALUE
4141 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4142 {
4143 if (new == const0_rtx)
4144 new = CONST0_RTX (mode);
4145 else
4146 new = (CONST_DOUBLE_FROM_REAL_VALUE
4147 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4148 }
4149#endif
4150 break;
4151
4152 case '2':
4153 case 'c':
4154 switch (code)
4155 {
4156 case PLUS:
4157 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4158 with that LABEL_REF as its second operand. If so, the result is
4159 the first operand of that MINUS. This handles switches with an
4160 ADDR_DIFF_VEC table. */
4161 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4162 {
4163 rtx y
4164 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4165 : lookup_as_function (folded_arg0, MINUS);
4166
4167 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4168 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4169 return XEXP (y, 0);
4170
4171 /* Now try for a CONST of a MINUS like the above. */
4172 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4173 : lookup_as_function (folded_arg0, CONST))) != 0
4174 && GET_CODE (XEXP (y, 0)) == MINUS
4175 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4176 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4177 return XEXP (XEXP (y, 0), 0);
4178 }
4179
4180 /* Likewise if the operands are in the other order. */
4181 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4182 {
4183 rtx y
4184 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4185 : lookup_as_function (folded_arg1, MINUS);
4186
4187 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4188 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4189 return XEXP (y, 0);
4190
4191 /* Now try for a CONST of a MINUS like the above. */
4192 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4193 : lookup_as_function (folded_arg1, CONST))) != 0
4194 && GET_CODE (XEXP (y, 0)) == MINUS
4195 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4196 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4197 return XEXP (XEXP (y, 0), 0);
4198 }
4199
4200 /* If second operand is a register equivalent to a negative
4201 CONST_INT, see if we can find a register equivalent to the
4202 positive constant. Make a MINUS if so. Don't do this for
4203 a non-negative constant since we might then alternate between
4204 choosing positive and negative constants. Having the positive
4205 constant previously-used is the more common case. Be sure
4206 the resulting constant is non-negative; if const_arg1 were
4207 the smallest negative number this would overflow: depending
4208 on the mode, this would either just be the same value (and
4209 hence not save anything) or be incorrect. */
4210 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4211 && INTVAL (const_arg1) < 0
4212 /* This used to test
4213
4214 -INTVAL (const_arg1) >= 0
4215
4216 But The Sun V5.0 compilers mis-compiled that test. So
4217 instead we test for the problematic value in a more direct
4218 manner and hope the Sun compilers get it correct. */
4219 && INTVAL (const_arg1) !=
4220 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4221 && GET_CODE (folded_arg1) == REG)
4222 {
4223 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4224 struct table_elt *p
4225 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4226 mode);
4227
4228 if (p)
4229 for (p = p->first_same_value; p; p = p->next_same_value)
4230 if (GET_CODE (p->exp) == REG)
4231 return simplify_gen_binary (MINUS, mode, folded_arg0,
4232 canon_reg (p->exp, NULL_RTX));
4233 }
4234 goto from_plus;
4235
4236 case MINUS:
4237 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4238 If so, produce (PLUS Z C2-C). */
4239 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4240 {
4241 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4242 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4243 return fold_rtx (plus_constant (copy_rtx (y),
4244 -INTVAL (const_arg1)),
4245 NULL_RTX);
4246 }
4247
4248 /* Fall through. */
4249
4250 from_plus:
4251 case SMIN: case SMAX: case UMIN: case UMAX:
4252 case IOR: case AND: case XOR:
4253 case MULT:
4254 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4255 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4256 is known to be of similar form, we may be able to replace the
4257 operation with a combined operation. This may eliminate the
4258 intermediate operation if every use is simplified in this way.
4259 Note that the similar optimization done by combine.c only works
4260 if the intermediate operation's result has only one reference. */
4261
4262 if (GET_CODE (folded_arg0) == REG
4263 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4264 {
4265 int is_shift
4266 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4267 rtx y = lookup_as_function (folded_arg0, code);
4268 rtx inner_const;
4269 enum rtx_code associate_code;
4270 rtx new_const;
4271
4272 if (y == 0
4273 || 0 == (inner_const
4274 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4275 || GET_CODE (inner_const) != CONST_INT
4276 /* If we have compiled a statement like
4277 "if (x == (x & mask1))", and now are looking at
4278 "x & mask2", we will have a case where the first operand
4279 of Y is the same as our first operand. Unless we detect
4280 this case, an infinite loop will result. */
4281 || XEXP (y, 0) == folded_arg0)
4282 break;
4283
4284 /* Don't associate these operations if they are a PLUS with the
4285 same constant and it is a power of two. These might be doable
4286 with a pre- or post-increment. Similarly for two subtracts of
4287 identical powers of two with post decrement. */
4288
4289 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4290 && ((HAVE_PRE_INCREMENT
4291 && exact_log2 (INTVAL (const_arg1)) >= 0)
4292 || (HAVE_POST_INCREMENT
4293 && exact_log2 (INTVAL (const_arg1)) >= 0)
4294 || (HAVE_PRE_DECREMENT
4295 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4296 || (HAVE_POST_DECREMENT
4297 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4298 break;
4299
4300 /* Compute the code used to compose the constants. For example,
4301 A-C1-C2 is A-(C1 + C2), so if CODE == MINUS, we want PLUS. */
4302
4303 associate_code = (is_shift || code == MINUS ? PLUS : code);
4304
4305 new_const = simplify_binary_operation (associate_code, mode,
4306 const_arg1, inner_const);
4307
4308 if (new_const == 0)
4309 break;
4310
4311 /* If we are associating shift operations, don't let this
4312 produce a shift of the size of the object or larger.
4313 This could occur when we follow a sign-extend by a right
4314 shift on a machine that does a sign-extend as a pair
4315 of shifts. */
4316
4317 if (is_shift && GET_CODE (new_const) == CONST_INT
4318 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4319 {
4320 /* As an exception, we can turn an ASHIFTRT of this
4321 form into a shift of the number of bits - 1. */
4322 if (code == ASHIFTRT)
4323 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4324 else
4325 break;
4326 }
4327
4328 y = copy_rtx (XEXP (y, 0));
4329
4330 /* If Y contains our first operand (the most common way this
4331 can happen is if Y is a MEM), we would do into an infinite
4332 loop if we tried to fold it. So don't in that case. */
4333
4334 if (! reg_mentioned_p (folded_arg0, y))
4335 y = fold_rtx (y, insn);
4336
4337 return simplify_gen_binary (code, mode, y, new_const);
4338 }
4339 break;
4340
4341 case DIV: case UDIV:
4342 /* ??? The associative optimization performed immediately above is
4343 also possible for DIV and UDIV using associate_code of MULT.
4344 However, we would need extra code to verify that the
4345 multiplication does not overflow, that is, there is no overflow
4346 in the calculation of new_const. */
4347 break;
4348
4349 default:
4350 break;
4351 }
4352
4353 new = simplify_binary_operation (code, mode,
4354 const_arg0 ? const_arg0 : folded_arg0,
4355 const_arg1 ? const_arg1 : folded_arg1);
4356 break;
4357
4358 case 'o':
4359 /* (lo_sum (high X) X) is simply X. */
4360 if (code == LO_SUM && const_arg0 != 0
4361 && GET_CODE (const_arg0) == HIGH
4362 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4363 return const_arg1;
4364 break;
4365
4366 case '3':
4367 case 'b':
4368 new = simplify_ternary_operation (code, mode, mode_arg0,
4369 const_arg0 ? const_arg0 : folded_arg0,
4370 const_arg1 ? const_arg1 : folded_arg1,
4371 const_arg2 ? const_arg2 : XEXP (x, 2));
4372 break;
4373
4374 case 'x':
4375 /* Always eliminate CONSTANT_P_RTX at this stage. */
4376 if (code == CONSTANT_P_RTX)
4377 return (const_arg0 ? const1_rtx : const0_rtx);
4378 break;
4379 }
4380
4381 return new ? new : x;
4382}
4383
4384
4385/* Return a constant value currently equivalent to X.
4386 Return 0 if we don't know one. */
4387
4388static rtx
4389equiv_constant (x)
4390 rtx x;
4391{
4392 if (GET_CODE (x) == REG
4393 && REGNO_QTY_VALID_P (REGNO (x)))
4394 {
4395 int x_q = REG_QTY (REGNO (x));
4396 struct qty_table_elem *x_ent = &qty_table[x_q];
4397
4398 if (x_ent->const_rtx)
4399 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4400 }
4401
4402 if (x == 0 || CONSTANT_P (x))
4403 return x;
4404
4405 /* If X is a MEM, try to fold it outside the context of any insn to see if
4406 it might be equivalent to a constant. That handles the case where it
4407 is a constant-pool reference. Then try to look it up in the hash table
4408 in case it is something whose value we have seen before. */
4409
4410 if (GET_CODE (x) == MEM)
4411 {
4412 struct table_elt *elt;
4413
4414 x = fold_rtx (x, NULL_RTX);
4415 if (CONSTANT_P (x))
4416 return x;
4417
4418 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4419 if (elt == 0)
4420 return 0;
4421
4422 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4423 if (elt->is_const && CONSTANT_P (elt->exp))
4424 return elt->exp;
4425 }
4426
4427 return 0;
4428}
4429
4430
4431/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4432 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4433 least-significant part of X.
4434 MODE specifies how big a part of X to return.
4435
4436 If the requested operation cannot be done, 0 is returned.
4437
4438 This is similar to gen_lowpart in emit-rtl.c. */
4439
4440rtx
4441gen_lowpart_if_possible (mode, x)
4442 enum machine_mode mode;
4443 rtx x;
4444{
4445 rtx result = gen_lowpart_common (mode, x);
4446
4447 if (result)
4448 return result;
4449 else if (GET_CODE (x) == MEM)
4450 {
4451 /* This is the only other case we handle. */
4452 int offset = 0;
4453 rtx new;
4454
4455 if (WORDS_BIG_ENDIAN)
4456 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4457 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4458 if (BYTES_BIG_ENDIAN)
4459 /* Adjust the address so that the address-after-the-data is
4460 unchanged. */
4461 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4462 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4463
4464 new = adjust_address_nv (x, mode, offset);
4465 if (! memory_address_p (mode, XEXP (new, 0)))
4466 return 0;
4467
4468 return new;
4469 }
4470 else
4471 return 0;
4472}
4473
4474
4475/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4476 branch. It will be zero if not.
4477
4478 In certain cases, this can cause us to add an equivalence. For example,
4479 if we are following the taken case of
4480 if (i == 2)
4481 we can add the fact that `i' and '2' are now equivalent.
4482
4483 In any case, we can record that this comparison was passed. If the same
4484 comparison is seen later, we will know its value. */
4485
4486static void
4487record_jump_equiv (insn, taken)
4488 rtx insn;
4489 int taken;
4490{
4491 int cond_known_true;
4492 rtx op0, op1;
4493 rtx set;
4494 enum machine_mode mode, mode0, mode1;
4495 int reversed_nonequality = 0;
4496 enum rtx_code code;
4497
4498 /* Ensure this is the right kind of insn. */
4499 if (! any_condjump_p (insn))
4500 return;
4501 set = pc_set (insn);
4502
4503 /* See if this jump condition is known true or false. */
4504 if (taken)
4505 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4506 else
4507 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4508
4509 /* Get the type of comparison being done and the operands being compared.
4510 If we had to reverse a non-equality condition, record that fact so we
4511 know that it isn't valid for floating-point. */
4512 code = GET_CODE (XEXP (SET_SRC (set), 0));
4513 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4514 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4515
4516 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4517 if (! cond_known_true)
4518 {
4519 code = reversed_comparison_code_parts (code, op0, op1, insn);
4520
4521 /* Don't remember if we can't find the inverse. */
4522 if (code == UNKNOWN)
4523 return;
4524 }
4525
4526 /* The mode is the mode of the non-constant. */
4527 mode = mode0;
4528 if (mode1 != VOIDmode)
4529 mode = mode1;
4530
4531 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4532}
4533
4534/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4535 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4536 Make any useful entries we can with that information. Called from
4537 above function and called recursively. */
4538
4539static void
4540record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4541 enum rtx_code code;
4542 enum machine_mode mode;
4543 rtx op0, op1;
4544 int reversed_nonequality;
4545{
4546 unsigned op0_hash, op1_hash;
4547 int op0_in_memory, op1_in_memory;
4548 struct table_elt *op0_elt, *op1_elt;
4549
4550 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4551 we know that they are also equal in the smaller mode (this is also
4552 true for all smaller modes whether or not there is a SUBREG, but
4553 is not worth testing for with no SUBREG). */
4554
4555 /* Note that GET_MODE (op0) may not equal MODE. */
4556 if (code == EQ && GET_CODE (op0) == SUBREG
4557 && (GET_MODE_SIZE (GET_MODE (op0))
4558 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4559 {
4560 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4561 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4562
4563 record_jump_cond (code, mode, SUBREG_REG (op0),
4564 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4565 reversed_nonequality);
4566 }
4567
4568 if (code == EQ && GET_CODE (op1) == SUBREG
4569 && (GET_MODE_SIZE (GET_MODE (op1))
4570 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4571 {
4572 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4573 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4574
4575 record_jump_cond (code, mode, SUBREG_REG (op1),
4576 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4577 reversed_nonequality);
4578 }
4579
4580 /* Similarly, if this is an NE comparison, and either is a SUBREG
4581 making a smaller mode, we know the whole thing is also NE. */
4582
4583 /* Note that GET_MODE (op0) may not equal MODE;
4584 if we test MODE instead, we can get an infinite recursion
4585 alternating between two modes each wider than MODE. */
4586
4587 if (code == NE && GET_CODE (op0) == SUBREG
4588 && subreg_lowpart_p (op0)
4589 && (GET_MODE_SIZE (GET_MODE (op0))
4590 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4591 {
4592 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4593 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4594
4595 record_jump_cond (code, mode, SUBREG_REG (op0),
4596 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4597 reversed_nonequality);
4598 }
4599
4600 if (code == NE && GET_CODE (op1) == SUBREG
4601 && subreg_lowpart_p (op1)
4602 && (GET_MODE_SIZE (GET_MODE (op1))
4603 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4604 {
4605 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4606 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4607
4608 record_jump_cond (code, mode, SUBREG_REG (op1),
4609 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4610 reversed_nonequality);
4611 }
4612
4613 /* Hash both operands. */
4614
4615 do_not_record = 0;
4616 hash_arg_in_memory = 0;
4617 op0_hash = HASH (op0, mode);
4618 op0_in_memory = hash_arg_in_memory;
4619
4620 if (do_not_record)
4621 return;
4622
4623 do_not_record = 0;
4624 hash_arg_in_memory = 0;
4625 op1_hash = HASH (op1, mode);
4626 op1_in_memory = hash_arg_in_memory;
4627
4628 if (do_not_record)
4629 return;
4630
4631 /* Look up both operands. */
4632 op0_elt = lookup (op0, op0_hash, mode);
4633 op1_elt = lookup (op1, op1_hash, mode);
4634
4635 /* If both operands are already equivalent or if they are not in the
4636 table but are identical, do nothing. */
4637 if ((op0_elt != 0 && op1_elt != 0
4638 && op0_elt->first_same_value == op1_elt->first_same_value)
4639 || op0 == op1 || rtx_equal_p (op0, op1))
4640 return;
4641
4642 /* If we aren't setting two things equal all we can do is save this
4643 comparison. Similarly if this is floating-point. In the latter
4644 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4645 If we record the equality, we might inadvertently delete code
4646 whose intent was to change -0 to +0. */
4647
4648 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4649 {
4650 struct qty_table_elem *ent;
4651 int qty;
4652
4653 /* If we reversed a floating-point comparison, if OP0 is not a
4654 register, or if OP1 is neither a register or constant, we can't
4655 do anything. */
4656
4657 if (GET_CODE (op1) != REG)
4658 op1 = equiv_constant (op1);
4659
4660 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4661 || GET_CODE (op0) != REG || op1 == 0)
4662 return;
4663
4664 /* Put OP0 in the hash table if it isn't already. This gives it a
4665 new quantity number. */
4666 if (op0_elt == 0)
4667 {
4668 if (insert_regs (op0, NULL, 0))
4669 {
4670 rehash_using_reg (op0);
4671 op0_hash = HASH (op0, mode);
4672
4673 /* If OP0 is contained in OP1, this changes its hash code
4674 as well. Faster to rehash than to check, except
4675 for the simple case of a constant. */
4676 if (! CONSTANT_P (op1))
4677 op1_hash = HASH (op1,mode);
4678 }
4679
4680 op0_elt = insert (op0, NULL, op0_hash, mode);
4681 op0_elt->in_memory = op0_in_memory;
4682 }
4683
4684 qty = REG_QTY (REGNO (op0));
4685 ent = &qty_table[qty];
4686
4687 ent->comparison_code = code;
4688 if (GET_CODE (op1) == REG)
4689 {
4690 /* Look it up again--in case op0 and op1 are the same. */
4691 op1_elt = lookup (op1, op1_hash, mode);
4692
4693 /* Put OP1 in the hash table so it gets a new quantity number. */
4694 if (op1_elt == 0)
4695 {
4696 if (insert_regs (op1, NULL, 0))
4697 {
4698 rehash_using_reg (op1);
4699 op1_hash = HASH (op1, mode);
4700 }
4701
4702 op1_elt = insert (op1, NULL, op1_hash, mode);
4703 op1_elt->in_memory = op1_in_memory;
4704 }
4705
4706 ent->comparison_const = NULL_RTX;
4707 ent->comparison_qty = REG_QTY (REGNO (op1));
4708 }
4709 else
4710 {
4711 ent->comparison_const = op1;
4712 ent->comparison_qty = -1;
4713 }
4714
4715 return;
4716 }
4717
4718 /* If either side is still missing an equivalence, make it now,
4719 then merge the equivalences. */
4720
4721 if (op0_elt == 0)
4722 {
4723 if (insert_regs (op0, NULL, 0))
4724 {
4725 rehash_using_reg (op0);
4726 op0_hash = HASH (op0, mode);
4727 }
4728
4729 op0_elt = insert (op0, NULL, op0_hash, mode);
4730 op0_elt->in_memory = op0_in_memory;
4731 }
4732
4733 if (op1_elt == 0)
4734 {
4735 if (insert_regs (op1, NULL, 0))
4736 {
4737 rehash_using_reg (op1);
4738 op1_hash = HASH (op1, mode);
4739 }
4740
4741 op1_elt = insert (op1, NULL, op1_hash, mode);
4742 op1_elt->in_memory = op1_in_memory;
4743 }
4744
4745 merge_equiv_classes (op0_elt, op1_elt);
4746 last_jump_equiv_class = op0_elt;
4747}
4748
4749
4750/* CSE processing for one instruction.
4751 First simplify sources and addresses of all assignments
4752 in the instruction, using previously-computed equivalents values.
4753 Then install the new sources and destinations in the table
4754 of available values.
4755
4756 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4757 the insn. It means that INSN is inside libcall block. In this
4758 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4759
4760/* Data on one SET contained in the instruction. */
4761
4762struct set
4763{
4764 /* The SET rtx itself. */
4765 rtx rtl;
4766 /* The SET_SRC of the rtx (the original value, if it is changing). */
4767 rtx src;
4768 /* The hash-table element for the SET_SRC of the SET. */
4769 struct table_elt *src_elt;
4770 /* Hash value for the SET_SRC. */
4771 unsigned src_hash;
4772 /* Hash value for the SET_DEST. */
4773 unsigned dest_hash;
4774 /* The SET_DEST, with SUBREG, etc., stripped. */
4775 rtx inner_dest;
4776 /* Nonzero if the SET_SRC is in memory. */
4777 char src_in_memory;
4778 /* Nonzero if the SET_SRC contains something
4779 whose value cannot be predicted and understood. */
4780 char src_volatile;
4781 /* Original machine mode, in case it becomes a CONST_INT. */
4782 enum machine_mode mode;
4783 /* A constant equivalent for SET_SRC, if any. */
4784 rtx src_const;
4785 /* Original SET_SRC value used for libcall notes. */
4786 rtx orig_src;
4787 /* Hash value of constant equivalent for SET_SRC. */
4788 unsigned src_const_hash;
4789 /* Table entry for constant equivalent for SET_SRC, if any. */
4790 struct table_elt *src_const_elt;
4791};
4792
4793static void
4794cse_insn (insn, libcall_insn)
4795 rtx insn;
4796 rtx libcall_insn;
4797{
4798 rtx x = PATTERN (insn);
4799 int i;
4800 rtx tem;
4801 int n_sets = 0;
4802
4803#ifdef HAVE_cc0
4804 /* Records what this insn does to set CC0. */
4805 rtx this_insn_cc0 = 0;
4806 enum machine_mode this_insn_cc0_mode = VOIDmode;
4807#endif
4808
4809 rtx src_eqv = 0;
4810 struct table_elt *src_eqv_elt = 0;
4811 int src_eqv_volatile = 0;
4812 int src_eqv_in_memory = 0;
4813 unsigned src_eqv_hash = 0;
4814
4815 struct set *sets = (struct set *) 0;
4816
4817 this_insn = insn;
4818
4819 /* Find all the SETs and CLOBBERs in this instruction.
4820 Record all the SETs in the array `set' and count them.
4821 Also determine whether there is a CLOBBER that invalidates
4822 all memory references, or all references at varying addresses. */
4823
4824 if (GET_CODE (insn) == CALL_INSN)
4825 {
4826 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4827 {
4828 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4829 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4830 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4831 }
4832 }
4833
4834 if (GET_CODE (x) == SET)
4835 {
4836 sets = (struct set *) alloca (sizeof (struct set));
4837 sets[0].rtl = x;
4838
4839 /* Ignore SETs that are unconditional jumps.
4840 They never need cse processing, so this does not hurt.
4841 The reason is not efficiency but rather
4842 so that we can test at the end for instructions
4843 that have been simplified to unconditional jumps
4844 and not be misled by unchanged instructions
4845 that were unconditional jumps to begin with. */
4846 if (SET_DEST (x) == pc_rtx
4847 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4848 ;
4849
4850 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4851 The hard function value register is used only once, to copy to
4852 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4853 Ensure we invalidate the destination register. On the 80386 no
4854 other code would invalidate it since it is a fixed_reg.
4855 We need not check the return of apply_change_group; see canon_reg. */
4856
4857 else if (GET_CODE (SET_SRC (x)) == CALL)
4858 {
4859 canon_reg (SET_SRC (x), insn);
4860 apply_change_group ();
4861 fold_rtx (SET_SRC (x), insn);
4862 invalidate (SET_DEST (x), VOIDmode);
4863 }
4864 else
4865 n_sets = 1;
4866 }
4867 else if (GET_CODE (x) == PARALLEL)
4868 {
4869 int lim = XVECLEN (x, 0);
4870
4871 sets = (struct set *) alloca (lim * sizeof (struct set));
4872
4873 /* Find all regs explicitly clobbered in this insn,
4874 and ensure they are not replaced with any other regs
4875 elsewhere in this insn.
4876 When a reg that is clobbered is also used for input,
4877 we should presume that that is for a reason,
4878 and we should not substitute some other register
4879 which is not supposed to be clobbered.
4880 Therefore, this loop cannot be merged into the one below
4881 because a CALL may precede a CLOBBER and refer to the
4882 value clobbered. We must not let a canonicalization do
4883 anything in that case. */
4884 for (i = 0; i < lim; i++)
4885 {
4886 rtx y = XVECEXP (x, 0, i);
4887 if (GET_CODE (y) == CLOBBER)
4888 {
4889 rtx clobbered = XEXP (y, 0);
4890
4891 if (GET_CODE (clobbered) == REG
4892 || GET_CODE (clobbered) == SUBREG)
4893 invalidate (clobbered, VOIDmode);
4894 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4895 || GET_CODE (clobbered) == ZERO_EXTRACT)
4896 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4897 }
4898 }
4899
4900 for (i = 0; i < lim; i++)
4901 {
4902 rtx y = XVECEXP (x, 0, i);
4903 if (GET_CODE (y) == SET)
4904 {
4905 /* As above, we ignore unconditional jumps and call-insns and
4906 ignore the result of apply_change_group. */
4907 if (GET_CODE (SET_SRC (y)) == CALL)
4908 {
4909 canon_reg (SET_SRC (y), insn);
4910 apply_change_group ();
4911 fold_rtx (SET_SRC (y), insn);
4912 invalidate (SET_DEST (y), VOIDmode);
4913 }
4914 else if (SET_DEST (y) == pc_rtx
4915 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4916 ;
4917 else
4918 sets[n_sets++].rtl = y;
4919 }
4920 else if (GET_CODE (y) == CLOBBER)
4921 {
4922 /* If we clobber memory, canon the address.
4923 This does nothing when a register is clobbered
4924 because we have already invalidated the reg. */
4925 if (GET_CODE (XEXP (y, 0)) == MEM)
4926 canon_reg (XEXP (y, 0), NULL_RTX);
4927 }
4928 else if (GET_CODE (y) == USE
4929 && ! (GET_CODE (XEXP (y, 0)) == REG
4930 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4931 canon_reg (y, NULL_RTX);
4932 else if (GET_CODE (y) == CALL)
4933 {
4934 /* The result of apply_change_group can be ignored; see
4935 canon_reg. */
4936 canon_reg (y, insn);
4937 apply_change_group ();
4938 fold_rtx (y, insn);
4939 }
4940 }
4941 }
4942 else if (GET_CODE (x) == CLOBBER)
4943 {
4944 if (GET_CODE (XEXP (x, 0)) == MEM)
4945 canon_reg (XEXP (x, 0), NULL_RTX);
4946 }
4947
4948 /* Canonicalize a USE of a pseudo register or memory location. */
4949 else if (GET_CODE (x) == USE
4950 && ! (GET_CODE (XEXP (x, 0)) == REG
4951 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4952 canon_reg (XEXP (x, 0), NULL_RTX);
4953 else if (GET_CODE (x) == CALL)
4954 {
4955 /* The result of apply_change_group can be ignored; see canon_reg. */
4956 canon_reg (x, insn);
4957 apply_change_group ();
4958 fold_rtx (x, insn);
4959 }
4960
4961 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4962 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4963 is handled specially for this case, and if it isn't set, then there will
4964 be no equivalence for the destination. */
4965 if (n_sets == 1 && REG_NOTES (insn) != 0
4966 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4967 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4968 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4969 {
4970 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4971 XEXP (tem, 0) = src_eqv;
4972 }
4973
4974 /* Canonicalize sources and addresses of destinations.
4975 We do this in a separate pass to avoid problems when a MATCH_DUP is
4976 present in the insn pattern. In that case, we want to ensure that
4977 we don't break the duplicate nature of the pattern. So we will replace
4978 both operands at the same time. Otherwise, we would fail to find an
4979 equivalent substitution in the loop calling validate_change below.
4980
4981 We used to suppress canonicalization of DEST if it appears in SRC,
4982 but we don't do this any more. */
4983
4984 for (i = 0; i < n_sets; i++)
4985 {
4986 rtx dest = SET_DEST (sets[i].rtl);
4987 rtx src = SET_SRC (sets[i].rtl);
4988 rtx new = canon_reg (src, insn);
4989 int insn_code;
4990
4991 sets[i].orig_src = src;
4992 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4993 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4994 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4995 || (insn_code = recog_memoized (insn)) < 0
4996 || insn_data[insn_code].n_dups > 0)
4997 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4998 else
4999 SET_SRC (sets[i].rtl) = new;
5000
5001 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
5002 {
5003 validate_change (insn, &XEXP (dest, 1),
5004 canon_reg (XEXP (dest, 1), insn), 1);
5005 validate_change (insn, &XEXP (dest, 2),
5006 canon_reg (XEXP (dest, 2), insn), 1);
5007 }
5008
5009 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
5010 || GET_CODE (dest) == ZERO_EXTRACT
5011 || GET_CODE (dest) == SIGN_EXTRACT)
5012 dest = XEXP (dest, 0);
5013
5014 if (GET_CODE (dest) == MEM)
5015 canon_reg (dest, insn);
5016 }
5017
5018 /* Now that we have done all the replacements, we can apply the change
5019 group and see if they all work. Note that this will cause some
5020 canonicalizations that would have worked individually not to be applied
5021 because some other canonicalization didn't work, but this should not
5022 occur often.
5023
5024 The result of apply_change_group can be ignored; see canon_reg. */
5025
5026 apply_change_group ();
5027
5028 /* Set sets[i].src_elt to the class each source belongs to.
5029 Detect assignments from or to volatile things
5030 and set set[i] to zero so they will be ignored
5031 in the rest of this function.
5032
5033 Nothing in this loop changes the hash table or the register chains. */
5034
5035 for (i = 0; i < n_sets; i++)
5036 {
5037 rtx src, dest;
5038 rtx src_folded;
5039 struct table_elt *elt = 0, *p;
5040 enum machine_mode mode;
5041 rtx src_eqv_here;
5042 rtx src_const = 0;
5043 rtx src_related = 0;
5044 struct table_elt *src_const_elt = 0;
5045 int src_cost = MAX_COST;
5046 int src_eqv_cost = MAX_COST;
5047 int src_folded_cost = MAX_COST;
5048 int src_related_cost = MAX_COST;
5049 int src_elt_cost = MAX_COST;
5050 int src_regcost = MAX_COST;
5051 int src_eqv_regcost = MAX_COST;
5052 int src_folded_regcost = MAX_COST;
5053 int src_related_regcost = MAX_COST;
5054 int src_elt_regcost = MAX_COST;
5055 /* Set nonzero if we need to call force_const_mem on with the
5056 contents of src_folded before using it. */
5057 int src_folded_force_flag = 0;
5058
5059 dest = SET_DEST (sets[i].rtl);
5060 src = SET_SRC (sets[i].rtl);
5061
5062 /* If SRC is a constant that has no machine mode,
5063 hash it with the destination's machine mode.
5064 This way we can keep different modes separate. */
5065
5066 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5067 sets[i].mode = mode;
5068
5069 if (src_eqv)
5070 {
5071 enum machine_mode eqvmode = mode;
5072 if (GET_CODE (dest) == STRICT_LOW_PART)
5073 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5074 do_not_record = 0;
5075 hash_arg_in_memory = 0;
5076 src_eqv_hash = HASH (src_eqv, eqvmode);
5077
5078 /* Find the equivalence class for the equivalent expression. */
5079
5080 if (!do_not_record)
5081 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5082
5083 src_eqv_volatile = do_not_record;
5084 src_eqv_in_memory = hash_arg_in_memory;
5085 }
5086
5087 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5088 value of the INNER register, not the destination. So it is not
5089 a valid substitution for the source. But save it for later. */
5090 if (GET_CODE (dest) == STRICT_LOW_PART)
5091 src_eqv_here = 0;
5092 else
5093 src_eqv_here = src_eqv;
5094
5095 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5096 simplified result, which may not necessarily be valid. */
5097 src_folded = fold_rtx (src, insn);
5098
5099#if 0
5100 /* ??? This caused bad code to be generated for the m68k port with -O2.
5101 Suppose src is (CONST_INT -1), and that after truncation src_folded
5102 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5103 At the end we will add src and src_const to the same equivalence
5104 class. We now have 3 and -1 on the same equivalence class. This
5105 causes later instructions to be mis-optimized. */
5106 /* If storing a constant in a bitfield, pre-truncate the constant
5107 so we will be able to record it later. */
5108 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5109 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5110 {
5111 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5112
5113 if (GET_CODE (src) == CONST_INT
5114 && GET_CODE (width) == CONST_INT
5115 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5116 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5117 src_folded
5118 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5119 << INTVAL (width)) - 1));
5120 }
5121#endif
5122
5123 /* Compute SRC's hash code, and also notice if it
5124 should not be recorded at all. In that case,
5125 prevent any further processing of this assignment. */
5126 do_not_record = 0;
5127 hash_arg_in_memory = 0;
5128
5129 sets[i].src = src;
5130 sets[i].src_hash = HASH (src, mode);
5131 sets[i].src_volatile = do_not_record;
5132 sets[i].src_in_memory = hash_arg_in_memory;
5133
5134 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5135 a pseudo, do not record SRC. Using SRC as a replacement for
5136 anything else will be incorrect in that situation. Note that
5137 this usually occurs only for stack slots, in which case all the
5138 RTL would be referring to SRC, so we don't lose any optimization
5139 opportunities by not having SRC in the hash table. */
5140
5141 if (GET_CODE (src) == MEM
5142 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5143 && GET_CODE (dest) == REG
5144 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5145 sets[i].src_volatile = 1;
5146
5147#if 0
5148 /* It is no longer clear why we used to do this, but it doesn't
5149 appear to still be needed. So let's try without it since this
5150 code hurts cse'ing widened ops. */
5151 /* If source is a perverse subreg (such as QI treated as an SI),
5152 treat it as volatile. It may do the work of an SI in one context
5153 where the extra bits are not being used, but cannot replace an SI
5154 in general. */
5155 if (GET_CODE (src) == SUBREG
5156 && (GET_MODE_SIZE (GET_MODE (src))
5157 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5158 sets[i].src_volatile = 1;
5159#endif
5160
5161 /* Locate all possible equivalent forms for SRC. Try to replace
5162 SRC in the insn with each cheaper equivalent.
5163
5164 We have the following types of equivalents: SRC itself, a folded
5165 version, a value given in a REG_EQUAL note, or a value related
5166 to a constant.
5167
5168 Each of these equivalents may be part of an additional class
5169 of equivalents (if more than one is in the table, they must be in
5170 the same class; we check for this).
5171
5172 If the source is volatile, we don't do any table lookups.
5173
5174 We note any constant equivalent for possible later use in a
5175 REG_NOTE. */
5176
5177 if (!sets[i].src_volatile)
5178 elt = lookup (src, sets[i].src_hash, mode);
5179
5180 sets[i].src_elt = elt;
5181
5182 if (elt && src_eqv_here && src_eqv_elt)
5183 {
5184 if (elt->first_same_value != src_eqv_elt->first_same_value)
5185 {
5186 /* The REG_EQUAL is indicating that two formerly distinct
5187 classes are now equivalent. So merge them. */
5188 merge_equiv_classes (elt, src_eqv_elt);
5189 src_eqv_hash = HASH (src_eqv, elt->mode);
5190 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5191 }
5192
5193 src_eqv_here = 0;
5194 }
5195
5196 else if (src_eqv_elt)
5197 elt = src_eqv_elt;
5198
5199 /* Try to find a constant somewhere and record it in `src_const'.
5200 Record its table element, if any, in `src_const_elt'. Look in
5201 any known equivalences first. (If the constant is not in the
5202 table, also set `sets[i].src_const_hash'). */
5203 if (elt)
5204 for (p = elt->first_same_value; p; p = p->next_same_value)
5205 if (p->is_const)
5206 {
5207 src_const = p->exp;
5208 src_const_elt = elt;
5209 break;
5210 }
5211
5212 if (src_const == 0
5213 && (CONSTANT_P (src_folded)
5214 /* Consider (minus (label_ref L1) (label_ref L2)) as
5215 "constant" here so we will record it. This allows us
5216 to fold switch statements when an ADDR_DIFF_VEC is used. */
5217 || (GET_CODE (src_folded) == MINUS
5218 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5219 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5220 src_const = src_folded, src_const_elt = elt;
5221 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5222 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5223
5224 /* If we don't know if the constant is in the table, get its
5225 hash code and look it up. */
5226 if (src_const && src_const_elt == 0)
5227 {
5228 sets[i].src_const_hash = HASH (src_const, mode);
5229 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5230 }
5231
5232 sets[i].src_const = src_const;
5233 sets[i].src_const_elt = src_const_elt;
5234
5235 /* If the constant and our source are both in the table, mark them as
5236 equivalent. Otherwise, if a constant is in the table but the source
5237 isn't, set ELT to it. */
5238 if (src_const_elt && elt
5239 && src_const_elt->first_same_value != elt->first_same_value)
5240 merge_equiv_classes (elt, src_const_elt);
5241 else if (src_const_elt && elt == 0)
5242 elt = src_const_elt;
5243
5244 /* See if there is a register linearly related to a constant
5245 equivalent of SRC. */
5246 if (src_const
5247 && (GET_CODE (src_const) == CONST
5248 || (src_const_elt && src_const_elt->related_value != 0)))
5249 {
5250 src_related = use_related_value (src_const, src_const_elt);
5251 if (src_related)
5252 {
5253 struct table_elt *src_related_elt
5254 = lookup (src_related, HASH (src_related, mode), mode);
5255 if (src_related_elt && elt)
5256 {
5257 if (elt->first_same_value
5258 != src_related_elt->first_same_value)
5259 /* This can occur when we previously saw a CONST
5260 involving a SYMBOL_REF and then see the SYMBOL_REF
5261 twice. Merge the involved classes. */
5262 merge_equiv_classes (elt, src_related_elt);
5263
5264 src_related = 0;
5265 src_related_elt = 0;
5266 }
5267 else if (src_related_elt && elt == 0)
5268 elt = src_related_elt;
5269 }
5270 }
5271
5272 /* See if we have a CONST_INT that is already in a register in a
5273 wider mode. */
5274
5275 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5276 && GET_MODE_CLASS (mode) == MODE_INT
5277 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5278 {
5279 enum machine_mode wider_mode;
5280
5281 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5282 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5283 && src_related == 0;
5284 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5285 {
5286 struct table_elt *const_elt
5287 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5288
5289 if (const_elt == 0)
5290 continue;
5291
5292 for (const_elt = const_elt->first_same_value;
5293 const_elt; const_elt = const_elt->next_same_value)
5294 if (GET_CODE (const_elt->exp) == REG)
5295 {
5296 src_related = gen_lowpart_if_possible (mode,
5297 const_elt->exp);
5298 break;
5299 }
5300 }
5301 }
5302
5303 /* Another possibility is that we have an AND with a constant in
5304 a mode narrower than a word. If so, it might have been generated
5305 as part of an "if" which would narrow the AND. If we already
5306 have done the AND in a wider mode, we can use a SUBREG of that
5307 value. */
5308
5309 if (flag_expensive_optimizations && ! src_related
5310 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5311 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5312 {
5313 enum machine_mode tmode;
5314 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5315
5316 for (tmode = GET_MODE_WIDER_MODE (mode);
5317 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5318 tmode = GET_MODE_WIDER_MODE (tmode))
5319 {
5320 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5321 struct table_elt *larger_elt;
5322
5323 if (inner)
5324 {
5325 PUT_MODE (new_and, tmode);
5326 XEXP (new_and, 0) = inner;
5327 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5328 if (larger_elt == 0)
5329 continue;
5330
5331 for (larger_elt = larger_elt->first_same_value;
5332 larger_elt; larger_elt = larger_elt->next_same_value)
5333 if (GET_CODE (larger_elt->exp) == REG)
5334 {
5335 src_related
5336 = gen_lowpart_if_possible (mode, larger_elt->exp);
5337 break;
5338 }
5339
5340 if (src_related)
5341 break;
5342 }
5343 }
5344 }
5345
5346#ifdef LOAD_EXTEND_OP
5347 /* See if a MEM has already been loaded with a widening operation;
5348 if it has, we can use a subreg of that. Many CISC machines
5349 also have such operations, but this is only likely to be
5350 beneficial these machines. */
5351
5352 if (flag_expensive_optimizations && src_related == 0
5353 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5354 && GET_MODE_CLASS (mode) == MODE_INT
5355 && GET_CODE (src) == MEM && ! do_not_record
5356 && LOAD_EXTEND_OP (mode) != NIL)
5357 {
5358 enum machine_mode tmode;
5359
5360 /* Set what we are trying to extend and the operation it might
5361 have been extended with. */
5362 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5363 XEXP (memory_extend_rtx, 0) = src;
5364
5365 for (tmode = GET_MODE_WIDER_MODE (mode);
5366 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5367 tmode = GET_MODE_WIDER_MODE (tmode))
5368 {
5369 struct table_elt *larger_elt;
5370
5371 PUT_MODE (memory_extend_rtx, tmode);
5372 larger_elt = lookup (memory_extend_rtx,
5373 HASH (memory_extend_rtx, tmode), tmode);
5374 if (larger_elt == 0)
5375 continue;
5376
5377 for (larger_elt = larger_elt->first_same_value;
5378 larger_elt; larger_elt = larger_elt->next_same_value)
5379 if (GET_CODE (larger_elt->exp) == REG)
5380 {
5381 src_related = gen_lowpart_if_possible (mode,
5382 larger_elt->exp);
5383 break;
5384 }
5385
5386 if (src_related)
5387 break;
5388 }
5389 }
5390#endif /* LOAD_EXTEND_OP */
5391
5392 if (src == src_folded)
5393 src_folded = 0;
5394
5395 /* At this point, ELT, if nonzero, points to a class of expressions
5396 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5397 and SRC_RELATED, if nonzero, each contain additional equivalent
5398 expressions. Prune these latter expressions by deleting expressions
5399 already in the equivalence class.
5400
5401 Check for an equivalent identical to the destination. If found,
5402 this is the preferred equivalent since it will likely lead to
5403 elimination of the insn. Indicate this by placing it in
5404 `src_related'. */
5405
5406 if (elt)
5407 elt = elt->first_same_value;
5408 for (p = elt; p; p = p->next_same_value)
5409 {
5410 enum rtx_code code = GET_CODE (p->exp);
5411
5412 /* If the expression is not valid, ignore it. Then we do not
5413 have to check for validity below. In most cases, we can use
5414 `rtx_equal_p', since canonicalization has already been done. */
5415 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5416 continue;
5417
5418 /* Also skip paradoxical subregs, unless that's what we're
5419 looking for. */
5420 if (code == SUBREG
5421 && (GET_MODE_SIZE (GET_MODE (p->exp))
5422 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5423 && ! (src != 0
5424 && GET_CODE (src) == SUBREG
5425 && GET_MODE (src) == GET_MODE (p->exp)
5426 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5427 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5428 continue;
5429
5430 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5431 src = 0;
5432 else if (src_folded && GET_CODE (src_folded) == code
5433 && rtx_equal_p (src_folded, p->exp))
5434 src_folded = 0;
5435 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5436 && rtx_equal_p (src_eqv_here, p->exp))
5437 src_eqv_here = 0;
5438 else if (src_related && GET_CODE (src_related) == code
5439 && rtx_equal_p (src_related, p->exp))
5440 src_related = 0;
5441
5442 /* This is the same as the destination of the insns, we want
5443 to prefer it. Copy it to src_related. The code below will
5444 then give it a negative cost. */
5445 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5446 src_related = dest;
5447 }
5448
5449 /* Find the cheapest valid equivalent, trying all the available
5450 possibilities. Prefer items not in the hash table to ones
5451 that are when they are equal cost. Note that we can never
5452 worsen an insn as the current contents will also succeed.
5453 If we find an equivalent identical to the destination, use it as best,
5454 since this insn will probably be eliminated in that case. */
5455 if (src)
5456 {
5457 if (rtx_equal_p (src, dest))
5458 src_cost = src_regcost = -1;
5459 else
5460 {
5461 src_cost = COST (src);
5462 src_regcost = approx_reg_cost (src);
5463 }
5464 }
5465
5466 if (src_eqv_here)
5467 {
5468 if (rtx_equal_p (src_eqv_here, dest))
5469 src_eqv_cost = src_eqv_regcost = -1;
5470 else
5471 {
5472 src_eqv_cost = COST (src_eqv_here);
5473 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5474 }
5475 }
5476
5477 if (src_folded)
5478 {
5479 if (rtx_equal_p (src_folded, dest))
5480 src_folded_cost = src_folded_regcost = -1;
5481 else
5482 {
5483 src_folded_cost = COST (src_folded);
5484 src_folded_regcost = approx_reg_cost (src_folded);
5485 }
5486 }
5487
5488 if (src_related)
5489 {
5490 if (rtx_equal_p (src_related, dest))
5491 src_related_cost = src_related_regcost = -1;
5492 else
5493 {
5494 src_related_cost = COST (src_related);
5495 src_related_regcost = approx_reg_cost (src_related);
5496 }
5497 }
5498
5499 /* If this was an indirect jump insn, a known label will really be
5500 cheaper even though it looks more expensive. */
5501 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5502 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5503
5504 /* Terminate loop when replacement made. This must terminate since
5505 the current contents will be tested and will always be valid. */
5506 while (1)
5507 {
5508 rtx trial;
5509
5510 /* Skip invalid entries. */
5511 while (elt && GET_CODE (elt->exp) != REG
5512 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5513 elt = elt->next_same_value;
5514
5515 /* A paradoxical subreg would be bad here: it'll be the right
5516 size, but later may be adjusted so that the upper bits aren't
5517 what we want. So reject it. */
5518 if (elt != 0
5519 && GET_CODE (elt->exp) == SUBREG
5520 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5521 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5522 /* It is okay, though, if the rtx we're trying to match
5523 will ignore any of the bits we can't predict. */
5524 && ! (src != 0
5525 && GET_CODE (src) == SUBREG
5526 && GET_MODE (src) == GET_MODE (elt->exp)
5527 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5528 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5529 {
5530 elt = elt->next_same_value;
5531 continue;
5532 }
5533
5534 if (elt)
5535 {
5536 src_elt_cost = elt->cost;
5537 src_elt_regcost = elt->regcost;
5538 }
5539
5540 /* Find cheapest and skip it for the next time. For items
5541 of equal cost, use this order:
5542 src_folded, src, src_eqv, src_related and hash table entry. */
5543 if (src_folded
5544 && preferrable (src_folded_cost, src_folded_regcost,
5545 src_cost, src_regcost) <= 0
5546 && preferrable (src_folded_cost, src_folded_regcost,
5547 src_eqv_cost, src_eqv_regcost) <= 0
5548 && preferrable (src_folded_cost, src_folded_regcost,
5549 src_related_cost, src_related_regcost) <= 0
5550 && preferrable (src_folded_cost, src_folded_regcost,
5551 src_elt_cost, src_elt_regcost) <= 0)
5552 {
5553 trial = src_folded, src_folded_cost = MAX_COST;
5554 if (src_folded_force_flag)
5555 trial = force_const_mem (mode, trial);
5556 }
5557 else if (src
5558 && preferrable (src_cost, src_regcost,
5559 src_eqv_cost, src_eqv_regcost) <= 0
5560 && preferrable (src_cost, src_regcost,
5561 src_related_cost, src_related_regcost) <= 0
5562 && preferrable (src_cost, src_regcost,
5563 src_elt_cost, src_elt_regcost) <= 0)
5564 trial = src, src_cost = MAX_COST;
5565 else if (src_eqv_here
5566 && preferrable (src_eqv_cost, src_eqv_regcost,
5567 src_related_cost, src_related_regcost) <= 0
5568 && preferrable (src_eqv_cost, src_eqv_regcost,
5569 src_elt_cost, src_elt_regcost) <= 0)
5570 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5571 else if (src_related
5572 && preferrable (src_related_cost, src_related_regcost,
5573 src_elt_cost, src_elt_regcost) <= 0)
5574 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5575 else
5576 {
5577 trial = copy_rtx (elt->exp);
5578 elt = elt->next_same_value;
5579 src_elt_cost = MAX_COST;
5580 }
5581
5582 /* We don't normally have an insn matching (set (pc) (pc)), so
5583 check for this separately here. We will delete such an
5584 insn below.
5585
5586 For other cases such as a table jump or conditional jump
5587 where we know the ultimate target, go ahead and replace the
5588 operand. While that may not make a valid insn, we will
5589 reemit the jump below (and also insert any necessary
5590 barriers). */
5591 if (n_sets == 1 && dest == pc_rtx
5592 && (trial == pc_rtx
5593 || (GET_CODE (trial) == LABEL_REF
5594 && ! condjump_p (insn))))
5595 {
5596 SET_SRC (sets[i].rtl) = trial;
5597 cse_jumps_altered = 1;
5598 break;
5599 }
5600
5601 /* Look for a substitution that makes a valid insn. */
5602 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5603 {
5604 /* If we just made a substitution inside a libcall, then we
5605 need to make the same substitution in any notes attached
5606 to the RETVAL insn. */
5607 if (libcall_insn
5608 && (GET_CODE (sets[i].orig_src) == REG
5609 || GET_CODE (sets[i].orig_src) == SUBREG
5610 || GET_CODE (sets[i].orig_src) == MEM))
5611 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5612 canon_reg (SET_SRC (sets[i].rtl), insn));
5613
5614 /* The result of apply_change_group can be ignored; see
5615 canon_reg. */
5616
5617 validate_change (insn, &SET_SRC (sets[i].rtl),
5618 canon_reg (SET_SRC (sets[i].rtl), insn),
5619 1);
5620 apply_change_group ();
5621 break;
5622 }
5623
5624 /* If we previously found constant pool entries for
5625 constants and this is a constant, try making a
5626 pool entry. Put it in src_folded unless we already have done
5627 this since that is where it likely came from. */
5628
5629 else if (constant_pool_entries_cost
5630 && CONSTANT_P (trial)
5631 /* Reject cases that will abort in decode_rtx_const.
5632 On the alpha when simplifying a switch, we get
5633 (const (truncate (minus (label_ref) (label_ref)))). */
5634 && ! (GET_CODE (trial) == CONST
5635 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5636 /* Likewise on IA-64, except without the truncate. */
5637 && ! (GET_CODE (trial) == CONST
5638 && GET_CODE (XEXP (trial, 0)) == MINUS
5639 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5640 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5641 && (src_folded == 0
5642 || (GET_CODE (src_folded) != MEM
5643 && ! src_folded_force_flag))
5644 && GET_MODE_CLASS (mode) != MODE_CC
5645 && mode != VOIDmode)
5646 {
5647 src_folded_force_flag = 1;
5648 src_folded = trial;
5649 src_folded_cost = constant_pool_entries_cost;
5650 }
5651 }
5652
5653 src = SET_SRC (sets[i].rtl);
5654
5655 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5656 However, there is an important exception: If both are registers
5657 that are not the head of their equivalence class, replace SET_SRC
5658 with the head of the class. If we do not do this, we will have
5659 both registers live over a portion of the basic block. This way,
5660 their lifetimes will likely abut instead of overlapping. */
5661 if (GET_CODE (dest) == REG
5662 && REGNO_QTY_VALID_P (REGNO (dest)))
5663 {
5664 int dest_q = REG_QTY (REGNO (dest));
5665 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5666
5667 if (dest_ent->mode == GET_MODE (dest)
5668 && dest_ent->first_reg != REGNO (dest)
5669 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5670 /* Don't do this if the original insn had a hard reg as
5671 SET_SRC or SET_DEST. */
5672 && (GET_CODE (sets[i].src) != REG
5673 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5674 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5675 /* We can't call canon_reg here because it won't do anything if
5676 SRC is a hard register. */
5677 {
5678 int src_q = REG_QTY (REGNO (src));
5679 struct qty_table_elem *src_ent = &qty_table[src_q];
5680 int first = src_ent->first_reg;
5681 rtx new_src
5682 = (first >= FIRST_PSEUDO_REGISTER
5683 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5684
5685 /* We must use validate-change even for this, because this
5686 might be a special no-op instruction, suitable only to
5687 tag notes onto. */
5688 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5689 {
5690 src = new_src;
5691 /* If we had a constant that is cheaper than what we are now
5692 setting SRC to, use that constant. We ignored it when we
5693 thought we could make this into a no-op. */
5694 if (src_const && COST (src_const) < COST (src)
5695 && validate_change (insn, &SET_SRC (sets[i].rtl),
5696 src_const, 0))
5697 src = src_const;
5698 }
5699 }
5700 }
5701
5702 /* If we made a change, recompute SRC values. */
5703 if (src != sets[i].src)
5704 {
5705 cse_altered = 1;
5706 do_not_record = 0;
5707 hash_arg_in_memory = 0;
5708 sets[i].src = src;
5709 sets[i].src_hash = HASH (src, mode);
5710 sets[i].src_volatile = do_not_record;
5711 sets[i].src_in_memory = hash_arg_in_memory;
5712 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5713 }
5714
5715 /* If this is a single SET, we are setting a register, and we have an
5716 equivalent constant, we want to add a REG_NOTE. We don't want
5717 to write a REG_EQUAL note for a constant pseudo since verifying that
5718 that pseudo hasn't been eliminated is a pain. Such a note also
5719 won't help anything.
5720
5721 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5722 which can be created for a reference to a compile time computable
5723 entry in a jump table. */
5724
5725 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5726 && GET_CODE (src_const) != REG
5727 && ! (GET_CODE (src_const) == CONST
5728 && GET_CODE (XEXP (src_const, 0)) == MINUS
5729 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5730 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5731 {
5732 /* Make sure that the rtx is not shared with any other insn. */
5733 src_const = copy_rtx (src_const);
5734
5735 /* Record the actual constant value in a REG_EQUAL note, making
5736 a new one if one does not already exist. */
5737 set_unique_reg_note (insn, REG_EQUAL, src_const);
5738
5739 /* If storing a constant value in a register that
5740 previously held the constant value 0,
5741 record this fact with a REG_WAS_0 note on this insn.
5742
5743 Note that the *register* is required to have previously held 0,
5744 not just any register in the quantity and we must point to the
5745 insn that set that register to zero.
5746
5747 Rather than track each register individually, we just see if
5748 the last set for this quantity was for this register. */
5749
5750 if (REGNO_QTY_VALID_P (REGNO (dest)))
5751 {
5752 int dest_q = REG_QTY (REGNO (dest));
5753 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5754
5755 if (dest_ent->const_rtx == const0_rtx)
5756 {
5757 /* See if we previously had a REG_WAS_0 note. */
5758 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5759 rtx const_insn = dest_ent->const_insn;
5760
5761 if ((tem = single_set (const_insn)) != 0
5762 && rtx_equal_p (SET_DEST (tem), dest))
5763 {
5764 if (note)
5765 XEXP (note, 0) = const_insn;
5766 else
5767 REG_NOTES (insn)
5768 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5769 REG_NOTES (insn));
5770 }
5771 }
5772 }
5773 }
5774
5775 /* Now deal with the destination. */
5776 do_not_record = 0;
5777
5778 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5779 to the MEM or REG within it. */
5780 while (GET_CODE (dest) == SIGN_EXTRACT
5781 || GET_CODE (dest) == ZERO_EXTRACT
5782 || GET_CODE (dest) == SUBREG
5783 || GET_CODE (dest) == STRICT_LOW_PART)
5784 dest = XEXP (dest, 0);
5785
5786 sets[i].inner_dest = dest;
5787
5788 if (GET_CODE (dest) == MEM)
5789 {
5790#ifdef PUSH_ROUNDING
5791 /* Stack pushes invalidate the stack pointer. */
5792 rtx addr = XEXP (dest, 0);
5793 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5794 && XEXP (addr, 0) == stack_pointer_rtx)
5795 invalidate (stack_pointer_rtx, Pmode);
5796#endif
5797 dest = fold_rtx (dest, insn);
5798 }
5799
5800 /* Compute the hash code of the destination now,
5801 before the effects of this instruction are recorded,
5802 since the register values used in the address computation
5803 are those before this instruction. */
5804 sets[i].dest_hash = HASH (dest, mode);
5805
5806 /* Don't enter a bit-field in the hash table
5807 because the value in it after the store
5808 may not equal what was stored, due to truncation. */
5809
5810 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5811 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5812 {
5813 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5814
5815 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5816 && GET_CODE (width) == CONST_INT
5817 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5818 && ! (INTVAL (src_const)
5819 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5820 /* Exception: if the value is constant,
5821 and it won't be truncated, record it. */
5822 ;
5823 else
5824 {
5825 /* This is chosen so that the destination will be invalidated
5826 but no new value will be recorded.
5827 We must invalidate because sometimes constant
5828 values can be recorded for bitfields. */
5829 sets[i].src_elt = 0;
5830 sets[i].src_volatile = 1;
5831 src_eqv = 0;
5832 src_eqv_elt = 0;
5833 }
5834 }
5835
5836 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5837 the insn. */
5838 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5839 {
5840 /* One less use of the label this insn used to jump to. */
5841 delete_insn (insn);
5842 cse_jumps_altered = 1;
5843 /* No more processing for this set. */
5844 sets[i].rtl = 0;
5845 }
5846
5847 /* If this SET is now setting PC to a label, we know it used to
5848 be a conditional or computed branch. */
5849 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5850 {
5851 /* Now emit a BARRIER after the unconditional jump. */
5852 if (NEXT_INSN (insn) == 0
5853 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5854 emit_barrier_after (insn);
5855
5856 /* We reemit the jump in as many cases as possible just in
5857 case the form of an unconditional jump is significantly
5858 different than a computed jump or conditional jump.
5859
5860 If this insn has multiple sets, then reemitting the
5861 jump is nontrivial. So instead we just force rerecognition
5862 and hope for the best. */
5863 if (n_sets == 1)
5864 {
5865 rtx new = emit_jump_insn_after (gen_jump (XEXP (src, 0)), insn);
5866
5867 JUMP_LABEL (new) = XEXP (src, 0);
5868 LABEL_NUSES (XEXP (src, 0))++;
5869 delete_insn (insn);
5870 insn = new;
5871
5872 /* Now emit a BARRIER after the unconditional jump. */
5873 if (NEXT_INSN (insn) == 0
5874 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5875 emit_barrier_after (insn);
5876 }
5877 else
5878 INSN_CODE (insn) = -1;
5879
5880 never_reached_warning (insn, NULL);
5881
5882 /* Do not bother deleting any unreachable code,
5883 let jump/flow do that. */
5884
5885 cse_jumps_altered = 1;
5886 sets[i].rtl = 0;
5887 }
5888
5889 /* If destination is volatile, invalidate it and then do no further
5890 processing for this assignment. */
5891
5892 else if (do_not_record)
5893 {
5894 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5895 invalidate (dest, VOIDmode);
5896 else if (GET_CODE (dest) == MEM)
5897 {
5898 /* Outgoing arguments for a libcall don't
5899 affect any recorded expressions. */
5900 if (! libcall_insn || insn == libcall_insn)
5901 invalidate (dest, VOIDmode);
5902 }
5903 else if (GET_CODE (dest) == STRICT_LOW_PART
5904 || GET_CODE (dest) == ZERO_EXTRACT)
5905 invalidate (XEXP (dest, 0), GET_MODE (dest));
5906 sets[i].rtl = 0;
5907 }
5908
5909 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5910 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5911
5912#ifdef HAVE_cc0
5913 /* If setting CC0, record what it was set to, or a constant, if it
5914 is equivalent to a constant. If it is being set to a floating-point
5915 value, make a COMPARE with the appropriate constant of 0. If we
5916 don't do this, later code can interpret this as a test against
5917 const0_rtx, which can cause problems if we try to put it into an
5918 insn as a floating-point operand. */
5919 if (dest == cc0_rtx)
5920 {
5921 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5922 this_insn_cc0_mode = mode;
5923 if (FLOAT_MODE_P (mode))
5924 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5925 CONST0_RTX (mode));
5926 }
5927#endif
5928 }
5929
5930 /* Now enter all non-volatile source expressions in the hash table
5931 if they are not already present.
5932 Record their equivalence classes in src_elt.
5933 This way we can insert the corresponding destinations into
5934 the same classes even if the actual sources are no longer in them
5935 (having been invalidated). */
5936
5937 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5938 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5939 {
5940 struct table_elt *elt;
5941 struct table_elt *classp = sets[0].src_elt;
5942 rtx dest = SET_DEST (sets[0].rtl);
5943 enum machine_mode eqvmode = GET_MODE (dest);
5944
5945 if (GET_CODE (dest) == STRICT_LOW_PART)
5946 {
5947 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5948 classp = 0;
5949 }
5950 if (insert_regs (src_eqv, classp, 0))
5951 {
5952 rehash_using_reg (src_eqv);
5953 src_eqv_hash = HASH (src_eqv, eqvmode);
5954 }
5955 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5956 elt->in_memory = src_eqv_in_memory;
5957 src_eqv_elt = elt;
5958
5959 /* Check to see if src_eqv_elt is the same as a set source which
5960 does not yet have an elt, and if so set the elt of the set source
5961 to src_eqv_elt. */
5962 for (i = 0; i < n_sets; i++)
5963 if (sets[i].rtl && sets[i].src_elt == 0
5964 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5965 sets[i].src_elt = src_eqv_elt;
5966 }
5967
5968 for (i = 0; i < n_sets; i++)
5969 if (sets[i].rtl && ! sets[i].src_volatile
5970 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5971 {
5972 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5973 {
5974 /* REG_EQUAL in setting a STRICT_LOW_PART
5975 gives an equivalent for the entire destination register,
5976 not just for the subreg being stored in now.
5977 This is a more interesting equivalence, so we arrange later
5978 to treat the entire reg as the destination. */
5979 sets[i].src_elt = src_eqv_elt;
5980 sets[i].src_hash = src_eqv_hash;
5981 }
5982 else
5983 {
5984 /* Insert source and constant equivalent into hash table, if not
5985 already present. */
5986 struct table_elt *classp = src_eqv_elt;
5987 rtx src = sets[i].src;
5988 rtx dest = SET_DEST (sets[i].rtl);
5989 enum machine_mode mode
5990 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5991
5992 if (sets[i].src_elt == 0)
5993 {
5994 /* Don't put a hard register source into the table if this is
5995 the last insn of a libcall. In this case, we only need
5996 to put src_eqv_elt in src_elt. */
5997 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5998 {
5999 struct table_elt *elt;
6000
6001 /* Note that these insert_regs calls cannot remove
6002 any of the src_elt's, because they would have failed to
6003 match if not still valid. */
6004 if (insert_regs (src, classp, 0))
6005 {
6006 rehash_using_reg (src);
6007 sets[i].src_hash = HASH (src, mode);
6008 }
6009 elt = insert (src, classp, sets[i].src_hash, mode);
6010 elt->in_memory = sets[i].src_in_memory;
6011 sets[i].src_elt = classp = elt;
6012 }
6013 else
6014 sets[i].src_elt = classp;
6015 }
6016 if (sets[i].src_const && sets[i].src_const_elt == 0
6017 && src != sets[i].src_const
6018 && ! rtx_equal_p (sets[i].src_const, src))
6019 sets[i].src_elt = insert (sets[i].src_const, classp,
6020 sets[i].src_const_hash, mode);
6021 }
6022 }
6023 else if (sets[i].src_elt == 0)
6024 /* If we did not insert the source into the hash table (e.g., it was
6025 volatile), note the equivalence class for the REG_EQUAL value, if any,
6026 so that the destination goes into that class. */
6027 sets[i].src_elt = src_eqv_elt;
6028
6029 invalidate_from_clobbers (x);
6030
6031 /* Some registers are invalidated by subroutine calls. Memory is
6032 invalidated by non-constant calls. */
6033
6034 if (GET_CODE (insn) == CALL_INSN)
6035 {
6036 if (! CONST_OR_PURE_CALL_P (insn))
6037 invalidate_memory ();
6038 invalidate_for_call ();
6039 }
6040
6041 /* Now invalidate everything set by this instruction.
6042 If a SUBREG or other funny destination is being set,
6043 sets[i].rtl is still nonzero, so here we invalidate the reg
6044 a part of which is being set. */
6045
6046 for (i = 0; i < n_sets; i++)
6047 if (sets[i].rtl)
6048 {
6049 /* We can't use the inner dest, because the mode associated with
6050 a ZERO_EXTRACT is significant. */
6051 rtx dest = SET_DEST (sets[i].rtl);
6052
6053 /* Needed for registers to remove the register from its
6054 previous quantity's chain.
6055 Needed for memory if this is a nonvarying address, unless
6056 we have just done an invalidate_memory that covers even those. */
6057 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6058 invalidate (dest, VOIDmode);
6059 else if (GET_CODE (dest) == MEM)
6060 {
6061 /* Outgoing arguments for a libcall don't
6062 affect any recorded expressions. */
6063 if (! libcall_insn || insn == libcall_insn)
6064 invalidate (dest, VOIDmode);
6065 }
6066 else if (GET_CODE (dest) == STRICT_LOW_PART
6067 || GET_CODE (dest) == ZERO_EXTRACT)
6068 invalidate (XEXP (dest, 0), GET_MODE (dest));
6069 }
6070
6071 /* A volatile ASM invalidates everything. */
6072 if (GET_CODE (insn) == INSN
6073 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6074 && MEM_VOLATILE_P (PATTERN (insn)))
6075 flush_hash_table ();
6076
6077 /* Make sure registers mentioned in destinations
6078 are safe for use in an expression to be inserted.
6079 This removes from the hash table
6080 any invalid entry that refers to one of these registers.
6081
6082 We don't care about the return value from mention_regs because
6083 we are going to hash the SET_DEST values unconditionally. */
6084
6085 for (i = 0; i < n_sets; i++)
6086 {
6087 if (sets[i].rtl)
6088 {
6089 rtx x = SET_DEST (sets[i].rtl);
6090
6091 if (GET_CODE (x) != REG)
6092 mention_regs (x);
6093 else
6094 {
6095 /* We used to rely on all references to a register becoming
6096 inaccessible when a register changes to a new quantity,
6097 since that changes the hash code. However, that is not
6098 safe, since after HASH_SIZE new quantities we get a
6099 hash 'collision' of a register with its own invalid
6100 entries. And since SUBREGs have been changed not to
6101 change their hash code with the hash code of the register,
6102 it wouldn't work any longer at all. So we have to check
6103 for any invalid references lying around now.
6104 This code is similar to the REG case in mention_regs,
6105 but it knows that reg_tick has been incremented, and
6106 it leaves reg_in_table as -1 . */
6107 unsigned int regno = REGNO (x);
6108 unsigned int endregno
6109 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6110 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6111 unsigned int i;
6112
6113 for (i = regno; i < endregno; i++)
6114 {
6115 if (REG_IN_TABLE (i) >= 0)
6116 {
6117 remove_invalid_refs (i);
6118 REG_IN_TABLE (i) = -1;
6119 }
6120 }
6121 }
6122 }
6123 }
6124
6125 /* We may have just removed some of the src_elt's from the hash table.
6126 So replace each one with the current head of the same class. */
6127
6128 for (i = 0; i < n_sets; i++)
6129 if (sets[i].rtl)
6130 {
6131 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6132 /* If elt was removed, find current head of same class,
6133 or 0 if nothing remains of that class. */
6134 {
6135 struct table_elt *elt = sets[i].src_elt;
6136
6137 while (elt && elt->prev_same_value)
6138 elt = elt->prev_same_value;
6139
6140 while (elt && elt->first_same_value == 0)
6141 elt = elt->next_same_value;
6142 sets[i].src_elt = elt ? elt->first_same_value : 0;
6143 }
6144 }
6145
6146 /* Now insert the destinations into their equivalence classes. */
6147
6148 for (i = 0; i < n_sets; i++)
6149 if (sets[i].rtl)
6150 {
6151 rtx dest = SET_DEST (sets[i].rtl);
6152 rtx inner_dest = sets[i].inner_dest;
6153 struct table_elt *elt;
6154
6155 /* Don't record value if we are not supposed to risk allocating
6156 floating-point values in registers that might be wider than
6157 memory. */
6158 if ((flag_float_store
6159 && GET_CODE (dest) == MEM
6160 && FLOAT_MODE_P (GET_MODE (dest)))
6161 /* Don't record BLKmode values, because we don't know the
6162 size of it, and can't be sure that other BLKmode values
6163 have the same or smaller size. */
6164 || GET_MODE (dest) == BLKmode
6165 /* Don't record values of destinations set inside a libcall block
6166 since we might delete the libcall. Things should have been set
6167 up so we won't want to reuse such a value, but we play it safe
6168 here. */
6169 || libcall_insn
6170 /* If we didn't put a REG_EQUAL value or a source into the hash
6171 table, there is no point is recording DEST. */
6172 || sets[i].src_elt == 0
6173 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6174 or SIGN_EXTEND, don't record DEST since it can cause
6175 some tracking to be wrong.
6176
6177 ??? Think about this more later. */
6178 || (GET_CODE (dest) == SUBREG
6179 && (GET_MODE_SIZE (GET_MODE (dest))
6180 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6181 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6182 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6183 continue;
6184
6185 /* STRICT_LOW_PART isn't part of the value BEING set,
6186 and neither is the SUBREG inside it.
6187 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6188 if (GET_CODE (dest) == STRICT_LOW_PART)
6189 dest = SUBREG_REG (XEXP (dest, 0));
6190
6191 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6192 /* Registers must also be inserted into chains for quantities. */
6193 if (insert_regs (dest, sets[i].src_elt, 1))
6194 {
6195 /* If `insert_regs' changes something, the hash code must be
6196 recalculated. */
6197 rehash_using_reg (dest);
6198 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6199 }
6200
6201 if (GET_CODE (inner_dest) == MEM
6202 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6203 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6204 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6205 Consider the case in which the address of the MEM is
6206 passed to a function, which alters the MEM. Then, if we
6207 later use Y instead of the MEM we'll miss the update. */
6208 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6209 else
6210 elt = insert (dest, sets[i].src_elt,
6211 sets[i].dest_hash, GET_MODE (dest));
6212
6213 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6214 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6215 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6216 0))));
6217
6218 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6219 narrower than M2, and both M1 and M2 are the same number of words,
6220 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6221 make that equivalence as well.
6222
6223 However, BAR may have equivalences for which gen_lowpart_if_possible
6224 will produce a simpler value than gen_lowpart_if_possible applied to
6225 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6226 BAR's equivalences. If we don't get a simplified form, make
6227 the SUBREG. It will not be used in an equivalence, but will
6228 cause two similar assignments to be detected.
6229
6230 Note the loop below will find SUBREG_REG (DEST) since we have
6231 already entered SRC and DEST of the SET in the table. */
6232
6233 if (GET_CODE (dest) == SUBREG
6234 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6235 / UNITS_PER_WORD)
6236 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6237 && (GET_MODE_SIZE (GET_MODE (dest))
6238 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6239 && sets[i].src_elt != 0)
6240 {
6241 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6242 struct table_elt *elt, *classp = 0;
6243
6244 for (elt = sets[i].src_elt->first_same_value; elt;
6245 elt = elt->next_same_value)
6246 {
6247 rtx new_src = 0;
6248 unsigned src_hash;
6249 struct table_elt *src_elt;
6250 int byte = 0;
6251
6252 /* Ignore invalid entries. */
6253 if (GET_CODE (elt->exp) != REG
6254 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6255 continue;
6256
6257 /* We may have already been playing subreg games. If the
6258 mode is already correct for the destination, use it. */
6259 if (GET_MODE (elt->exp) == new_mode)
6260 new_src = elt->exp;
6261 else
6262 {
6263 /* Calculate big endian correction for the SUBREG_BYTE.
6264 We have already checked that M1 (GET_MODE (dest))
6265 is not narrower than M2 (new_mode). */
6266 if (BYTES_BIG_ENDIAN)
6267 byte = (GET_MODE_SIZE (GET_MODE (dest))
6268 - GET_MODE_SIZE (new_mode));
6269
6270 new_src = simplify_gen_subreg (new_mode, elt->exp,
6271 GET_MODE (dest), byte);
6272 }
6273
6274 /* The call to simplify_gen_subreg fails if the value
6275 is VOIDmode, yet we can't do any simplification, e.g.
6276 for EXPR_LISTs denoting function call results.
6277 It is invalid to construct a SUBREG with a VOIDmode
6278 SUBREG_REG, hence a zero new_src means we can't do
6279 this substitution. */
6280 if (! new_src)
6281 continue;
6282
6283 src_hash = HASH (new_src, new_mode);
6284 src_elt = lookup (new_src, src_hash, new_mode);
6285
6286 /* Put the new source in the hash table is if isn't
6287 already. */
6288 if (src_elt == 0)
6289 {
6290 if (insert_regs (new_src, classp, 0))
6291 {
6292 rehash_using_reg (new_src);
6293 src_hash = HASH (new_src, new_mode);
6294 }
6295 src_elt = insert (new_src, classp, src_hash, new_mode);
6296 src_elt->in_memory = elt->in_memory;
6297 }
6298 else if (classp && classp != src_elt->first_same_value)
6299 /* Show that two things that we've seen before are
6300 actually the same. */
6301 merge_equiv_classes (src_elt, classp);
6302
6303 classp = src_elt->first_same_value;
6304 /* Ignore invalid entries. */
6305 while (classp
6306 && GET_CODE (classp->exp) != REG
6307 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6308 classp = classp->next_same_value;
6309 }
6310 }
6311 }
6312
6313 /* Special handling for (set REG0 REG1) where REG0 is the
6314 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6315 be used in the sequel, so (if easily done) change this insn to
6316 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6317 that computed their value. Then REG1 will become a dead store
6318 and won't cloud the situation for later optimizations.
6319
6320 Do not make this change if REG1 is a hard register, because it will
6321 then be used in the sequel and we may be changing a two-operand insn
6322 into a three-operand insn.
6323
6324 Also do not do this if we are operating on a copy of INSN.
6325
6326 Also don't do this if INSN ends a libcall; this would cause an unrelated
6327 register to be set in the middle of a libcall, and we then get bad code
6328 if the libcall is deleted. */
6329
6330 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6331 && NEXT_INSN (PREV_INSN (insn)) == insn
6332 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6333 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6334 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6335 {
6336 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6337 struct qty_table_elem *src_ent = &qty_table[src_q];
6338
6339 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6340 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6341 {
6342 rtx prev = insn;
6343 /* Scan for the previous nonnote insn, but stop at a basic
6344 block boundary. */
6345 do
6346 {
6347 prev = PREV_INSN (prev);
6348 }
6349 while (prev && GET_CODE (prev) == NOTE
6350 && NOTE_LINE_NUMBER (prev) != NOTE_INSN_BASIC_BLOCK);
6351
6352 /* Do not swap the registers around if the previous instruction
6353 attaches a REG_EQUIV note to REG1.
6354
6355 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6356 from the pseudo that originally shadowed an incoming argument
6357 to another register. Some uses of REG_EQUIV might rely on it
6358 being attached to REG1 rather than REG2.
6359
6360 This section previously turned the REG_EQUIV into a REG_EQUAL
6361 note. We cannot do that because REG_EQUIV may provide an
6362 uninitialized stack slot when REG_PARM_STACK_SPACE is used. */
6363
6364 if (prev != 0 && GET_CODE (prev) == INSN
6365 && GET_CODE (PATTERN (prev)) == SET
6366 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6367 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6368 {
6369 rtx dest = SET_DEST (sets[0].rtl);
6370 rtx src = SET_SRC (sets[0].rtl);
6371 rtx note;
6372
6373 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6374 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6375 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6376 apply_change_group ();
6377
6378 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6379 any REG_WAS_0 note on INSN to PREV. */
6380 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6381 if (note)
6382 remove_note (prev, note);
6383
6384 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6385 if (note)
6386 {
6387 remove_note (insn, note);
6388 XEXP (note, 1) = REG_NOTES (prev);
6389 REG_NOTES (prev) = note;
6390 }
6391
6392 /* If INSN has a REG_EQUAL note, and this note mentions
6393 REG0, then we must delete it, because the value in
6394 REG0 has changed. If the note's value is REG1, we must
6395 also delete it because that is now this insn's dest. */
6396 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6397 if (note != 0
6398 && (reg_mentioned_p (dest, XEXP (note, 0))
6399 || rtx_equal_p (src, XEXP (note, 0))))
6400 remove_note (insn, note);
6401 }
6402 }
6403 }
6404
6405 /* If this is a conditional jump insn, record any known equivalences due to
6406 the condition being tested. */
6407
6408 last_jump_equiv_class = 0;
6409 if (GET_CODE (insn) == JUMP_INSN
6410 && n_sets == 1 && GET_CODE (x) == SET
6411 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6412 record_jump_equiv (insn, 0);
6413
6414#ifdef HAVE_cc0
6415 /* If the previous insn set CC0 and this insn no longer references CC0,
6416 delete the previous insn. Here we use the fact that nothing expects CC0
6417 to be valid over an insn, which is true until the final pass. */
6418 if (prev_insn && GET_CODE (prev_insn) == INSN
6419 && (tem = single_set (prev_insn)) != 0
6420 && SET_DEST (tem) == cc0_rtx
6421 && ! reg_mentioned_p (cc0_rtx, x))
6422 delete_insn (prev_insn);
6423
6424 prev_insn_cc0 = this_insn_cc0;
6425 prev_insn_cc0_mode = this_insn_cc0_mode;
6426#endif
6427
6428 prev_insn = insn;
6429}
6430
6431
6432/* Remove from the hash table all expressions that reference memory. */
6433
6434static void
6435invalidate_memory ()
6436{
6437 int i;
6438 struct table_elt *p, *next;
6439
6440 for (i = 0; i < HASH_SIZE; i++)
6441 for (p = table[i]; p; p = next)
6442 {
6443 next = p->next_same_hash;
6444 if (p->in_memory)
6445 remove_from_table (p, i);
6446 }
6447}
6448
6449/* If ADDR is an address that implicitly affects the stack pointer, return
6450 1 and update the register tables to show the effect. Else, return 0. */
6451
6452static int
6453addr_affects_sp_p (addr)
6454 rtx addr;
6455{
6456 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6457 && GET_CODE (XEXP (addr, 0)) == REG
6458 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6459 {
6460 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6461 {
6462 REG_TICK (STACK_POINTER_REGNUM)++;
6463 /* Is it possible to use a subreg of SP? */
6464 SUBREG_TICKED (STACK_POINTER_REGNUM) = -1;
6465 }
6466
6467 /* This should be *very* rare. */
6468 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6469 invalidate (stack_pointer_rtx, VOIDmode);
6470
6471 return 1;
6472 }
6473
6474 return 0;
6475}
6476
6477/* Perform invalidation on the basis of everything about an insn
6478 except for invalidating the actual places that are SET in it.
6479 This includes the places CLOBBERed, and anything that might
6480 alias with something that is SET or CLOBBERed.
6481
6482 X is the pattern of the insn. */
6483
6484static void
6485invalidate_from_clobbers (x)
6486 rtx x;
6487{
6488 if (GET_CODE (x) == CLOBBER)
6489 {
6490 rtx ref = XEXP (x, 0);
6491 if (ref)
6492 {
6493 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6494 || GET_CODE (ref) == MEM)
6495 invalidate (ref, VOIDmode);
6496 else if (GET_CODE (ref) == STRICT_LOW_PART
6497 || GET_CODE (ref) == ZERO_EXTRACT)
6498 invalidate (XEXP (ref, 0), GET_MODE (ref));
6499 }
6500 }
6501 else if (GET_CODE (x) == PARALLEL)
6502 {
6503 int i;
6504 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6505 {
6506 rtx y = XVECEXP (x, 0, i);
6507 if (GET_CODE (y) == CLOBBER)
6508 {
6509 rtx ref = XEXP (y, 0);
6510 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6511 || GET_CODE (ref) == MEM)
6512 invalidate (ref, VOIDmode);
6513 else if (GET_CODE (ref) == STRICT_LOW_PART
6514 || GET_CODE (ref) == ZERO_EXTRACT)
6515 invalidate (XEXP (ref, 0), GET_MODE (ref));
6516 }
6517 }
6518 }
6519}
6520
6521
6522/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6523 and replace any registers in them with either an equivalent constant
6524 or the canonical form of the register. If we are inside an address,
6525 only do this if the address remains valid.
6526
6527 OBJECT is 0 except when within a MEM in which case it is the MEM.
6528
6529 Return the replacement for X. */
6530
6531static rtx
6532cse_process_notes (x, object)
6533 rtx x;
6534 rtx object;
6535{
6536 enum rtx_code code = GET_CODE (x);
6537 const char *fmt = GET_RTX_FORMAT (code);
6538 int i;
6539
6540 switch (code)
6541 {
6542 case CONST_INT:
6543 case CONST:
6544 case SYMBOL_REF:
6545 case LABEL_REF:
6546 case CONST_DOUBLE:
6547 case CONST_VECTOR:
6548 case PC:
6549 case CC0:
6550 case LO_SUM:
6551 return x;
6552
6553 case MEM:
6554 validate_change (x, &XEXP (x, 0),
6555 cse_process_notes (XEXP (x, 0), x), 0);
6556 return x;
6557
6558 case EXPR_LIST:
6559 case INSN_LIST:
6560 if (REG_NOTE_KIND (x) == REG_EQUAL)
6561 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6562 if (XEXP (x, 1))
6563 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6564 return x;
6565
6566 case SIGN_EXTEND:
6567 case ZERO_EXTEND:
6568 case SUBREG:
6569 {
6570 rtx new = cse_process_notes (XEXP (x, 0), object);
6571 /* We don't substitute VOIDmode constants into these rtx,
6572 since they would impede folding. */
6573 if (GET_MODE (new) != VOIDmode)
6574 validate_change (object, &XEXP (x, 0), new, 0);
6575 return x;
6576 }
6577
6578 case REG:
6579 i = REG_QTY (REGNO (x));
6580
6581 /* Return a constant or a constant register. */
6582 if (REGNO_QTY_VALID_P (REGNO (x)))
6583 {
6584 struct qty_table_elem *ent = &qty_table[i];
6585
6586 if (ent->const_rtx != NULL_RTX
6587 && (CONSTANT_P (ent->const_rtx)
6588 || GET_CODE (ent->const_rtx) == REG))
6589 {
6590 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6591 if (new)
6592 return new;
6593 }
6594 }
6595
6596 /* Otherwise, canonicalize this register. */
6597 return canon_reg (x, NULL_RTX);
6598
6599 default:
6600 break;
6601 }
6602
6603 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6604 if (fmt[i] == 'e')
6605 validate_change (object, &XEXP (x, i),
6606 cse_process_notes (XEXP (x, i), object), 0);
6607
6608 return x;
6609}
6610
6611
6612/* Find common subexpressions between the end test of a loop and the beginning
6613 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6614
6615 Often we have a loop where an expression in the exit test is used
6616 in the body of the loop. For example "while (*p) *q++ = *p++;".
6617 Because of the way we duplicate the loop exit test in front of the loop,
6618 however, we don't detect that common subexpression. This will be caught
6619 when global cse is implemented, but this is a quite common case.
6620
6621 This function handles the most common cases of these common expressions.
6622 It is called after we have processed the basic block ending with the
6623 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6624 jumps to a label used only once. */
6625
6626static void
6627cse_around_loop (loop_start)
6628 rtx loop_start;
6629{
6630 rtx insn;
6631 int i;
6632 struct table_elt *p;
6633
6634 /* If the jump at the end of the loop doesn't go to the start, we don't
6635 do anything. */
6636 for (insn = PREV_INSN (loop_start);
6637 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6638 insn = PREV_INSN (insn))
6639 ;
6640
6641 if (insn == 0
6642 || GET_CODE (insn) != NOTE
6643 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6644 return;
6645
6646 /* If the last insn of the loop (the end test) was an NE comparison,
6647 we will interpret it as an EQ comparison, since we fell through
6648 the loop. Any equivalences resulting from that comparison are
6649 therefore not valid and must be invalidated. */
6650 if (last_jump_equiv_class)
6651 for (p = last_jump_equiv_class->first_same_value; p;
6652 p = p->next_same_value)
6653 {
6654 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6655 || (GET_CODE (p->exp) == SUBREG
6656 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6657 invalidate (p->exp, VOIDmode);
6658 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6659 || GET_CODE (p->exp) == ZERO_EXTRACT)
6660 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6661 }
6662
6663 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6664 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6665
6666 The only thing we do with SET_DEST is invalidate entries, so we
6667 can safely process each SET in order. It is slightly less efficient
6668 to do so, but we only want to handle the most common cases.
6669
6670 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6671 These pseudos won't have valid entries in any of the tables indexed
6672 by register number, such as reg_qty. We avoid out-of-range array
6673 accesses by not processing any instructions created after cse started. */
6674
6675 for (insn = NEXT_INSN (loop_start);
6676 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6677 && INSN_UID (insn) < max_insn_uid
6678 && ! (GET_CODE (insn) == NOTE
6679 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6680 insn = NEXT_INSN (insn))
6681 {
6682 if (INSN_P (insn)
6683 && (GET_CODE (PATTERN (insn)) == SET
6684 || GET_CODE (PATTERN (insn)) == CLOBBER))
6685 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6686 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6687 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6688 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6689 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6690 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6691 loop_start);
6692 }
6693}
6694
6695
6696/* Process one SET of an insn that was skipped. We ignore CLOBBERs
6697 since they are done elsewhere. This function is called via note_stores. */
6698
6699static void
6700invalidate_skipped_set (dest, set, data)
6701 rtx set;
6702 rtx dest;
6703 void *data ATTRIBUTE_UNUSED;
6704{
6705 enum rtx_code code = GET_CODE (dest);
6706
6707 if (code == MEM
6708 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6709 /* There are times when an address can appear varying and be a PLUS
6710 during this scan when it would be a fixed address were we to know
6711 the proper equivalences. So invalidate all memory if there is
6712 a BLKmode or nonscalar memory reference or a reference to a
6713 variable address. */
6714 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6715 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6716 {
6717 invalidate_memory ();
6718 return;
6719 }
6720
6721 if (GET_CODE (set) == CLOBBER
6722#ifdef HAVE_cc0
6723 || dest == cc0_rtx
6724#endif
6725 || dest == pc_rtx)
6726 return;
6727
6728 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6729 invalidate (XEXP (dest, 0), GET_MODE (dest));
6730 else if (code == REG || code == SUBREG || code == MEM)
6731 invalidate (dest, VOIDmode);
6732}
6733
6734/* Invalidate all insns from START up to the end of the function or the
6735 next label. This called when we wish to CSE around a block that is
6736 conditionally executed. */
6737
6738static void
6739invalidate_skipped_block (start)
6740 rtx start;
6741{
6742 rtx insn;
6743
6744 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6745 insn = NEXT_INSN (insn))
6746 {
6747 if (! INSN_P (insn))
6748 continue;
6749
6750 if (GET_CODE (insn) == CALL_INSN)
6751 {
6752 if (! CONST_OR_PURE_CALL_P (insn))
6753 invalidate_memory ();
6754 invalidate_for_call ();
6755 }
6756
6757 invalidate_from_clobbers (PATTERN (insn));
6758 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6759 }
6760}
6761
6762
6763/* If modifying X will modify the value in *DATA (which is really an
6764 `rtx *'), indicate that fact by setting the pointed to value to
6765 NULL_RTX. */
6766
6767static void
6768cse_check_loop_start (x, set, data)
6769 rtx x;
6770 rtx set ATTRIBUTE_UNUSED;
6771 void *data;
6772{
6773 rtx *cse_check_loop_start_value = (rtx *) data;
6774
6775 if (*cse_check_loop_start_value == NULL_RTX
6776 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6777 return;
6778
6779 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6780 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6781 *cse_check_loop_start_value = NULL_RTX;
6782}
6783
6784/* X is a SET or CLOBBER contained in INSN that was found near the start of
6785 a loop that starts with the label at LOOP_START.
6786
6787 If X is a SET, we see if its SET_SRC is currently in our hash table.
6788 If so, we see if it has a value equal to some register used only in the
6789 loop exit code (as marked by jump.c).
6790
6791 If those two conditions are true, we search backwards from the start of
6792 the loop to see if that same value was loaded into a register that still
6793 retains its value at the start of the loop.
6794
6795 If so, we insert an insn after the load to copy the destination of that
6796 load into the equivalent register and (try to) replace our SET_SRC with that
6797 register.
6798
6799 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6800
6801static void
6802cse_set_around_loop (x, insn, loop_start)
6803 rtx x;
6804 rtx insn;
6805 rtx loop_start;
6806{
6807 struct table_elt *src_elt;
6808
6809 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6810 are setting PC or CC0 or whose SET_SRC is already a register. */
6811 if (GET_CODE (x) == SET
6812 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6813 && GET_CODE (SET_SRC (x)) != REG)
6814 {
6815 src_elt = lookup (SET_SRC (x),
6816 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6817 GET_MODE (SET_DEST (x)));
6818
6819 if (src_elt)
6820 for (src_elt = src_elt->first_same_value; src_elt;
6821 src_elt = src_elt->next_same_value)
6822 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6823 && COST (src_elt->exp) < COST (SET_SRC (x)))
6824 {
6825 rtx p, set;
6826
6827 /* Look for an insn in front of LOOP_START that sets
6828 something in the desired mode to SET_SRC (x) before we hit
6829 a label or CALL_INSN. */
6830
6831 for (p = prev_nonnote_insn (loop_start);
6832 p && GET_CODE (p) != CALL_INSN
6833 && GET_CODE (p) != CODE_LABEL;
6834 p = prev_nonnote_insn (p))
6835 if ((set = single_set (p)) != 0
6836 && GET_CODE (SET_DEST (set)) == REG
6837 && GET_MODE (SET_DEST (set)) == src_elt->mode
6838 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6839 {
6840 /* We now have to ensure that nothing between P
6841 and LOOP_START modified anything referenced in
6842 SET_SRC (x). We know that nothing within the loop
6843 can modify it, or we would have invalidated it in
6844 the hash table. */
6845 rtx q;
6846 rtx cse_check_loop_start_value = SET_SRC (x);
6847 for (q = p; q != loop_start; q = NEXT_INSN (q))
6848 if (INSN_P (q))
6849 note_stores (PATTERN (q),
6850 cse_check_loop_start,
6851 &cse_check_loop_start_value);
6852
6853 /* If nothing was changed and we can replace our
6854 SET_SRC, add an insn after P to copy its destination
6855 to what we will be replacing SET_SRC with. */
6856 if (cse_check_loop_start_value
6857 && validate_change (insn, &SET_SRC (x),
6858 src_elt->exp, 0))
6859 {
6860 /* If this creates new pseudos, this is unsafe,
6861 because the regno of new pseudo is unsuitable
6862 to index into reg_qty when cse_insn processes
6863 the new insn. Therefore, if a new pseudo was
6864 created, discard this optimization. */
6865 int nregs = max_reg_num ();
6866 rtx move
6867 = gen_move_insn (src_elt->exp, SET_DEST (set));
6868 if (nregs != max_reg_num ())
6869 {
6870 if (! validate_change (insn, &SET_SRC (x),
6871 SET_SRC (set), 0))
6872 abort ();
6873 }
6874 else
6875 {
6876 if (control_flow_insn_p (p))
6877 /* p can cause a control flow transfer so it
6878 is the last insn of a basic block. We can't
6879 therefore use emit_insn_after. */
6880 emit_insn_before (move, next_nonnote_insn (p));
6881 else
6882 emit_insn_after (move, p);
6883 }
6884 }
6885 break;
6886 }
6887 }
6888 }
6889
6890 /* Deal with the destination of X affecting the stack pointer. */
6891 addr_affects_sp_p (SET_DEST (x));
6892
6893 /* See comment on similar code in cse_insn for explanation of these
6894 tests. */
6895 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6896 || GET_CODE (SET_DEST (x)) == MEM)
6897 invalidate (SET_DEST (x), VOIDmode);
6898 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6899 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6900 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6901}
6902
6903
6904/* Find the end of INSN's basic block and return its range,
6905 the total number of SETs in all the insns of the block, the last insn of the
6906 block, and the branch path.
6907
6908 The branch path indicates which branches should be followed. If a nonzero
6909 path size is specified, the block should be rescanned and a different set
6910 of branches will be taken. The branch path is only used if
6911 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is nonzero.
6912
6913 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6914 used to describe the block. It is filled in with the information about
6915 the current block. The incoming structure's branch path, if any, is used
6916 to construct the output branch path. */
6917
6918void
6919cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6920 rtx insn;
6921 struct cse_basic_block_data *data;
6922 int follow_jumps;
6923 int after_loop;
6924 int skip_blocks;
6925{
6926 rtx p = insn, q;
6927 int nsets = 0;
6928 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6929 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6930 int path_size = data->path_size;
6931 int path_entry = 0;
6932 int i;
6933
6934 /* Update the previous branch path, if any. If the last branch was
6935 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6936 shorten the path by one and look at the previous branch. We know that
6937 at least one branch must have been taken if PATH_SIZE is nonzero. */
6938 while (path_size > 0)
6939 {
6940 if (data->path[path_size - 1].status != NOT_TAKEN)
6941 {
6942 data->path[path_size - 1].status = NOT_TAKEN;
6943 break;
6944 }
6945 else
6946 path_size--;
6947 }
6948
6949 /* If the first instruction is marked with QImode, that means we've
6950 already processed this block. Our caller will look at DATA->LAST
6951 to figure out where to go next. We want to return the next block
6952 in the instruction stream, not some branched-to block somewhere
6953 else. We accomplish this by pretending our called forbid us to
6954 follow jumps, or skip blocks. */
6955 if (GET_MODE (insn) == QImode)
6956 follow_jumps = skip_blocks = 0;
6957
6958 /* Scan to end of this basic block. */
6959 while (p && GET_CODE (p) != CODE_LABEL)
6960 {
6961 /* Don't cse out the end of a loop. This makes a difference
6962 only for the unusual loops that always execute at least once;
6963 all other loops have labels there so we will stop in any case.
6964 Cse'ing out the end of the loop is dangerous because it
6965 might cause an invariant expression inside the loop
6966 to be reused after the end of the loop. This would make it
6967 hard to move the expression out of the loop in loop.c,
6968 especially if it is one of several equivalent expressions
6969 and loop.c would like to eliminate it.
6970
6971 If we are running after loop.c has finished, we can ignore
6972 the NOTE_INSN_LOOP_END. */
6973
6974 if (! after_loop && GET_CODE (p) == NOTE
6975 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6976 break;
6977
6978 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6979 the regs restored by the longjmp come from
6980 a later time than the setjmp. */
6981 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6982 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6983 break;
6984
6985 /* A PARALLEL can have lots of SETs in it,
6986 especially if it is really an ASM_OPERANDS. */
6987 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6988 nsets += XVECLEN (PATTERN (p), 0);
6989 else if (GET_CODE (p) != NOTE)
6990 nsets += 1;
6991
6992 /* Ignore insns made by CSE; they cannot affect the boundaries of
6993 the basic block. */
6994
6995 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6996 high_cuid = INSN_CUID (p);
6997 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6998 low_cuid = INSN_CUID (p);
6999
7000 /* See if this insn is in our branch path. If it is and we are to
7001 take it, do so. */
7002 if (path_entry < path_size && data->path[path_entry].branch == p)
7003 {
7004 if (data->path[path_entry].status != NOT_TAKEN)
7005 p = JUMP_LABEL (p);
7006
7007 /* Point to next entry in path, if any. */
7008 path_entry++;
7009 }
7010
7011 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
7012 was specified, we haven't reached our maximum path length, there are
7013 insns following the target of the jump, this is the only use of the
7014 jump label, and the target label is preceded by a BARRIER.
7015
7016 Alternatively, we can follow the jump if it branches around a
7017 block of code and there are no other branches into the block.
7018 In this case invalidate_skipped_block will be called to invalidate any
7019 registers set in the block when following the jump. */
7020
7021 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
7022 && GET_CODE (p) == JUMP_INSN
7023 && GET_CODE (PATTERN (p)) == SET
7024 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
7025 && JUMP_LABEL (p) != 0
7026 && LABEL_NUSES (JUMP_LABEL (p)) == 1
7027 && NEXT_INSN (JUMP_LABEL (p)) != 0)
7028 {
7029 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
7030 if ((GET_CODE (q) != NOTE
7031 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
7032 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
7033 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
7034 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
7035 break;
7036
7037 /* If we ran into a BARRIER, this code is an extension of the
7038 basic block when the branch is taken. */
7039 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
7040 {
7041 /* Don't allow ourself to keep walking around an
7042 always-executed loop. */
7043 if (next_real_insn (q) == next)
7044 {
7045 p = NEXT_INSN (p);
7046 continue;
7047 }
7048
7049 /* Similarly, don't put a branch in our path more than once. */
7050 for (i = 0; i < path_entry; i++)
7051 if (data->path[i].branch == p)
7052 break;
7053
7054 if (i != path_entry)
7055 break;
7056
7057 data->path[path_entry].branch = p;
7058 data->path[path_entry++].status = TAKEN;
7059
7060 /* This branch now ends our path. It was possible that we
7061 didn't see this branch the last time around (when the
7062 insn in front of the target was a JUMP_INSN that was
7063 turned into a no-op). */
7064 path_size = path_entry;
7065
7066 p = JUMP_LABEL (p);
7067 /* Mark block so we won't scan it again later. */
7068 PUT_MODE (NEXT_INSN (p), QImode);
7069 }
7070 /* Detect a branch around a block of code. */
7071 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7072 {
7073 rtx tmp;
7074
7075 if (next_real_insn (q) == next)
7076 {
7077 p = NEXT_INSN (p);
7078 continue;
7079 }
7080
7081 for (i = 0; i < path_entry; i++)
7082 if (data->path[i].branch == p)
7083 break;
7084
7085 if (i != path_entry)
7086 break;
7087
7088 /* This is no_labels_between_p (p, q) with an added check for
7089 reaching the end of a function (in case Q precedes P). */
7090 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7091 if (GET_CODE (tmp) == CODE_LABEL)
7092 break;
7093
7094 if (tmp == q)
7095 {
7096 data->path[path_entry].branch = p;
7097 data->path[path_entry++].status = AROUND;
7098
7099 path_size = path_entry;
7100
7101 p = JUMP_LABEL (p);
7102 /* Mark block so we won't scan it again later. */
7103 PUT_MODE (NEXT_INSN (p), QImode);
7104 }
7105 }
7106 }
7107 p = NEXT_INSN (p);
7108 }
7109
7110 data->low_cuid = low_cuid;
7111 data->high_cuid = high_cuid;
7112 data->nsets = nsets;
7113 data->last = p;
7114
7115 /* If all jumps in the path are not taken, set our path length to zero
7116 so a rescan won't be done. */
7117 for (i = path_size - 1; i >= 0; i--)
7118 if (data->path[i].status != NOT_TAKEN)
7119 break;
7120
7121 if (i == -1)
7122 data->path_size = 0;
7123 else
7124 data->path_size = path_size;
7125
7126 /* End the current branch path. */
7127 data->path[path_size].branch = 0;
7128}
7129
7130
7131/* Perform cse on the instructions of a function.
7132 F is the first instruction.
7133 NREGS is one plus the highest pseudo-reg number used in the instruction.
7134
7135 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7136 (only if -frerun-cse-after-loop).
7137
7138 Returns 1 if jump_optimize should be redone due to simplifications
7139 in conditional jump instructions. */
7140
7141int
7142cse_main (f, nregs, after_loop, file)
7143 rtx f;
7144 int nregs;
7145 int after_loop;
7146 FILE *file;
7147{
7148 struct cse_basic_block_data val;
7149 rtx insn = f;
7150 int i;
7151
7152 cse_jumps_altered = 0;
7153 recorded_label_ref = 0;
7154 constant_pool_entries_cost = 0;
7155 val.path_size = 0;
7156
7157 init_recog ();
7158 init_alias_analysis ();
7159
7160 max_reg = nregs;
7161
7162 max_insn_uid = get_max_uid ();
7163
7164 reg_eqv_table = (struct reg_eqv_elem *)
7165 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7166
7167#ifdef LOAD_EXTEND_OP
7168
7169 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7170 and change the code and mode as appropriate. */
7171 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7172#endif
7173
7174 /* Reset the counter indicating how many elements have been made
7175 thus far. */
7176 n_elements_made = 0;
7177
7178 /* Find the largest uid. */
7179
7180 max_uid = get_max_uid ();
7181 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7182
7183 /* Compute the mapping from uids to cuids.
7184 CUIDs are numbers assigned to insns, like uids,
7185 except that cuids increase monotonically through the code.
7186 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7187 between two insns is not affected by -g. */
7188
7189 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7190 {
7191 if (GET_CODE (insn) != NOTE
7192 || NOTE_LINE_NUMBER (insn) < 0)
7193 INSN_CUID (insn) = ++i;
7194 else
7195 /* Give a line number note the same cuid as preceding insn. */
7196 INSN_CUID (insn) = i;
7197 }
7198
7199 ggc_push_context ();
7200
7201 /* Loop over basic blocks.
7202 Compute the maximum number of qty's needed for each basic block
7203 (which is 2 for each SET). */
7204 insn = f;
7205 while (insn)
7206 {
7207 cse_altered = 0;
7208 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7209 flag_cse_skip_blocks);
7210
7211 /* If this basic block was already processed or has no sets, skip it. */
7212 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7213 {
7214 PUT_MODE (insn, VOIDmode);
7215 insn = (val.last ? NEXT_INSN (val.last) : 0);
7216 val.path_size = 0;
7217 continue;
7218 }
7219
7220 cse_basic_block_start = val.low_cuid;
7221 cse_basic_block_end = val.high_cuid;
7222 max_qty = val.nsets * 2;
7223
7224 if (file)
7225 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7226 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7227 val.nsets);
7228
7229 /* Make MAX_QTY bigger to give us room to optimize
7230 past the end of this basic block, if that should prove useful. */
7231 if (max_qty < 500)
7232 max_qty = 500;
7233
7234 max_qty += max_reg;
7235
7236 /* If this basic block is being extended by following certain jumps,
7237 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7238 Otherwise, we start after this basic block. */
7239 if (val.path_size > 0)
7240 cse_basic_block (insn, val.last, val.path, 0);
7241 else
7242 {
7243 int old_cse_jumps_altered = cse_jumps_altered;
7244 rtx temp;
7245
7246 /* When cse changes a conditional jump to an unconditional
7247 jump, we want to reprocess the block, since it will give
7248 us a new branch path to investigate. */
7249 cse_jumps_altered = 0;
7250 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7251 if (cse_jumps_altered == 0
7252 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7253 insn = temp;
7254
7255 cse_jumps_altered |= old_cse_jumps_altered;
7256 }
7257
7258 if (cse_altered)
7259 ggc_collect ();
7260
7261#ifdef USE_C_ALLOCA
7262 alloca (0);
7263#endif
7264 }
7265
7266 ggc_pop_context ();
7267
7268 if (max_elements_made < n_elements_made)
7269 max_elements_made = n_elements_made;
7270
7271 /* Clean up. */
7272 end_alias_analysis ();
7273 free (uid_cuid);
7274 free (reg_eqv_table);
7275
7276 return cse_jumps_altered || recorded_label_ref;
7277}
7278
7279/* Process a single basic block. FROM and TO and the limits of the basic
7280 block. NEXT_BRANCH points to the branch path when following jumps or
7281 a null path when not following jumps.
7282
7283 AROUND_LOOP is nonzero if we are to try to cse around to the start of a
7284 loop. This is true when we are being called for the last time on a
7285 block and this CSE pass is before loop.c. */
7286
7287static rtx
7288cse_basic_block (from, to, next_branch, around_loop)
7289 rtx from, to;
7290 struct branch_path *next_branch;
7291 int around_loop;
7292{
7293 rtx insn;
7294 int to_usage = 0;
7295 rtx libcall_insn = NULL_RTX;
7296 int num_insns = 0;
7297
7298 /* This array is undefined before max_reg, so only allocate
7299 the space actually needed and adjust the start. */
7300
7301 qty_table
7302 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7303 * sizeof (struct qty_table_elem));
7304 qty_table -= max_reg;
7305
7306 new_basic_block ();
7307
7308 /* TO might be a label. If so, protect it from being deleted. */
7309 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7310 ++LABEL_NUSES (to);
7311
7312 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7313 {
7314 enum rtx_code code = GET_CODE (insn);
7315
7316 /* If we have processed 1,000 insns, flush the hash table to
7317 avoid extreme quadratic behavior. We must not include NOTEs
7318 in the count since there may be more of them when generating
7319 debugging information. If we clear the table at different
7320 times, code generated with -g -O might be different than code
7321 generated with -O but not -g.
7322
7323 ??? This is a real kludge and needs to be done some other way.
7324 Perhaps for 2.9. */
7325 if (code != NOTE && num_insns++ > 1000)
7326 {
7327 flush_hash_table ();
7328 num_insns = 0;
7329 }
7330
7331 /* See if this is a branch that is part of the path. If so, and it is
7332 to be taken, do so. */
7333 if (next_branch->branch == insn)
7334 {
7335 enum taken status = next_branch++->status;
7336 if (status != NOT_TAKEN)
7337 {
7338 if (status == TAKEN)
7339 record_jump_equiv (insn, 1);
7340 else
7341 invalidate_skipped_block (NEXT_INSN (insn));
7342
7343 /* Set the last insn as the jump insn; it doesn't affect cc0.
7344 Then follow this branch. */
7345#ifdef HAVE_cc0
7346 prev_insn_cc0 = 0;
7347#endif
7348 prev_insn = insn;
7349 insn = JUMP_LABEL (insn);
7350 continue;
7351 }
7352 }
7353
7354 if (GET_MODE (insn) == QImode)
7355 PUT_MODE (insn, VOIDmode);
7356
7357 if (GET_RTX_CLASS (code) == 'i')
7358 {
7359 rtx p;
7360
7361 /* Process notes first so we have all notes in canonical forms when
7362 looking for duplicate operations. */
7363
7364 if (REG_NOTES (insn))
7365 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7366
7367 /* Track when we are inside in LIBCALL block. Inside such a block,
7368 we do not want to record destinations. The last insn of a
7369 LIBCALL block is not considered to be part of the block, since
7370 its destination is the result of the block and hence should be
7371 recorded. */
7372
7373 if (REG_NOTES (insn) != 0)
7374 {
7375 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7376 libcall_insn = XEXP (p, 0);
7377 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7378 libcall_insn = 0;
7379 }
7380
7381 cse_insn (insn, libcall_insn);
7382
7383 /* If we haven't already found an insn where we added a LABEL_REF,
7384 check this one. */
7385 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7386 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7387 (void *) insn))
7388 recorded_label_ref = 1;
7389 }
7390
7391 /* If INSN is now an unconditional jump, skip to the end of our
7392 basic block by pretending that we just did the last insn in the
7393 basic block. If we are jumping to the end of our block, show
7394 that we can have one usage of TO. */
7395
7396 if (any_uncondjump_p (insn))
7397 {
7398 if (to == 0)
7399 {
7400 free (qty_table + max_reg);
7401 return 0;
7402 }
7403
7404 if (JUMP_LABEL (insn) == to)
7405 to_usage = 1;
7406
7407 /* Maybe TO was deleted because the jump is unconditional.
7408 If so, there is nothing left in this basic block. */
7409 /* ??? Perhaps it would be smarter to set TO
7410 to whatever follows this insn,
7411 and pretend the basic block had always ended here. */
7412 if (INSN_DELETED_P (to))
7413 break;
7414
7415 insn = PREV_INSN (to);
7416 }
7417
7418 /* See if it is ok to keep on going past the label
7419 which used to end our basic block. Remember that we incremented
7420 the count of that label, so we decrement it here. If we made
7421 a jump unconditional, TO_USAGE will be one; in that case, we don't
7422 want to count the use in that jump. */
7423
7424 if (to != 0 && NEXT_INSN (insn) == to
7425 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7426 {
7427 struct cse_basic_block_data val;
7428 rtx prev;
7429
7430 insn = NEXT_INSN (to);
7431
7432 /* If TO was the last insn in the function, we are done. */
7433 if (insn == 0)
7434 {
7435 free (qty_table + max_reg);
7436 return 0;
7437 }
7438
7439 /* If TO was preceded by a BARRIER we are done with this block
7440 because it has no continuation. */
7441 prev = prev_nonnote_insn (to);
7442 if (prev && GET_CODE (prev) == BARRIER)
7443 {
7444 free (qty_table + max_reg);
7445 return insn;
7446 }
7447
7448 /* Find the end of the following block. Note that we won't be
7449 following branches in this case. */
7450 to_usage = 0;
7451 val.path_size = 0;
7452 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7453
7454 /* If the tables we allocated have enough space left
7455 to handle all the SETs in the next basic block,
7456 continue through it. Otherwise, return,
7457 and that block will be scanned individually. */
7458 if (val.nsets * 2 + next_qty > max_qty)
7459 break;
7460
7461 cse_basic_block_start = val.low_cuid;
7462 cse_basic_block_end = val.high_cuid;
7463 to = val.last;
7464
7465 /* Prevent TO from being deleted if it is a label. */
7466 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7467 ++LABEL_NUSES (to);
7468
7469 /* Back up so we process the first insn in the extension. */
7470 insn = PREV_INSN (insn);
7471 }
7472 }
7473
7474 if (next_qty > max_qty)
7475 abort ();
7476
7477 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7478 the previous insn is the only insn that branches to the head of a loop,
7479 we can cse into the loop. Don't do this if we changed the jump
7480 structure of a loop unless we aren't going to be following jumps. */
7481
7482 insn = prev_nonnote_insn (to);
7483 if ((cse_jumps_altered == 0
7484 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7485 && around_loop && to != 0
7486 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7487 && GET_CODE (insn) == JUMP_INSN
7488 && JUMP_LABEL (insn) != 0
7489 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7490 cse_around_loop (JUMP_LABEL (insn));
7491
7492 free (qty_table + max_reg);
7493
7494 return to ? NEXT_INSN (to) : 0;
7495}
7496
7497
7498/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7499 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7500
7501static int
7502check_for_label_ref (rtl, data)
7503 rtx *rtl;
7504 void *data;
7505{
7506 rtx insn = (rtx) data;
7507
7508 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7509 we must rerun jump since it needs to place the note. If this is a
7510 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7511 since no REG_LABEL will be added. */
7512 return (GET_CODE (*rtl) == LABEL_REF
7513 && ! LABEL_REF_NONLOCAL_P (*rtl)
7514 && LABEL_P (XEXP (*rtl, 0))
7515 && INSN_UID (XEXP (*rtl, 0)) != 0
7516 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7517}
7518
7519
7520/* Count the number of times registers are used (not set) in X.
7521 COUNTS is an array in which we accumulate the count, INCR is how much
7522 we count each register usage.
7523
7524 Don't count a usage of DEST, which is the SET_DEST of a SET which
7525 contains X in its SET_SRC. This is because such a SET does not
7526 modify the liveness of DEST. */
7527
7528static void
7529count_reg_usage (x, counts, dest, incr)
7530 rtx x;
7531 int *counts;
7532 rtx dest;
7533 int incr;
7534{
7535 enum rtx_code code;
7536 rtx note;
7537 const char *fmt;
7538 int i, j;
7539
7540 if (x == 0)
7541 return;
7542
7543 switch (code = GET_CODE (x))
7544 {
7545 case REG:
7546 if (x != dest)
7547 counts[REGNO (x)] += incr;
7548 return;
7549
7550 case PC:
7551 case CC0:
7552 case CONST:
7553 case CONST_INT:
7554 case CONST_DOUBLE:
7555 case CONST_VECTOR:
7556 case SYMBOL_REF:
7557 case LABEL_REF:
7558 return;
7559
7560 case CLOBBER:
7561 /* If we are clobbering a MEM, mark any registers inside the address
7562 as being used. */
7563 if (GET_CODE (XEXP (x, 0)) == MEM)
7564 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7565 return;
7566
7567 case SET:
7568 /* Unless we are setting a REG, count everything in SET_DEST. */
7569 if (GET_CODE (SET_DEST (x)) != REG)
7570 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7571
7572 /* If SRC has side-effects, then we can't delete this insn, so the
7573 usage of SET_DEST inside SRC counts.
7574
7575 ??? Strictly-speaking, we might be preserving this insn
7576 because some other SET has side-effects, but that's hard
7577 to do and can't happen now. */
7578 count_reg_usage (SET_SRC (x), counts,
7579 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7580 incr);
7581 return;
7582
7583 case CALL_INSN:
7584 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7585 /* Fall through. */
7586
7587 case INSN:
7588 case JUMP_INSN:
7589 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7590
7591 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7592 use them. */
7593
7594 note = find_reg_equal_equiv_note (x);
7595 if (note)
7596 count_reg_usage (XEXP (note, 0), counts, NULL_RTX, incr);
7597 return;
7598
7599 case INSN_LIST:
7600 abort ();
7601
7602 default:
7603 break;
7604 }
7605
7606 fmt = GET_RTX_FORMAT (code);
7607 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7608 {
7609 if (fmt[i] == 'e')
7610 count_reg_usage (XEXP (x, i), counts, dest, incr);
7611 else if (fmt[i] == 'E')
7612 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7613 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7614 }
7615}
7616
7617
7618/* Return true if set is live. */
7619static bool
7620set_live_p (set, insn, counts)
7621 rtx set;
7622 rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
7623 int *counts;
7624{
7625#ifdef HAVE_cc0
7626 rtx tem;
7627#endif
7628
7629 if (set_noop_p (set))
7630 ;
7631
7632#ifdef HAVE_cc0
7633 else if (GET_CODE (SET_DEST (set)) == CC0
7634 && !side_effects_p (SET_SRC (set))
7635 && ((tem = next_nonnote_insn (insn)) == 0
7636 || !INSN_P (tem)
7637 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7638 return false;
7639#endif
7640 else if (GET_CODE (SET_DEST (set)) != REG
7641 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7642 || counts[REGNO (SET_DEST (set))] != 0
7643 || side_effects_p (SET_SRC (set))
7644 /* An ADDRESSOF expression can turn into a use of the
7645 internal arg pointer, so always consider the
7646 internal arg pointer live. If it is truly dead,
7647 flow will delete the initializing insn. */
7648 || (SET_DEST (set) == current_function_internal_arg_pointer))
7649 return true;
7650 return false;
7651}
7652
7653/* Return true if insn is live. */
7654
7655static bool
7656insn_live_p (insn, counts)
7657 rtx insn;
7658 int *counts;
7659{
7660 int i;
7661 if (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
7662 return true;
7663 else if (GET_CODE (PATTERN (insn)) == SET)
7664 return set_live_p (PATTERN (insn), insn, counts);
7665 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7666 {
7667 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7668 {
7669 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7670
7671 if (GET_CODE (elt) == SET)
7672 {
7673 if (set_live_p (elt, insn, counts))
7674 return true;
7675 }
7676 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7677 return true;
7678 }
7679 return false;
7680 }
7681 else
7682 return true;
7683}
7684
7685/* Return true if libcall is dead as a whole. */
7686
7687static bool
7688dead_libcall_p (insn, counts)
7689 rtx insn;
7690 int *counts;
7691{
7692 rtx note;
7693 /* See if there's a REG_EQUAL note on this insn and try to
7694 replace the source with the REG_EQUAL expression.
7695
7696 We assume that insns with REG_RETVALs can only be reg->reg
7697 copies at this point. */
7698 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7699 if (note)
7700 {
7701 rtx set = single_set (insn);
7702 rtx new = simplify_rtx (XEXP (note, 0));
7703
7704 if (!new)
7705 new = XEXP (note, 0);
7706
7707 /* While changing insn, we must update the counts accordingly. */
7708 count_reg_usage (insn, counts, NULL_RTX, -1);
7709
7710 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7711 {
7712 count_reg_usage (insn, counts, NULL_RTX, 1);
7713 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7714 remove_note (insn, note);
7715 return true;
7716 }
7717 count_reg_usage (insn, counts, NULL_RTX, 1);
7718 }
7719 return false;
7720}
7721
7722/* Scan all the insns and delete any that are dead; i.e., they store a register
7723 that is never used or they copy a register to itself.
7724
7725 This is used to remove insns made obviously dead by cse, loop or other
7726 optimizations. It improves the heuristics in loop since it won't try to
7727 move dead invariants out of loops or make givs for dead quantities. The
7728 remaining passes of the compilation are also sped up. */
7729
7730int
7731delete_trivially_dead_insns (insns, nreg)
7732 rtx insns;
7733 int nreg;
7734{
7735 int *counts;
7736 rtx insn, prev;
7737 int in_libcall = 0, dead_libcall = 0;
7738 int ndead = 0, nlastdead, niterations = 0;
7739
7740 timevar_push (TV_DELETE_TRIVIALLY_DEAD);
7741 /* First count the number of times each register is used. */
7742 counts = (int *) xcalloc (nreg, sizeof (int));
7743 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7744 count_reg_usage (insn, counts, NULL_RTX, 1);
7745
7746 do
7747 {
7748 nlastdead = ndead;
7749 niterations++;
7750 /* Go from the last insn to the first and delete insns that only set unused
7751 registers or copy a register to itself. As we delete an insn, remove
7752 usage counts for registers it uses.
7753
7754 The first jump optimization pass may leave a real insn as the last
7755 insn in the function. We must not skip that insn or we may end
7756 up deleting code that is not really dead. */
7757 insn = get_last_insn ();
7758 if (! INSN_P (insn))
7759 insn = prev_real_insn (insn);
7760
7761 for (; insn; insn = prev)
7762 {
7763 int live_insn = 0;
7764
7765 prev = prev_real_insn (insn);
7766
7767 /* Don't delete any insns that are part of a libcall block unless
7768 we can delete the whole libcall block.
7769
7770 Flow or loop might get confused if we did that. Remember
7771 that we are scanning backwards. */
7772 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7773 {
7774 in_libcall = 1;
7775 live_insn = 1;
7776 dead_libcall = dead_libcall_p (insn, counts);
7777 }
7778 else if (in_libcall)
7779 live_insn = ! dead_libcall;
7780 else
7781 live_insn = insn_live_p (insn, counts);
7782
7783 /* If this is a dead insn, delete it and show registers in it aren't
7784 being used. */
7785
7786 if (! live_insn)
7787 {
7788 count_reg_usage (insn, counts, NULL_RTX, -1);
7789 delete_insn_and_edges (insn);
7790 ndead++;
7791 }
7792
7793 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7794 {
7795 in_libcall = 0;
7796 dead_libcall = 0;
7797 }
7798 }
7799 }
7800 while (ndead != nlastdead);
7801
7802 if (rtl_dump_file && ndead)
7803 fprintf (rtl_dump_file, "Deleted %i trivially dead insns; %i iterations\n",
7804 ndead, niterations);
7805 /* Clean up. */
7806 free (counts);
7807 timevar_pop (TV_DELETE_TRIVIALLY_DEAD);
7808 return ndead;
7809}
Note: See TracBrowser for help on using the repository browser.