source: trunk/src/gcc/gcc/cse.c@ 2

Last change on this file since 2 was 2, checked in by bird, 22 years ago

Initial revision

  • Property cvs2svn:cvs-rev set to 1.1
  • Property svn:eol-style set to native
  • Property svn:executable set to *
File size: 237.0 KB
Line 
1/* Common subexpression elimination for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
21
22#include "config.h"
23/* stdio.h must precede rtl.h for FFS. */
24#include "system.h"
25
26#include "rtl.h"
27#include "tm_p.h"
28#include "regs.h"
29#include "hard-reg-set.h"
30#include "basic-block.h"
31#include "flags.h"
32#include "real.h"
33#include "insn-config.h"
34#include "recog.h"
35#include "function.h"
36#include "expr.h"
37#include "toplev.h"
38#include "output.h"
39#include "ggc.h"
40
41/* The basic idea of common subexpression elimination is to go
42 through the code, keeping a record of expressions that would
43 have the same value at the current scan point, and replacing
44 expressions encountered with the cheapest equivalent expression.
45
46 It is too complicated to keep track of the different possibilities
47 when control paths merge in this code; so, at each label, we forget all
48 that is known and start fresh. This can be described as processing each
49 extended basic block separately. We have a separate pass to perform
50 global CSE.
51
52 Note CSE can turn a conditional or computed jump into a nop or
53 an unconditional jump. When this occurs we arrange to run the jump
54 optimizer after CSE to delete the unreachable code.
55
56 We use two data structures to record the equivalent expressions:
57 a hash table for most expressions, and a vector of "quantity
58 numbers" to record equivalent (pseudo) registers.
59
60 The use of the special data structure for registers is desirable
61 because it is faster. It is possible because registers references
62 contain a fairly small number, the register number, taken from
63 a contiguously allocated series, and two register references are
64 identical if they have the same number. General expressions
65 do not have any such thing, so the only way to retrieve the
66 information recorded on an expression other than a register
67 is to keep it in a hash table.
68
69Registers and "quantity numbers":
70
71 At the start of each basic block, all of the (hardware and pseudo)
72 registers used in the function are given distinct quantity
73 numbers to indicate their contents. During scan, when the code
74 copies one register into another, we copy the quantity number.
75 When a register is loaded in any other way, we allocate a new
76 quantity number to describe the value generated by this operation.
77 `reg_qty' records what quantity a register is currently thought
78 of as containing.
79
80 All real quantity numbers are greater than or equal to `max_reg'.
81 If register N has not been assigned a quantity, reg_qty[N] will equal N.
82
83 Quantity numbers below `max_reg' do not exist and none of the `qty_table'
84 entries should be referenced with an index below `max_reg'.
85
86 We also maintain a bidirectional chain of registers for each
87 quantity number. The `qty_table` members `first_reg' and `last_reg',
88 and `reg_eqv_table' members `next' and `prev' hold these chains.
89
90 The first register in a chain is the one whose lifespan is least local.
91 Among equals, it is the one that was seen first.
92 We replace any equivalent register with that one.
93
94 If two registers have the same quantity number, it must be true that
95 REG expressions with qty_table `mode' must be in the hash table for both
96 registers and must be in the same class.
97
98 The converse is not true. Since hard registers may be referenced in
99 any mode, two REG expressions might be equivalent in the hash table
100 but not have the same quantity number if the quantity number of one
101 of the registers is not the same mode as those expressions.
102
103Constants and quantity numbers
104
105 When a quantity has a known constant value, that value is stored
106 in the appropriate qty_table `const_rtx'. This is in addition to
107 putting the constant in the hash table as is usual for non-regs.
108
109 Whether a reg or a constant is preferred is determined by the configuration
110 macro CONST_COSTS and will often depend on the constant value. In any
111 event, expressions containing constants can be simplified, by fold_rtx.
112
113 When a quantity has a known nearly constant value (such as an address
114 of a stack slot), that value is stored in the appropriate qty_table
115 `const_rtx'.
116
117 Integer constants don't have a machine mode. However, cse
118 determines the intended machine mode from the destination
119 of the instruction that moves the constant. The machine mode
120 is recorded in the hash table along with the actual RTL
121 constant expression so that different modes are kept separate.
122
123Other expressions:
124
125 To record known equivalences among expressions in general
126 we use a hash table called `table'. It has a fixed number of buckets
127 that contain chains of `struct table_elt' elements for expressions.
128 These chains connect the elements whose expressions have the same
129 hash codes.
130
131 Other chains through the same elements connect the elements which
132 currently have equivalent values.
133
134 Register references in an expression are canonicalized before hashing
135 the expression. This is done using `reg_qty' and qty_table `first_reg'.
136 The hash code of a register reference is computed using the quantity
137 number, not the register number.
138
139 When the value of an expression changes, it is necessary to remove from the
140 hash table not just that expression but all expressions whose values
141 could be different as a result.
142
143 1. If the value changing is in memory, except in special cases
144 ANYTHING referring to memory could be changed. That is because
145 nobody knows where a pointer does not point.
146 The function `invalidate_memory' removes what is necessary.
147
148 The special cases are when the address is constant or is
149 a constant plus a fixed register such as the frame pointer
150 or a static chain pointer. When such addresses are stored in,
151 we can tell exactly which other such addresses must be invalidated
152 due to overlap. `invalidate' does this.
153 All expressions that refer to non-constant
154 memory addresses are also invalidated. `invalidate_memory' does this.
155
156 2. If the value changing is a register, all expressions
157 containing references to that register, and only those,
158 must be removed.
159
160 Because searching the entire hash table for expressions that contain
161 a register is very slow, we try to figure out when it isn't necessary.
162 Precisely, this is necessary only when expressions have been
163 entered in the hash table using this register, and then the value has
164 changed, and then another expression wants to be added to refer to
165 the register's new value. This sequence of circumstances is rare
166 within any one basic block.
167
168 The vectors `reg_tick' and `reg_in_table' are used to detect this case.
169 reg_tick[i] is incremented whenever a value is stored in register i.
170 reg_in_table[i] holds -1 if no references to register i have been
171 entered in the table; otherwise, it contains the value reg_tick[i] had
172 when the references were entered. If we want to enter a reference
173 and reg_in_table[i] != reg_tick[i], we must scan and remove old references.
174 Until we want to enter a new entry, the mere fact that the two vectors
175 don't match makes the entries be ignored if anyone tries to match them.
176
177 Registers themselves are entered in the hash table as well as in
178 the equivalent-register chains. However, the vectors `reg_tick'
179 and `reg_in_table' do not apply to expressions which are simple
180 register references. These expressions are removed from the table
181 immediately when they become invalid, and this can be done even if
182 we do not immediately search for all the expressions that refer to
183 the register.
184
185 A CLOBBER rtx in an instruction invalidates its operand for further
186 reuse. A CLOBBER or SET rtx whose operand is a MEM:BLK
187 invalidates everything that resides in memory.
188
189Related expressions:
190
191 Constant expressions that differ only by an additive integer
192 are called related. When a constant expression is put in
193 the table, the related expression with no constant term
194 is also entered. These are made to point at each other
195 so that it is possible to find out if there exists any
196 register equivalent to an expression related to a given expression. */
197
198/* One plus largest register number used in this function. */
199
200static int max_reg;
201
202/* One plus largest instruction UID used in this function at time of
203 cse_main call. */
204
205static int max_insn_uid;
206
207/* Length of qty_table vector. We know in advance we will not need
208 a quantity number this big. */
209
210static int max_qty;
211
212/* Next quantity number to be allocated.
213 This is 1 + the largest number needed so far. */
214
215static int next_qty;
216
217/* Per-qty information tracking.
218
219 `first_reg' and `last_reg' track the head and tail of the
220 chain of registers which currently contain this quantity.
221
222 `mode' contains the machine mode of this quantity.
223
224 `const_rtx' holds the rtx of the constant value of this
225 quantity, if known. A summations of the frame/arg pointer
226 and a constant can also be entered here. When this holds
227 a known value, `const_insn' is the insn which stored the
228 constant value.
229
230 `comparison_{code,const,qty}' are used to track when a
231 comparison between a quantity and some constant or register has
232 been passed. In such a case, we know the results of the comparison
233 in case we see it again. These members record a comparison that
234 is known to be true. `comparison_code' holds the rtx code of such
235 a comparison, else it is set to UNKNOWN and the other two
236 comparison members are undefined. `comparison_const' holds
237 the constant being compared against, or zero if the comparison
238 is not against a constant. `comparison_qty' holds the quantity
239 being compared against when the result is known. If the comparison
240 is not with a register, `comparison_qty' is -1. */
241
242struct qty_table_elem
243{
244 rtx const_rtx;
245 rtx const_insn;
246 rtx comparison_const;
247 int comparison_qty;
248 unsigned int first_reg, last_reg;
249 enum machine_mode mode;
250 enum rtx_code comparison_code;
251};
252
253/* The table of all qtys, indexed by qty number. */
254static struct qty_table_elem *qty_table;
255
256#ifdef HAVE_cc0
257/* For machines that have a CC0, we do not record its value in the hash
258 table since its use is guaranteed to be the insn immediately following
259 its definition and any other insn is presumed to invalidate it.
260
261 Instead, we store below the value last assigned to CC0. If it should
262 happen to be a constant, it is stored in preference to the actual
263 assigned value. In case it is a constant, we store the mode in which
264 the constant should be interpreted. */
265
266static rtx prev_insn_cc0;
267static enum machine_mode prev_insn_cc0_mode;
268#endif
269
270/* Previous actual insn. 0 if at first insn of basic block. */
271
272static rtx prev_insn;
273
274/* Insn being scanned. */
275
276static rtx this_insn;
277
278/* Index by register number, gives the number of the next (or
279 previous) register in the chain of registers sharing the same
280 value.
281
282 Or -1 if this register is at the end of the chain.
283
284 If reg_qty[N] == N, reg_eqv_table[N].next is undefined. */
285
286/* Per-register equivalence chain. */
287struct reg_eqv_elem
288{
289 int next, prev;
290};
291
292/* The table of all register equivalence chains. */
293static struct reg_eqv_elem *reg_eqv_table;
294
295struct cse_reg_info
296{
297 /* Next in hash chain. */
298 struct cse_reg_info *hash_next;
299
300 /* The next cse_reg_info structure in the free or used list. */
301 struct cse_reg_info *next;
302
303 /* Search key */
304 unsigned int regno;
305
306 /* The quantity number of the register's current contents. */
307 int reg_qty;
308
309 /* The number of times the register has been altered in the current
310 basic block. */
311 int reg_tick;
312
313 /* The REG_TICK value at which rtx's containing this register are
314 valid in the hash table. If this does not equal the current
315 reg_tick value, such expressions existing in the hash table are
316 invalid. */
317 int reg_in_table;
318};
319
320/* A free list of cse_reg_info entries. */
321static struct cse_reg_info *cse_reg_info_free_list;
322
323/* A used list of cse_reg_info entries. */
324static struct cse_reg_info *cse_reg_info_used_list;
325static struct cse_reg_info *cse_reg_info_used_list_end;
326
327/* A mapping from registers to cse_reg_info data structures. */
328#define REGHASH_SHIFT 7
329#define REGHASH_SIZE (1 << REGHASH_SHIFT)
330#define REGHASH_MASK (REGHASH_SIZE - 1)
331static struct cse_reg_info *reg_hash[REGHASH_SIZE];
332
333#define REGHASH_FN(REGNO) \
334 (((REGNO) ^ ((REGNO) >> REGHASH_SHIFT)) & REGHASH_MASK)
335
336/* The last lookup we did into the cse_reg_info_tree. This allows us
337 to cache repeated lookups. */
338static unsigned int cached_regno;
339static struct cse_reg_info *cached_cse_reg_info;
340
341/* A HARD_REG_SET containing all the hard registers for which there is
342 currently a REG expression in the hash table. Note the difference
343 from the above variables, which indicate if the REG is mentioned in some
344 expression in the table. */
345
346static HARD_REG_SET hard_regs_in_table;
347
348/* CUID of insn that starts the basic block currently being cse-processed. */
349
350static int cse_basic_block_start;
351
352/* CUID of insn that ends the basic block currently being cse-processed. */
353
354static int cse_basic_block_end;
355
356/* Vector mapping INSN_UIDs to cuids.
357 The cuids are like uids but increase monotonically always.
358 We use them to see whether a reg is used outside a given basic block. */
359
360static int *uid_cuid;
361
362/* Highest UID in UID_CUID. */
363static int max_uid;
364
365/* Get the cuid of an insn. */
366
367#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
368
369/* Nonzero if this pass has made changes, and therefore it's
370 worthwhile to run the garbage collector. */
371
372static int cse_altered;
373
374/* Nonzero if cse has altered conditional jump insns
375 in such a way that jump optimization should be redone. */
376
377static int cse_jumps_altered;
378
379/* Nonzero if we put a LABEL_REF into the hash table for an INSN without a
380 REG_LABEL, we have to rerun jump after CSE to put in the note. */
381static int recorded_label_ref;
382
383/* canon_hash stores 1 in do_not_record
384 if it notices a reference to CC0, PC, or some other volatile
385 subexpression. */
386
387static int do_not_record;
388
389#ifdef LOAD_EXTEND_OP
390
391/* Scratch rtl used when looking for load-extended copy of a MEM. */
392static rtx memory_extend_rtx;
393#endif
394
395/* canon_hash stores 1 in hash_arg_in_memory
396 if it notices a reference to memory within the expression being hashed. */
397
398static int hash_arg_in_memory;
399
400/* The hash table contains buckets which are chains of `struct table_elt's,
401 each recording one expression's information.
402 That expression is in the `exp' field.
403
404 The canon_exp field contains a canonical (from the point of view of
405 alias analysis) version of the `exp' field.
406
407 Those elements with the same hash code are chained in both directions
408 through the `next_same_hash' and `prev_same_hash' fields.
409
410 Each set of expressions with equivalent values
411 are on a two-way chain through the `next_same_value'
412 and `prev_same_value' fields, and all point with
413 the `first_same_value' field at the first element in
414 that chain. The chain is in order of increasing cost.
415 Each element's cost value is in its `cost' field.
416
417 The `in_memory' field is nonzero for elements that
418 involve any reference to memory. These elements are removed
419 whenever a write is done to an unidentified location in memory.
420 To be safe, we assume that a memory address is unidentified unless
421 the address is either a symbol constant or a constant plus
422 the frame pointer or argument pointer.
423
424 The `related_value' field is used to connect related expressions
425 (that differ by adding an integer).
426 The related expressions are chained in a circular fashion.
427 `related_value' is zero for expressions for which this
428 chain is not useful.
429
430 The `cost' field stores the cost of this element's expression.
431 The `regcost' field stores the value returned by approx_reg_cost for
432 this element's expression.
433
434 The `is_const' flag is set if the element is a constant (including
435 a fixed address).
436
437 The `flag' field is used as a temporary during some search routines.
438
439 The `mode' field is usually the same as GET_MODE (`exp'), but
440 if `exp' is a CONST_INT and has no machine mode then the `mode'
441 field is the mode it was being used as. Each constant is
442 recorded separately for each mode it is used with. */
443
444struct table_elt
445{
446 rtx exp;
447 rtx canon_exp;
448 struct table_elt *next_same_hash;
449 struct table_elt *prev_same_hash;
450 struct table_elt *next_same_value;
451 struct table_elt *prev_same_value;
452 struct table_elt *first_same_value;
453 struct table_elt *related_value;
454 int cost;
455 int regcost;
456 enum machine_mode mode;
457 char in_memory;
458 char is_const;
459 char flag;
460};
461
462/* We don't want a lot of buckets, because we rarely have very many
463 things stored in the hash table, and a lot of buckets slows
464 down a lot of loops that happen frequently. */
465#define HASH_SHIFT 5
466#define HASH_SIZE (1 << HASH_SHIFT)
467#define HASH_MASK (HASH_SIZE - 1)
468
469/* Compute hash code of X in mode M. Special-case case where X is a pseudo
470 register (hard registers may require `do_not_record' to be set). */
471
472#define HASH(X, M) \
473 ((GET_CODE (X) == REG && REGNO (X) >= FIRST_PSEUDO_REGISTER \
474 ? (((unsigned) REG << 7) + (unsigned) REG_QTY (REGNO (X))) \
475 : canon_hash (X, M)) & HASH_MASK)
476
477/* Determine whether register number N is considered a fixed register for the
478 purpose of approximating register costs.
479 It is desirable to replace other regs with fixed regs, to reduce need for
480 non-fixed hard regs.
481 A reg wins if it is either the frame pointer or designated as fixed. */
482#define FIXED_REGNO_P(N) \
483 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
484 || fixed_regs[N] || global_regs[N])
485
486/* Compute cost of X, as stored in the `cost' field of a table_elt. Fixed
487 hard registers and pointers into the frame are the cheapest with a cost
488 of 0. Next come pseudos with a cost of one and other hard registers with
489 a cost of 2. Aside from these special cases, call `rtx_cost'. */
490
491#define CHEAP_REGNO(N) \
492 ((N) == FRAME_POINTER_REGNUM || (N) == HARD_FRAME_POINTER_REGNUM \
493 || (N) == STACK_POINTER_REGNUM || (N) == ARG_POINTER_REGNUM \
494 || ((N) >= FIRST_VIRTUAL_REGISTER && (N) <= LAST_VIRTUAL_REGISTER) \
495 || ((N) < FIRST_PSEUDO_REGISTER \
496 && FIXED_REGNO_P (N) && REGNO_REG_CLASS (N) != NO_REGS))
497
498#define COST(X) (GET_CODE (X) == REG ? 0 : notreg_cost (X, SET))
499#define COST_IN(X,OUTER) (GET_CODE (X) == REG ? 0 : notreg_cost (X, OUTER))
500
501/* Get the info associated with register N. */
502
503#define GET_CSE_REG_INFO(N) \
504 (((N) == cached_regno && cached_cse_reg_info) \
505 ? cached_cse_reg_info : get_cse_reg_info ((N)))
506
507/* Get the number of times this register has been updated in this
508 basic block. */
509
510#define REG_TICK(N) ((GET_CSE_REG_INFO (N))->reg_tick)
511
512/* Get the point at which REG was recorded in the table. */
513
514#define REG_IN_TABLE(N) ((GET_CSE_REG_INFO (N))->reg_in_table)
515
516/* Get the quantity number for REG. */
517
518#define REG_QTY(N) ((GET_CSE_REG_INFO (N))->reg_qty)
519
520/* Determine if the quantity number for register X represents a valid index
521 into the qty_table. */
522
523#define REGNO_QTY_VALID_P(N) (REG_QTY (N) != (int) (N))
524
525static struct table_elt *table[HASH_SIZE];
526
527/* Chain of `struct table_elt's made so far for this function
528 but currently removed from the table. */
529
530static struct table_elt *free_element_chain;
531
532/* Number of `struct table_elt' structures made so far for this function. */
533
534static int n_elements_made;
535
536/* Maximum value `n_elements_made' has had so far in this compilation
537 for functions previously processed. */
538
539static int max_elements_made;
540
541/* Surviving equivalence class when two equivalence classes are merged
542 by recording the effects of a jump in the last insn. Zero if the
543 last insn was not a conditional jump. */
544
545static struct table_elt *last_jump_equiv_class;
546
547/* Set to the cost of a constant pool reference if one was found for a
548 symbolic constant. If this was found, it means we should try to
549 convert constants into constant pool entries if they don't fit in
550 the insn. */
551
552static int constant_pool_entries_cost;
553
554/* Define maximum length of a branch path. */
555
556#define PATHLENGTH 10
557
558/* This data describes a block that will be processed by cse_basic_block. */
559
560struct cse_basic_block_data
561{
562 /* Lowest CUID value of insns in block. */
563 int low_cuid;
564 /* Highest CUID value of insns in block. */
565 int high_cuid;
566 /* Total number of SETs in block. */
567 int nsets;
568 /* Last insn in the block. */
569 rtx last;
570 /* Size of current branch path, if any. */
571 int path_size;
572 /* Current branch path, indicating which branches will be taken. */
573 struct branch_path
574 {
575 /* The branch insn. */
576 rtx branch;
577 /* Whether it should be taken or not. AROUND is the same as taken
578 except that it is used when the destination label is not preceded
579 by a BARRIER. */
580 enum taken {TAKEN, NOT_TAKEN, AROUND} status;
581 } path[PATHLENGTH];
582};
583
584/* Nonzero if X has the form (PLUS frame-pointer integer). We check for
585 virtual regs here because the simplify_*_operation routines are called
586 by integrate.c, which is called before virtual register instantiation.
587
588 ?!? FIXED_BASE_PLUS_P and NONZERO_BASE_PLUS_P need to move into
589 a header file so that their definitions can be shared with the
590 simplification routines in simplify-rtx.c. Until then, do not
591 change these macros without also changing the copy in simplify-rtx.c. */
592
593#define FIXED_BASE_PLUS_P(X) \
594 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
595 || ((X) == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])\
596 || (X) == virtual_stack_vars_rtx \
597 || (X) == virtual_incoming_args_rtx \
598 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
599 && (XEXP (X, 0) == frame_pointer_rtx \
600 || XEXP (X, 0) == hard_frame_pointer_rtx \
601 || ((X) == arg_pointer_rtx \
602 && fixed_regs[ARG_POINTER_REGNUM]) \
603 || XEXP (X, 0) == virtual_stack_vars_rtx \
604 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
605 || GET_CODE (X) == ADDRESSOF)
606
607/* Similar, but also allows reference to the stack pointer.
608
609 This used to include FIXED_BASE_PLUS_P, however, we can't assume that
610 arg_pointer_rtx by itself is nonzero, because on at least one machine,
611 the i960, the arg pointer is zero when it is unused. */
612
613#define NONZERO_BASE_PLUS_P(X) \
614 ((X) == frame_pointer_rtx || (X) == hard_frame_pointer_rtx \
615 || (X) == virtual_stack_vars_rtx \
616 || (X) == virtual_incoming_args_rtx \
617 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
618 && (XEXP (X, 0) == frame_pointer_rtx \
619 || XEXP (X, 0) == hard_frame_pointer_rtx \
620 || ((X) == arg_pointer_rtx \
621 && fixed_regs[ARG_POINTER_REGNUM]) \
622 || XEXP (X, 0) == virtual_stack_vars_rtx \
623 || XEXP (X, 0) == virtual_incoming_args_rtx)) \
624 || (X) == stack_pointer_rtx \
625 || (X) == virtual_stack_dynamic_rtx \
626 || (X) == virtual_outgoing_args_rtx \
627 || (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
628 && (XEXP (X, 0) == stack_pointer_rtx \
629 || XEXP (X, 0) == virtual_stack_dynamic_rtx \
630 || XEXP (X, 0) == virtual_outgoing_args_rtx)) \
631 || GET_CODE (X) == ADDRESSOF)
632
633static int notreg_cost PARAMS ((rtx, enum rtx_code));
634static int approx_reg_cost_1 PARAMS ((rtx *, void *));
635static int approx_reg_cost PARAMS ((rtx));
636static int preferrable PARAMS ((int, int, int, int));
637static void new_basic_block PARAMS ((void));
638static void make_new_qty PARAMS ((unsigned int, enum machine_mode));
639static void make_regs_eqv PARAMS ((unsigned int, unsigned int));
640static void delete_reg_equiv PARAMS ((unsigned int));
641static int mention_regs PARAMS ((rtx));
642static int insert_regs PARAMS ((rtx, struct table_elt *, int));
643static void remove_from_table PARAMS ((struct table_elt *, unsigned));
644static struct table_elt *lookup PARAMS ((rtx, unsigned, enum machine_mode)),
645 *lookup_for_remove PARAMS ((rtx, unsigned, enum machine_mode));
646static rtx lookup_as_function PARAMS ((rtx, enum rtx_code));
647static struct table_elt *insert PARAMS ((rtx, struct table_elt *, unsigned,
648 enum machine_mode));
649static void merge_equiv_classes PARAMS ((struct table_elt *,
650 struct table_elt *));
651static void invalidate PARAMS ((rtx, enum machine_mode));
652static int cse_rtx_varies_p PARAMS ((rtx, int));
653static void remove_invalid_refs PARAMS ((unsigned int));
654static void remove_invalid_subreg_refs PARAMS ((unsigned int, unsigned int,
655 enum machine_mode));
656static void rehash_using_reg PARAMS ((rtx));
657static void invalidate_memory PARAMS ((void));
658static void invalidate_for_call PARAMS ((void));
659static rtx use_related_value PARAMS ((rtx, struct table_elt *));
660static unsigned canon_hash PARAMS ((rtx, enum machine_mode));
661static unsigned canon_hash_string PARAMS ((const char *));
662static unsigned safe_hash PARAMS ((rtx, enum machine_mode));
663static int exp_equiv_p PARAMS ((rtx, rtx, int, int));
664static rtx canon_reg PARAMS ((rtx, rtx));
665static void find_best_addr PARAMS ((rtx, rtx *, enum machine_mode));
666static enum rtx_code find_comparison_args PARAMS ((enum rtx_code, rtx *, rtx *,
667 enum machine_mode *,
668 enum machine_mode *));
669static rtx fold_rtx PARAMS ((rtx, rtx));
670static rtx equiv_constant PARAMS ((rtx));
671static void record_jump_equiv PARAMS ((rtx, int));
672static void record_jump_cond PARAMS ((enum rtx_code, enum machine_mode,
673 rtx, rtx, int));
674static void cse_insn PARAMS ((rtx, rtx));
675static int addr_affects_sp_p PARAMS ((rtx));
676static void invalidate_from_clobbers PARAMS ((rtx));
677static rtx cse_process_notes PARAMS ((rtx, rtx));
678static void cse_around_loop PARAMS ((rtx));
679static void invalidate_skipped_set PARAMS ((rtx, rtx, void *));
680static void invalidate_skipped_block PARAMS ((rtx));
681static void cse_check_loop_start PARAMS ((rtx, rtx, void *));
682static void cse_set_around_loop PARAMS ((rtx, rtx, rtx));
683static rtx cse_basic_block PARAMS ((rtx, rtx, struct branch_path *, int));
684static void count_reg_usage PARAMS ((rtx, int *, rtx, int));
685static int check_for_label_ref PARAMS ((rtx *, void *));
686extern void dump_class PARAMS ((struct table_elt*));
687static struct cse_reg_info * get_cse_reg_info PARAMS ((unsigned int));
688static int check_dependence PARAMS ((rtx *, void *));
689
690static void flush_hash_table PARAMS ((void));
691static bool insn_live_p PARAMS ((rtx, int *));
692static bool set_live_p PARAMS ((rtx, rtx, int *));
693static bool dead_libcall_p PARAMS ((rtx));
694
695
696/* Dump the expressions in the equivalence class indicated by CLASSP.
697 This function is used only for debugging. */
698void
699dump_class (classp)
700 struct table_elt *classp;
701{
702 struct table_elt *elt;
703
704 fprintf (stderr, "Equivalence chain for ");
705 print_rtl (stderr, classp->exp);
706 fprintf (stderr, ": \n");
707
708 for (elt = classp->first_same_value; elt; elt = elt->next_same_value)
709 {
710 print_rtl (stderr, elt->exp);
711 fprintf (stderr, "\n");
712 }
713}
714
715/* Subroutine of approx_reg_cost; called through for_each_rtx. */
716
717static int
718approx_reg_cost_1 (xp, data)
719 rtx *xp;
720 void *data;
721{
722 rtx x = *xp;
723 regset set = (regset) data;
724
725 if (x && GET_CODE (x) == REG)
726 SET_REGNO_REG_SET (set, REGNO (x));
727 return 0;
728}
729
730/* Return an estimate of the cost of the registers used in an rtx.
731 This is mostly the number of different REG expressions in the rtx;
732 however for some exceptions like fixed registers we use a cost of
733 0. If any other hard register reference occurs, return MAX_COST. */
734
735static int
736approx_reg_cost (x)
737 rtx x;
738{
739 regset_head set;
740 int i;
741 int cost = 0;
742 int hardregs = 0;
743
744 INIT_REG_SET (&set);
745 for_each_rtx (&x, approx_reg_cost_1, (void *)&set);
746
747 EXECUTE_IF_SET_IN_REG_SET
748 (&set, 0, i,
749 {
750 if (! CHEAP_REGNO (i))
751 {
752 if (i < FIRST_PSEUDO_REGISTER)
753 hardregs++;
754
755 cost += i < FIRST_PSEUDO_REGISTER ? 2 : 1;
756 }
757 });
758
759 CLEAR_REG_SET (&set);
760 return hardregs && SMALL_REGISTER_CLASSES ? MAX_COST : cost;
761}
762
763/* Return a negative value if an rtx A, whose costs are given by COST_A
764 and REGCOST_A, is more desirable than an rtx B.
765 Return a positive value if A is less desirable, or 0 if the two are
766 equally good. */
767static int
768preferrable (cost_a, regcost_a, cost_b, regcost_b)
769 int cost_a, regcost_a, cost_b, regcost_b;
770{
771 /* First, get rid of a cases involving expressions that are entirely
772 unwanted. */
773 if (cost_a != cost_b)
774 {
775 if (cost_a == MAX_COST)
776 return 1;
777 if (cost_b == MAX_COST)
778 return -1;
779 }
780
781 /* Avoid extending lifetimes of hardregs. */
782 if (regcost_a != regcost_b)
783 {
784 if (regcost_a == MAX_COST)
785 return 1;
786 if (regcost_b == MAX_COST)
787 return -1;
788 }
789
790 /* Normal operation costs take precedence. */
791 if (cost_a != cost_b)
792 return cost_a - cost_b;
793 /* Only if these are identical consider effects on register pressure. */
794 if (regcost_a != regcost_b)
795 return regcost_a - regcost_b;
796 return 0;
797}
798
799/* Internal function, to compute cost when X is not a register; called
800 from COST macro to keep it simple. */
801
802static int
803notreg_cost (x, outer)
804 rtx x;
805 enum rtx_code outer;
806{
807 return ((GET_CODE (x) == SUBREG
808 && GET_CODE (SUBREG_REG (x)) == REG
809 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
810 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
811 && (GET_MODE_SIZE (GET_MODE (x))
812 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
813 && subreg_lowpart_p (x)
814 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (x)),
815 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))))
816 ? 0
817 : rtx_cost (x, outer) * 2);
818}
819
820/* Return an estimate of the cost of computing rtx X.
821 One use is in cse, to decide which expression to keep in the hash table.
822 Another is in rtl generation, to pick the cheapest way to multiply.
823 Other uses like the latter are expected in the future. */
824
825int
826rtx_cost (x, outer_code)
827 rtx x;
828 enum rtx_code outer_code ATTRIBUTE_UNUSED;
829{
830 int i, j;
831 enum rtx_code code;
832 const char *fmt;
833 int total;
834
835 if (x == 0)
836 return 0;
837
838 /* Compute the default costs of certain things.
839 Note that RTX_COSTS can override the defaults. */
840
841 code = GET_CODE (x);
842 switch (code)
843 {
844 case MULT:
845 /* Count multiplication by 2**n as a shift,
846 because if we are considering it, we would output it as a shift. */
847 if (GET_CODE (XEXP (x, 1)) == CONST_INT
848 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
849 total = 2;
850 else
851 total = COSTS_N_INSNS (5);
852 break;
853 case DIV:
854 case UDIV:
855 case MOD:
856 case UMOD:
857 total = COSTS_N_INSNS (7);
858 break;
859 case USE:
860 /* Used in loop.c and combine.c as a marker. */
861 total = 0;
862 break;
863 default:
864 total = COSTS_N_INSNS (1);
865 }
866
867 switch (code)
868 {
869 case REG:
870 return 0;
871
872 case SUBREG:
873 /* If we can't tie these modes, make this expensive. The larger
874 the mode, the more expensive it is. */
875 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
876 return COSTS_N_INSNS (2
877 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
878 break;
879
880#ifdef RTX_COSTS
881 RTX_COSTS (x, code, outer_code);
882#endif
883#ifdef CONST_COSTS
884 CONST_COSTS (x, code, outer_code);
885#endif
886
887 default:
888#ifdef DEFAULT_RTX_COSTS
889 DEFAULT_RTX_COSTS (x, code, outer_code);
890#endif
891 break;
892 }
893
894 /* Sum the costs of the sub-rtx's, plus cost of this operation,
895 which is already in total. */
896
897 fmt = GET_RTX_FORMAT (code);
898 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
899 if (fmt[i] == 'e')
900 total += rtx_cost (XEXP (x, i), code);
901 else if (fmt[i] == 'E')
902 for (j = 0; j < XVECLEN (x, i); j++)
903 total += rtx_cost (XVECEXP (x, i, j), code);
904
905 return total;
906}
907
908
909/* Return cost of address expression X.
910 Expect that X is properly formed address reference. */
911
912int
913address_cost (x, mode)
914 rtx x;
915 enum machine_mode mode;
916{
917 /* The ADDRESS_COST macro does not deal with ADDRESSOF nodes. But,
918 during CSE, such nodes are present. Using an ADDRESSOF node which
919 refers to the address of a REG is a good thing because we can then
920 turn (MEM (ADDRESSSOF (REG))) into just plain REG. */
921
922 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
923 return -1;
924
925 /* We may be asked for cost of various unusual addresses, such as operands
926 of push instruction. It is not worthwhile to complicate writing
927 of ADDRESS_COST macro by such cases. */
928
929 if (!memory_address_p (mode, x))
930 return 1000;
931#ifdef ADDRESS_COST
932 return ADDRESS_COST (x);
933#else
934 return rtx_cost (x, MEM);
935#endif
936}
937
938
939
940static struct cse_reg_info *
941get_cse_reg_info (regno)
942 unsigned int regno;
943{
944 struct cse_reg_info **hash_head = &reg_hash[REGHASH_FN (regno)];
945 struct cse_reg_info *p;
946
947 for (p = *hash_head; p != NULL; p = p->hash_next)
948 if (p->regno == regno)
949 break;
950
951 if (p == NULL)
952 {
953 /* Get a new cse_reg_info structure. */
954 if (cse_reg_info_free_list)
955 {
956 p = cse_reg_info_free_list;
957 cse_reg_info_free_list = p->next;
958 }
959 else
960 p = (struct cse_reg_info *) xmalloc (sizeof (struct cse_reg_info));
961
962 /* Insert into hash table. */
963 p->hash_next = *hash_head;
964 *hash_head = p;
965
966 /* Initialize it. */
967 p->reg_tick = 1;
968 p->reg_in_table = -1;
969 p->reg_qty = regno;
970 p->regno = regno;
971 p->next = cse_reg_info_used_list;
972 cse_reg_info_used_list = p;
973 if (!cse_reg_info_used_list_end)
974 cse_reg_info_used_list_end = p;
975 }
976
977 /* Cache this lookup; we tend to be looking up information about the
978 same register several times in a row. */
979 cached_regno = regno;
980 cached_cse_reg_info = p;
981
982 return p;
983}
984
985/* Clear the hash table and initialize each register with its own quantity,
986 for a new basic block. */
987
988static void
989new_basic_block ()
990{
991 int i;
992
993 next_qty = max_reg;
994
995 /* Clear out hash table state for this pass. */
996
997 memset ((char *) reg_hash, 0, sizeof reg_hash);
998
999 if (cse_reg_info_used_list)
1000 {
1001 cse_reg_info_used_list_end->next = cse_reg_info_free_list;
1002 cse_reg_info_free_list = cse_reg_info_used_list;
1003 cse_reg_info_used_list = cse_reg_info_used_list_end = 0;
1004 }
1005 cached_cse_reg_info = 0;
1006
1007 CLEAR_HARD_REG_SET (hard_regs_in_table);
1008
1009 /* The per-quantity values used to be initialized here, but it is
1010 much faster to initialize each as it is made in `make_new_qty'. */
1011
1012 for (i = 0; i < HASH_SIZE; i++)
1013 {
1014 struct table_elt *first;
1015
1016 first = table[i];
1017 if (first != NULL)
1018 {
1019 struct table_elt *last = first;
1020
1021 table[i] = NULL;
1022
1023 while (last->next_same_hash != NULL)
1024 last = last->next_same_hash;
1025
1026 /* Now relink this hash entire chain into
1027 the free element list. */
1028
1029 last->next_same_hash = free_element_chain;
1030 free_element_chain = first;
1031 }
1032 }
1033
1034 prev_insn = 0;
1035
1036#ifdef HAVE_cc0
1037 prev_insn_cc0 = 0;
1038#endif
1039}
1040
1041/* Say that register REG contains a quantity in mode MODE not in any
1042 register before and initialize that quantity. */
1043
1044static void
1045make_new_qty (reg, mode)
1046 unsigned int reg;
1047 enum machine_mode mode;
1048{
1049 int q;
1050 struct qty_table_elem *ent;
1051 struct reg_eqv_elem *eqv;
1052
1053 if (next_qty >= max_qty)
1054 abort ();
1055
1056 q = REG_QTY (reg) = next_qty++;
1057 ent = &qty_table[q];
1058 ent->first_reg = reg;
1059 ent->last_reg = reg;
1060 ent->mode = mode;
1061 ent->const_rtx = ent->const_insn = NULL_RTX;
1062 ent->comparison_code = UNKNOWN;
1063
1064 eqv = &reg_eqv_table[reg];
1065 eqv->next = eqv->prev = -1;
1066}
1067
1068/* Make reg NEW equivalent to reg OLD.
1069 OLD is not changing; NEW is. */
1070
1071static void
1072make_regs_eqv (new, old)
1073 unsigned int new, old;
1074{
1075 unsigned int lastr, firstr;
1076 int q = REG_QTY (old);
1077 struct qty_table_elem *ent;
1078
1079 ent = &qty_table[q];
1080
1081 /* Nothing should become eqv until it has a "non-invalid" qty number. */
1082 if (! REGNO_QTY_VALID_P (old))
1083 abort ();
1084
1085 REG_QTY (new) = q;
1086 firstr = ent->first_reg;
1087 lastr = ent->last_reg;
1088
1089 /* Prefer fixed hard registers to anything. Prefer pseudo regs to other
1090 hard regs. Among pseudos, if NEW will live longer than any other reg
1091 of the same qty, and that is beyond the current basic block,
1092 make it the new canonical replacement for this qty. */
1093 if (! (firstr < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (firstr))
1094 /* Certain fixed registers might be of the class NO_REGS. This means
1095 that not only can they not be allocated by the compiler, but
1096 they cannot be used in substitutions or canonicalizations
1097 either. */
1098 && (new >= FIRST_PSEUDO_REGISTER || REGNO_REG_CLASS (new) != NO_REGS)
1099 && ((new < FIRST_PSEUDO_REGISTER && FIXED_REGNO_P (new))
1100 || (new >= FIRST_PSEUDO_REGISTER
1101 && (firstr < FIRST_PSEUDO_REGISTER
1102 || ((uid_cuid[REGNO_LAST_UID (new)] > cse_basic_block_end
1103 || (uid_cuid[REGNO_FIRST_UID (new)]
1104 < cse_basic_block_start))
1105 && (uid_cuid[REGNO_LAST_UID (new)]
1106 > uid_cuid[REGNO_LAST_UID (firstr)]))))))
1107 {
1108 reg_eqv_table[firstr].prev = new;
1109 reg_eqv_table[new].next = firstr;
1110 reg_eqv_table[new].prev = -1;
1111 ent->first_reg = new;
1112 }
1113 else
1114 {
1115 /* If NEW is a hard reg (known to be non-fixed), insert at end.
1116 Otherwise, insert before any non-fixed hard regs that are at the
1117 end. Registers of class NO_REGS cannot be used as an
1118 equivalent for anything. */
1119 while (lastr < FIRST_PSEUDO_REGISTER && reg_eqv_table[lastr].prev >= 0
1120 && (REGNO_REG_CLASS (lastr) == NO_REGS || ! FIXED_REGNO_P (lastr))
1121 && new >= FIRST_PSEUDO_REGISTER)
1122 lastr = reg_eqv_table[lastr].prev;
1123 reg_eqv_table[new].next = reg_eqv_table[lastr].next;
1124 if (reg_eqv_table[lastr].next >= 0)
1125 reg_eqv_table[reg_eqv_table[lastr].next].prev = new;
1126 else
1127 qty_table[q].last_reg = new;
1128 reg_eqv_table[lastr].next = new;
1129 reg_eqv_table[new].prev = lastr;
1130 }
1131}
1132
1133/* Remove REG from its equivalence class. */
1134
1135static void
1136delete_reg_equiv (reg)
1137 unsigned int reg;
1138{
1139 struct qty_table_elem *ent;
1140 int q = REG_QTY (reg);
1141 int p, n;
1142
1143 /* If invalid, do nothing. */
1144 if (q == (int) reg)
1145 return;
1146
1147 ent = &qty_table[q];
1148
1149 p = reg_eqv_table[reg].prev;
1150 n = reg_eqv_table[reg].next;
1151
1152 if (n != -1)
1153 reg_eqv_table[n].prev = p;
1154 else
1155 ent->last_reg = p;
1156 if (p != -1)
1157 reg_eqv_table[p].next = n;
1158 else
1159 ent->first_reg = n;
1160
1161 REG_QTY (reg) = reg;
1162}
1163
1164/* Remove any invalid expressions from the hash table
1165 that refer to any of the registers contained in expression X.
1166
1167 Make sure that newly inserted references to those registers
1168 as subexpressions will be considered valid.
1169
1170 mention_regs is not called when a register itself
1171 is being stored in the table.
1172
1173 Return 1 if we have done something that may have changed the hash code
1174 of X. */
1175
1176static int
1177mention_regs (x)
1178 rtx x;
1179{
1180 enum rtx_code code;
1181 int i, j;
1182 const char *fmt;
1183 int changed = 0;
1184
1185 if (x == 0)
1186 return 0;
1187
1188 code = GET_CODE (x);
1189 if (code == REG)
1190 {
1191 unsigned int regno = REGNO (x);
1192 unsigned int endregno
1193 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
1194 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
1195 unsigned int i;
1196
1197 for (i = regno; i < endregno; i++)
1198 {
1199 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1200 remove_invalid_refs (i);
1201
1202 REG_IN_TABLE (i) = REG_TICK (i);
1203 }
1204
1205 return 0;
1206 }
1207
1208 /* If this is a SUBREG, we don't want to discard other SUBREGs of the same
1209 pseudo if they don't use overlapping words. We handle only pseudos
1210 here for simplicity. */
1211 if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1212 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1213 {
1214 unsigned int i = REGNO (SUBREG_REG (x));
1215
1216 if (REG_IN_TABLE (i) >= 0 && REG_IN_TABLE (i) != REG_TICK (i))
1217 {
1218 /* If reg_tick has been incremented more than once since
1219 reg_in_table was last set, that means that the entire
1220 register has been set before, so discard anything memorized
1221 for the entire register, including all SUBREG expressions. */
1222 if (REG_IN_TABLE (i) != REG_TICK (i) - 1)
1223 remove_invalid_refs (i);
1224 else
1225 remove_invalid_subreg_refs (i, SUBREG_BYTE (x), GET_MODE (x));
1226 }
1227
1228 REG_IN_TABLE (i) = REG_TICK (i);
1229 return 0;
1230 }
1231
1232 /* If X is a comparison or a COMPARE and either operand is a register
1233 that does not have a quantity, give it one. This is so that a later
1234 call to record_jump_equiv won't cause X to be assigned a different
1235 hash code and not found in the table after that call.
1236
1237 It is not necessary to do this here, since rehash_using_reg can
1238 fix up the table later, but doing this here eliminates the need to
1239 call that expensive function in the most common case where the only
1240 use of the register is in the comparison. */
1241
1242 if (code == COMPARE || GET_RTX_CLASS (code) == '<')
1243 {
1244 if (GET_CODE (XEXP (x, 0)) == REG
1245 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
1246 if (insert_regs (XEXP (x, 0), NULL, 0))
1247 {
1248 rehash_using_reg (XEXP (x, 0));
1249 changed = 1;
1250 }
1251
1252 if (GET_CODE (XEXP (x, 1)) == REG
1253 && ! REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
1254 if (insert_regs (XEXP (x, 1), NULL, 0))
1255 {
1256 rehash_using_reg (XEXP (x, 1));
1257 changed = 1;
1258 }
1259 }
1260
1261 fmt = GET_RTX_FORMAT (code);
1262 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1263 if (fmt[i] == 'e')
1264 changed |= mention_regs (XEXP (x, i));
1265 else if (fmt[i] == 'E')
1266 for (j = 0; j < XVECLEN (x, i); j++)
1267 changed |= mention_regs (XVECEXP (x, i, j));
1268
1269 return changed;
1270}
1271
1272/* Update the register quantities for inserting X into the hash table
1273 with a value equivalent to CLASSP.
1274 (If the class does not contain a REG, it is irrelevant.)
1275 If MODIFIED is nonzero, X is a destination; it is being modified.
1276 Note that delete_reg_equiv should be called on a register
1277 before insert_regs is done on that register with MODIFIED != 0.
1278
1279 Nonzero value means that elements of reg_qty have changed
1280 so X's hash code may be different. */
1281
1282static int
1283insert_regs (x, classp, modified)
1284 rtx x;
1285 struct table_elt *classp;
1286 int modified;
1287{
1288 if (GET_CODE (x) == REG)
1289 {
1290 unsigned int regno = REGNO (x);
1291 int qty_valid;
1292
1293 /* If REGNO is in the equivalence table already but is of the
1294 wrong mode for that equivalence, don't do anything here. */
1295
1296 qty_valid = REGNO_QTY_VALID_P (regno);
1297 if (qty_valid)
1298 {
1299 struct qty_table_elem *ent = &qty_table[REG_QTY (regno)];
1300
1301 if (ent->mode != GET_MODE (x))
1302 return 0;
1303 }
1304
1305 if (modified || ! qty_valid)
1306 {
1307 if (classp)
1308 for (classp = classp->first_same_value;
1309 classp != 0;
1310 classp = classp->next_same_value)
1311 if (GET_CODE (classp->exp) == REG
1312 && GET_MODE (classp->exp) == GET_MODE (x))
1313 {
1314 make_regs_eqv (regno, REGNO (classp->exp));
1315 return 1;
1316 }
1317
1318 /* Mention_regs for a SUBREG checks if REG_TICK is exactly one larger
1319 than REG_IN_TABLE to find out if there was only a single preceding
1320 invalidation - for the SUBREG - or another one, which would be
1321 for the full register. However, if we find here that REG_TICK
1322 indicates that the register is invalid, it means that it has
1323 been invalidated in a separate operation. The SUBREG might be used
1324 now (then this is a recursive call), or we might use the full REG
1325 now and a SUBREG of it later. So bump up REG_TICK so that
1326 mention_regs will do the right thing. */
1327 if (! modified
1328 && REG_IN_TABLE (regno) >= 0
1329 && REG_TICK (regno) == REG_IN_TABLE (regno) + 1)
1330 REG_TICK (regno)++;
1331 make_new_qty (regno, GET_MODE (x));
1332 return 1;
1333 }
1334
1335 return 0;
1336 }
1337
1338 /* If X is a SUBREG, we will likely be inserting the inner register in the
1339 table. If that register doesn't have an assigned quantity number at
1340 this point but does later, the insertion that we will be doing now will
1341 not be accessible because its hash code will have changed. So assign
1342 a quantity number now. */
1343
1344 else if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == REG
1345 && ! REGNO_QTY_VALID_P (REGNO (SUBREG_REG (x))))
1346 {
1347 insert_regs (SUBREG_REG (x), NULL, 0);
1348 mention_regs (x);
1349 return 1;
1350 }
1351 else
1352 return mention_regs (x);
1353}
1354
1355
1356/* Look in or update the hash table. */
1357
1358/* Remove table element ELT from use in the table.
1359 HASH is its hash code, made using the HASH macro.
1360 It's an argument because often that is known in advance
1361 and we save much time not recomputing it. */
1362
1363static void
1364remove_from_table (elt, hash)
1365 struct table_elt *elt;
1366 unsigned hash;
1367{
1368 if (elt == 0)
1369 return;
1370
1371 /* Mark this element as removed. See cse_insn. */
1372 elt->first_same_value = 0;
1373
1374 /* Remove the table element from its equivalence class. */
1375
1376 {
1377 struct table_elt *prev = elt->prev_same_value;
1378 struct table_elt *next = elt->next_same_value;
1379
1380 if (next)
1381 next->prev_same_value = prev;
1382
1383 if (prev)
1384 prev->next_same_value = next;
1385 else
1386 {
1387 struct table_elt *newfirst = next;
1388 while (next)
1389 {
1390 next->first_same_value = newfirst;
1391 next = next->next_same_value;
1392 }
1393 }
1394 }
1395
1396 /* Remove the table element from its hash bucket. */
1397
1398 {
1399 struct table_elt *prev = elt->prev_same_hash;
1400 struct table_elt *next = elt->next_same_hash;
1401
1402 if (next)
1403 next->prev_same_hash = prev;
1404
1405 if (prev)
1406 prev->next_same_hash = next;
1407 else if (table[hash] == elt)
1408 table[hash] = next;
1409 else
1410 {
1411 /* This entry is not in the proper hash bucket. This can happen
1412 when two classes were merged by `merge_equiv_classes'. Search
1413 for the hash bucket that it heads. This happens only very
1414 rarely, so the cost is acceptable. */
1415 for (hash = 0; hash < HASH_SIZE; hash++)
1416 if (table[hash] == elt)
1417 table[hash] = next;
1418 }
1419 }
1420
1421 /* Remove the table element from its related-value circular chain. */
1422
1423 if (elt->related_value != 0 && elt->related_value != elt)
1424 {
1425 struct table_elt *p = elt->related_value;
1426
1427 while (p->related_value != elt)
1428 p = p->related_value;
1429 p->related_value = elt->related_value;
1430 if (p->related_value == p)
1431 p->related_value = 0;
1432 }
1433
1434 /* Now add it to the free element chain. */
1435 elt->next_same_hash = free_element_chain;
1436 free_element_chain = elt;
1437}
1438
1439/* Look up X in the hash table and return its table element,
1440 or 0 if X is not in the table.
1441
1442 MODE is the machine-mode of X, or if X is an integer constant
1443 with VOIDmode then MODE is the mode with which X will be used.
1444
1445 Here we are satisfied to find an expression whose tree structure
1446 looks like X. */
1447
1448static struct table_elt *
1449lookup (x, hash, mode)
1450 rtx x;
1451 unsigned hash;
1452 enum machine_mode mode;
1453{
1454 struct table_elt *p;
1455
1456 for (p = table[hash]; p; p = p->next_same_hash)
1457 if (mode == p->mode && ((x == p->exp && GET_CODE (x) == REG)
1458 || exp_equiv_p (x, p->exp, GET_CODE (x) != REG, 0)))
1459 return p;
1460
1461 return 0;
1462}
1463
1464/* Like `lookup' but don't care whether the table element uses invalid regs.
1465 Also ignore discrepancies in the machine mode of a register. */
1466
1467static struct table_elt *
1468lookup_for_remove (x, hash, mode)
1469 rtx x;
1470 unsigned hash;
1471 enum machine_mode mode;
1472{
1473 struct table_elt *p;
1474
1475 if (GET_CODE (x) == REG)
1476 {
1477 unsigned int regno = REGNO (x);
1478
1479 /* Don't check the machine mode when comparing registers;
1480 invalidating (REG:SI 0) also invalidates (REG:DF 0). */
1481 for (p = table[hash]; p; p = p->next_same_hash)
1482 if (GET_CODE (p->exp) == REG
1483 && REGNO (p->exp) == regno)
1484 return p;
1485 }
1486 else
1487 {
1488 for (p = table[hash]; p; p = p->next_same_hash)
1489 if (mode == p->mode && (x == p->exp || exp_equiv_p (x, p->exp, 0, 0)))
1490 return p;
1491 }
1492
1493 return 0;
1494}
1495
1496/* Look for an expression equivalent to X and with code CODE.
1497 If one is found, return that expression. */
1498
1499static rtx
1500lookup_as_function (x, code)
1501 rtx x;
1502 enum rtx_code code;
1503{
1504 struct table_elt *p
1505 = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, GET_MODE (x));
1506
1507 /* If we are looking for a CONST_INT, the mode doesn't really matter, as
1508 long as we are narrowing. So if we looked in vain for a mode narrower
1509 than word_mode before, look for word_mode now. */
1510 if (p == 0 && code == CONST_INT
1511 && GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (word_mode))
1512 {
1513 x = copy_rtx (x);
1514 PUT_MODE (x, word_mode);
1515 p = lookup (x, safe_hash (x, VOIDmode) & HASH_MASK, word_mode);
1516 }
1517
1518 if (p == 0)
1519 return 0;
1520
1521 for (p = p->first_same_value; p; p = p->next_same_value)
1522 if (GET_CODE (p->exp) == code
1523 /* Make sure this is a valid entry in the table. */
1524 && exp_equiv_p (p->exp, p->exp, 1, 0))
1525 return p->exp;
1526
1527 return 0;
1528}
1529
1530/* Insert X in the hash table, assuming HASH is its hash code
1531 and CLASSP is an element of the class it should go in
1532 (or 0 if a new class should be made).
1533 It is inserted at the proper position to keep the class in
1534 the order cheapest first.
1535
1536 MODE is the machine-mode of X, or if X is an integer constant
1537 with VOIDmode then MODE is the mode with which X will be used.
1538
1539 For elements of equal cheapness, the most recent one
1540 goes in front, except that the first element in the list
1541 remains first unless a cheaper element is added. The order of
1542 pseudo-registers does not matter, as canon_reg will be called to
1543 find the cheapest when a register is retrieved from the table.
1544
1545 The in_memory field in the hash table element is set to 0.
1546 The caller must set it nonzero if appropriate.
1547
1548 You should call insert_regs (X, CLASSP, MODIFY) before calling here,
1549 and if insert_regs returns a nonzero value
1550 you must then recompute its hash code before calling here.
1551
1552 If necessary, update table showing constant values of quantities. */
1553
1554#define CHEAPER(X, Y) \
1555 (preferrable ((X)->cost, (X)->regcost, (Y)->cost, (Y)->regcost) < 0)
1556
1557static struct table_elt *
1558insert (x, classp, hash, mode)
1559 rtx x;
1560 struct table_elt *classp;
1561 unsigned hash;
1562 enum machine_mode mode;
1563{
1564 struct table_elt *elt;
1565
1566 /* If X is a register and we haven't made a quantity for it,
1567 something is wrong. */
1568 if (GET_CODE (x) == REG && ! REGNO_QTY_VALID_P (REGNO (x)))
1569 abort ();
1570
1571 /* If X is a hard register, show it is being put in the table. */
1572 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
1573 {
1574 unsigned int regno = REGNO (x);
1575 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1576 unsigned int i;
1577
1578 for (i = regno; i < endregno; i++)
1579 SET_HARD_REG_BIT (hard_regs_in_table, i);
1580 }
1581
1582 /* Put an element for X into the right hash bucket. */
1583
1584 elt = free_element_chain;
1585 if (elt)
1586 free_element_chain = elt->next_same_hash;
1587 else
1588 {
1589 n_elements_made++;
1590 elt = (struct table_elt *) xmalloc (sizeof (struct table_elt));
1591 }
1592
1593 elt->exp = x;
1594 elt->canon_exp = NULL_RTX;
1595 elt->cost = COST (x);
1596 elt->regcost = approx_reg_cost (x);
1597 elt->next_same_value = 0;
1598 elt->prev_same_value = 0;
1599 elt->next_same_hash = table[hash];
1600 elt->prev_same_hash = 0;
1601 elt->related_value = 0;
1602 elt->in_memory = 0;
1603 elt->mode = mode;
1604 elt->is_const = (CONSTANT_P (x)
1605 /* GNU C++ takes advantage of this for `this'
1606 (and other const values). */
1607 || (RTX_UNCHANGING_P (x)
1608 && GET_CODE (x) == REG
1609 && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1610 || FIXED_BASE_PLUS_P (x));
1611
1612 if (table[hash])
1613 table[hash]->prev_same_hash = elt;
1614 table[hash] = elt;
1615
1616 /* Put it into the proper value-class. */
1617 if (classp)
1618 {
1619 classp = classp->first_same_value;
1620 if (CHEAPER (elt, classp))
1621 /* Insert at the head of the class */
1622 {
1623 struct table_elt *p;
1624 elt->next_same_value = classp;
1625 classp->prev_same_value = elt;
1626 elt->first_same_value = elt;
1627
1628 for (p = classp; p; p = p->next_same_value)
1629 p->first_same_value = elt;
1630 }
1631 else
1632 {
1633 /* Insert not at head of the class. */
1634 /* Put it after the last element cheaper than X. */
1635 struct table_elt *p, *next;
1636
1637 for (p = classp; (next = p->next_same_value) && CHEAPER (next, elt);
1638 p = next);
1639
1640 /* Put it after P and before NEXT. */
1641 elt->next_same_value = next;
1642 if (next)
1643 next->prev_same_value = elt;
1644
1645 elt->prev_same_value = p;
1646 p->next_same_value = elt;
1647 elt->first_same_value = classp;
1648 }
1649 }
1650 else
1651 elt->first_same_value = elt;
1652
1653 /* If this is a constant being set equivalent to a register or a register
1654 being set equivalent to a constant, note the constant equivalence.
1655
1656 If this is a constant, it cannot be equivalent to a different constant,
1657 and a constant is the only thing that can be cheaper than a register. So
1658 we know the register is the head of the class (before the constant was
1659 inserted).
1660
1661 If this is a register that is not already known equivalent to a
1662 constant, we must check the entire class.
1663
1664 If this is a register that is already known equivalent to an insn,
1665 update the qtys `const_insn' to show that `this_insn' is the latest
1666 insn making that quantity equivalent to the constant. */
1667
1668 if (elt->is_const && classp && GET_CODE (classp->exp) == REG
1669 && GET_CODE (x) != REG)
1670 {
1671 int exp_q = REG_QTY (REGNO (classp->exp));
1672 struct qty_table_elem *exp_ent = &qty_table[exp_q];
1673
1674 exp_ent->const_rtx = gen_lowpart_if_possible (exp_ent->mode, x);
1675 exp_ent->const_insn = this_insn;
1676 }
1677
1678 else if (GET_CODE (x) == REG
1679 && classp
1680 && ! qty_table[REG_QTY (REGNO (x))].const_rtx
1681 && ! elt->is_const)
1682 {
1683 struct table_elt *p;
1684
1685 for (p = classp; p != 0; p = p->next_same_value)
1686 {
1687 if (p->is_const && GET_CODE (p->exp) != REG)
1688 {
1689 int x_q = REG_QTY (REGNO (x));
1690 struct qty_table_elem *x_ent = &qty_table[x_q];
1691
1692 x_ent->const_rtx
1693 = gen_lowpart_if_possible (GET_MODE (x), p->exp);
1694 x_ent->const_insn = this_insn;
1695 break;
1696 }
1697 }
1698 }
1699
1700 else if (GET_CODE (x) == REG
1701 && qty_table[REG_QTY (REGNO (x))].const_rtx
1702 && GET_MODE (x) == qty_table[REG_QTY (REGNO (x))].mode)
1703 qty_table[REG_QTY (REGNO (x))].const_insn = this_insn;
1704
1705 /* If this is a constant with symbolic value,
1706 and it has a term with an explicit integer value,
1707 link it up with related expressions. */
1708 if (GET_CODE (x) == CONST)
1709 {
1710 rtx subexp = get_related_value (x);
1711 unsigned subhash;
1712 struct table_elt *subelt, *subelt_prev;
1713
1714 if (subexp != 0)
1715 {
1716 /* Get the integer-free subexpression in the hash table. */
1717 subhash = safe_hash (subexp, mode) & HASH_MASK;
1718 subelt = lookup (subexp, subhash, mode);
1719 if (subelt == 0)
1720 subelt = insert (subexp, NULL, subhash, mode);
1721 /* Initialize SUBELT's circular chain if it has none. */
1722 if (subelt->related_value == 0)
1723 subelt->related_value = subelt;
1724 /* Find the element in the circular chain that precedes SUBELT. */
1725 subelt_prev = subelt;
1726 while (subelt_prev->related_value != subelt)
1727 subelt_prev = subelt_prev->related_value;
1728 /* Put new ELT into SUBELT's circular chain just before SUBELT.
1729 This way the element that follows SUBELT is the oldest one. */
1730 elt->related_value = subelt_prev->related_value;
1731 subelt_prev->related_value = elt;
1732 }
1733 }
1734
1735 return elt;
1736}
1737
1738
1739/* Given two equivalence classes, CLASS1 and CLASS2, put all the entries from
1740 CLASS2 into CLASS1. This is done when we have reached an insn which makes
1741 the two classes equivalent.
1742
1743 CLASS1 will be the surviving class; CLASS2 should not be used after this
1744 call.
1745
1746 Any invalid entries in CLASS2 will not be copied. */
1747
1748static void
1749merge_equiv_classes (class1, class2)
1750 struct table_elt *class1, *class2;
1751{
1752 struct table_elt *elt, *next, *new;
1753
1754 /* Ensure we start with the head of the classes. */
1755 class1 = class1->first_same_value;
1756 class2 = class2->first_same_value;
1757
1758 /* If they were already equal, forget it. */
1759 if (class1 == class2)
1760 return;
1761
1762 for (elt = class2; elt; elt = next)
1763 {
1764 unsigned int hash;
1765 rtx exp = elt->exp;
1766 enum machine_mode mode = elt->mode;
1767
1768 next = elt->next_same_value;
1769
1770 /* Remove old entry, make a new one in CLASS1's class.
1771 Don't do this for invalid entries as we cannot find their
1772 hash code (it also isn't necessary). */
1773 if (GET_CODE (exp) == REG || exp_equiv_p (exp, exp, 1, 0))
1774 {
1775 hash_arg_in_memory = 0;
1776 hash = HASH (exp, mode);
1777
1778 if (GET_CODE (exp) == REG)
1779 delete_reg_equiv (REGNO (exp));
1780
1781 remove_from_table (elt, hash);
1782
1783 if (insert_regs (exp, class1, 0))
1784 {
1785 rehash_using_reg (exp);
1786 hash = HASH (exp, mode);
1787 }
1788 new = insert (exp, class1, hash, mode);
1789 new->in_memory = hash_arg_in_memory;
1790 }
1791 }
1792}
1793
1794
1795/* Flush the entire hash table. */
1796
1797static void
1798flush_hash_table ()
1799{
1800 int i;
1801 struct table_elt *p;
1802
1803 for (i = 0; i < HASH_SIZE; i++)
1804 for (p = table[i]; p; p = table[i])
1805 {
1806 /* Note that invalidate can remove elements
1807 after P in the current hash chain. */
1808 if (GET_CODE (p->exp) == REG)
1809 invalidate (p->exp, p->mode);
1810 else
1811 remove_from_table (p, i);
1812 }
1813}
1814
1815
1816/* Function called for each rtx to check whether true dependence exist. */
1817struct check_dependence_data
1818{
1819 enum machine_mode mode;
1820 rtx exp;
1821};
1822
1823static int
1824check_dependence (x, data)
1825 rtx *x;
1826 void *data;
1827{
1828 struct check_dependence_data *d = (struct check_dependence_data *) data;
1829 if (*x && GET_CODE (*x) == MEM)
1830 return true_dependence (d->exp, d->mode, *x, cse_rtx_varies_p);
1831 else
1832 return 0;
1833}
1834
1835
1836/* Remove from the hash table, or mark as invalid, all expressions whose
1837 values could be altered by storing in X. X is a register, a subreg, or
1838 a memory reference with nonvarying address (because, when a memory
1839 reference with a varying address is stored in, all memory references are
1840 removed by invalidate_memory so specific invalidation is superfluous).
1841 FULL_MODE, if not VOIDmode, indicates that this much should be
1842 invalidated instead of just the amount indicated by the mode of X. This
1843 is only used for bitfield stores into memory.
1844
1845 A nonvarying address may be just a register or just a symbol reference,
1846 or it may be either of those plus a numeric offset. */
1847
1848static void
1849invalidate (x, full_mode)
1850 rtx x;
1851 enum machine_mode full_mode;
1852{
1853 int i;
1854 struct table_elt *p;
1855
1856 switch (GET_CODE (x))
1857 {
1858 case REG:
1859 {
1860 /* If X is a register, dependencies on its contents are recorded
1861 through the qty number mechanism. Just change the qty number of
1862 the register, mark it as invalid for expressions that refer to it,
1863 and remove it itself. */
1864 unsigned int regno = REGNO (x);
1865 unsigned int hash = HASH (x, GET_MODE (x));
1866
1867 /* Remove REGNO from any quantity list it might be on and indicate
1868 that its value might have changed. If it is a pseudo, remove its
1869 entry from the hash table.
1870
1871 For a hard register, we do the first two actions above for any
1872 additional hard registers corresponding to X. Then, if any of these
1873 registers are in the table, we must remove any REG entries that
1874 overlap these registers. */
1875
1876 delete_reg_equiv (regno);
1877 REG_TICK (regno)++;
1878
1879 if (regno >= FIRST_PSEUDO_REGISTER)
1880 {
1881 /* Because a register can be referenced in more than one mode,
1882 we might have to remove more than one table entry. */
1883 struct table_elt *elt;
1884
1885 while ((elt = lookup_for_remove (x, hash, GET_MODE (x))))
1886 remove_from_table (elt, hash);
1887 }
1888 else
1889 {
1890 HOST_WIDE_INT in_table
1891 = TEST_HARD_REG_BIT (hard_regs_in_table, regno);
1892 unsigned int endregno
1893 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
1894 unsigned int tregno, tendregno, rn;
1895 struct table_elt *p, *next;
1896
1897 CLEAR_HARD_REG_BIT (hard_regs_in_table, regno);
1898
1899 for (rn = regno + 1; rn < endregno; rn++)
1900 {
1901 in_table |= TEST_HARD_REG_BIT (hard_regs_in_table, rn);
1902 CLEAR_HARD_REG_BIT (hard_regs_in_table, rn);
1903 delete_reg_equiv (rn);
1904 REG_TICK (rn)++;
1905 }
1906
1907 if (in_table)
1908 for (hash = 0; hash < HASH_SIZE; hash++)
1909 for (p = table[hash]; p; p = next)
1910 {
1911 next = p->next_same_hash;
1912
1913 if (GET_CODE (p->exp) != REG
1914 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
1915 continue;
1916
1917 tregno = REGNO (p->exp);
1918 tendregno
1919 = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (p->exp));
1920 if (tendregno > regno && tregno < endregno)
1921 remove_from_table (p, hash);
1922 }
1923 }
1924 }
1925 return;
1926
1927 case SUBREG:
1928 invalidate (SUBREG_REG (x), VOIDmode);
1929 return;
1930
1931 case PARALLEL:
1932 for (i = XVECLEN (x, 0) - 1; i >= 0; --i)
1933 invalidate (XVECEXP (x, 0, i), VOIDmode);
1934 return;
1935
1936 case EXPR_LIST:
1937 /* This is part of a disjoint return value; extract the location in
1938 question ignoring the offset. */
1939 invalidate (XEXP (x, 0), VOIDmode);
1940 return;
1941
1942 case MEM:
1943 /* Calculate the canonical version of X here so that
1944 true_dependence doesn't generate new RTL for X on each call. */
1945 x = canon_rtx (x);
1946
1947 /* Remove all hash table elements that refer to overlapping pieces of
1948 memory. */
1949 if (full_mode == VOIDmode)
1950 full_mode = GET_MODE (x);
1951
1952 for (i = 0; i < HASH_SIZE; i++)
1953 {
1954 struct table_elt *next;
1955
1956 for (p = table[i]; p; p = next)
1957 {
1958 next = p->next_same_hash;
1959 if (p->in_memory)
1960 {
1961 struct check_dependence_data d;
1962
1963 /* Just canonicalize the expression once;
1964 otherwise each time we call invalidate
1965 true_dependence will canonicalize the
1966 expression again. */
1967 if (!p->canon_exp)
1968 p->canon_exp = canon_rtx (p->exp);
1969 d.exp = x;
1970 d.mode = full_mode;
1971 if (for_each_rtx (&p->canon_exp, check_dependence, &d))
1972 remove_from_table (p, i);
1973 }
1974 }
1975 }
1976 return;
1977
1978 default:
1979 abort ();
1980 }
1981}
1982
1983
1984/* Remove all expressions that refer to register REGNO,
1985 since they are already invalid, and we are about to
1986 mark that register valid again and don't want the old
1987 expressions to reappear as valid. */
1988
1989static void
1990remove_invalid_refs (regno)
1991 unsigned int regno;
1992{
1993 unsigned int i;
1994 struct table_elt *p, *next;
1995
1996 for (i = 0; i < HASH_SIZE; i++)
1997 for (p = table[i]; p; p = next)
1998 {
1999 next = p->next_same_hash;
2000 if (GET_CODE (p->exp) != REG
2001 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*) 0))
2002 remove_from_table (p, i);
2003 }
2004}
2005
2006/* Likewise for a subreg with subreg_reg REGNO, subreg_byte OFFSET,
2007 and mode MODE. */
2008static void
2009remove_invalid_subreg_refs (regno, offset, mode)
2010 unsigned int regno;
2011 unsigned int offset;
2012 enum machine_mode mode;
2013{
2014 unsigned int i;
2015 struct table_elt *p, *next;
2016 unsigned int end = offset + (GET_MODE_SIZE (mode) - 1);
2017
2018 for (i = 0; i < HASH_SIZE; i++)
2019 for (p = table[i]; p; p = next)
2020 {
2021 rtx exp = p->exp;
2022 next = p->next_same_hash;
2023
2024 if (GET_CODE (exp) != REG
2025 && (GET_CODE (exp) != SUBREG
2026 || GET_CODE (SUBREG_REG (exp)) != REG
2027 || REGNO (SUBREG_REG (exp)) != regno
2028 || (((SUBREG_BYTE (exp)
2029 + (GET_MODE_SIZE (GET_MODE (exp)) - 1)) >= offset)
2030 && SUBREG_BYTE (exp) <= end))
2031 && refers_to_regno_p (regno, regno + 1, p->exp, (rtx*) 0))
2032 remove_from_table (p, i);
2033 }
2034}
2035
2036
2037/* Recompute the hash codes of any valid entries in the hash table that
2038 reference X, if X is a register, or SUBREG_REG (X) if X is a SUBREG.
2039
2040 This is called when we make a jump equivalence. */
2041
2042static void
2043rehash_using_reg (x)
2044 rtx x;
2045{
2046 unsigned int i;
2047 struct table_elt *p, *next;
2048 unsigned hash;
2049
2050 if (GET_CODE (x) == SUBREG)
2051 x = SUBREG_REG (x);
2052
2053 /* If X is not a register or if the register is known not to be in any
2054 valid entries in the table, we have no work to do. */
2055
2056 if (GET_CODE (x) != REG
2057 || REG_IN_TABLE (REGNO (x)) < 0
2058 || REG_IN_TABLE (REGNO (x)) != REG_TICK (REGNO (x)))
2059 return;
2060
2061 /* Scan all hash chains looking for valid entries that mention X.
2062 If we find one and it is in the wrong hash chain, move it. We can skip
2063 objects that are registers, since they are handled specially. */
2064
2065 for (i = 0; i < HASH_SIZE; i++)
2066 for (p = table[i]; p; p = next)
2067 {
2068 next = p->next_same_hash;
2069 if (GET_CODE (p->exp) != REG && reg_mentioned_p (x, p->exp)
2070 && exp_equiv_p (p->exp, p->exp, 1, 0)
2071 && i != (hash = safe_hash (p->exp, p->mode) & HASH_MASK))
2072 {
2073 if (p->next_same_hash)
2074 p->next_same_hash->prev_same_hash = p->prev_same_hash;
2075
2076 if (p->prev_same_hash)
2077 p->prev_same_hash->next_same_hash = p->next_same_hash;
2078 else
2079 table[i] = p->next_same_hash;
2080
2081 p->next_same_hash = table[hash];
2082 p->prev_same_hash = 0;
2083 if (table[hash])
2084 table[hash]->prev_same_hash = p;
2085 table[hash] = p;
2086 }
2087 }
2088}
2089
2090
2091/* Remove from the hash table any expression that is a call-clobbered
2092 register. Also update their TICK values. */
2093
2094static void
2095invalidate_for_call ()
2096{
2097 unsigned int regno, endregno;
2098 unsigned int i;
2099 unsigned hash;
2100 struct table_elt *p, *next;
2101 int in_table = 0;
2102
2103 /* Go through all the hard registers. For each that is clobbered in
2104 a CALL_INSN, remove the register from quantity chains and update
2105 reg_tick if defined. Also see if any of these registers is currently
2106 in the table. */
2107
2108 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2109 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, regno))
2110 {
2111 delete_reg_equiv (regno);
2112 if (REG_TICK (regno) >= 0)
2113 REG_TICK (regno)++;
2114
2115 in_table |= (TEST_HARD_REG_BIT (hard_regs_in_table, regno) != 0);
2116 }
2117
2118 /* In the case where we have no call-clobbered hard registers in the
2119 table, we are done. Otherwise, scan the table and remove any
2120 entry that overlaps a call-clobbered register. */
2121
2122 if (in_table)
2123 for (hash = 0; hash < HASH_SIZE; hash++)
2124 for (p = table[hash]; p; p = next)
2125 {
2126 next = p->next_same_hash;
2127
2128 if (GET_CODE (p->exp) != REG
2129 || REGNO (p->exp) >= FIRST_PSEUDO_REGISTER)
2130 continue;
2131
2132 regno = REGNO (p->exp);
2133 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (p->exp));
2134
2135 for (i = regno; i < endregno; i++)
2136 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2137 {
2138 remove_from_table (p, hash);
2139 break;
2140 }
2141 }
2142}
2143
2144
2145/* Given an expression X of type CONST,
2146 and ELT which is its table entry (or 0 if it
2147 is not in the hash table),
2148 return an alternate expression for X as a register plus integer.
2149 If none can be found, return 0. */
2150
2151static rtx
2152use_related_value (x, elt)
2153 rtx x;
2154 struct table_elt *elt;
2155{
2156 struct table_elt *relt = 0;
2157 struct table_elt *p, *q;
2158 HOST_WIDE_INT offset;
2159
2160 /* First, is there anything related known?
2161 If we have a table element, we can tell from that.
2162 Otherwise, must look it up. */
2163
2164 if (elt != 0 && elt->related_value != 0)
2165 relt = elt;
2166 else if (elt == 0 && GET_CODE (x) == CONST)
2167 {
2168 rtx subexp = get_related_value (x);
2169 if (subexp != 0)
2170 relt = lookup (subexp,
2171 safe_hash (subexp, GET_MODE (subexp)) & HASH_MASK,
2172 GET_MODE (subexp));
2173 }
2174
2175 if (relt == 0)
2176 return 0;
2177
2178 /* Search all related table entries for one that has an
2179 equivalent register. */
2180
2181 p = relt;
2182 while (1)
2183 {
2184 /* This loop is strange in that it is executed in two different cases.
2185 The first is when X is already in the table. Then it is searching
2186 the RELATED_VALUE list of X's class (RELT). The second case is when
2187 X is not in the table. Then RELT points to a class for the related
2188 value.
2189
2190 Ensure that, whatever case we are in, that we ignore classes that have
2191 the same value as X. */
2192
2193 if (rtx_equal_p (x, p->exp))
2194 q = 0;
2195 else
2196 for (q = p->first_same_value; q; q = q->next_same_value)
2197 if (GET_CODE (q->exp) == REG)
2198 break;
2199
2200 if (q)
2201 break;
2202
2203 p = p->related_value;
2204
2205 /* We went all the way around, so there is nothing to be found.
2206 Alternatively, perhaps RELT was in the table for some other reason
2207 and it has no related values recorded. */
2208 if (p == relt || p == 0)
2209 break;
2210 }
2211
2212 if (q == 0)
2213 return 0;
2214
2215 offset = (get_integer_term (x) - get_integer_term (p->exp));
2216 /* Note: OFFSET may be 0 if P->xexp and X are related by commutativity. */
2217 return plus_constant (q->exp, offset);
2218}
2219
2220
2221/* Hash a string. Just add its bytes up. */
2222static inline unsigned
2223canon_hash_string (ps)
2224 const char *ps;
2225{
2226 unsigned hash = 0;
2227 const unsigned char *p = (const unsigned char *)ps;
2228
2229 if (p)
2230 while (*p)
2231 hash += *p++;
2232
2233 return hash;
2234}
2235
2236/* Hash an rtx. We are careful to make sure the value is never negative.
2237 Equivalent registers hash identically.
2238 MODE is used in hashing for CONST_INTs only;
2239 otherwise the mode of X is used.
2240
2241 Store 1 in do_not_record if any subexpression is volatile.
2242
2243 Store 1 in hash_arg_in_memory if X contains a MEM rtx
2244 which does not have the RTX_UNCHANGING_P bit set.
2245
2246 Note that cse_insn knows that the hash code of a MEM expression
2247 is just (int) MEM plus the hash code of the address. */
2248
2249static unsigned
2250canon_hash (x, mode)
2251 rtx x;
2252 enum machine_mode mode;
2253{
2254 int i, j;
2255 unsigned hash = 0;
2256 enum rtx_code code;
2257 const char *fmt;
2258
2259 /* repeat is used to turn tail-recursion into iteration. */
2260 repeat:
2261 if (x == 0)
2262 return hash;
2263
2264 code = GET_CODE (x);
2265 switch (code)
2266 {
2267 case REG:
2268 {
2269 unsigned int regno = REGNO (x);
2270 bool record;
2271
2272 /* On some machines, we can't record any non-fixed hard register,
2273 because extending its life will cause reload problems. We
2274 consider ap, fp, sp, gp to be fixed for this purpose.
2275
2276 We also consider CCmode registers to be fixed for this purpose;
2277 failure to do so leads to failure to simplify 0<100 type of
2278 conditionals.
2279
2280 On all machines, we can't record any global registers.
2281 Nor should we record any register that is in a small
2282 class, as defined by CLASS_LIKELY_SPILLED_P. */
2283
2284 if (regno >= FIRST_PSEUDO_REGISTER)
2285 record = true;
2286 else if (x == frame_pointer_rtx
2287 || x == hard_frame_pointer_rtx
2288 || x == arg_pointer_rtx
2289 || x == stack_pointer_rtx
2290 || x == pic_offset_table_rtx)
2291 record = true;
2292 else if (global_regs[regno])
2293 record = false;
2294 else if (fixed_regs[regno])
2295 record = true;
2296 else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_CC)
2297 record = true;
2298 else if (SMALL_REGISTER_CLASSES)
2299 record = false;
2300 else if (CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno)))
2301 record = false;
2302 else
2303 record = true;
2304
2305 if (!record)
2306 {
2307 do_not_record = 1;
2308 return 0;
2309 }
2310
2311 hash += ((unsigned) REG << 7) + (unsigned) REG_QTY (regno);
2312 return hash;
2313 }
2314
2315 /* We handle SUBREG of a REG specially because the underlying
2316 reg changes its hash value with every value change; we don't
2317 want to have to forget unrelated subregs when one subreg changes. */
2318 case SUBREG:
2319 {
2320 if (GET_CODE (SUBREG_REG (x)) == REG)
2321 {
2322 hash += (((unsigned) SUBREG << 7)
2323 + REGNO (SUBREG_REG (x))
2324 + (SUBREG_BYTE (x) / UNITS_PER_WORD));
2325 return hash;
2326 }
2327 break;
2328 }
2329
2330 case CONST_INT:
2331 {
2332 unsigned HOST_WIDE_INT tem = INTVAL (x);
2333 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
2334 return hash;
2335 }
2336
2337 case CONST_DOUBLE:
2338 /* This is like the general case, except that it only counts
2339 the integers representing the constant. */
2340 hash += (unsigned) code + (unsigned) GET_MODE (x);
2341 if (GET_MODE (x) != VOIDmode)
2342 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
2343 {
2344 unsigned HOST_WIDE_INT tem = XWINT (x, i);
2345 hash += tem;
2346 }
2347 else
2348 hash += ((unsigned) CONST_DOUBLE_LOW (x)
2349 + (unsigned) CONST_DOUBLE_HIGH (x));
2350 return hash;
2351
2352 case CONST_VECTOR:
2353 {
2354 int units;
2355 rtx elt;
2356
2357 units = CONST_VECTOR_NUNITS (x);
2358
2359 for (i = 0; i < units; ++i)
2360 {
2361 elt = CONST_VECTOR_ELT (x, i);
2362 hash += canon_hash (elt, GET_MODE (elt));
2363 }
2364
2365 return hash;
2366 }
2367
2368 /* Assume there is only one rtx object for any given label. */
2369 case LABEL_REF:
2370 hash += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
2371 return hash;
2372
2373 case SYMBOL_REF:
2374 hash += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
2375 return hash;
2376
2377 case MEM:
2378 /* We don't record if marked volatile or if BLKmode since we don't
2379 know the size of the move. */
2380 if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
2381 {
2382 do_not_record = 1;
2383 return 0;
2384 }
2385 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2386 {
2387 hash_arg_in_memory = 1;
2388 }
2389 /* Now that we have already found this special case,
2390 might as well speed it up as much as possible. */
2391 hash += (unsigned) MEM;
2392 x = XEXP (x, 0);
2393 goto repeat;
2394
2395 case USE:
2396 /* A USE that mentions non-volatile memory needs special
2397 handling since the MEM may be BLKmode which normally
2398 prevents an entry from being made. Pure calls are
2399 marked by a USE which mentions BLKmode memory. */
2400 if (GET_CODE (XEXP (x, 0)) == MEM
2401 && ! MEM_VOLATILE_P (XEXP (x, 0)))
2402 {
2403 hash += (unsigned)USE;
2404 x = XEXP (x, 0);
2405
2406 if (! RTX_UNCHANGING_P (x) || FIXED_BASE_PLUS_P (XEXP (x, 0)))
2407 hash_arg_in_memory = 1;
2408
2409 /* Now that we have already found this special case,
2410 might as well speed it up as much as possible. */
2411 hash += (unsigned) MEM;
2412 x = XEXP (x, 0);
2413 goto repeat;
2414 }
2415 break;
2416
2417 case PRE_DEC:
2418 case PRE_INC:
2419 case POST_DEC:
2420 case POST_INC:
2421 case PRE_MODIFY:
2422 case POST_MODIFY:
2423 case PC:
2424 case CC0:
2425 case CALL:
2426 case UNSPEC_VOLATILE:
2427 do_not_record = 1;
2428 return 0;
2429
2430 case ASM_OPERANDS:
2431 if (MEM_VOLATILE_P (x))
2432 {
2433 do_not_record = 1;
2434 return 0;
2435 }
2436 else
2437 {
2438 /* We don't want to take the filename and line into account. */
2439 hash += (unsigned) code + (unsigned) GET_MODE (x)
2440 + canon_hash_string (ASM_OPERANDS_TEMPLATE (x))
2441 + canon_hash_string (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
2442 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
2443
2444 if (ASM_OPERANDS_INPUT_LENGTH (x))
2445 {
2446 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2447 {
2448 hash += (canon_hash (ASM_OPERANDS_INPUT (x, i),
2449 GET_MODE (ASM_OPERANDS_INPUT (x, i)))
2450 + canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT
2451 (x, i)));
2452 }
2453
2454 hash += canon_hash_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
2455 x = ASM_OPERANDS_INPUT (x, 0);
2456 mode = GET_MODE (x);
2457 goto repeat;
2458 }
2459
2460 return hash;
2461 }
2462 break;
2463
2464 default:
2465 break;
2466 }
2467
2468 i = GET_RTX_LENGTH (code) - 1;
2469 hash += (unsigned) code + (unsigned) GET_MODE (x);
2470 fmt = GET_RTX_FORMAT (code);
2471 for (; i >= 0; i--)
2472 {
2473 if (fmt[i] == 'e')
2474 {
2475 rtx tem = XEXP (x, i);
2476
2477 /* If we are about to do the last recursive call
2478 needed at this level, change it into iteration.
2479 This function is called enough to be worth it. */
2480 if (i == 0)
2481 {
2482 x = tem;
2483 goto repeat;
2484 }
2485 hash += canon_hash (tem, 0);
2486 }
2487 else if (fmt[i] == 'E')
2488 for (j = 0; j < XVECLEN (x, i); j++)
2489 hash += canon_hash (XVECEXP (x, i, j), 0);
2490 else if (fmt[i] == 's')
2491 hash += canon_hash_string (XSTR (x, i));
2492 else if (fmt[i] == 'i')
2493 {
2494 unsigned tem = XINT (x, i);
2495 hash += tem;
2496 }
2497 else if (fmt[i] == '0' || fmt[i] == 't')
2498 /* Unused. */
2499 ;
2500 else
2501 abort ();
2502 }
2503 return hash;
2504}
2505
2506/* Like canon_hash but with no side effects. */
2507
2508static unsigned
2509safe_hash (x, mode)
2510 rtx x;
2511 enum machine_mode mode;
2512{
2513 int save_do_not_record = do_not_record;
2514 int save_hash_arg_in_memory = hash_arg_in_memory;
2515 unsigned hash = canon_hash (x, mode);
2516 hash_arg_in_memory = save_hash_arg_in_memory;
2517 do_not_record = save_do_not_record;
2518 return hash;
2519}
2520
2521
2522/* Return 1 iff X and Y would canonicalize into the same thing,
2523 without actually constructing the canonicalization of either one.
2524 If VALIDATE is nonzero,
2525 we assume X is an expression being processed from the rtl
2526 and Y was found in the hash table. We check register refs
2527 in Y for being marked as valid.
2528
2529 If EQUAL_VALUES is nonzero, we allow a register to match a constant value
2530 that is known to be in the register. Ordinarily, we don't allow them
2531 to match, because letting them match would cause unpredictable results
2532 in all the places that search a hash table chain for an equivalent
2533 for a given value. A possible equivalent that has different structure
2534 has its hash code computed from different data. Whether the hash code
2535 is the same as that of the given value is pure luck. */
2536
2537static int
2538exp_equiv_p (x, y, validate, equal_values)
2539 rtx x, y;
2540 int validate;
2541 int equal_values;
2542{
2543 int i, j;
2544 enum rtx_code code;
2545 const char *fmt;
2546
2547 /* Note: it is incorrect to assume an expression is equivalent to itself
2548 if VALIDATE is nonzero. */
2549 if (x == y && !validate)
2550 return 1;
2551 if (x == 0 || y == 0)
2552 return x == y;
2553
2554 code = GET_CODE (x);
2555 if (code != GET_CODE (y))
2556 {
2557 if (!equal_values)
2558 return 0;
2559
2560 /* If X is a constant and Y is a register or vice versa, they may be
2561 equivalent. We only have to validate if Y is a register. */
2562 if (CONSTANT_P (x) && GET_CODE (y) == REG
2563 && REGNO_QTY_VALID_P (REGNO (y)))
2564 {
2565 int y_q = REG_QTY (REGNO (y));
2566 struct qty_table_elem *y_ent = &qty_table[y_q];
2567
2568 if (GET_MODE (y) == y_ent->mode
2569 && rtx_equal_p (x, y_ent->const_rtx)
2570 && (! validate || REG_IN_TABLE (REGNO (y)) == REG_TICK (REGNO (y))))
2571 return 1;
2572 }
2573
2574 if (CONSTANT_P (y) && code == REG
2575 && REGNO_QTY_VALID_P (REGNO (x)))
2576 {
2577 int x_q = REG_QTY (REGNO (x));
2578 struct qty_table_elem *x_ent = &qty_table[x_q];
2579
2580 if (GET_MODE (x) == x_ent->mode
2581 && rtx_equal_p (y, x_ent->const_rtx))
2582 return 1;
2583 }
2584
2585 return 0;
2586 }
2587
2588 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2589 if (GET_MODE (x) != GET_MODE (y))
2590 return 0;
2591
2592 switch (code)
2593 {
2594 case PC:
2595 case CC0:
2596 case CONST_INT:
2597 return x == y;
2598
2599 case LABEL_REF:
2600 return XEXP (x, 0) == XEXP (y, 0);
2601
2602 case SYMBOL_REF:
2603 return XSTR (x, 0) == XSTR (y, 0);
2604
2605 case REG:
2606 {
2607 unsigned int regno = REGNO (y);
2608 unsigned int endregno
2609 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
2610 : HARD_REGNO_NREGS (regno, GET_MODE (y)));
2611 unsigned int i;
2612
2613 /* If the quantities are not the same, the expressions are not
2614 equivalent. If there are and we are not to validate, they
2615 are equivalent. Otherwise, ensure all regs are up-to-date. */
2616
2617 if (REG_QTY (REGNO (x)) != REG_QTY (regno))
2618 return 0;
2619
2620 if (! validate)
2621 return 1;
2622
2623 for (i = regno; i < endregno; i++)
2624 if (REG_IN_TABLE (i) != REG_TICK (i))
2625 return 0;
2626
2627 return 1;
2628 }
2629
2630 /* For commutative operations, check both orders. */
2631 case PLUS:
2632 case MULT:
2633 case AND:
2634 case IOR:
2635 case XOR:
2636 case NE:
2637 case EQ:
2638 return ((exp_equiv_p (XEXP (x, 0), XEXP (y, 0), validate, equal_values)
2639 && exp_equiv_p (XEXP (x, 1), XEXP (y, 1),
2640 validate, equal_values))
2641 || (exp_equiv_p (XEXP (x, 0), XEXP (y, 1),
2642 validate, equal_values)
2643 && exp_equiv_p (XEXP (x, 1), XEXP (y, 0),
2644 validate, equal_values)));
2645
2646 case ASM_OPERANDS:
2647 /* We don't use the generic code below because we want to
2648 disregard filename and line numbers. */
2649
2650 /* A volatile asm isn't equivalent to any other. */
2651 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2652 return 0;
2653
2654 if (GET_MODE (x) != GET_MODE (y)
2655 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
2656 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2657 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
2658 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
2659 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
2660 return 0;
2661
2662 if (ASM_OPERANDS_INPUT_LENGTH (x))
2663 {
2664 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
2665 if (! exp_equiv_p (ASM_OPERANDS_INPUT (x, i),
2666 ASM_OPERANDS_INPUT (y, i),
2667 validate, equal_values)
2668 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
2669 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
2670 return 0;
2671 }
2672
2673 return 1;
2674
2675 default:
2676 break;
2677 }
2678
2679 /* Compare the elements. If any pair of corresponding elements
2680 fail to match, return 0 for the whole things. */
2681
2682 fmt = GET_RTX_FORMAT (code);
2683 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2684 {
2685 switch (fmt[i])
2686 {
2687 case 'e':
2688 if (! exp_equiv_p (XEXP (x, i), XEXP (y, i), validate, equal_values))
2689 return 0;
2690 break;
2691
2692 case 'E':
2693 if (XVECLEN (x, i) != XVECLEN (y, i))
2694 return 0;
2695 for (j = 0; j < XVECLEN (x, i); j++)
2696 if (! exp_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j),
2697 validate, equal_values))
2698 return 0;
2699 break;
2700
2701 case 's':
2702 if (strcmp (XSTR (x, i), XSTR (y, i)))
2703 return 0;
2704 break;
2705
2706 case 'i':
2707 if (XINT (x, i) != XINT (y, i))
2708 return 0;
2709 break;
2710
2711 case 'w':
2712 if (XWINT (x, i) != XWINT (y, i))
2713 return 0;
2714 break;
2715
2716 case '0':
2717 case 't':
2718 break;
2719
2720 default:
2721 abort ();
2722 }
2723 }
2724
2725 return 1;
2726}
2727
2728
2729/* Return 1 if X has a value that can vary even between two
2730 executions of the program. 0 means X can be compared reliably
2731 against certain constants or near-constants. */
2732
2733static int
2734cse_rtx_varies_p (x, from_alias)
2735 rtx x;
2736 int from_alias;
2737{
2738 /* We need not check for X and the equivalence class being of the same
2739 mode because if X is equivalent to a constant in some mode, it
2740 doesn't vary in any mode. */
2741
2742 if (GET_CODE (x) == REG
2743 && REGNO_QTY_VALID_P (REGNO (x)))
2744 {
2745 int x_q = REG_QTY (REGNO (x));
2746 struct qty_table_elem *x_ent = &qty_table[x_q];
2747
2748 if (GET_MODE (x) == x_ent->mode
2749 && x_ent->const_rtx != NULL_RTX)
2750 return 0;
2751 }
2752
2753 if (GET_CODE (x) == PLUS
2754 && GET_CODE (XEXP (x, 1)) == CONST_INT
2755 && GET_CODE (XEXP (x, 0)) == REG
2756 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0))))
2757 {
2758 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2759 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2760
2761 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2762 && x0_ent->const_rtx != NULL_RTX)
2763 return 0;
2764 }
2765
2766 /* This can happen as the result of virtual register instantiation, if
2767 the initial constant is too large to be a valid address. This gives
2768 us a three instruction sequence, load large offset into a register,
2769 load fp minus a constant into a register, then a MEM which is the
2770 sum of the two `constant' registers. */
2771 if (GET_CODE (x) == PLUS
2772 && GET_CODE (XEXP (x, 0)) == REG
2773 && GET_CODE (XEXP (x, 1)) == REG
2774 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 0)))
2775 && REGNO_QTY_VALID_P (REGNO (XEXP (x, 1))))
2776 {
2777 int x0_q = REG_QTY (REGNO (XEXP (x, 0)));
2778 int x1_q = REG_QTY (REGNO (XEXP (x, 1)));
2779 struct qty_table_elem *x0_ent = &qty_table[x0_q];
2780 struct qty_table_elem *x1_ent = &qty_table[x1_q];
2781
2782 if ((GET_MODE (XEXP (x, 0)) == x0_ent->mode)
2783 && x0_ent->const_rtx != NULL_RTX
2784 && (GET_MODE (XEXP (x, 1)) == x1_ent->mode)
2785 && x1_ent->const_rtx != NULL_RTX)
2786 return 0;
2787 }
2788
2789 return rtx_varies_p (x, from_alias);
2790}
2791
2792
2793/* Canonicalize an expression:
2794 replace each register reference inside it
2795 with the "oldest" equivalent register.
2796
2797 If INSN is non-zero and we are replacing a pseudo with a hard register
2798 or vice versa, validate_change is used to ensure that INSN remains valid
2799 after we make our substitution. The calls are made with IN_GROUP non-zero
2800 so apply_change_group must be called upon the outermost return from this
2801 function (unless INSN is zero). The result of apply_change_group can
2802 generally be discarded since the changes we are making are optional. */
2803
2804static rtx
2805canon_reg (x, insn)
2806 rtx x;
2807 rtx insn;
2808{
2809 int i;
2810 enum rtx_code code;
2811 const char *fmt;
2812
2813 if (x == 0)
2814 return x;
2815
2816 code = GET_CODE (x);
2817 switch (code)
2818 {
2819 case PC:
2820 case CC0:
2821 case CONST:
2822 case CONST_INT:
2823 case CONST_DOUBLE:
2824 case CONST_VECTOR:
2825 case SYMBOL_REF:
2826 case LABEL_REF:
2827 case ADDR_VEC:
2828 case ADDR_DIFF_VEC:
2829 return x;
2830
2831 case REG:
2832 {
2833 int first;
2834 int q;
2835 struct qty_table_elem *ent;
2836
2837 /* Never replace a hard reg, because hard regs can appear
2838 in more than one machine mode, and we must preserve the mode
2839 of each occurrence. Also, some hard regs appear in
2840 MEMs that are shared and mustn't be altered. Don't try to
2841 replace any reg that maps to a reg of class NO_REGS. */
2842 if (REGNO (x) < FIRST_PSEUDO_REGISTER
2843 || ! REGNO_QTY_VALID_P (REGNO (x)))
2844 return x;
2845
2846 q = REG_QTY (REGNO (x));
2847 ent = &qty_table[q];
2848 first = ent->first_reg;
2849 return (first >= FIRST_PSEUDO_REGISTER ? regno_reg_rtx[first]
2850 : REGNO_REG_CLASS (first) == NO_REGS ? x
2851 : gen_rtx_REG (ent->mode, first));
2852 }
2853
2854 default:
2855 break;
2856 }
2857
2858 fmt = GET_RTX_FORMAT (code);
2859 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2860 {
2861 int j;
2862
2863 if (fmt[i] == 'e')
2864 {
2865 rtx new = canon_reg (XEXP (x, i), insn);
2866 int insn_code;
2867
2868 /* If replacing pseudo with hard reg or vice versa, ensure the
2869 insn remains valid. Likewise if the insn has MATCH_DUPs. */
2870 if (insn != 0 && new != 0
2871 && GET_CODE (new) == REG && GET_CODE (XEXP (x, i)) == REG
2872 && (((REGNO (new) < FIRST_PSEUDO_REGISTER)
2873 != (REGNO (XEXP (x, i)) < FIRST_PSEUDO_REGISTER))
2874 || (insn_code = recog_memoized (insn)) < 0
2875 || insn_data[insn_code].n_dups > 0))
2876 validate_change (insn, &XEXP (x, i), new, 1);
2877 else
2878 XEXP (x, i) = new;
2879 }
2880 else if (fmt[i] == 'E')
2881 for (j = 0; j < XVECLEN (x, i); j++)
2882 XVECEXP (x, i, j) = canon_reg (XVECEXP (x, i, j), insn);
2883 }
2884
2885 return x;
2886}
2887
2888
2889/* LOC is a location within INSN that is an operand address (the contents of
2890 a MEM). Find the best equivalent address to use that is valid for this
2891 insn.
2892
2893 On most CISC machines, complicated address modes are costly, and rtx_cost
2894 is a good approximation for that cost. However, most RISC machines have
2895 only a few (usually only one) memory reference formats. If an address is
2896 valid at all, it is often just as cheap as any other address. Hence, for
2897 RISC machines, we use the configuration macro `ADDRESS_COST' to compare the
2898 costs of various addresses. For two addresses of equal cost, choose the one
2899 with the highest `rtx_cost' value as that has the potential of eliminating
2900 the most insns. For equal costs, we choose the first in the equivalence
2901 class. Note that we ignore the fact that pseudo registers are cheaper
2902 than hard registers here because we would also prefer the pseudo registers.
2903 */
2904
2905static void
2906find_best_addr (insn, loc, mode)
2907 rtx insn;
2908 rtx *loc;
2909 enum machine_mode mode;
2910{
2911 struct table_elt *elt;
2912 rtx addr = *loc;
2913#ifdef ADDRESS_COST
2914 struct table_elt *p;
2915 int found_better = 1;
2916#endif
2917 int save_do_not_record = do_not_record;
2918 int save_hash_arg_in_memory = hash_arg_in_memory;
2919 int addr_volatile;
2920 int regno;
2921 unsigned hash;
2922
2923 /* Do not try to replace constant addresses or addresses of local and
2924 argument slots. These MEM expressions are made only once and inserted
2925 in many instructions, as well as being used to control symbol table
2926 output. It is not safe to clobber them.
2927
2928 There are some uncommon cases where the address is already in a register
2929 for some reason, but we cannot take advantage of that because we have
2930 no easy way to unshare the MEM. In addition, looking up all stack
2931 addresses is costly. */
2932 if ((GET_CODE (addr) == PLUS
2933 && GET_CODE (XEXP (addr, 0)) == REG
2934 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2935 && (regno = REGNO (XEXP (addr, 0)),
2936 regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM
2937 || regno == ARG_POINTER_REGNUM))
2938 || (GET_CODE (addr) == REG
2939 && (regno = REGNO (addr), regno == FRAME_POINTER_REGNUM
2940 || regno == HARD_FRAME_POINTER_REGNUM
2941 || regno == ARG_POINTER_REGNUM))
2942 || GET_CODE (addr) == ADDRESSOF
2943 || CONSTANT_ADDRESS_P (addr))
2944 return;
2945
2946 /* If this address is not simply a register, try to fold it. This will
2947 sometimes simplify the expression. Many simplifications
2948 will not be valid, but some, usually applying the associative rule, will
2949 be valid and produce better code. */
2950 if (GET_CODE (addr) != REG)
2951 {
2952 rtx folded = fold_rtx (copy_rtx (addr), NULL_RTX);
2953 int addr_folded_cost = address_cost (folded, mode);
2954 int addr_cost = address_cost (addr, mode);
2955
2956 if ((addr_folded_cost < addr_cost
2957 || (addr_folded_cost == addr_cost
2958 /* ??? The rtx_cost comparison is left over from an older
2959 version of this code. It is probably no longer helpful. */
2960 && (rtx_cost (folded, MEM) > rtx_cost (addr, MEM)
2961 || approx_reg_cost (folded) < approx_reg_cost (addr))))
2962 && validate_change (insn, loc, folded, 0))
2963 addr = folded;
2964 }
2965
2966 /* If this address is not in the hash table, we can't look for equivalences
2967 of the whole address. Also, ignore if volatile. */
2968
2969 do_not_record = 0;
2970 hash = HASH (addr, Pmode);
2971 addr_volatile = do_not_record;
2972 do_not_record = save_do_not_record;
2973 hash_arg_in_memory = save_hash_arg_in_memory;
2974
2975 if (addr_volatile)
2976 return;
2977
2978 elt = lookup (addr, hash, Pmode);
2979
2980#ifndef ADDRESS_COST
2981 if (elt)
2982 {
2983 int our_cost = elt->cost;
2984
2985 /* Find the lowest cost below ours that works. */
2986 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
2987 if (elt->cost < our_cost
2988 && (GET_CODE (elt->exp) == REG
2989 || exp_equiv_p (elt->exp, elt->exp, 1, 0))
2990 && validate_change (insn, loc,
2991 canon_reg (copy_rtx (elt->exp), NULL_RTX), 0))
2992 return;
2993 }
2994#else
2995
2996 if (elt)
2997 {
2998 /* We need to find the best (under the criteria documented above) entry
2999 in the class that is valid. We use the `flag' field to indicate
3000 choices that were invalid and iterate until we can't find a better
3001 one that hasn't already been tried. */
3002
3003 for (p = elt->first_same_value; p; p = p->next_same_value)
3004 p->flag = 0;
3005
3006 while (found_better)
3007 {
3008 int best_addr_cost = address_cost (*loc, mode);
3009 int best_rtx_cost = (elt->cost + 1) >> 1;
3010 int exp_cost;
3011 struct table_elt *best_elt = elt;
3012
3013 found_better = 0;
3014 for (p = elt->first_same_value; p; p = p->next_same_value)
3015 if (! p->flag)
3016 {
3017 if ((GET_CODE (p->exp) == REG
3018 || exp_equiv_p (p->exp, p->exp, 1, 0))
3019 && ((exp_cost = address_cost (p->exp, mode)) < best_addr_cost
3020 || (exp_cost == best_addr_cost
3021 && ((p->cost + 1) >> 1) > best_rtx_cost)))
3022 {
3023 found_better = 1;
3024 best_addr_cost = exp_cost;
3025 best_rtx_cost = (p->cost + 1) >> 1;
3026 best_elt = p;
3027 }
3028 }
3029
3030 if (found_better)
3031 {
3032 if (validate_change (insn, loc,
3033 canon_reg (copy_rtx (best_elt->exp),
3034 NULL_RTX), 0))
3035 return;
3036 else
3037 best_elt->flag = 1;
3038 }
3039 }
3040 }
3041
3042 /* If the address is a binary operation with the first operand a register
3043 and the second a constant, do the same as above, but looking for
3044 equivalences of the register. Then try to simplify before checking for
3045 the best address to use. This catches a few cases: First is when we
3046 have REG+const and the register is another REG+const. We can often merge
3047 the constants and eliminate one insn and one register. It may also be
3048 that a machine has a cheap REG+REG+const. Finally, this improves the
3049 code on the Alpha for unaligned byte stores. */
3050
3051 if (flag_expensive_optimizations
3052 && (GET_RTX_CLASS (GET_CODE (*loc)) == '2'
3053 || GET_RTX_CLASS (GET_CODE (*loc)) == 'c')
3054 && GET_CODE (XEXP (*loc, 0)) == REG
3055 && GET_CODE (XEXP (*loc, 1)) == CONST_INT)
3056 {
3057 rtx c = XEXP (*loc, 1);
3058
3059 do_not_record = 0;
3060 hash = HASH (XEXP (*loc, 0), Pmode);
3061 do_not_record = save_do_not_record;
3062 hash_arg_in_memory = save_hash_arg_in_memory;
3063
3064 elt = lookup (XEXP (*loc, 0), hash, Pmode);
3065 if (elt == 0)
3066 return;
3067
3068 /* We need to find the best (under the criteria documented above) entry
3069 in the class that is valid. We use the `flag' field to indicate
3070 choices that were invalid and iterate until we can't find a better
3071 one that hasn't already been tried. */
3072
3073 for (p = elt->first_same_value; p; p = p->next_same_value)
3074 p->flag = 0;
3075
3076 while (found_better)
3077 {
3078 int best_addr_cost = address_cost (*loc, mode);
3079 int best_rtx_cost = (COST (*loc) + 1) >> 1;
3080 struct table_elt *best_elt = elt;
3081 rtx best_rtx = *loc;
3082 int count;
3083
3084 /* This is at worst case an O(n^2) algorithm, so limit our search
3085 to the first 32 elements on the list. This avoids trouble
3086 compiling code with very long basic blocks that can easily
3087 call simplify_gen_binary so many times that we run out of
3088 memory. */
3089
3090 found_better = 0;
3091 for (p = elt->first_same_value, count = 0;
3092 p && count < 32;
3093 p = p->next_same_value, count++)
3094 if (! p->flag
3095 && (GET_CODE (p->exp) == REG
3096 || exp_equiv_p (p->exp, p->exp, 1, 0)))
3097 {
3098 rtx new = simplify_gen_binary (GET_CODE (*loc), Pmode,
3099 p->exp, c);
3100 int new_cost;
3101 new_cost = address_cost (new, mode);
3102
3103 if (new_cost < best_addr_cost
3104 || (new_cost == best_addr_cost
3105 && (COST (new) + 1) >> 1 > best_rtx_cost))
3106 {
3107 found_better = 1;
3108 best_addr_cost = new_cost;
3109 best_rtx_cost = (COST (new) + 1) >> 1;
3110 best_elt = p;
3111 best_rtx = new;
3112 }
3113 }
3114
3115 if (found_better)
3116 {
3117 if (validate_change (insn, loc,
3118 canon_reg (copy_rtx (best_rtx),
3119 NULL_RTX), 0))
3120 return;
3121 else
3122 best_elt->flag = 1;
3123 }
3124 }
3125 }
3126#endif
3127}
3128
3129
3130/* Given an operation (CODE, *PARG1, *PARG2), where code is a comparison
3131 operation (EQ, NE, GT, etc.), follow it back through the hash table and
3132 what values are being compared.
3133
3134 *PARG1 and *PARG2 are updated to contain the rtx representing the values
3135 actually being compared. For example, if *PARG1 was (cc0) and *PARG2
3136 was (const_int 0), *PARG1 and *PARG2 will be set to the objects that were
3137 compared to produce cc0.
3138
3139 The return value is the comparison operator and is either the code of
3140 A or the code corresponding to the inverse of the comparison. */
3141
3142static enum rtx_code
3143find_comparison_args (code, parg1, parg2, pmode1, pmode2)
3144 enum rtx_code code;
3145 rtx *parg1, *parg2;
3146 enum machine_mode *pmode1, *pmode2;
3147{
3148 rtx arg1, arg2;
3149
3150 arg1 = *parg1, arg2 = *parg2;
3151
3152 /* If ARG2 is const0_rtx, see what ARG1 is equivalent to. */
3153
3154 while (arg2 == CONST0_RTX (GET_MODE (arg1)))
3155 {
3156 /* Set non-zero when we find something of interest. */
3157 rtx x = 0;
3158 int reverse_code = 0;
3159 struct table_elt *p = 0;
3160
3161 /* If arg1 is a COMPARE, extract the comparison arguments from it.
3162 On machines with CC0, this is the only case that can occur, since
3163 fold_rtx will return the COMPARE or item being compared with zero
3164 when given CC0. */
3165
3166 if (GET_CODE (arg1) == COMPARE && arg2 == const0_rtx)
3167 x = arg1;
3168
3169 /* If ARG1 is a comparison operator and CODE is testing for
3170 STORE_FLAG_VALUE, get the inner arguments. */
3171
3172 else if (GET_RTX_CLASS (GET_CODE (arg1)) == '<')
3173 {
3174 if (code == NE
3175 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3176 && code == LT && STORE_FLAG_VALUE == -1)
3177#ifdef FLOAT_STORE_FLAG_VALUE
3178 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3179 && (REAL_VALUE_NEGATIVE
3180 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3181#endif
3182 )
3183 x = arg1;
3184 else if (code == EQ
3185 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_INT
3186 && code == GE && STORE_FLAG_VALUE == -1)
3187#ifdef FLOAT_STORE_FLAG_VALUE
3188 || (GET_MODE_CLASS (GET_MODE (arg1)) == MODE_FLOAT
3189 && (REAL_VALUE_NEGATIVE
3190 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3191#endif
3192 )
3193 x = arg1, reverse_code = 1;
3194 }
3195
3196 /* ??? We could also check for
3197
3198 (ne (and (eq (...) (const_int 1))) (const_int 0))
3199
3200 and related forms, but let's wait until we see them occurring. */
3201
3202 if (x == 0)
3203 /* Look up ARG1 in the hash table and see if it has an equivalence
3204 that lets us see what is being compared. */
3205 p = lookup (arg1, safe_hash (arg1, GET_MODE (arg1)) & HASH_MASK,
3206 GET_MODE (arg1));
3207 if (p)
3208 {
3209 p = p->first_same_value;
3210
3211 /* If what we compare is already known to be constant, that is as
3212 good as it gets.
3213 We need to break the loop in this case, because otherwise we
3214 can have an infinite loop when looking at a reg that is known
3215 to be a constant which is the same as a comparison of a reg
3216 against zero which appears later in the insn stream, which in
3217 turn is constant and the same as the comparison of the first reg
3218 against zero... */
3219 if (p->is_const)
3220 break;
3221 }
3222
3223 for (; p; p = p->next_same_value)
3224 {
3225 enum machine_mode inner_mode = GET_MODE (p->exp);
3226
3227 /* If the entry isn't valid, skip it. */
3228 if (! exp_equiv_p (p->exp, p->exp, 1, 0))
3229 continue;
3230
3231 if (GET_CODE (p->exp) == COMPARE
3232 /* Another possibility is that this machine has a compare insn
3233 that includes the comparison code. In that case, ARG1 would
3234 be equivalent to a comparison operation that would set ARG1 to
3235 either STORE_FLAG_VALUE or zero. If this is an NE operation,
3236 ORIG_CODE is the actual comparison being done; if it is an EQ,
3237 we must reverse ORIG_CODE. On machine with a negative value
3238 for STORE_FLAG_VALUE, also look at LT and GE operations. */
3239 || ((code == NE
3240 || (code == LT
3241 && GET_MODE_CLASS (inner_mode) == MODE_INT
3242 && (GET_MODE_BITSIZE (inner_mode)
3243 <= HOST_BITS_PER_WIDE_INT)
3244 && (STORE_FLAG_VALUE
3245 & ((HOST_WIDE_INT) 1
3246 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3247#ifdef FLOAT_STORE_FLAG_VALUE
3248 || (code == LT
3249 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3250 && (REAL_VALUE_NEGATIVE
3251 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3252#endif
3253 )
3254 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<'))
3255 {
3256 x = p->exp;
3257 break;
3258 }
3259 else if ((code == EQ
3260 || (code == GE
3261 && GET_MODE_CLASS (inner_mode) == MODE_INT
3262 && (GET_MODE_BITSIZE (inner_mode)
3263 <= HOST_BITS_PER_WIDE_INT)
3264 && (STORE_FLAG_VALUE
3265 & ((HOST_WIDE_INT) 1
3266 << (GET_MODE_BITSIZE (inner_mode) - 1))))
3267#ifdef FLOAT_STORE_FLAG_VALUE
3268 || (code == GE
3269 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
3270 && (REAL_VALUE_NEGATIVE
3271 (FLOAT_STORE_FLAG_VALUE (GET_MODE (arg1)))))
3272#endif
3273 )
3274 && GET_RTX_CLASS (GET_CODE (p->exp)) == '<')
3275 {
3276 reverse_code = 1;
3277 x = p->exp;
3278 break;
3279 }
3280
3281 /* If this is fp + constant, the equivalent is a better operand since
3282 it may let us predict the value of the comparison. */
3283 else if (NONZERO_BASE_PLUS_P (p->exp))
3284 {
3285 arg1 = p->exp;
3286 continue;
3287 }
3288 }
3289
3290 /* If we didn't find a useful equivalence for ARG1, we are done.
3291 Otherwise, set up for the next iteration. */
3292 if (x == 0)
3293 break;
3294
3295 /* If we need to reverse the comparison, make sure that that is
3296 possible -- we can't necessarily infer the value of GE from LT
3297 with floating-point operands. */
3298 if (reverse_code)
3299 {
3300 enum rtx_code reversed = reversed_comparison_code (x, NULL_RTX);
3301 if (reversed == UNKNOWN)
3302 break;
3303 else code = reversed;
3304 }
3305 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
3306 code = GET_CODE (x);
3307 arg1 = XEXP (x, 0), arg2 = XEXP (x, 1);
3308 }
3309
3310 /* Return our results. Return the modes from before fold_rtx
3311 because fold_rtx might produce const_int, and then it's too late. */
3312 *pmode1 = GET_MODE (arg1), *pmode2 = GET_MODE (arg2);
3313 *parg1 = fold_rtx (arg1, 0), *parg2 = fold_rtx (arg2, 0);
3314
3315 return code;
3316}
3317
3318
3319/* If X is a nontrivial arithmetic operation on an argument
3320 for which a constant value can be determined, return
3321 the result of operating on that value, as a constant.
3322 Otherwise, return X, possibly with one or more operands
3323 modified by recursive calls to this function.
3324
3325 If X is a register whose contents are known, we do NOT
3326 return those contents here. equiv_constant is called to
3327 perform that task.
3328
3329 INSN is the insn that we may be modifying. If it is 0, make a copy
3330 of X before modifying it. */
3331
3332static rtx
3333fold_rtx (x, insn)
3334 rtx x;
3335 rtx insn;
3336{
3337 enum rtx_code code;
3338 enum machine_mode mode;
3339 const char *fmt;
3340 int i;
3341 rtx new = 0;
3342 int copied = 0;
3343 int must_swap = 0;
3344
3345 /* Folded equivalents of first two operands of X. */
3346 rtx folded_arg0;
3347 rtx folded_arg1;
3348
3349 /* Constant equivalents of first three operands of X;
3350 0 when no such equivalent is known. */
3351 rtx const_arg0;
3352 rtx const_arg1;
3353 rtx const_arg2;
3354
3355 /* The mode of the first operand of X. We need this for sign and zero
3356 extends. */
3357 enum machine_mode mode_arg0;
3358
3359 if (x == 0)
3360 return x;
3361
3362 mode = GET_MODE (x);
3363 code = GET_CODE (x);
3364 switch (code)
3365 {
3366 case CONST:
3367 case CONST_INT:
3368 case CONST_DOUBLE:
3369 case CONST_VECTOR:
3370 case SYMBOL_REF:
3371 case LABEL_REF:
3372 case REG:
3373 /* No use simplifying an EXPR_LIST
3374 since they are used only for lists of args
3375 in a function call's REG_EQUAL note. */
3376 case EXPR_LIST:
3377 /* Changing anything inside an ADDRESSOF is incorrect; we don't
3378 want to (e.g.,) make (addressof (const_int 0)) just because
3379 the location is known to be zero. */
3380 case ADDRESSOF:
3381 return x;
3382
3383#ifdef HAVE_cc0
3384 case CC0:
3385 return prev_insn_cc0;
3386#endif
3387
3388 case PC:
3389 /* If the next insn is a CODE_LABEL followed by a jump table,
3390 PC's value is a LABEL_REF pointing to that label. That
3391 lets us fold switch statements on the VAX. */
3392 if (insn && GET_CODE (insn) == JUMP_INSN)
3393 {
3394 rtx next = next_nonnote_insn (insn);
3395
3396 if (next && GET_CODE (next) == CODE_LABEL
3397 && NEXT_INSN (next) != 0
3398 && GET_CODE (NEXT_INSN (next)) == JUMP_INSN
3399 && (GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_VEC
3400 || GET_CODE (PATTERN (NEXT_INSN (next))) == ADDR_DIFF_VEC))
3401 return gen_rtx_LABEL_REF (Pmode, next);
3402 }
3403 break;
3404
3405 case SUBREG:
3406 /* See if we previously assigned a constant value to this SUBREG. */
3407 if ((new = lookup_as_function (x, CONST_INT)) != 0
3408 || (new = lookup_as_function (x, CONST_DOUBLE)) != 0)
3409 return new;
3410
3411 /* If this is a paradoxical SUBREG, we have no idea what value the
3412 extra bits would have. However, if the operand is equivalent
3413 to a SUBREG whose operand is the same as our mode, and all the
3414 modes are within a word, we can just use the inner operand
3415 because these SUBREGs just say how to treat the register.
3416
3417 Similarly if we find an integer constant. */
3418
3419 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3420 {
3421 enum machine_mode imode = GET_MODE (SUBREG_REG (x));
3422 struct table_elt *elt;
3423
3424 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD
3425 && GET_MODE_SIZE (imode) <= UNITS_PER_WORD
3426 && (elt = lookup (SUBREG_REG (x), HASH (SUBREG_REG (x), imode),
3427 imode)) != 0)
3428 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
3429 {
3430 if (CONSTANT_P (elt->exp)
3431 && GET_MODE (elt->exp) == VOIDmode)
3432 return elt->exp;
3433
3434 if (GET_CODE (elt->exp) == SUBREG
3435 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3436 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3437 return copy_rtx (SUBREG_REG (elt->exp));
3438 }
3439
3440 return x;
3441 }
3442
3443 /* Fold SUBREG_REG. If it changed, see if we can simplify the SUBREG.
3444 We might be able to if the SUBREG is extracting a single word in an
3445 integral mode or extracting the low part. */
3446
3447 folded_arg0 = fold_rtx (SUBREG_REG (x), insn);
3448 const_arg0 = equiv_constant (folded_arg0);
3449 if (const_arg0)
3450 folded_arg0 = const_arg0;
3451
3452 if (folded_arg0 != SUBREG_REG (x))
3453 {
3454 new = simplify_subreg (mode, folded_arg0,
3455 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
3456 if (new)
3457 return new;
3458 }
3459
3460 /* If this is a narrowing SUBREG and our operand is a REG, see if
3461 we can find an equivalence for REG that is an arithmetic operation
3462 in a wider mode where both operands are paradoxical SUBREGs
3463 from objects of our result mode. In that case, we couldn't report
3464 an equivalent value for that operation, since we don't know what the
3465 extra bits will be. But we can find an equivalence for this SUBREG
3466 by folding that operation is the narrow mode. This allows us to
3467 fold arithmetic in narrow modes when the machine only supports
3468 word-sized arithmetic.
3469
3470 Also look for a case where we have a SUBREG whose operand is the
3471 same as our result. If both modes are smaller than a word, we
3472 are simply interpreting a register in different modes and we
3473 can use the inner value. */
3474
3475 if (GET_CODE (folded_arg0) == REG
3476 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (folded_arg0))
3477 && subreg_lowpart_p (x))
3478 {
3479 struct table_elt *elt;
3480
3481 /* We can use HASH here since we know that canon_hash won't be
3482 called. */
3483 elt = lookup (folded_arg0,
3484 HASH (folded_arg0, GET_MODE (folded_arg0)),
3485 GET_MODE (folded_arg0));
3486
3487 if (elt)
3488 elt = elt->first_same_value;
3489
3490 for (; elt; elt = elt->next_same_value)
3491 {
3492 enum rtx_code eltcode = GET_CODE (elt->exp);
3493
3494 /* Just check for unary and binary operations. */
3495 if (GET_RTX_CLASS (GET_CODE (elt->exp)) == '1'
3496 && GET_CODE (elt->exp) != SIGN_EXTEND
3497 && GET_CODE (elt->exp) != ZERO_EXTEND
3498 && GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3499 && GET_MODE (SUBREG_REG (XEXP (elt->exp, 0))) == mode
3500 && (GET_MODE_CLASS (mode)
3501 == GET_MODE_CLASS (GET_MODE (XEXP (elt->exp, 0)))))
3502 {
3503 rtx op0 = SUBREG_REG (XEXP (elt->exp, 0));
3504
3505 if (GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3506 op0 = fold_rtx (op0, NULL_RTX);
3507
3508 op0 = equiv_constant (op0);
3509 if (op0)
3510 new = simplify_unary_operation (GET_CODE (elt->exp), mode,
3511 op0, mode);
3512 }
3513 else if ((GET_RTX_CLASS (GET_CODE (elt->exp)) == '2'
3514 || GET_RTX_CLASS (GET_CODE (elt->exp)) == 'c')
3515 && eltcode != DIV && eltcode != MOD
3516 && eltcode != UDIV && eltcode != UMOD
3517 && eltcode != ASHIFTRT && eltcode != LSHIFTRT
3518 && eltcode != ROTATE && eltcode != ROTATERT
3519 && ((GET_CODE (XEXP (elt->exp, 0)) == SUBREG
3520 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 0)))
3521 == mode))
3522 || CONSTANT_P (XEXP (elt->exp, 0)))
3523 && ((GET_CODE (XEXP (elt->exp, 1)) == SUBREG
3524 && (GET_MODE (SUBREG_REG (XEXP (elt->exp, 1)))
3525 == mode))
3526 || CONSTANT_P (XEXP (elt->exp, 1))))
3527 {
3528 rtx op0 = gen_lowpart_common (mode, XEXP (elt->exp, 0));
3529 rtx op1 = gen_lowpart_common (mode, XEXP (elt->exp, 1));
3530
3531 if (op0 && GET_CODE (op0) != REG && ! CONSTANT_P (op0))
3532 op0 = fold_rtx (op0, NULL_RTX);
3533
3534 if (op0)
3535 op0 = equiv_constant (op0);
3536
3537 if (op1 && GET_CODE (op1) != REG && ! CONSTANT_P (op1))
3538 op1 = fold_rtx (op1, NULL_RTX);
3539
3540 if (op1)
3541 op1 = equiv_constant (op1);
3542
3543 /* If we are looking for the low SImode part of
3544 (ashift:DI c (const_int 32)), it doesn't work
3545 to compute that in SImode, because a 32-bit shift
3546 in SImode is unpredictable. We know the value is 0. */
3547 if (op0 && op1
3548 && GET_CODE (elt->exp) == ASHIFT
3549 && GET_CODE (op1) == CONST_INT
3550 && INTVAL (op1) >= GET_MODE_BITSIZE (mode))
3551 {
3552 if (INTVAL (op1) < GET_MODE_BITSIZE (GET_MODE (elt->exp)))
3553
3554 /* If the count fits in the inner mode's width,
3555 but exceeds the outer mode's width,
3556 the value will get truncated to 0
3557 by the subreg. */
3558 new = const0_rtx;
3559 else
3560 /* If the count exceeds even the inner mode's width,
3561 don't fold this expression. */
3562 new = 0;
3563 }
3564 else if (op0 && op1)
3565 new = simplify_binary_operation (GET_CODE (elt->exp), mode,
3566 op0, op1);
3567 }
3568
3569 else if (GET_CODE (elt->exp) == SUBREG
3570 && GET_MODE (SUBREG_REG (elt->exp)) == mode
3571 && (GET_MODE_SIZE (GET_MODE (folded_arg0))
3572 <= UNITS_PER_WORD)
3573 && exp_equiv_p (elt->exp, elt->exp, 1, 0))
3574 new = copy_rtx (SUBREG_REG (elt->exp));
3575
3576 if (new)
3577 return new;
3578 }
3579 }
3580
3581 return x;
3582
3583 case NOT:
3584 case NEG:
3585 /* If we have (NOT Y), see if Y is known to be (NOT Z).
3586 If so, (NOT Y) simplifies to Z. Similarly for NEG. */
3587 new = lookup_as_function (XEXP (x, 0), code);
3588 if (new)
3589 return fold_rtx (copy_rtx (XEXP (new, 0)), insn);
3590 break;
3591
3592 case MEM:
3593 /* If we are not actually processing an insn, don't try to find the
3594 best address. Not only don't we care, but we could modify the
3595 MEM in an invalid way since we have no insn to validate against. */
3596 if (insn != 0)
3597 find_best_addr (insn, &XEXP (x, 0), GET_MODE (x));
3598
3599 {
3600 /* Even if we don't fold in the insn itself,
3601 we can safely do so here, in hopes of getting a constant. */
3602 rtx addr = fold_rtx (XEXP (x, 0), NULL_RTX);
3603 rtx base = 0;
3604 HOST_WIDE_INT offset = 0;
3605
3606 if (GET_CODE (addr) == REG
3607 && REGNO_QTY_VALID_P (REGNO (addr)))
3608 {
3609 int addr_q = REG_QTY (REGNO (addr));
3610 struct qty_table_elem *addr_ent = &qty_table[addr_q];
3611
3612 if (GET_MODE (addr) == addr_ent->mode
3613 && addr_ent->const_rtx != NULL_RTX)
3614 addr = addr_ent->const_rtx;
3615 }
3616
3617 /* If address is constant, split it into a base and integer offset. */
3618 if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
3619 base = addr;
3620 else if (GET_CODE (addr) == CONST && GET_CODE (XEXP (addr, 0)) == PLUS
3621 && GET_CODE (XEXP (XEXP (addr, 0), 1)) == CONST_INT)
3622 {
3623 base = XEXP (XEXP (addr, 0), 0);
3624 offset = INTVAL (XEXP (XEXP (addr, 0), 1));
3625 }
3626 else if (GET_CODE (addr) == LO_SUM
3627 && GET_CODE (XEXP (addr, 1)) == SYMBOL_REF)
3628 base = XEXP (addr, 1);
3629 else if (GET_CODE (addr) == ADDRESSOF)
3630 return change_address (x, VOIDmode, addr);
3631
3632 /* If this is a constant pool reference, we can fold it into its
3633 constant to allow better value tracking. */
3634 if (base && GET_CODE (base) == SYMBOL_REF
3635 && CONSTANT_POOL_ADDRESS_P (base))
3636 {
3637 rtx constant = get_pool_constant (base);
3638 enum machine_mode const_mode = get_pool_mode (base);
3639 rtx new;
3640
3641 if (CONSTANT_P (constant) && GET_CODE (constant) != CONST_INT)
3642 constant_pool_entries_cost = COST (constant);
3643
3644 /* If we are loading the full constant, we have an equivalence. */
3645 if (offset == 0 && mode == const_mode)
3646 return constant;
3647
3648 /* If this actually isn't a constant (weird!), we can't do
3649 anything. Otherwise, handle the two most common cases:
3650 extracting a word from a multi-word constant, and extracting
3651 the low-order bits. Other cases don't seem common enough to
3652 worry about. */
3653 if (! CONSTANT_P (constant))
3654 return x;
3655
3656 if (GET_MODE_CLASS (mode) == MODE_INT
3657 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3658 && offset % UNITS_PER_WORD == 0
3659 && (new = operand_subword (constant,
3660 offset / UNITS_PER_WORD,
3661 0, const_mode)) != 0)
3662 return new;
3663
3664 if (((BYTES_BIG_ENDIAN
3665 && offset == GET_MODE_SIZE (GET_MODE (constant)) - 1)
3666 || (! BYTES_BIG_ENDIAN && offset == 0))
3667 && (new = gen_lowpart_if_possible (mode, constant)) != 0)
3668 return new;
3669 }
3670
3671 /* If this is a reference to a label at a known position in a jump
3672 table, we also know its value. */
3673 if (base && GET_CODE (base) == LABEL_REF)
3674 {
3675 rtx label = XEXP (base, 0);
3676 rtx table_insn = NEXT_INSN (label);
3677
3678 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3679 && GET_CODE (PATTERN (table_insn)) == ADDR_VEC)
3680 {
3681 rtx table = PATTERN (table_insn);
3682
3683 if (offset >= 0
3684 && (offset / GET_MODE_SIZE (GET_MODE (table))
3685 < XVECLEN (table, 0)))
3686 return XVECEXP (table, 0,
3687 offset / GET_MODE_SIZE (GET_MODE (table)));
3688 }
3689 if (table_insn && GET_CODE (table_insn) == JUMP_INSN
3690 && GET_CODE (PATTERN (table_insn)) == ADDR_DIFF_VEC)
3691 {
3692 rtx table = PATTERN (table_insn);
3693
3694 if (offset >= 0
3695 && (offset / GET_MODE_SIZE (GET_MODE (table))
3696 < XVECLEN (table, 1)))
3697 {
3698 offset /= GET_MODE_SIZE (GET_MODE (table));
3699 new = gen_rtx_MINUS (Pmode, XVECEXP (table, 1, offset),
3700 XEXP (table, 0));
3701
3702 if (GET_MODE (table) != Pmode)
3703 new = gen_rtx_TRUNCATE (GET_MODE (table), new);
3704
3705 /* Indicate this is a constant. This isn't a
3706 valid form of CONST, but it will only be used
3707 to fold the next insns and then discarded, so
3708 it should be safe.
3709
3710 Note this expression must be explicitly discarded,
3711 by cse_insn, else it may end up in a REG_EQUAL note
3712 and "escape" to cause problems elsewhere. */
3713 return gen_rtx_CONST (GET_MODE (new), new);
3714 }
3715 }
3716 }
3717
3718 return x;
3719 }
3720
3721#ifdef NO_FUNCTION_CSE
3722 case CALL:
3723 if (CONSTANT_P (XEXP (XEXP (x, 0), 0)))
3724 return x;
3725 break;
3726#endif
3727
3728 case ASM_OPERANDS:
3729 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
3730 validate_change (insn, &ASM_OPERANDS_INPUT (x, i),
3731 fold_rtx (ASM_OPERANDS_INPUT (x, i), insn), 0);
3732 break;
3733
3734 default:
3735 break;
3736 }
3737
3738 const_arg0 = 0;
3739 const_arg1 = 0;
3740 const_arg2 = 0;
3741 mode_arg0 = VOIDmode;
3742
3743 /* Try folding our operands.
3744 Then see which ones have constant values known. */
3745
3746 fmt = GET_RTX_FORMAT (code);
3747 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3748 if (fmt[i] == 'e')
3749 {
3750 rtx arg = XEXP (x, i);
3751 rtx folded_arg = arg, const_arg = 0;
3752 enum machine_mode mode_arg = GET_MODE (arg);
3753 rtx cheap_arg, expensive_arg;
3754 rtx replacements[2];
3755 int j;
3756
3757 /* Most arguments are cheap, so handle them specially. */
3758 switch (GET_CODE (arg))
3759 {
3760 case REG:
3761 /* This is the same as calling equiv_constant; it is duplicated
3762 here for speed. */
3763 if (REGNO_QTY_VALID_P (REGNO (arg)))
3764 {
3765 int arg_q = REG_QTY (REGNO (arg));
3766 struct qty_table_elem *arg_ent = &qty_table[arg_q];
3767
3768 if (arg_ent->const_rtx != NULL_RTX
3769 && GET_CODE (arg_ent->const_rtx) != REG
3770 && GET_CODE (arg_ent->const_rtx) != PLUS)
3771 const_arg
3772 = gen_lowpart_if_possible (GET_MODE (arg),
3773 arg_ent->const_rtx);
3774 }
3775 break;
3776
3777 case CONST:
3778 case CONST_INT:
3779 case SYMBOL_REF:
3780 case LABEL_REF:
3781 case CONST_DOUBLE:
3782 case CONST_VECTOR:
3783 const_arg = arg;
3784 break;
3785
3786#ifdef HAVE_cc0
3787 case CC0:
3788 folded_arg = prev_insn_cc0;
3789 mode_arg = prev_insn_cc0_mode;
3790 const_arg = equiv_constant (folded_arg);
3791 break;
3792#endif
3793
3794 default:
3795 folded_arg = fold_rtx (arg, insn);
3796 const_arg = equiv_constant (folded_arg);
3797 }
3798
3799 /* For the first three operands, see if the operand
3800 is constant or equivalent to a constant. */
3801 switch (i)
3802 {
3803 case 0:
3804 folded_arg0 = folded_arg;
3805 const_arg0 = const_arg;
3806 mode_arg0 = mode_arg;
3807 break;
3808 case 1:
3809 folded_arg1 = folded_arg;
3810 const_arg1 = const_arg;
3811 break;
3812 case 2:
3813 const_arg2 = const_arg;
3814 break;
3815 }
3816
3817 /* Pick the least expensive of the folded argument and an
3818 equivalent constant argument. */
3819 if (const_arg == 0 || const_arg == folded_arg
3820 || COST_IN (const_arg, code) > COST_IN (folded_arg, code))
3821 cheap_arg = folded_arg, expensive_arg = const_arg;
3822 else
3823 cheap_arg = const_arg, expensive_arg = folded_arg;
3824
3825 /* Try to replace the operand with the cheapest of the two
3826 possibilities. If it doesn't work and this is either of the first
3827 two operands of a commutative operation, try swapping them.
3828 If THAT fails, try the more expensive, provided it is cheaper
3829 than what is already there. */
3830
3831 if (cheap_arg == XEXP (x, i))
3832 continue;
3833
3834 if (insn == 0 && ! copied)
3835 {
3836 x = copy_rtx (x);
3837 copied = 1;
3838 }
3839
3840 /* Order the replacements from cheapest to most expensive. */
3841 replacements[0] = cheap_arg;
3842 replacements[1] = expensive_arg;
3843
3844 for (j = 0; j < 2 && replacements[j]; j++)
3845 {
3846 int old_cost = COST_IN (XEXP (x, i), code);
3847 int new_cost = COST_IN (replacements[j], code);
3848
3849 /* Stop if what existed before was cheaper. Prefer constants
3850 in the case of a tie. */
3851 if (new_cost > old_cost
3852 || (new_cost == old_cost && CONSTANT_P (XEXP (x, i))))
3853 break;
3854
3855 if (validate_change (insn, &XEXP (x, i), replacements[j], 0))
3856 break;
3857
3858 if (code == NE || code == EQ || GET_RTX_CLASS (code) == 'c'
3859 || code == LTGT || code == UNEQ || code == ORDERED
3860 || code == UNORDERED)
3861 {
3862 validate_change (insn, &XEXP (x, i), XEXP (x, 1 - i), 1);
3863 validate_change (insn, &XEXP (x, 1 - i), replacements[j], 1);
3864
3865 if (apply_change_group ())
3866 {
3867 /* Swap them back to be invalid so that this loop can
3868 continue and flag them to be swapped back later. */
3869 rtx tem;
3870
3871 tem = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1);
3872 XEXP (x, 1) = tem;
3873 must_swap = 1;
3874 break;
3875 }
3876 }
3877 }
3878 }
3879
3880 else
3881 {
3882 if (fmt[i] == 'E')
3883 /* Don't try to fold inside of a vector of expressions.
3884 Doing nothing is harmless. */
3885 {;}
3886 }
3887
3888 /* If a commutative operation, place a constant integer as the second
3889 operand unless the first operand is also a constant integer. Otherwise,
3890 place any constant second unless the first operand is also a constant. */
3891
3892 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c'
3893 || code == LTGT || code == UNEQ || code == ORDERED
3894 || code == UNORDERED)
3895 {
3896 if (must_swap || (const_arg0
3897 && (const_arg1 == 0
3898 || (GET_CODE (const_arg0) == CONST_INT
3899 && GET_CODE (const_arg1) != CONST_INT))))
3900 {
3901 rtx tem = XEXP (x, 0);
3902
3903 if (insn == 0 && ! copied)
3904 {
3905 x = copy_rtx (x);
3906 copied = 1;
3907 }
3908
3909 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
3910 validate_change (insn, &XEXP (x, 1), tem, 1);
3911 if (apply_change_group ())
3912 {
3913 tem = const_arg0, const_arg0 = const_arg1, const_arg1 = tem;
3914 tem = folded_arg0, folded_arg0 = folded_arg1, folded_arg1 = tem;
3915 }
3916 }
3917 }
3918
3919 /* If X is an arithmetic operation, see if we can simplify it. */
3920
3921 switch (GET_RTX_CLASS (code))
3922 {
3923 case '1':
3924 {
3925 int is_const = 0;
3926
3927 /* We can't simplify extension ops unless we know the
3928 original mode. */
3929 if ((code == ZERO_EXTEND || code == SIGN_EXTEND)
3930 && mode_arg0 == VOIDmode)
3931 break;
3932
3933 /* If we had a CONST, strip it off and put it back later if we
3934 fold. */
3935 if (const_arg0 != 0 && GET_CODE (const_arg0) == CONST)
3936 is_const = 1, const_arg0 = XEXP (const_arg0, 0);
3937
3938 new = simplify_unary_operation (code, mode,
3939 const_arg0 ? const_arg0 : folded_arg0,
3940 mode_arg0);
3941 if (new != 0 && is_const)
3942 new = gen_rtx_CONST (mode, new);
3943 }
3944 break;
3945
3946 case '<':
3947 /* See what items are actually being compared and set FOLDED_ARG[01]
3948 to those values and CODE to the actual comparison code. If any are
3949 constant, set CONST_ARG0 and CONST_ARG1 appropriately. We needn't
3950 do anything if both operands are already known to be constant. */
3951
3952 if (const_arg0 == 0 || const_arg1 == 0)
3953 {
3954 struct table_elt *p0, *p1;
3955 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
3956 enum machine_mode mode_arg1;
3957
3958#ifdef FLOAT_STORE_FLAG_VALUE
3959 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
3960 {
3961 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
3962 (FLOAT_STORE_FLAG_VALUE (mode), mode));
3963 false_rtx = CONST0_RTX (mode);
3964 }
3965#endif
3966
3967 code = find_comparison_args (code, &folded_arg0, &folded_arg1,
3968 &mode_arg0, &mode_arg1);
3969 const_arg0 = equiv_constant (folded_arg0);
3970 const_arg1 = equiv_constant (folded_arg1);
3971
3972 /* If the mode is VOIDmode or a MODE_CC mode, we don't know
3973 what kinds of things are being compared, so we can't do
3974 anything with this comparison. */
3975
3976 if (mode_arg0 == VOIDmode || GET_MODE_CLASS (mode_arg0) == MODE_CC)
3977 break;
3978
3979 /* If we do not now have two constants being compared, see
3980 if we can nevertheless deduce some things about the
3981 comparison. */
3982 if (const_arg0 == 0 || const_arg1 == 0)
3983 {
3984 /* Is FOLDED_ARG0 frame-pointer plus a constant? Or
3985 non-explicit constant? These aren't zero, but we
3986 don't know their sign. */
3987 if (const_arg1 == const0_rtx
3988 && (NONZERO_BASE_PLUS_P (folded_arg0)
3989#if 0 /* Sad to say, on sysvr4, #pragma weak can make a symbol address
3990 come out as 0. */
3991 || GET_CODE (folded_arg0) == SYMBOL_REF
3992#endif
3993 || GET_CODE (folded_arg0) == LABEL_REF
3994 || GET_CODE (folded_arg0) == CONST))
3995 {
3996 if (code == EQ)
3997 return false_rtx;
3998 else if (code == NE)
3999 return true_rtx;
4000 }
4001
4002 /* See if the two operands are the same. */
4003
4004 if (folded_arg0 == folded_arg1
4005 || (GET_CODE (folded_arg0) == REG
4006 && GET_CODE (folded_arg1) == REG
4007 && (REG_QTY (REGNO (folded_arg0))
4008 == REG_QTY (REGNO (folded_arg1))))
4009 || ((p0 = lookup (folded_arg0,
4010 (safe_hash (folded_arg0, mode_arg0)
4011 & HASH_MASK), mode_arg0))
4012 && (p1 = lookup (folded_arg1,
4013 (safe_hash (folded_arg1, mode_arg0)
4014 & HASH_MASK), mode_arg0))
4015 && p0->first_same_value == p1->first_same_value))
4016 {
4017 /* Sadly two equal NaNs are not equivalent. */
4018 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4019 || ! FLOAT_MODE_P (mode_arg0)
4020 || flag_unsafe_math_optimizations)
4021 return ((code == EQ || code == LE || code == GE
4022 || code == LEU || code == GEU || code == UNEQ
4023 || code == UNLE || code == UNGE || code == ORDERED)
4024 ? true_rtx : false_rtx);
4025 /* Take care for the FP compares we can resolve. */
4026 if (code == UNEQ || code == UNLE || code == UNGE)
4027 return true_rtx;
4028 if (code == LTGT || code == LT || code == GT)
4029 return false_rtx;
4030 }
4031
4032 /* If FOLDED_ARG0 is a register, see if the comparison we are
4033 doing now is either the same as we did before or the reverse
4034 (we only check the reverse if not floating-point). */
4035 else if (GET_CODE (folded_arg0) == REG)
4036 {
4037 int qty = REG_QTY (REGNO (folded_arg0));
4038
4039 if (REGNO_QTY_VALID_P (REGNO (folded_arg0)))
4040 {
4041 struct qty_table_elem *ent = &qty_table[qty];
4042
4043 if ((comparison_dominates_p (ent->comparison_code, code)
4044 || (! FLOAT_MODE_P (mode_arg0)
4045 && comparison_dominates_p (ent->comparison_code,
4046 reverse_condition (code))))
4047 && (rtx_equal_p (ent->comparison_const, folded_arg1)
4048 || (const_arg1
4049 && rtx_equal_p (ent->comparison_const,
4050 const_arg1))
4051 || (GET_CODE (folded_arg1) == REG
4052 && (REG_QTY (REGNO (folded_arg1)) == ent->comparison_qty))))
4053 return (comparison_dominates_p (ent->comparison_code, code)
4054 ? true_rtx : false_rtx);
4055 }
4056 }
4057 }
4058 }
4059
4060 /* If we are comparing against zero, see if the first operand is
4061 equivalent to an IOR with a constant. If so, we may be able to
4062 determine the result of this comparison. */
4063
4064 if (const_arg1 == const0_rtx)
4065 {
4066 rtx y = lookup_as_function (folded_arg0, IOR);
4067 rtx inner_const;
4068
4069 if (y != 0
4070 && (inner_const = equiv_constant (XEXP (y, 1))) != 0
4071 && GET_CODE (inner_const) == CONST_INT
4072 && INTVAL (inner_const) != 0)
4073 {
4074 int sign_bitnum = GET_MODE_BITSIZE (mode_arg0) - 1;
4075 int has_sign = (HOST_BITS_PER_WIDE_INT >= sign_bitnum
4076 && (INTVAL (inner_const)
4077 & ((HOST_WIDE_INT) 1 << sign_bitnum)));
4078 rtx true_rtx = const_true_rtx, false_rtx = const0_rtx;
4079
4080#ifdef FLOAT_STORE_FLAG_VALUE
4081 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
4082 {
4083 true_rtx = (CONST_DOUBLE_FROM_REAL_VALUE
4084 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4085 false_rtx = CONST0_RTX (mode);
4086 }
4087#endif
4088
4089 switch (code)
4090 {
4091 case EQ:
4092 return false_rtx;
4093 case NE:
4094 return true_rtx;
4095 case LT: case LE:
4096 if (has_sign)
4097 return true_rtx;
4098 break;
4099 case GT: case GE:
4100 if (has_sign)
4101 return false_rtx;
4102 break;
4103 default:
4104 break;
4105 }
4106 }
4107 }
4108
4109 new = simplify_relational_operation (code,
4110 (mode_arg0 != VOIDmode
4111 ? mode_arg0
4112 : (GET_MODE (const_arg0
4113 ? const_arg0
4114 : folded_arg0)
4115 != VOIDmode)
4116 ? GET_MODE (const_arg0
4117 ? const_arg0
4118 : folded_arg0)
4119 : GET_MODE (const_arg1
4120 ? const_arg1
4121 : folded_arg1)),
4122 const_arg0 ? const_arg0 : folded_arg0,
4123 const_arg1 ? const_arg1 : folded_arg1);
4124#ifdef FLOAT_STORE_FLAG_VALUE
4125 if (new != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT)
4126 {
4127 if (new == const0_rtx)
4128 new = CONST0_RTX (mode);
4129 else
4130 new = (CONST_DOUBLE_FROM_REAL_VALUE
4131 (FLOAT_STORE_FLAG_VALUE (mode), mode));
4132 }
4133#endif
4134 break;
4135
4136 case '2':
4137 case 'c':
4138 switch (code)
4139 {
4140 case PLUS:
4141 /* If the second operand is a LABEL_REF, see if the first is a MINUS
4142 with that LABEL_REF as its second operand. If so, the result is
4143 the first operand of that MINUS. This handles switches with an
4144 ADDR_DIFF_VEC table. */
4145 if (const_arg1 && GET_CODE (const_arg1) == LABEL_REF)
4146 {
4147 rtx y
4148 = GET_CODE (folded_arg0) == MINUS ? folded_arg0
4149 : lookup_as_function (folded_arg0, MINUS);
4150
4151 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4152 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg1, 0))
4153 return XEXP (y, 0);
4154
4155 /* Now try for a CONST of a MINUS like the above. */
4156 if ((y = (GET_CODE (folded_arg0) == CONST ? folded_arg0
4157 : lookup_as_function (folded_arg0, CONST))) != 0
4158 && GET_CODE (XEXP (y, 0)) == MINUS
4159 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4160 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg1, 0))
4161 return XEXP (XEXP (y, 0), 0);
4162 }
4163
4164 /* Likewise if the operands are in the other order. */
4165 if (const_arg0 && GET_CODE (const_arg0) == LABEL_REF)
4166 {
4167 rtx y
4168 = GET_CODE (folded_arg1) == MINUS ? folded_arg1
4169 : lookup_as_function (folded_arg1, MINUS);
4170
4171 if (y != 0 && GET_CODE (XEXP (y, 1)) == LABEL_REF
4172 && XEXP (XEXP (y, 1), 0) == XEXP (const_arg0, 0))
4173 return XEXP (y, 0);
4174
4175 /* Now try for a CONST of a MINUS like the above. */
4176 if ((y = (GET_CODE (folded_arg1) == CONST ? folded_arg1
4177 : lookup_as_function (folded_arg1, CONST))) != 0
4178 && GET_CODE (XEXP (y, 0)) == MINUS
4179 && GET_CODE (XEXP (XEXP (y, 0), 1)) == LABEL_REF
4180 && XEXP (XEXP (XEXP (y, 0), 1), 0) == XEXP (const_arg0, 0))
4181 return XEXP (XEXP (y, 0), 0);
4182 }
4183
4184 /* If second operand is a register equivalent to a negative
4185 CONST_INT, see if we can find a register equivalent to the
4186 positive constant. Make a MINUS if so. Don't do this for
4187 a non-negative constant since we might then alternate between
4188 choosing positive and negative constants. Having the positive
4189 constant previously-used is the more common case. Be sure
4190 the resulting constant is non-negative; if const_arg1 were
4191 the smallest negative number this would overflow: depending
4192 on the mode, this would either just be the same value (and
4193 hence not save anything) or be incorrect. */
4194 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT
4195 && INTVAL (const_arg1) < 0
4196 /* This used to test
4197
4198 -INTVAL (const_arg1) >= 0
4199
4200 But The Sun V5.0 compilers mis-compiled that test. So
4201 instead we test for the problematic value in a more direct
4202 manner and hope the Sun compilers get it correct. */
4203 && INTVAL (const_arg1) !=
4204 ((HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1))
4205 && GET_CODE (folded_arg1) == REG)
4206 {
4207 rtx new_const = GEN_INT (-INTVAL (const_arg1));
4208 struct table_elt *p
4209 = lookup (new_const, safe_hash (new_const, mode) & HASH_MASK,
4210 mode);
4211
4212 if (p)
4213 for (p = p->first_same_value; p; p = p->next_same_value)
4214 if (GET_CODE (p->exp) == REG)
4215 return simplify_gen_binary (MINUS, mode, folded_arg0,
4216 canon_reg (p->exp, NULL_RTX));
4217 }
4218 goto from_plus;
4219
4220 case MINUS:
4221 /* If we have (MINUS Y C), see if Y is known to be (PLUS Z C2).
4222 If so, produce (PLUS Z C2-C). */
4223 if (const_arg1 != 0 && GET_CODE (const_arg1) == CONST_INT)
4224 {
4225 rtx y = lookup_as_function (XEXP (x, 0), PLUS);
4226 if (y && GET_CODE (XEXP (y, 1)) == CONST_INT)
4227 return fold_rtx (plus_constant (copy_rtx (y),
4228 -INTVAL (const_arg1)),
4229 NULL_RTX);
4230 }
4231
4232 /* Fall through. */
4233
4234 from_plus:
4235 case SMIN: case SMAX: case UMIN: case UMAX:
4236 case IOR: case AND: case XOR:
4237 case MULT: case DIV: case UDIV:
4238 case ASHIFT: case LSHIFTRT: case ASHIFTRT:
4239 /* If we have (<op> <reg> <const_int>) for an associative OP and REG
4240 is known to be of similar form, we may be able to replace the
4241 operation with a combined operation. This may eliminate the
4242 intermediate operation if every use is simplified in this way.
4243 Note that the similar optimization done by combine.c only works
4244 if the intermediate operation's result has only one reference. */
4245
4246 if (GET_CODE (folded_arg0) == REG
4247 && const_arg1 && GET_CODE (const_arg1) == CONST_INT)
4248 {
4249 int is_shift
4250 = (code == ASHIFT || code == ASHIFTRT || code == LSHIFTRT);
4251 rtx y = lookup_as_function (folded_arg0, code);
4252 rtx inner_const;
4253 enum rtx_code associate_code;
4254 rtx new_const;
4255
4256 if (y == 0
4257 || 0 == (inner_const
4258 = equiv_constant (fold_rtx (XEXP (y, 1), 0)))
4259 || GET_CODE (inner_const) != CONST_INT
4260 /* If we have compiled a statement like
4261 "if (x == (x & mask1))", and now are looking at
4262 "x & mask2", we will have a case where the first operand
4263 of Y is the same as our first operand. Unless we detect
4264 this case, an infinite loop will result. */
4265 || XEXP (y, 0) == folded_arg0)
4266 break;
4267
4268 /* Don't associate these operations if they are a PLUS with the
4269 same constant and it is a power of two. These might be doable
4270 with a pre- or post-increment. Similarly for two subtracts of
4271 identical powers of two with post decrement. */
4272
4273 if (code == PLUS && INTVAL (const_arg1) == INTVAL (inner_const)
4274 && ((HAVE_PRE_INCREMENT
4275 && exact_log2 (INTVAL (const_arg1)) >= 0)
4276 || (HAVE_POST_INCREMENT
4277 && exact_log2 (INTVAL (const_arg1)) >= 0)
4278 || (HAVE_PRE_DECREMENT
4279 && exact_log2 (- INTVAL (const_arg1)) >= 0)
4280 || (HAVE_POST_DECREMENT
4281 && exact_log2 (- INTVAL (const_arg1)) >= 0)))
4282 break;
4283
4284 /* Compute the code used to compose the constants. For example,
4285 A/C1/C2 is A/(C1 * C2), so if CODE == DIV, we want MULT. */
4286
4287 associate_code
4288 = (code == MULT || code == DIV || code == UDIV ? MULT
4289 : is_shift || code == PLUS || code == MINUS ? PLUS : code);
4290
4291 new_const = simplify_binary_operation (associate_code, mode,
4292 const_arg1, inner_const);
4293
4294 if (new_const == 0)
4295 break;
4296
4297 /* If we are associating shift operations, don't let this
4298 produce a shift of the size of the object or larger.
4299 This could occur when we follow a sign-extend by a right
4300 shift on a machine that does a sign-extend as a pair
4301 of shifts. */
4302
4303 if (is_shift && GET_CODE (new_const) == CONST_INT
4304 && INTVAL (new_const) >= GET_MODE_BITSIZE (mode))
4305 {
4306 /* As an exception, we can turn an ASHIFTRT of this
4307 form into a shift of the number of bits - 1. */
4308 if (code == ASHIFTRT)
4309 new_const = GEN_INT (GET_MODE_BITSIZE (mode) - 1);
4310 else
4311 break;
4312 }
4313
4314 y = copy_rtx (XEXP (y, 0));
4315
4316 /* If Y contains our first operand (the most common way this
4317 can happen is if Y is a MEM), we would do into an infinite
4318 loop if we tried to fold it. So don't in that case. */
4319
4320 if (! reg_mentioned_p (folded_arg0, y))
4321 y = fold_rtx (y, insn);
4322
4323 return simplify_gen_binary (code, mode, y, new_const);
4324 }
4325 break;
4326
4327 default:
4328 break;
4329 }
4330
4331 new = simplify_binary_operation (code, mode,
4332 const_arg0 ? const_arg0 : folded_arg0,
4333 const_arg1 ? const_arg1 : folded_arg1);
4334 break;
4335
4336 case 'o':
4337 /* (lo_sum (high X) X) is simply X. */
4338 if (code == LO_SUM && const_arg0 != 0
4339 && GET_CODE (const_arg0) == HIGH
4340 && rtx_equal_p (XEXP (const_arg0, 0), const_arg1))
4341 return const_arg1;
4342 break;
4343
4344 case '3':
4345 case 'b':
4346 new = simplify_ternary_operation (code, mode, mode_arg0,
4347 const_arg0 ? const_arg0 : folded_arg0,
4348 const_arg1 ? const_arg1 : folded_arg1,
4349 const_arg2 ? const_arg2 : XEXP (x, 2));
4350 break;
4351
4352 case 'x':
4353 /* Always eliminate CONSTANT_P_RTX at this stage. */
4354 if (code == CONSTANT_P_RTX)
4355 return (const_arg0 ? const1_rtx : const0_rtx);
4356 break;
4357 }
4358
4359 return new ? new : x;
4360}
4361
4362
4363/* Return a constant value currently equivalent to X.
4364 Return 0 if we don't know one. */
4365
4366static rtx
4367equiv_constant (x)
4368 rtx x;
4369{
4370 if (GET_CODE (x) == REG
4371 && REGNO_QTY_VALID_P (REGNO (x)))
4372 {
4373 int x_q = REG_QTY (REGNO (x));
4374 struct qty_table_elem *x_ent = &qty_table[x_q];
4375
4376 if (x_ent->const_rtx)
4377 x = gen_lowpart_if_possible (GET_MODE (x), x_ent->const_rtx);
4378 }
4379
4380 if (x == 0 || CONSTANT_P (x))
4381 return x;
4382
4383 /* If X is a MEM, try to fold it outside the context of any insn to see if
4384 it might be equivalent to a constant. That handles the case where it
4385 is a constant-pool reference. Then try to look it up in the hash table
4386 in case it is something whose value we have seen before. */
4387
4388 if (GET_CODE (x) == MEM)
4389 {
4390 struct table_elt *elt;
4391
4392 x = fold_rtx (x, NULL_RTX);
4393 if (CONSTANT_P (x))
4394 return x;
4395
4396 elt = lookup (x, safe_hash (x, GET_MODE (x)) & HASH_MASK, GET_MODE (x));
4397 if (elt == 0)
4398 return 0;
4399
4400 for (elt = elt->first_same_value; elt; elt = elt->next_same_value)
4401 if (elt->is_const && CONSTANT_P (elt->exp))
4402 return elt->exp;
4403 }
4404
4405 return 0;
4406}
4407
4408
4409/* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a fixed-point
4410 number, return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
4411 least-significant part of X.
4412 MODE specifies how big a part of X to return.
4413
4414 If the requested operation cannot be done, 0 is returned.
4415
4416 This is similar to gen_lowpart in emit-rtl.c. */
4417
4418rtx
4419gen_lowpart_if_possible (mode, x)
4420 enum machine_mode mode;
4421 rtx x;
4422{
4423 rtx result = gen_lowpart_common (mode, x);
4424
4425 if (result)
4426 return result;
4427 else if (GET_CODE (x) == MEM)
4428 {
4429 /* This is the only other case we handle. */
4430 int offset = 0;
4431 rtx new;
4432
4433 if (WORDS_BIG_ENDIAN)
4434 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
4435 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4436 if (BYTES_BIG_ENDIAN)
4437 /* Adjust the address so that the address-after-the-data is
4438 unchanged. */
4439 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
4440 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
4441
4442 new = adjust_address_nv (x, mode, offset);
4443 if (! memory_address_p (mode, XEXP (new, 0)))
4444 return 0;
4445
4446 return new;
4447 }
4448 else
4449 return 0;
4450}
4451
4452
4453/* Given INSN, a jump insn, TAKEN indicates if we are following the "taken"
4454 branch. It will be zero if not.
4455
4456 In certain cases, this can cause us to add an equivalence. For example,
4457 if we are following the taken case of
4458 if (i == 2)
4459 we can add the fact that `i' and '2' are now equivalent.
4460
4461 In any case, we can record that this comparison was passed. If the same
4462 comparison is seen later, we will know its value. */
4463
4464static void
4465record_jump_equiv (insn, taken)
4466 rtx insn;
4467 int taken;
4468{
4469 int cond_known_true;
4470 rtx op0, op1;
4471 rtx set;
4472 enum machine_mode mode, mode0, mode1;
4473 int reversed_nonequality = 0;
4474 enum rtx_code code;
4475
4476 /* Ensure this is the right kind of insn. */
4477 if (! any_condjump_p (insn))
4478 return;
4479 set = pc_set (insn);
4480
4481 /* See if this jump condition is known true or false. */
4482 if (taken)
4483 cond_known_true = (XEXP (SET_SRC (set), 2) == pc_rtx);
4484 else
4485 cond_known_true = (XEXP (SET_SRC (set), 1) == pc_rtx);
4486
4487 /* Get the type of comparison being done and the operands being compared.
4488 If we had to reverse a non-equality condition, record that fact so we
4489 know that it isn't valid for floating-point. */
4490 code = GET_CODE (XEXP (SET_SRC (set), 0));
4491 op0 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 0), insn);
4492 op1 = fold_rtx (XEXP (XEXP (SET_SRC (set), 0), 1), insn);
4493
4494 code = find_comparison_args (code, &op0, &op1, &mode0, &mode1);
4495 if (! cond_known_true)
4496 {
4497 code = reversed_comparison_code_parts (code, op0, op1, insn);
4498
4499 /* Don't remember if we can't find the inverse. */
4500 if (code == UNKNOWN)
4501 return;
4502 }
4503
4504 /* The mode is the mode of the non-constant. */
4505 mode = mode0;
4506 if (mode1 != VOIDmode)
4507 mode = mode1;
4508
4509 record_jump_cond (code, mode, op0, op1, reversed_nonequality);
4510}
4511
4512/* We know that comparison CODE applied to OP0 and OP1 in MODE is true.
4513 REVERSED_NONEQUALITY is nonzero if CODE had to be swapped.
4514 Make any useful entries we can with that information. Called from
4515 above function and called recursively. */
4516
4517static void
4518record_jump_cond (code, mode, op0, op1, reversed_nonequality)
4519 enum rtx_code code;
4520 enum machine_mode mode;
4521 rtx op0, op1;
4522 int reversed_nonequality;
4523{
4524 unsigned op0_hash, op1_hash;
4525 int op0_in_memory, op1_in_memory;
4526 struct table_elt *op0_elt, *op1_elt;
4527
4528 /* If OP0 and OP1 are known equal, and either is a paradoxical SUBREG,
4529 we know that they are also equal in the smaller mode (this is also
4530 true for all smaller modes whether or not there is a SUBREG, but
4531 is not worth testing for with no SUBREG). */
4532
4533 /* Note that GET_MODE (op0) may not equal MODE. */
4534 if (code == EQ && GET_CODE (op0) == SUBREG
4535 && (GET_MODE_SIZE (GET_MODE (op0))
4536 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4537 {
4538 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4539 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4540
4541 record_jump_cond (code, mode, SUBREG_REG (op0),
4542 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4543 reversed_nonequality);
4544 }
4545
4546 if (code == EQ && GET_CODE (op1) == SUBREG
4547 && (GET_MODE_SIZE (GET_MODE (op1))
4548 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4549 {
4550 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4551 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4552
4553 record_jump_cond (code, mode, SUBREG_REG (op1),
4554 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4555 reversed_nonequality);
4556 }
4557
4558 /* Similarly, if this is an NE comparison, and either is a SUBREG
4559 making a smaller mode, we know the whole thing is also NE. */
4560
4561 /* Note that GET_MODE (op0) may not equal MODE;
4562 if we test MODE instead, we can get an infinite recursion
4563 alternating between two modes each wider than MODE. */
4564
4565 if (code == NE && GET_CODE (op0) == SUBREG
4566 && subreg_lowpart_p (op0)
4567 && (GET_MODE_SIZE (GET_MODE (op0))
4568 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))))
4569 {
4570 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
4571 rtx tem = gen_lowpart_if_possible (inner_mode, op1);
4572
4573 record_jump_cond (code, mode, SUBREG_REG (op0),
4574 tem ? tem : gen_rtx_SUBREG (inner_mode, op1, 0),
4575 reversed_nonequality);
4576 }
4577
4578 if (code == NE && GET_CODE (op1) == SUBREG
4579 && subreg_lowpart_p (op1)
4580 && (GET_MODE_SIZE (GET_MODE (op1))
4581 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1)))))
4582 {
4583 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op1));
4584 rtx tem = gen_lowpart_if_possible (inner_mode, op0);
4585
4586 record_jump_cond (code, mode, SUBREG_REG (op1),
4587 tem ? tem : gen_rtx_SUBREG (inner_mode, op0, 0),
4588 reversed_nonequality);
4589 }
4590
4591 /* Hash both operands. */
4592
4593 do_not_record = 0;
4594 hash_arg_in_memory = 0;
4595 op0_hash = HASH (op0, mode);
4596 op0_in_memory = hash_arg_in_memory;
4597
4598 if (do_not_record)
4599 return;
4600
4601 do_not_record = 0;
4602 hash_arg_in_memory = 0;
4603 op1_hash = HASH (op1, mode);
4604 op1_in_memory = hash_arg_in_memory;
4605
4606 if (do_not_record)
4607 return;
4608
4609 /* Look up both operands. */
4610 op0_elt = lookup (op0, op0_hash, mode);
4611 op1_elt = lookup (op1, op1_hash, mode);
4612
4613 /* If both operands are already equivalent or if they are not in the
4614 table but are identical, do nothing. */
4615 if ((op0_elt != 0 && op1_elt != 0
4616 && op0_elt->first_same_value == op1_elt->first_same_value)
4617 || op0 == op1 || rtx_equal_p (op0, op1))
4618 return;
4619
4620 /* If we aren't setting two things equal all we can do is save this
4621 comparison. Similarly if this is floating-point. In the latter
4622 case, OP1 might be zero and both -0.0 and 0.0 are equal to it.
4623 If we record the equality, we might inadvertently delete code
4624 whose intent was to change -0 to +0. */
4625
4626 if (code != EQ || FLOAT_MODE_P (GET_MODE (op0)))
4627 {
4628 struct qty_table_elem *ent;
4629 int qty;
4630
4631 /* If we reversed a floating-point comparison, if OP0 is not a
4632 register, or if OP1 is neither a register or constant, we can't
4633 do anything. */
4634
4635 if (GET_CODE (op1) != REG)
4636 op1 = equiv_constant (op1);
4637
4638 if ((reversed_nonequality && FLOAT_MODE_P (mode))
4639 || GET_CODE (op0) != REG || op1 == 0)
4640 return;
4641
4642 /* Put OP0 in the hash table if it isn't already. This gives it a
4643 new quantity number. */
4644 if (op0_elt == 0)
4645 {
4646 if (insert_regs (op0, NULL, 0))
4647 {
4648 rehash_using_reg (op0);
4649 op0_hash = HASH (op0, mode);
4650
4651 /* If OP0 is contained in OP1, this changes its hash code
4652 as well. Faster to rehash than to check, except
4653 for the simple case of a constant. */
4654 if (! CONSTANT_P (op1))
4655 op1_hash = HASH (op1,mode);
4656 }
4657
4658 op0_elt = insert (op0, NULL, op0_hash, mode);
4659 op0_elt->in_memory = op0_in_memory;
4660 }
4661
4662 qty = REG_QTY (REGNO (op0));
4663 ent = &qty_table[qty];
4664
4665 ent->comparison_code = code;
4666 if (GET_CODE (op1) == REG)
4667 {
4668 /* Look it up again--in case op0 and op1 are the same. */
4669 op1_elt = lookup (op1, op1_hash, mode);
4670
4671 /* Put OP1 in the hash table so it gets a new quantity number. */
4672 if (op1_elt == 0)
4673 {
4674 if (insert_regs (op1, NULL, 0))
4675 {
4676 rehash_using_reg (op1);
4677 op1_hash = HASH (op1, mode);
4678 }
4679
4680 op1_elt = insert (op1, NULL, op1_hash, mode);
4681 op1_elt->in_memory = op1_in_memory;
4682 }
4683
4684 ent->comparison_const = NULL_RTX;
4685 ent->comparison_qty = REG_QTY (REGNO (op1));
4686 }
4687 else
4688 {
4689 ent->comparison_const = op1;
4690 ent->comparison_qty = -1;
4691 }
4692
4693 return;
4694 }
4695
4696 /* If either side is still missing an equivalence, make it now,
4697 then merge the equivalences. */
4698
4699 if (op0_elt == 0)
4700 {
4701 if (insert_regs (op0, NULL, 0))
4702 {
4703 rehash_using_reg (op0);
4704 op0_hash = HASH (op0, mode);
4705 }
4706
4707 op0_elt = insert (op0, NULL, op0_hash, mode);
4708 op0_elt->in_memory = op0_in_memory;
4709 }
4710
4711 if (op1_elt == 0)
4712 {
4713 if (insert_regs (op1, NULL, 0))
4714 {
4715 rehash_using_reg (op1);
4716 op1_hash = HASH (op1, mode);
4717 }
4718
4719 op1_elt = insert (op1, NULL, op1_hash, mode);
4720 op1_elt->in_memory = op1_in_memory;
4721 }
4722
4723 merge_equiv_classes (op0_elt, op1_elt);
4724 last_jump_equiv_class = op0_elt;
4725}
4726
4727
4728/* CSE processing for one instruction.
4729 First simplify sources and addresses of all assignments
4730 in the instruction, using previously-computed equivalents values.
4731 Then install the new sources and destinations in the table
4732 of available values.
4733
4734 If LIBCALL_INSN is nonzero, don't record any equivalence made in
4735 the insn. It means that INSN is inside libcall block. In this
4736 case LIBCALL_INSN is the corresponding insn with REG_LIBCALL. */
4737
4738/* Data on one SET contained in the instruction. */
4739
4740struct set
4741{
4742 /* The SET rtx itself. */
4743 rtx rtl;
4744 /* The SET_SRC of the rtx (the original value, if it is changing). */
4745 rtx src;
4746 /* The hash-table element for the SET_SRC of the SET. */
4747 struct table_elt *src_elt;
4748 /* Hash value for the SET_SRC. */
4749 unsigned src_hash;
4750 /* Hash value for the SET_DEST. */
4751 unsigned dest_hash;
4752 /* The SET_DEST, with SUBREG, etc., stripped. */
4753 rtx inner_dest;
4754 /* Nonzero if the SET_SRC is in memory. */
4755 char src_in_memory;
4756 /* Nonzero if the SET_SRC contains something
4757 whose value cannot be predicted and understood. */
4758 char src_volatile;
4759 /* Original machine mode, in case it becomes a CONST_INT. */
4760 enum machine_mode mode;
4761 /* A constant equivalent for SET_SRC, if any. */
4762 rtx src_const;
4763 /* Original SET_SRC value used for libcall notes. */
4764 rtx orig_src;
4765 /* Hash value of constant equivalent for SET_SRC. */
4766 unsigned src_const_hash;
4767 /* Table entry for constant equivalent for SET_SRC, if any. */
4768 struct table_elt *src_const_elt;
4769};
4770
4771static void
4772cse_insn (insn, libcall_insn)
4773 rtx insn;
4774 rtx libcall_insn;
4775{
4776 rtx x = PATTERN (insn);
4777 int i;
4778 rtx tem;
4779 int n_sets = 0;
4780
4781#ifdef HAVE_cc0
4782 /* Records what this insn does to set CC0. */
4783 rtx this_insn_cc0 = 0;
4784 enum machine_mode this_insn_cc0_mode = VOIDmode;
4785#endif
4786
4787 rtx src_eqv = 0;
4788 struct table_elt *src_eqv_elt = 0;
4789 int src_eqv_volatile = 0;
4790 int src_eqv_in_memory = 0;
4791 unsigned src_eqv_hash = 0;
4792
4793 struct set *sets = (struct set *) 0;
4794
4795 this_insn = insn;
4796
4797 /* Find all the SETs and CLOBBERs in this instruction.
4798 Record all the SETs in the array `set' and count them.
4799 Also determine whether there is a CLOBBER that invalidates
4800 all memory references, or all references at varying addresses. */
4801
4802 if (GET_CODE (insn) == CALL_INSN)
4803 {
4804 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4805 {
4806 if (GET_CODE (XEXP (tem, 0)) == CLOBBER)
4807 invalidate (SET_DEST (XEXP (tem, 0)), VOIDmode);
4808 XEXP (tem, 0) = canon_reg (XEXP (tem, 0), insn);
4809 }
4810 }
4811
4812 if (GET_CODE (x) == SET)
4813 {
4814 sets = (struct set *) alloca (sizeof (struct set));
4815 sets[0].rtl = x;
4816
4817 /* Ignore SETs that are unconditional jumps.
4818 They never need cse processing, so this does not hurt.
4819 The reason is not efficiency but rather
4820 so that we can test at the end for instructions
4821 that have been simplified to unconditional jumps
4822 and not be misled by unchanged instructions
4823 that were unconditional jumps to begin with. */
4824 if (SET_DEST (x) == pc_rtx
4825 && GET_CODE (SET_SRC (x)) == LABEL_REF)
4826 ;
4827
4828 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
4829 The hard function value register is used only once, to copy to
4830 someplace else, so it isn't worth cse'ing (and on 80386 is unsafe)!
4831 Ensure we invalidate the destination register. On the 80386 no
4832 other code would invalidate it since it is a fixed_reg.
4833 We need not check the return of apply_change_group; see canon_reg. */
4834
4835 else if (GET_CODE (SET_SRC (x)) == CALL)
4836 {
4837 canon_reg (SET_SRC (x), insn);
4838 apply_change_group ();
4839 fold_rtx (SET_SRC (x), insn);
4840 invalidate (SET_DEST (x), VOIDmode);
4841 }
4842 else
4843 n_sets = 1;
4844 }
4845 else if (GET_CODE (x) == PARALLEL)
4846 {
4847 int lim = XVECLEN (x, 0);
4848
4849 sets = (struct set *) alloca (lim * sizeof (struct set));
4850
4851 /* Find all regs explicitly clobbered in this insn,
4852 and ensure they are not replaced with any other regs
4853 elsewhere in this insn.
4854 When a reg that is clobbered is also used for input,
4855 we should presume that that is for a reason,
4856 and we should not substitute some other register
4857 which is not supposed to be clobbered.
4858 Therefore, this loop cannot be merged into the one below
4859 because a CALL may precede a CLOBBER and refer to the
4860 value clobbered. We must not let a canonicalization do
4861 anything in that case. */
4862 for (i = 0; i < lim; i++)
4863 {
4864 rtx y = XVECEXP (x, 0, i);
4865 if (GET_CODE (y) == CLOBBER)
4866 {
4867 rtx clobbered = XEXP (y, 0);
4868
4869 if (GET_CODE (clobbered) == REG
4870 || GET_CODE (clobbered) == SUBREG)
4871 invalidate (clobbered, VOIDmode);
4872 else if (GET_CODE (clobbered) == STRICT_LOW_PART
4873 || GET_CODE (clobbered) == ZERO_EXTRACT)
4874 invalidate (XEXP (clobbered, 0), GET_MODE (clobbered));
4875 }
4876 }
4877
4878 for (i = 0; i < lim; i++)
4879 {
4880 rtx y = XVECEXP (x, 0, i);
4881 if (GET_CODE (y) == SET)
4882 {
4883 /* As above, we ignore unconditional jumps and call-insns and
4884 ignore the result of apply_change_group. */
4885 if (GET_CODE (SET_SRC (y)) == CALL)
4886 {
4887 canon_reg (SET_SRC (y), insn);
4888 apply_change_group ();
4889 fold_rtx (SET_SRC (y), insn);
4890 invalidate (SET_DEST (y), VOIDmode);
4891 }
4892 else if (SET_DEST (y) == pc_rtx
4893 && GET_CODE (SET_SRC (y)) == LABEL_REF)
4894 ;
4895 else
4896 sets[n_sets++].rtl = y;
4897 }
4898 else if (GET_CODE (y) == CLOBBER)
4899 {
4900 /* If we clobber memory, canon the address.
4901 This does nothing when a register is clobbered
4902 because we have already invalidated the reg. */
4903 if (GET_CODE (XEXP (y, 0)) == MEM)
4904 canon_reg (XEXP (y, 0), NULL_RTX);
4905 }
4906 else if (GET_CODE (y) == USE
4907 && ! (GET_CODE (XEXP (y, 0)) == REG
4908 && REGNO (XEXP (y, 0)) < FIRST_PSEUDO_REGISTER))
4909 canon_reg (y, NULL_RTX);
4910 else if (GET_CODE (y) == CALL)
4911 {
4912 /* The result of apply_change_group can be ignored; see
4913 canon_reg. */
4914 canon_reg (y, insn);
4915 apply_change_group ();
4916 fold_rtx (y, insn);
4917 }
4918 }
4919 }
4920 else if (GET_CODE (x) == CLOBBER)
4921 {
4922 if (GET_CODE (XEXP (x, 0)) == MEM)
4923 canon_reg (XEXP (x, 0), NULL_RTX);
4924 }
4925
4926 /* Canonicalize a USE of a pseudo register or memory location. */
4927 else if (GET_CODE (x) == USE
4928 && ! (GET_CODE (XEXP (x, 0)) == REG
4929 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER))
4930 canon_reg (XEXP (x, 0), NULL_RTX);
4931 else if (GET_CODE (x) == CALL)
4932 {
4933 /* The result of apply_change_group can be ignored; see canon_reg. */
4934 canon_reg (x, insn);
4935 apply_change_group ();
4936 fold_rtx (x, insn);
4937 }
4938
4939 /* Store the equivalent value in SRC_EQV, if different, or if the DEST
4940 is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV
4941 is handled specially for this case, and if it isn't set, then there will
4942 be no equivalence for the destination. */
4943 if (n_sets == 1 && REG_NOTES (insn) != 0
4944 && (tem = find_reg_note (insn, REG_EQUAL, NULL_RTX)) != 0
4945 && (! rtx_equal_p (XEXP (tem, 0), SET_SRC (sets[0].rtl))
4946 || GET_CODE (SET_DEST (sets[0].rtl)) == STRICT_LOW_PART))
4947 {
4948 src_eqv = fold_rtx (canon_reg (XEXP (tem, 0), NULL_RTX), insn);
4949 XEXP (tem, 0) = src_eqv;
4950 }
4951
4952 /* Canonicalize sources and addresses of destinations.
4953 We do this in a separate pass to avoid problems when a MATCH_DUP is
4954 present in the insn pattern. In that case, we want to ensure that
4955 we don't break the duplicate nature of the pattern. So we will replace
4956 both operands at the same time. Otherwise, we would fail to find an
4957 equivalent substitution in the loop calling validate_change below.
4958
4959 We used to suppress canonicalization of DEST if it appears in SRC,
4960 but we don't do this any more. */
4961
4962 for (i = 0; i < n_sets; i++)
4963 {
4964 rtx dest = SET_DEST (sets[i].rtl);
4965 rtx src = SET_SRC (sets[i].rtl);
4966 rtx new = canon_reg (src, insn);
4967 int insn_code;
4968
4969 sets[i].orig_src = src;
4970 if ((GET_CODE (new) == REG && GET_CODE (src) == REG
4971 && ((REGNO (new) < FIRST_PSEUDO_REGISTER)
4972 != (REGNO (src) < FIRST_PSEUDO_REGISTER)))
4973 || (insn_code = recog_memoized (insn)) < 0
4974 || insn_data[insn_code].n_dups > 0)
4975 validate_change (insn, &SET_SRC (sets[i].rtl), new, 1);
4976 else
4977 SET_SRC (sets[i].rtl) = new;
4978
4979 if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT)
4980 {
4981 validate_change (insn, &XEXP (dest, 1),
4982 canon_reg (XEXP (dest, 1), insn), 1);
4983 validate_change (insn, &XEXP (dest, 2),
4984 canon_reg (XEXP (dest, 2), insn), 1);
4985 }
4986
4987 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
4988 || GET_CODE (dest) == ZERO_EXTRACT
4989 || GET_CODE (dest) == SIGN_EXTRACT)
4990 dest = XEXP (dest, 0);
4991
4992 if (GET_CODE (dest) == MEM)
4993 canon_reg (dest, insn);
4994 }
4995
4996 /* Now that we have done all the replacements, we can apply the change
4997 group and see if they all work. Note that this will cause some
4998 canonicalizations that would have worked individually not to be applied
4999 because some other canonicalization didn't work, but this should not
5000 occur often.
5001
5002 The result of apply_change_group can be ignored; see canon_reg. */
5003
5004 apply_change_group ();
5005
5006 /* Set sets[i].src_elt to the class each source belongs to.
5007 Detect assignments from or to volatile things
5008 and set set[i] to zero so they will be ignored
5009 in the rest of this function.
5010
5011 Nothing in this loop changes the hash table or the register chains. */
5012
5013 for (i = 0; i < n_sets; i++)
5014 {
5015 rtx src, dest;
5016 rtx src_folded;
5017 struct table_elt *elt = 0, *p;
5018 enum machine_mode mode;
5019 rtx src_eqv_here;
5020 rtx src_const = 0;
5021 rtx src_related = 0;
5022 struct table_elt *src_const_elt = 0;
5023 int src_cost = MAX_COST;
5024 int src_eqv_cost = MAX_COST;
5025 int src_folded_cost = MAX_COST;
5026 int src_related_cost = MAX_COST;
5027 int src_elt_cost = MAX_COST;
5028 int src_regcost = MAX_COST;
5029 int src_eqv_regcost = MAX_COST;
5030 int src_folded_regcost = MAX_COST;
5031 int src_related_regcost = MAX_COST;
5032 int src_elt_regcost = MAX_COST;
5033 /* Set non-zero if we need to call force_const_mem on with the
5034 contents of src_folded before using it. */
5035 int src_folded_force_flag = 0;
5036
5037 dest = SET_DEST (sets[i].rtl);
5038 src = SET_SRC (sets[i].rtl);
5039
5040 /* If SRC is a constant that has no machine mode,
5041 hash it with the destination's machine mode.
5042 This way we can keep different modes separate. */
5043
5044 mode = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5045 sets[i].mode = mode;
5046
5047 if (src_eqv)
5048 {
5049 enum machine_mode eqvmode = mode;
5050 if (GET_CODE (dest) == STRICT_LOW_PART)
5051 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5052 do_not_record = 0;
5053 hash_arg_in_memory = 0;
5054 src_eqv_hash = HASH (src_eqv, eqvmode);
5055
5056 /* Find the equivalence class for the equivalent expression. */
5057
5058 if (!do_not_record)
5059 src_eqv_elt = lookup (src_eqv, src_eqv_hash, eqvmode);
5060
5061 src_eqv_volatile = do_not_record;
5062 src_eqv_in_memory = hash_arg_in_memory;
5063 }
5064
5065 /* If this is a STRICT_LOW_PART assignment, src_eqv corresponds to the
5066 value of the INNER register, not the destination. So it is not
5067 a valid substitution for the source. But save it for later. */
5068 if (GET_CODE (dest) == STRICT_LOW_PART)
5069 src_eqv_here = 0;
5070 else
5071 src_eqv_here = src_eqv;
5072
5073 /* Simplify and foldable subexpressions in SRC. Then get the fully-
5074 simplified result, which may not necessarily be valid. */
5075 src_folded = fold_rtx (src, insn);
5076
5077#if 0
5078 /* ??? This caused bad code to be generated for the m68k port with -O2.
5079 Suppose src is (CONST_INT -1), and that after truncation src_folded
5080 is (CONST_INT 3). Suppose src_folded is then used for src_const.
5081 At the end we will add src and src_const to the same equivalence
5082 class. We now have 3 and -1 on the same equivalence class. This
5083 causes later instructions to be mis-optimized. */
5084 /* If storing a constant in a bitfield, pre-truncate the constant
5085 so we will be able to record it later. */
5086 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5087 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5088 {
5089 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5090
5091 if (GET_CODE (src) == CONST_INT
5092 && GET_CODE (width) == CONST_INT
5093 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5094 && (INTVAL (src) & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5095 src_folded
5096 = GEN_INT (INTVAL (src) & (((HOST_WIDE_INT) 1
5097 << INTVAL (width)) - 1));
5098 }
5099#endif
5100
5101 /* Compute SRC's hash code, and also notice if it
5102 should not be recorded at all. In that case,
5103 prevent any further processing of this assignment. */
5104 do_not_record = 0;
5105 hash_arg_in_memory = 0;
5106
5107 sets[i].src = src;
5108 sets[i].src_hash = HASH (src, mode);
5109 sets[i].src_volatile = do_not_record;
5110 sets[i].src_in_memory = hash_arg_in_memory;
5111
5112 /* If SRC is a MEM, there is a REG_EQUIV note for SRC, and DEST is
5113 a pseudo, do not record SRC. Using SRC as a replacement for
5114 anything else will be incorrect in that situation. Note that
5115 this usually occurs only for stack slots, in which case all the
5116 RTL would be referring to SRC, so we don't lose any optimization
5117 opportunities by not having SRC in the hash table. */
5118
5119 if (GET_CODE (src) == MEM
5120 && find_reg_note (insn, REG_EQUIV, NULL_RTX) != 0
5121 && GET_CODE (dest) == REG
5122 && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5123 sets[i].src_volatile = 1;
5124
5125#if 0
5126 /* It is no longer clear why we used to do this, but it doesn't
5127 appear to still be needed. So let's try without it since this
5128 code hurts cse'ing widened ops. */
5129 /* If source is a perverse subreg (such as QI treated as an SI),
5130 treat it as volatile. It may do the work of an SI in one context
5131 where the extra bits are not being used, but cannot replace an SI
5132 in general. */
5133 if (GET_CODE (src) == SUBREG
5134 && (GET_MODE_SIZE (GET_MODE (src))
5135 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
5136 sets[i].src_volatile = 1;
5137#endif
5138
5139 /* Locate all possible equivalent forms for SRC. Try to replace
5140 SRC in the insn with each cheaper equivalent.
5141
5142 We have the following types of equivalents: SRC itself, a folded
5143 version, a value given in a REG_EQUAL note, or a value related
5144 to a constant.
5145
5146 Each of these equivalents may be part of an additional class
5147 of equivalents (if more than one is in the table, they must be in
5148 the same class; we check for this).
5149
5150 If the source is volatile, we don't do any table lookups.
5151
5152 We note any constant equivalent for possible later use in a
5153 REG_NOTE. */
5154
5155 if (!sets[i].src_volatile)
5156 elt = lookup (src, sets[i].src_hash, mode);
5157
5158 sets[i].src_elt = elt;
5159
5160 if (elt && src_eqv_here && src_eqv_elt)
5161 {
5162 if (elt->first_same_value != src_eqv_elt->first_same_value)
5163 {
5164 /* The REG_EQUAL is indicating that two formerly distinct
5165 classes are now equivalent. So merge them. */
5166 merge_equiv_classes (elt, src_eqv_elt);
5167 src_eqv_hash = HASH (src_eqv, elt->mode);
5168 src_eqv_elt = lookup (src_eqv, src_eqv_hash, elt->mode);
5169 }
5170
5171 src_eqv_here = 0;
5172 }
5173
5174 else if (src_eqv_elt)
5175 elt = src_eqv_elt;
5176
5177 /* Try to find a constant somewhere and record it in `src_const'.
5178 Record its table element, if any, in `src_const_elt'. Look in
5179 any known equivalences first. (If the constant is not in the
5180 table, also set `sets[i].src_const_hash'). */
5181 if (elt)
5182 for (p = elt->first_same_value; p; p = p->next_same_value)
5183 if (p->is_const)
5184 {
5185 src_const = p->exp;
5186 src_const_elt = elt;
5187 break;
5188 }
5189
5190 if (src_const == 0
5191 && (CONSTANT_P (src_folded)
5192 /* Consider (minus (label_ref L1) (label_ref L2)) as
5193 "constant" here so we will record it. This allows us
5194 to fold switch statements when an ADDR_DIFF_VEC is used. */
5195 || (GET_CODE (src_folded) == MINUS
5196 && GET_CODE (XEXP (src_folded, 0)) == LABEL_REF
5197 && GET_CODE (XEXP (src_folded, 1)) == LABEL_REF)))
5198 src_const = src_folded, src_const_elt = elt;
5199 else if (src_const == 0 && src_eqv_here && CONSTANT_P (src_eqv_here))
5200 src_const = src_eqv_here, src_const_elt = src_eqv_elt;
5201
5202 /* If we don't know if the constant is in the table, get its
5203 hash code and look it up. */
5204 if (src_const && src_const_elt == 0)
5205 {
5206 sets[i].src_const_hash = HASH (src_const, mode);
5207 src_const_elt = lookup (src_const, sets[i].src_const_hash, mode);
5208 }
5209
5210 sets[i].src_const = src_const;
5211 sets[i].src_const_elt = src_const_elt;
5212
5213 /* If the constant and our source are both in the table, mark them as
5214 equivalent. Otherwise, if a constant is in the table but the source
5215 isn't, set ELT to it. */
5216 if (src_const_elt && elt
5217 && src_const_elt->first_same_value != elt->first_same_value)
5218 merge_equiv_classes (elt, src_const_elt);
5219 else if (src_const_elt && elt == 0)
5220 elt = src_const_elt;
5221
5222 /* See if there is a register linearly related to a constant
5223 equivalent of SRC. */
5224 if (src_const
5225 && (GET_CODE (src_const) == CONST
5226 || (src_const_elt && src_const_elt->related_value != 0)))
5227 {
5228 src_related = use_related_value (src_const, src_const_elt);
5229 if (src_related)
5230 {
5231 struct table_elt *src_related_elt
5232 = lookup (src_related, HASH (src_related, mode), mode);
5233 if (src_related_elt && elt)
5234 {
5235 if (elt->first_same_value
5236 != src_related_elt->first_same_value)
5237 /* This can occur when we previously saw a CONST
5238 involving a SYMBOL_REF and then see the SYMBOL_REF
5239 twice. Merge the involved classes. */
5240 merge_equiv_classes (elt, src_related_elt);
5241
5242 src_related = 0;
5243 src_related_elt = 0;
5244 }
5245 else if (src_related_elt && elt == 0)
5246 elt = src_related_elt;
5247 }
5248 }
5249
5250 /* See if we have a CONST_INT that is already in a register in a
5251 wider mode. */
5252
5253 if (src_const && src_related == 0 && GET_CODE (src_const) == CONST_INT
5254 && GET_MODE_CLASS (mode) == MODE_INT
5255 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD)
5256 {
5257 enum machine_mode wider_mode;
5258
5259 for (wider_mode = GET_MODE_WIDER_MODE (mode);
5260 GET_MODE_BITSIZE (wider_mode) <= BITS_PER_WORD
5261 && src_related == 0;
5262 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5263 {
5264 struct table_elt *const_elt
5265 = lookup (src_const, HASH (src_const, wider_mode), wider_mode);
5266
5267 if (const_elt == 0)
5268 continue;
5269
5270 for (const_elt = const_elt->first_same_value;
5271 const_elt; const_elt = const_elt->next_same_value)
5272 if (GET_CODE (const_elt->exp) == REG)
5273 {
5274 src_related = gen_lowpart_if_possible (mode,
5275 const_elt->exp);
5276 break;
5277 }
5278 }
5279 }
5280
5281 /* Another possibility is that we have an AND with a constant in
5282 a mode narrower than a word. If so, it might have been generated
5283 as part of an "if" which would narrow the AND. If we already
5284 have done the AND in a wider mode, we can use a SUBREG of that
5285 value. */
5286
5287 if (flag_expensive_optimizations && ! src_related
5288 && GET_CODE (src) == AND && GET_CODE (XEXP (src, 1)) == CONST_INT
5289 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5290 {
5291 enum machine_mode tmode;
5292 rtx new_and = gen_rtx_AND (VOIDmode, NULL_RTX, XEXP (src, 1));
5293
5294 for (tmode = GET_MODE_WIDER_MODE (mode);
5295 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5296 tmode = GET_MODE_WIDER_MODE (tmode))
5297 {
5298 rtx inner = gen_lowpart_if_possible (tmode, XEXP (src, 0));
5299 struct table_elt *larger_elt;
5300
5301 if (inner)
5302 {
5303 PUT_MODE (new_and, tmode);
5304 XEXP (new_and, 0) = inner;
5305 larger_elt = lookup (new_and, HASH (new_and, tmode), tmode);
5306 if (larger_elt == 0)
5307 continue;
5308
5309 for (larger_elt = larger_elt->first_same_value;
5310 larger_elt; larger_elt = larger_elt->next_same_value)
5311 if (GET_CODE (larger_elt->exp) == REG)
5312 {
5313 src_related
5314 = gen_lowpart_if_possible (mode, larger_elt->exp);
5315 break;
5316 }
5317
5318 if (src_related)
5319 break;
5320 }
5321 }
5322 }
5323
5324#ifdef LOAD_EXTEND_OP
5325 /* See if a MEM has already been loaded with a widening operation;
5326 if it has, we can use a subreg of that. Many CISC machines
5327 also have such operations, but this is only likely to be
5328 beneficial these machines. */
5329
5330 if (flag_expensive_optimizations && src_related == 0
5331 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
5332 && GET_MODE_CLASS (mode) == MODE_INT
5333 && GET_CODE (src) == MEM && ! do_not_record
5334 && LOAD_EXTEND_OP (mode) != NIL)
5335 {
5336 enum machine_mode tmode;
5337
5338 /* Set what we are trying to extend and the operation it might
5339 have been extended with. */
5340 PUT_CODE (memory_extend_rtx, LOAD_EXTEND_OP (mode));
5341 XEXP (memory_extend_rtx, 0) = src;
5342
5343 for (tmode = GET_MODE_WIDER_MODE (mode);
5344 GET_MODE_SIZE (tmode) <= UNITS_PER_WORD;
5345 tmode = GET_MODE_WIDER_MODE (tmode))
5346 {
5347 struct table_elt *larger_elt;
5348
5349 PUT_MODE (memory_extend_rtx, tmode);
5350 larger_elt = lookup (memory_extend_rtx,
5351 HASH (memory_extend_rtx, tmode), tmode);
5352 if (larger_elt == 0)
5353 continue;
5354
5355 for (larger_elt = larger_elt->first_same_value;
5356 larger_elt; larger_elt = larger_elt->next_same_value)
5357 if (GET_CODE (larger_elt->exp) == REG)
5358 {
5359 src_related = gen_lowpart_if_possible (mode,
5360 larger_elt->exp);
5361 break;
5362 }
5363
5364 if (src_related)
5365 break;
5366 }
5367 }
5368#endif /* LOAD_EXTEND_OP */
5369
5370 if (src == src_folded)
5371 src_folded = 0;
5372
5373 /* At this point, ELT, if non-zero, points to a class of expressions
5374 equivalent to the source of this SET and SRC, SRC_EQV, SRC_FOLDED,
5375 and SRC_RELATED, if non-zero, each contain additional equivalent
5376 expressions. Prune these latter expressions by deleting expressions
5377 already in the equivalence class.
5378
5379 Check for an equivalent identical to the destination. If found,
5380 this is the preferred equivalent since it will likely lead to
5381 elimination of the insn. Indicate this by placing it in
5382 `src_related'. */
5383
5384 if (elt)
5385 elt = elt->first_same_value;
5386 for (p = elt; p; p = p->next_same_value)
5387 {
5388 enum rtx_code code = GET_CODE (p->exp);
5389
5390 /* If the expression is not valid, ignore it. Then we do not
5391 have to check for validity below. In most cases, we can use
5392 `rtx_equal_p', since canonicalization has already been done. */
5393 if (code != REG && ! exp_equiv_p (p->exp, p->exp, 1, 0))
5394 continue;
5395
5396 /* Also skip paradoxical subregs, unless that's what we're
5397 looking for. */
5398 if (code == SUBREG
5399 && (GET_MODE_SIZE (GET_MODE (p->exp))
5400 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))
5401 && ! (src != 0
5402 && GET_CODE (src) == SUBREG
5403 && GET_MODE (src) == GET_MODE (p->exp)
5404 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5405 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (p->exp))))))
5406 continue;
5407
5408 if (src && GET_CODE (src) == code && rtx_equal_p (src, p->exp))
5409 src = 0;
5410 else if (src_folded && GET_CODE (src_folded) == code
5411 && rtx_equal_p (src_folded, p->exp))
5412 src_folded = 0;
5413 else if (src_eqv_here && GET_CODE (src_eqv_here) == code
5414 && rtx_equal_p (src_eqv_here, p->exp))
5415 src_eqv_here = 0;
5416 else if (src_related && GET_CODE (src_related) == code
5417 && rtx_equal_p (src_related, p->exp))
5418 src_related = 0;
5419
5420 /* This is the same as the destination of the insns, we want
5421 to prefer it. Copy it to src_related. The code below will
5422 then give it a negative cost. */
5423 if (GET_CODE (dest) == code && rtx_equal_p (p->exp, dest))
5424 src_related = dest;
5425 }
5426
5427 /* Find the cheapest valid equivalent, trying all the available
5428 possibilities. Prefer items not in the hash table to ones
5429 that are when they are equal cost. Note that we can never
5430 worsen an insn as the current contents will also succeed.
5431 If we find an equivalent identical to the destination, use it as best,
5432 since this insn will probably be eliminated in that case. */
5433 if (src)
5434 {
5435 if (rtx_equal_p (src, dest))
5436 src_cost = src_regcost = -1;
5437 else
5438 {
5439 src_cost = COST (src);
5440 src_regcost = approx_reg_cost (src);
5441 }
5442 }
5443
5444 if (src_eqv_here)
5445 {
5446 if (rtx_equal_p (src_eqv_here, dest))
5447 src_eqv_cost = src_eqv_regcost = -1;
5448 else
5449 {
5450 src_eqv_cost = COST (src_eqv_here);
5451 src_eqv_regcost = approx_reg_cost (src_eqv_here);
5452 }
5453 }
5454
5455 if (src_folded)
5456 {
5457 if (rtx_equal_p (src_folded, dest))
5458 src_folded_cost = src_folded_regcost = -1;
5459 else
5460 {
5461 src_folded_cost = COST (src_folded);
5462 src_folded_regcost = approx_reg_cost (src_folded);
5463 }
5464 }
5465
5466 if (src_related)
5467 {
5468 if (rtx_equal_p (src_related, dest))
5469 src_related_cost = src_related_regcost = -1;
5470 else
5471 {
5472 src_related_cost = COST (src_related);
5473 src_related_regcost = approx_reg_cost (src_related);
5474 }
5475 }
5476
5477 /* If this was an indirect jump insn, a known label will really be
5478 cheaper even though it looks more expensive. */
5479 if (dest == pc_rtx && src_const && GET_CODE (src_const) == LABEL_REF)
5480 src_folded = src_const, src_folded_cost = src_folded_regcost = -1;
5481
5482 /* Terminate loop when replacement made. This must terminate since
5483 the current contents will be tested and will always be valid. */
5484 while (1)
5485 {
5486 rtx trial;
5487
5488 /* Skip invalid entries. */
5489 while (elt && GET_CODE (elt->exp) != REG
5490 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
5491 elt = elt->next_same_value;
5492
5493 /* A paradoxical subreg would be bad here: it'll be the right
5494 size, but later may be adjusted so that the upper bits aren't
5495 what we want. So reject it. */
5496 if (elt != 0
5497 && GET_CODE (elt->exp) == SUBREG
5498 && (GET_MODE_SIZE (GET_MODE (elt->exp))
5499 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))
5500 /* It is okay, though, if the rtx we're trying to match
5501 will ignore any of the bits we can't predict. */
5502 && ! (src != 0
5503 && GET_CODE (src) == SUBREG
5504 && GET_MODE (src) == GET_MODE (elt->exp)
5505 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
5506 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (elt->exp))))))
5507 {
5508 elt = elt->next_same_value;
5509 continue;
5510 }
5511
5512 if (elt)
5513 {
5514 src_elt_cost = elt->cost;
5515 src_elt_regcost = elt->regcost;
5516 }
5517
5518 /* Find cheapest and skip it for the next time. For items
5519 of equal cost, use this order:
5520 src_folded, src, src_eqv, src_related and hash table entry. */
5521 if (src_folded
5522 && preferrable (src_folded_cost, src_folded_regcost,
5523 src_cost, src_regcost) <= 0
5524 && preferrable (src_folded_cost, src_folded_regcost,
5525 src_eqv_cost, src_eqv_regcost) <= 0
5526 && preferrable (src_folded_cost, src_folded_regcost,
5527 src_related_cost, src_related_regcost) <= 0
5528 && preferrable (src_folded_cost, src_folded_regcost,
5529 src_elt_cost, src_elt_regcost) <= 0)
5530 {
5531 trial = src_folded, src_folded_cost = MAX_COST;
5532 if (src_folded_force_flag)
5533 trial = force_const_mem (mode, trial);
5534 }
5535 else if (src
5536 && preferrable (src_cost, src_regcost,
5537 src_eqv_cost, src_eqv_regcost) <= 0
5538 && preferrable (src_cost, src_regcost,
5539 src_related_cost, src_related_regcost) <= 0
5540 && preferrable (src_cost, src_regcost,
5541 src_elt_cost, src_elt_regcost) <= 0)
5542 trial = src, src_cost = MAX_COST;
5543 else if (src_eqv_here
5544 && preferrable (src_eqv_cost, src_eqv_regcost,
5545 src_related_cost, src_related_regcost) <= 0
5546 && preferrable (src_eqv_cost, src_eqv_regcost,
5547 src_elt_cost, src_elt_regcost) <= 0)
5548 trial = copy_rtx (src_eqv_here), src_eqv_cost = MAX_COST;
5549 else if (src_related
5550 && preferrable (src_related_cost, src_related_regcost,
5551 src_elt_cost, src_elt_regcost) <= 0)
5552 trial = copy_rtx (src_related), src_related_cost = MAX_COST;
5553 else
5554 {
5555 trial = copy_rtx (elt->exp);
5556 elt = elt->next_same_value;
5557 src_elt_cost = MAX_COST;
5558 }
5559
5560 /* We don't normally have an insn matching (set (pc) (pc)), so
5561 check for this separately here. We will delete such an
5562 insn below.
5563
5564 For other cases such as a table jump or conditional jump
5565 where we know the ultimate target, go ahead and replace the
5566 operand. While that may not make a valid insn, we will
5567 reemit the jump below (and also insert any necessary
5568 barriers). */
5569 if (n_sets == 1 && dest == pc_rtx
5570 && (trial == pc_rtx
5571 || (GET_CODE (trial) == LABEL_REF
5572 && ! condjump_p (insn))))
5573 {
5574 SET_SRC (sets[i].rtl) = trial;
5575 cse_jumps_altered = 1;
5576 break;
5577 }
5578
5579 /* Look for a substitution that makes a valid insn. */
5580 else if (validate_change (insn, &SET_SRC (sets[i].rtl), trial, 0))
5581 {
5582 /* If we just made a substitution inside a libcall, then we
5583 need to make the same substitution in any notes attached
5584 to the RETVAL insn. */
5585 if (libcall_insn
5586 && (GET_CODE (sets[i].orig_src) == REG
5587 || GET_CODE (sets[i].orig_src) == SUBREG
5588 || GET_CODE (sets[i].orig_src) == MEM))
5589 replace_rtx (REG_NOTES (libcall_insn), sets[i].orig_src,
5590 canon_reg (SET_SRC (sets[i].rtl), insn));
5591
5592 /* The result of apply_change_group can be ignored; see
5593 canon_reg. */
5594
5595 validate_change (insn, &SET_SRC (sets[i].rtl),
5596 canon_reg (SET_SRC (sets[i].rtl), insn),
5597 1);
5598 apply_change_group ();
5599 break;
5600 }
5601
5602 /* If we previously found constant pool entries for
5603 constants and this is a constant, try making a
5604 pool entry. Put it in src_folded unless we already have done
5605 this since that is where it likely came from. */
5606
5607 else if (constant_pool_entries_cost
5608 && CONSTANT_P (trial)
5609 /* Reject cases that will abort in decode_rtx_const.
5610 On the alpha when simplifying a switch, we get
5611 (const (truncate (minus (label_ref) (label_ref)))). */
5612 && ! (GET_CODE (trial) == CONST
5613 && GET_CODE (XEXP (trial, 0)) == TRUNCATE)
5614 /* Likewise on IA-64, except without the truncate. */
5615 && ! (GET_CODE (trial) == CONST
5616 && GET_CODE (XEXP (trial, 0)) == MINUS
5617 && GET_CODE (XEXP (XEXP (trial, 0), 0)) == LABEL_REF
5618 && GET_CODE (XEXP (XEXP (trial, 0), 1)) == LABEL_REF)
5619 && (src_folded == 0
5620 || (GET_CODE (src_folded) != MEM
5621 && ! src_folded_force_flag))
5622 && GET_MODE_CLASS (mode) != MODE_CC
5623 && mode != VOIDmode)
5624 {
5625 src_folded_force_flag = 1;
5626 src_folded = trial;
5627 src_folded_cost = constant_pool_entries_cost;
5628 }
5629 }
5630
5631 src = SET_SRC (sets[i].rtl);
5632
5633 /* In general, it is good to have a SET with SET_SRC == SET_DEST.
5634 However, there is an important exception: If both are registers
5635 that are not the head of their equivalence class, replace SET_SRC
5636 with the head of the class. If we do not do this, we will have
5637 both registers live over a portion of the basic block. This way,
5638 their lifetimes will likely abut instead of overlapping. */
5639 if (GET_CODE (dest) == REG
5640 && REGNO_QTY_VALID_P (REGNO (dest)))
5641 {
5642 int dest_q = REG_QTY (REGNO (dest));
5643 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5644
5645 if (dest_ent->mode == GET_MODE (dest)
5646 && dest_ent->first_reg != REGNO (dest)
5647 && GET_CODE (src) == REG && REGNO (src) == REGNO (dest)
5648 /* Don't do this if the original insn had a hard reg as
5649 SET_SRC or SET_DEST. */
5650 && (GET_CODE (sets[i].src) != REG
5651 || REGNO (sets[i].src) >= FIRST_PSEUDO_REGISTER)
5652 && (GET_CODE (dest) != REG || REGNO (dest) >= FIRST_PSEUDO_REGISTER))
5653 /* We can't call canon_reg here because it won't do anything if
5654 SRC is a hard register. */
5655 {
5656 int src_q = REG_QTY (REGNO (src));
5657 struct qty_table_elem *src_ent = &qty_table[src_q];
5658 int first = src_ent->first_reg;
5659 rtx new_src
5660 = (first >= FIRST_PSEUDO_REGISTER
5661 ? regno_reg_rtx[first] : gen_rtx_REG (GET_MODE (src), first));
5662
5663 /* We must use validate-change even for this, because this
5664 might be a special no-op instruction, suitable only to
5665 tag notes onto. */
5666 if (validate_change (insn, &SET_SRC (sets[i].rtl), new_src, 0))
5667 {
5668 src = new_src;
5669 /* If we had a constant that is cheaper than what we are now
5670 setting SRC to, use that constant. We ignored it when we
5671 thought we could make this into a no-op. */
5672 if (src_const && COST (src_const) < COST (src)
5673 && validate_change (insn, &SET_SRC (sets[i].rtl),
5674 src_const, 0))
5675 src = src_const;
5676 }
5677 }
5678 }
5679
5680 /* If we made a change, recompute SRC values. */
5681 if (src != sets[i].src)
5682 {
5683 cse_altered = 1;
5684 do_not_record = 0;
5685 hash_arg_in_memory = 0;
5686 sets[i].src = src;
5687 sets[i].src_hash = HASH (src, mode);
5688 sets[i].src_volatile = do_not_record;
5689 sets[i].src_in_memory = hash_arg_in_memory;
5690 sets[i].src_elt = lookup (src, sets[i].src_hash, mode);
5691 }
5692
5693 /* If this is a single SET, we are setting a register, and we have an
5694 equivalent constant, we want to add a REG_NOTE. We don't want
5695 to write a REG_EQUAL note for a constant pseudo since verifying that
5696 that pseudo hasn't been eliminated is a pain. Such a note also
5697 won't help anything.
5698
5699 Avoid a REG_EQUAL note for (CONST (MINUS (LABEL_REF) (LABEL_REF)))
5700 which can be created for a reference to a compile time computable
5701 entry in a jump table. */
5702
5703 if (n_sets == 1 && src_const && GET_CODE (dest) == REG
5704 && GET_CODE (src_const) != REG
5705 && ! (GET_CODE (src_const) == CONST
5706 && GET_CODE (XEXP (src_const, 0)) == MINUS
5707 && GET_CODE (XEXP (XEXP (src_const, 0), 0)) == LABEL_REF
5708 && GET_CODE (XEXP (XEXP (src_const, 0), 1)) == LABEL_REF))
5709 {
5710 /* Make sure that the rtx is not shared with any other insn. */
5711 src_const = copy_rtx (src_const);
5712
5713 /* Record the actual constant value in a REG_EQUAL note, making
5714 a new one if one does not already exist. */
5715 set_unique_reg_note (insn, REG_EQUAL, src_const);
5716
5717 /* If storing a constant value in a register that
5718 previously held the constant value 0,
5719 record this fact with a REG_WAS_0 note on this insn.
5720
5721 Note that the *register* is required to have previously held 0,
5722 not just any register in the quantity and we must point to the
5723 insn that set that register to zero.
5724
5725 Rather than track each register individually, we just see if
5726 the last set for this quantity was for this register. */
5727
5728 if (REGNO_QTY_VALID_P (REGNO (dest)))
5729 {
5730 int dest_q = REG_QTY (REGNO (dest));
5731 struct qty_table_elem *dest_ent = &qty_table[dest_q];
5732
5733 if (dest_ent->const_rtx == const0_rtx)
5734 {
5735 /* See if we previously had a REG_WAS_0 note. */
5736 rtx note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
5737 rtx const_insn = dest_ent->const_insn;
5738
5739 if ((tem = single_set (const_insn)) != 0
5740 && rtx_equal_p (SET_DEST (tem), dest))
5741 {
5742 if (note)
5743 XEXP (note, 0) = const_insn;
5744 else
5745 REG_NOTES (insn)
5746 = gen_rtx_INSN_LIST (REG_WAS_0, const_insn,
5747 REG_NOTES (insn));
5748 }
5749 }
5750 }
5751 }
5752
5753 /* Now deal with the destination. */
5754 do_not_record = 0;
5755
5756 /* Look within any SIGN_EXTRACT or ZERO_EXTRACT
5757 to the MEM or REG within it. */
5758 while (GET_CODE (dest) == SIGN_EXTRACT
5759 || GET_CODE (dest) == ZERO_EXTRACT
5760 || GET_CODE (dest) == SUBREG
5761 || GET_CODE (dest) == STRICT_LOW_PART)
5762 dest = XEXP (dest, 0);
5763
5764 sets[i].inner_dest = dest;
5765
5766 if (GET_CODE (dest) == MEM)
5767 {
5768#ifdef PUSH_ROUNDING
5769 /* Stack pushes invalidate the stack pointer. */
5770 rtx addr = XEXP (dest, 0);
5771 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
5772 && XEXP (addr, 0) == stack_pointer_rtx)
5773 invalidate (stack_pointer_rtx, Pmode);
5774#endif
5775 dest = fold_rtx (dest, insn);
5776 }
5777
5778 /* Compute the hash code of the destination now,
5779 before the effects of this instruction are recorded,
5780 since the register values used in the address computation
5781 are those before this instruction. */
5782 sets[i].dest_hash = HASH (dest, mode);
5783
5784 /* Don't enter a bit-field in the hash table
5785 because the value in it after the store
5786 may not equal what was stored, due to truncation. */
5787
5788 if (GET_CODE (SET_DEST (sets[i].rtl)) == ZERO_EXTRACT
5789 || GET_CODE (SET_DEST (sets[i].rtl)) == SIGN_EXTRACT)
5790 {
5791 rtx width = XEXP (SET_DEST (sets[i].rtl), 1);
5792
5793 if (src_const != 0 && GET_CODE (src_const) == CONST_INT
5794 && GET_CODE (width) == CONST_INT
5795 && INTVAL (width) < HOST_BITS_PER_WIDE_INT
5796 && ! (INTVAL (src_const)
5797 & ((HOST_WIDE_INT) (-1) << INTVAL (width))))
5798 /* Exception: if the value is constant,
5799 and it won't be truncated, record it. */
5800 ;
5801 else
5802 {
5803 /* This is chosen so that the destination will be invalidated
5804 but no new value will be recorded.
5805 We must invalidate because sometimes constant
5806 values can be recorded for bitfields. */
5807 sets[i].src_elt = 0;
5808 sets[i].src_volatile = 1;
5809 src_eqv = 0;
5810 src_eqv_elt = 0;
5811 }
5812 }
5813
5814 /* If only one set in a JUMP_INSN and it is now a no-op, we can delete
5815 the insn. */
5816 else if (n_sets == 1 && dest == pc_rtx && src == pc_rtx)
5817 {
5818 /* One less use of the label this insn used to jump to. */
5819 delete_insn (insn);
5820 cse_jumps_altered = 1;
5821 /* No more processing for this set. */
5822 sets[i].rtl = 0;
5823 }
5824
5825 /* If this SET is now setting PC to a label, we know it used to
5826 be a conditional or computed branch. */
5827 else if (dest == pc_rtx && GET_CODE (src) == LABEL_REF)
5828 {
5829 /* Now emit a BARRIER after the unconditional jump. */
5830 if (NEXT_INSN (insn) == 0
5831 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5832 emit_barrier_after (insn);
5833
5834 /* We reemit the jump in as many cases as possible just in
5835 case the form of an unconditional jump is significantly
5836 different than a computed jump or conditional jump.
5837
5838 If this insn has multiple sets, then reemitting the
5839 jump is nontrivial. So instead we just force rerecognition
5840 and hope for the best. */
5841 if (n_sets == 1)
5842 {
5843 rtx new = emit_jump_insn_before (gen_jump (XEXP (src, 0)), insn);
5844
5845 JUMP_LABEL (new) = XEXP (src, 0);
5846 LABEL_NUSES (XEXP (src, 0))++;
5847 insn = new;
5848
5849 /* Now emit a BARRIER after the unconditional jump. */
5850 if (NEXT_INSN (insn) == 0
5851 || GET_CODE (NEXT_INSN (insn)) != BARRIER)
5852 emit_barrier_after (insn);
5853 }
5854 else
5855 INSN_CODE (insn) = -1;
5856
5857 never_reached_warning (insn, NULL);
5858
5859 /* Do not bother deleting any unreachable code,
5860 let jump/flow do that. */
5861
5862 cse_jumps_altered = 1;
5863 sets[i].rtl = 0;
5864 }
5865
5866 /* If destination is volatile, invalidate it and then do no further
5867 processing for this assignment. */
5868
5869 else if (do_not_record)
5870 {
5871 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
5872 invalidate (dest, VOIDmode);
5873 else if (GET_CODE (dest) == MEM)
5874 {
5875 /* Outgoing arguments for a libcall don't
5876 affect any recorded expressions. */
5877 if (! libcall_insn || insn == libcall_insn)
5878 invalidate (dest, VOIDmode);
5879 }
5880 else if (GET_CODE (dest) == STRICT_LOW_PART
5881 || GET_CODE (dest) == ZERO_EXTRACT)
5882 invalidate (XEXP (dest, 0), GET_MODE (dest));
5883 sets[i].rtl = 0;
5884 }
5885
5886 if (sets[i].rtl != 0 && dest != SET_DEST (sets[i].rtl))
5887 sets[i].dest_hash = HASH (SET_DEST (sets[i].rtl), mode);
5888
5889#ifdef HAVE_cc0
5890 /* If setting CC0, record what it was set to, or a constant, if it
5891 is equivalent to a constant. If it is being set to a floating-point
5892 value, make a COMPARE with the appropriate constant of 0. If we
5893 don't do this, later code can interpret this as a test against
5894 const0_rtx, which can cause problems if we try to put it into an
5895 insn as a floating-point operand. */
5896 if (dest == cc0_rtx)
5897 {
5898 this_insn_cc0 = src_const && mode != VOIDmode ? src_const : src;
5899 this_insn_cc0_mode = mode;
5900 if (FLOAT_MODE_P (mode))
5901 this_insn_cc0 = gen_rtx_COMPARE (VOIDmode, this_insn_cc0,
5902 CONST0_RTX (mode));
5903 }
5904#endif
5905 }
5906
5907 /* Now enter all non-volatile source expressions in the hash table
5908 if they are not already present.
5909 Record their equivalence classes in src_elt.
5910 This way we can insert the corresponding destinations into
5911 the same classes even if the actual sources are no longer in them
5912 (having been invalidated). */
5913
5914 if (src_eqv && src_eqv_elt == 0 && sets[0].rtl != 0 && ! src_eqv_volatile
5915 && ! rtx_equal_p (src_eqv, SET_DEST (sets[0].rtl)))
5916 {
5917 struct table_elt *elt;
5918 struct table_elt *classp = sets[0].src_elt;
5919 rtx dest = SET_DEST (sets[0].rtl);
5920 enum machine_mode eqvmode = GET_MODE (dest);
5921
5922 if (GET_CODE (dest) == STRICT_LOW_PART)
5923 {
5924 eqvmode = GET_MODE (SUBREG_REG (XEXP (dest, 0)));
5925 classp = 0;
5926 }
5927 if (insert_regs (src_eqv, classp, 0))
5928 {
5929 rehash_using_reg (src_eqv);
5930 src_eqv_hash = HASH (src_eqv, eqvmode);
5931 }
5932 elt = insert (src_eqv, classp, src_eqv_hash, eqvmode);
5933 elt->in_memory = src_eqv_in_memory;
5934 src_eqv_elt = elt;
5935
5936 /* Check to see if src_eqv_elt is the same as a set source which
5937 does not yet have an elt, and if so set the elt of the set source
5938 to src_eqv_elt. */
5939 for (i = 0; i < n_sets; i++)
5940 if (sets[i].rtl && sets[i].src_elt == 0
5941 && rtx_equal_p (SET_SRC (sets[i].rtl), src_eqv))
5942 sets[i].src_elt = src_eqv_elt;
5943 }
5944
5945 for (i = 0; i < n_sets; i++)
5946 if (sets[i].rtl && ! sets[i].src_volatile
5947 && ! rtx_equal_p (SET_SRC (sets[i].rtl), SET_DEST (sets[i].rtl)))
5948 {
5949 if (GET_CODE (SET_DEST (sets[i].rtl)) == STRICT_LOW_PART)
5950 {
5951 /* REG_EQUAL in setting a STRICT_LOW_PART
5952 gives an equivalent for the entire destination register,
5953 not just for the subreg being stored in now.
5954 This is a more interesting equivalence, so we arrange later
5955 to treat the entire reg as the destination. */
5956 sets[i].src_elt = src_eqv_elt;
5957 sets[i].src_hash = src_eqv_hash;
5958 }
5959 else
5960 {
5961 /* Insert source and constant equivalent into hash table, if not
5962 already present. */
5963 struct table_elt *classp = src_eqv_elt;
5964 rtx src = sets[i].src;
5965 rtx dest = SET_DEST (sets[i].rtl);
5966 enum machine_mode mode
5967 = GET_MODE (src) == VOIDmode ? GET_MODE (dest) : GET_MODE (src);
5968
5969 if (sets[i].src_elt == 0)
5970 {
5971 /* Don't put a hard register source into the table if this is
5972 the last insn of a libcall. In this case, we only need
5973 to put src_eqv_elt in src_elt. */
5974 if (! find_reg_note (insn, REG_RETVAL, NULL_RTX))
5975 {
5976 struct table_elt *elt;
5977
5978 /* Note that these insert_regs calls cannot remove
5979 any of the src_elt's, because they would have failed to
5980 match if not still valid. */
5981 if (insert_regs (src, classp, 0))
5982 {
5983 rehash_using_reg (src);
5984 sets[i].src_hash = HASH (src, mode);
5985 }
5986 elt = insert (src, classp, sets[i].src_hash, mode);
5987 elt->in_memory = sets[i].src_in_memory;
5988 sets[i].src_elt = classp = elt;
5989 }
5990 else
5991 sets[i].src_elt = classp;
5992 }
5993 if (sets[i].src_const && sets[i].src_const_elt == 0
5994 && src != sets[i].src_const
5995 && ! rtx_equal_p (sets[i].src_const, src))
5996 sets[i].src_elt = insert (sets[i].src_const, classp,
5997 sets[i].src_const_hash, mode);
5998 }
5999 }
6000 else if (sets[i].src_elt == 0)
6001 /* If we did not insert the source into the hash table (e.g., it was
6002 volatile), note the equivalence class for the REG_EQUAL value, if any,
6003 so that the destination goes into that class. */
6004 sets[i].src_elt = src_eqv_elt;
6005
6006 invalidate_from_clobbers (x);
6007
6008 /* Some registers are invalidated by subroutine calls. Memory is
6009 invalidated by non-constant calls. */
6010
6011 if (GET_CODE (insn) == CALL_INSN)
6012 {
6013 if (! CONST_OR_PURE_CALL_P (insn))
6014 invalidate_memory ();
6015 invalidate_for_call ();
6016 }
6017
6018 /* Now invalidate everything set by this instruction.
6019 If a SUBREG or other funny destination is being set,
6020 sets[i].rtl is still nonzero, so here we invalidate the reg
6021 a part of which is being set. */
6022
6023 for (i = 0; i < n_sets; i++)
6024 if (sets[i].rtl)
6025 {
6026 /* We can't use the inner dest, because the mode associated with
6027 a ZERO_EXTRACT is significant. */
6028 rtx dest = SET_DEST (sets[i].rtl);
6029
6030 /* Needed for registers to remove the register from its
6031 previous quantity's chain.
6032 Needed for memory if this is a nonvarying address, unless
6033 we have just done an invalidate_memory that covers even those. */
6034 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6035 invalidate (dest, VOIDmode);
6036 else if (GET_CODE (dest) == MEM)
6037 {
6038 /* Outgoing arguments for a libcall don't
6039 affect any recorded expressions. */
6040 if (! libcall_insn || insn == libcall_insn)
6041 invalidate (dest, VOIDmode);
6042 }
6043 else if (GET_CODE (dest) == STRICT_LOW_PART
6044 || GET_CODE (dest) == ZERO_EXTRACT)
6045 invalidate (XEXP (dest, 0), GET_MODE (dest));
6046 }
6047
6048 /* A volatile ASM invalidates everything. */
6049 if (GET_CODE (insn) == INSN
6050 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
6051 && MEM_VOLATILE_P (PATTERN (insn)))
6052 flush_hash_table ();
6053
6054 /* Make sure registers mentioned in destinations
6055 are safe for use in an expression to be inserted.
6056 This removes from the hash table
6057 any invalid entry that refers to one of these registers.
6058
6059 We don't care about the return value from mention_regs because
6060 we are going to hash the SET_DEST values unconditionally. */
6061
6062 for (i = 0; i < n_sets; i++)
6063 {
6064 if (sets[i].rtl)
6065 {
6066 rtx x = SET_DEST (sets[i].rtl);
6067
6068 if (GET_CODE (x) != REG)
6069 mention_regs (x);
6070 else
6071 {
6072 /* We used to rely on all references to a register becoming
6073 inaccessible when a register changes to a new quantity,
6074 since that changes the hash code. However, that is not
6075 safe, since after HASH_SIZE new quantities we get a
6076 hash 'collision' of a register with its own invalid
6077 entries. And since SUBREGs have been changed not to
6078 change their hash code with the hash code of the register,
6079 it wouldn't work any longer at all. So we have to check
6080 for any invalid references lying around now.
6081 This code is similar to the REG case in mention_regs,
6082 but it knows that reg_tick has been incremented, and
6083 it leaves reg_in_table as -1 . */
6084 unsigned int regno = REGNO (x);
6085 unsigned int endregno
6086 = regno + (regno >= FIRST_PSEUDO_REGISTER ? 1
6087 : HARD_REGNO_NREGS (regno, GET_MODE (x)));
6088 unsigned int i;
6089
6090 for (i = regno; i < endregno; i++)
6091 {
6092 if (REG_IN_TABLE (i) >= 0)
6093 {
6094 remove_invalid_refs (i);
6095 REG_IN_TABLE (i) = -1;
6096 }
6097 }
6098 }
6099 }
6100 }
6101
6102 /* We may have just removed some of the src_elt's from the hash table.
6103 So replace each one with the current head of the same class. */
6104
6105 for (i = 0; i < n_sets; i++)
6106 if (sets[i].rtl)
6107 {
6108 if (sets[i].src_elt && sets[i].src_elt->first_same_value == 0)
6109 /* If elt was removed, find current head of same class,
6110 or 0 if nothing remains of that class. */
6111 {
6112 struct table_elt *elt = sets[i].src_elt;
6113
6114 while (elt && elt->prev_same_value)
6115 elt = elt->prev_same_value;
6116
6117 while (elt && elt->first_same_value == 0)
6118 elt = elt->next_same_value;
6119 sets[i].src_elt = elt ? elt->first_same_value : 0;
6120 }
6121 }
6122
6123 /* Now insert the destinations into their equivalence classes. */
6124
6125 for (i = 0; i < n_sets; i++)
6126 if (sets[i].rtl)
6127 {
6128 rtx dest = SET_DEST (sets[i].rtl);
6129 rtx inner_dest = sets[i].inner_dest;
6130 struct table_elt *elt;
6131
6132 /* Don't record value if we are not supposed to risk allocating
6133 floating-point values in registers that might be wider than
6134 memory. */
6135 if ((flag_float_store
6136 && GET_CODE (dest) == MEM
6137 && FLOAT_MODE_P (GET_MODE (dest)))
6138 /* Don't record BLKmode values, because we don't know the
6139 size of it, and can't be sure that other BLKmode values
6140 have the same or smaller size. */
6141 || GET_MODE (dest) == BLKmode
6142 /* Don't record values of destinations set inside a libcall block
6143 since we might delete the libcall. Things should have been set
6144 up so we won't want to reuse such a value, but we play it safe
6145 here. */
6146 || libcall_insn
6147 /* If we didn't put a REG_EQUAL value or a source into the hash
6148 table, there is no point is recording DEST. */
6149 || sets[i].src_elt == 0
6150 /* If DEST is a paradoxical SUBREG and SRC is a ZERO_EXTEND
6151 or SIGN_EXTEND, don't record DEST since it can cause
6152 some tracking to be wrong.
6153
6154 ??? Think about this more later. */
6155 || (GET_CODE (dest) == SUBREG
6156 && (GET_MODE_SIZE (GET_MODE (dest))
6157 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6158 && (GET_CODE (sets[i].src) == SIGN_EXTEND
6159 || GET_CODE (sets[i].src) == ZERO_EXTEND)))
6160 continue;
6161
6162 /* STRICT_LOW_PART isn't part of the value BEING set,
6163 and neither is the SUBREG inside it.
6164 Note that in this case SETS[I].SRC_ELT is really SRC_EQV_ELT. */
6165 if (GET_CODE (dest) == STRICT_LOW_PART)
6166 dest = SUBREG_REG (XEXP (dest, 0));
6167
6168 if (GET_CODE (dest) == REG || GET_CODE (dest) == SUBREG)
6169 /* Registers must also be inserted into chains for quantities. */
6170 if (insert_regs (dest, sets[i].src_elt, 1))
6171 {
6172 /* If `insert_regs' changes something, the hash code must be
6173 recalculated. */
6174 rehash_using_reg (dest);
6175 sets[i].dest_hash = HASH (dest, GET_MODE (dest));
6176 }
6177
6178 if (GET_CODE (inner_dest) == MEM
6179 && GET_CODE (XEXP (inner_dest, 0)) == ADDRESSOF)
6180 /* Given (SET (MEM (ADDRESSOF (X))) Y) we don't want to say
6181 that (MEM (ADDRESSOF (X))) is equivalent to Y.
6182 Consider the case in which the address of the MEM is
6183 passed to a function, which alters the MEM. Then, if we
6184 later use Y instead of the MEM we'll miss the update. */
6185 elt = insert (dest, 0, sets[i].dest_hash, GET_MODE (dest));
6186 else
6187 elt = insert (dest, sets[i].src_elt,
6188 sets[i].dest_hash, GET_MODE (dest));
6189
6190 elt->in_memory = (GET_CODE (sets[i].inner_dest) == MEM
6191 && (! RTX_UNCHANGING_P (sets[i].inner_dest)
6192 || FIXED_BASE_PLUS_P (XEXP (sets[i].inner_dest,
6193 0))));
6194
6195 /* If we have (set (subreg:m1 (reg:m2 foo) 0) (bar:m1)), M1 is no
6196 narrower than M2, and both M1 and M2 are the same number of words,
6197 we are also doing (set (reg:m2 foo) (subreg:m2 (bar:m1) 0)) so
6198 make that equivalence as well.
6199
6200 However, BAR may have equivalences for which gen_lowpart_if_possible
6201 will produce a simpler value than gen_lowpart_if_possible applied to
6202 BAR (e.g., if BAR was ZERO_EXTENDed from M2), so we will scan all
6203 BAR's equivalences. If we don't get a simplified form, make
6204 the SUBREG. It will not be used in an equivalence, but will
6205 cause two similar assignments to be detected.
6206
6207 Note the loop below will find SUBREG_REG (DEST) since we have
6208 already entered SRC and DEST of the SET in the table. */
6209
6210 if (GET_CODE (dest) == SUBREG
6211 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) - 1)
6212 / UNITS_PER_WORD)
6213 == (GET_MODE_SIZE (GET_MODE (dest)) - 1) / UNITS_PER_WORD)
6214 && (GET_MODE_SIZE (GET_MODE (dest))
6215 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))))
6216 && sets[i].src_elt != 0)
6217 {
6218 enum machine_mode new_mode = GET_MODE (SUBREG_REG (dest));
6219 struct table_elt *elt, *classp = 0;
6220
6221 for (elt = sets[i].src_elt->first_same_value; elt;
6222 elt = elt->next_same_value)
6223 {
6224 rtx new_src = 0;
6225 unsigned src_hash;
6226 struct table_elt *src_elt;
6227
6228 /* Ignore invalid entries. */
6229 if (GET_CODE (elt->exp) != REG
6230 && ! exp_equiv_p (elt->exp, elt->exp, 1, 0))
6231 continue;
6232
6233 new_src = gen_lowpart_if_possible (new_mode, elt->exp);
6234 if (new_src == 0)
6235 new_src = gen_rtx_SUBREG (new_mode, elt->exp, 0);
6236
6237 src_hash = HASH (new_src, new_mode);
6238 src_elt = lookup (new_src, src_hash, new_mode);
6239
6240 /* Put the new source in the hash table is if isn't
6241 already. */
6242 if (src_elt == 0)
6243 {
6244 if (insert_regs (new_src, classp, 0))
6245 {
6246 rehash_using_reg (new_src);
6247 src_hash = HASH (new_src, new_mode);
6248 }
6249 src_elt = insert (new_src, classp, src_hash, new_mode);
6250 src_elt->in_memory = elt->in_memory;
6251 }
6252 else if (classp && classp != src_elt->first_same_value)
6253 /* Show that two things that we've seen before are
6254 actually the same. */
6255 merge_equiv_classes (src_elt, classp);
6256
6257 classp = src_elt->first_same_value;
6258 /* Ignore invalid entries. */
6259 while (classp
6260 && GET_CODE (classp->exp) != REG
6261 && ! exp_equiv_p (classp->exp, classp->exp, 1, 0))
6262 classp = classp->next_same_value;
6263 }
6264 }
6265 }
6266
6267 /* Special handling for (set REG0 REG1) where REG0 is the
6268 "cheapest", cheaper than REG1. After cse, REG1 will probably not
6269 be used in the sequel, so (if easily done) change this insn to
6270 (set REG1 REG0) and replace REG1 with REG0 in the previous insn
6271 that computed their value. Then REG1 will become a dead store
6272 and won't cloud the situation for later optimizations.
6273
6274 Do not make this change if REG1 is a hard register, because it will
6275 then be used in the sequel and we may be changing a two-operand insn
6276 into a three-operand insn.
6277
6278 Also do not do this if we are operating on a copy of INSN.
6279
6280 Also don't do this if INSN ends a libcall; this would cause an unrelated
6281 register to be set in the middle of a libcall, and we then get bad code
6282 if the libcall is deleted. */
6283
6284 if (n_sets == 1 && sets[0].rtl && GET_CODE (SET_DEST (sets[0].rtl)) == REG
6285 && NEXT_INSN (PREV_INSN (insn)) == insn
6286 && GET_CODE (SET_SRC (sets[0].rtl)) == REG
6287 && REGNO (SET_SRC (sets[0].rtl)) >= FIRST_PSEUDO_REGISTER
6288 && REGNO_QTY_VALID_P (REGNO (SET_SRC (sets[0].rtl))))
6289 {
6290 int src_q = REG_QTY (REGNO (SET_SRC (sets[0].rtl)));
6291 struct qty_table_elem *src_ent = &qty_table[src_q];
6292
6293 if ((src_ent->first_reg == REGNO (SET_DEST (sets[0].rtl)))
6294 && ! find_reg_note (insn, REG_RETVAL, NULL_RTX))
6295 {
6296 rtx prev = prev_nonnote_insn (insn);
6297
6298 /* Do not swap the registers around if the previous instruction
6299 attaches a REG_EQUIV note to REG1.
6300
6301 ??? It's not entirely clear whether we can transfer a REG_EQUIV
6302 from the pseudo that originally shadowed an incoming argument
6303 to another register. Some uses of REG_EQUIV might rely on it
6304 being attached to REG1 rather than REG2.
6305
6306 This section previously turned the REG_EQUIV into a REG_EQUAL
6307 note. We cannot do that because REG_EQUIV may provide an
6308 uninitialised stack slot when REG_PARM_STACK_SPACE is used. */
6309
6310 if (prev != 0 && GET_CODE (prev) == INSN
6311 && GET_CODE (PATTERN (prev)) == SET
6312 && SET_DEST (PATTERN (prev)) == SET_SRC (sets[0].rtl)
6313 && ! find_reg_note (prev, REG_EQUIV, NULL_RTX))
6314 {
6315 rtx dest = SET_DEST (sets[0].rtl);
6316 rtx src = SET_SRC (sets[0].rtl);
6317 rtx note;
6318
6319 validate_change (prev, &SET_DEST (PATTERN (prev)), dest, 1);
6320 validate_change (insn, &SET_DEST (sets[0].rtl), src, 1);
6321 validate_change (insn, &SET_SRC (sets[0].rtl), dest, 1);
6322 apply_change_group ();
6323
6324 /* If there was a REG_WAS_0 note on PREV, remove it. Move
6325 any REG_WAS_0 note on INSN to PREV. */
6326 note = find_reg_note (prev, REG_WAS_0, NULL_RTX);
6327 if (note)
6328 remove_note (prev, note);
6329
6330 note = find_reg_note (insn, REG_WAS_0, NULL_RTX);
6331 if (note)
6332 {
6333 remove_note (insn, note);
6334 XEXP (note, 1) = REG_NOTES (prev);
6335 REG_NOTES (prev) = note;
6336 }
6337
6338 /* If INSN has a REG_EQUAL note, and this note mentions
6339 REG0, then we must delete it, because the value in
6340 REG0 has changed. If the note's value is REG1, we must
6341 also delete it because that is now this insn's dest. */
6342 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
6343 if (note != 0
6344 && (reg_mentioned_p (dest, XEXP (note, 0))
6345 || rtx_equal_p (src, XEXP (note, 0))))
6346 remove_note (insn, note);
6347 }
6348 }
6349 }
6350
6351 /* If this is a conditional jump insn, record any known equivalences due to
6352 the condition being tested. */
6353
6354 last_jump_equiv_class = 0;
6355 if (GET_CODE (insn) == JUMP_INSN
6356 && n_sets == 1 && GET_CODE (x) == SET
6357 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE)
6358 record_jump_equiv (insn, 0);
6359
6360#ifdef HAVE_cc0
6361 /* If the previous insn set CC0 and this insn no longer references CC0,
6362 delete the previous insn. Here we use the fact that nothing expects CC0
6363 to be valid over an insn, which is true until the final pass. */
6364 if (prev_insn && GET_CODE (prev_insn) == INSN
6365 && (tem = single_set (prev_insn)) != 0
6366 && SET_DEST (tem) == cc0_rtx
6367 && ! reg_mentioned_p (cc0_rtx, x))
6368 delete_insn (prev_insn);
6369
6370 prev_insn_cc0 = this_insn_cc0;
6371 prev_insn_cc0_mode = this_insn_cc0_mode;
6372#endif
6373
6374 prev_insn = insn;
6375}
6376
6377
6378/* Remove from the hash table all expressions that reference memory. */
6379
6380static void
6381invalidate_memory ()
6382{
6383 int i;
6384 struct table_elt *p, *next;
6385
6386 for (i = 0; i < HASH_SIZE; i++)
6387 for (p = table[i]; p; p = next)
6388 {
6389 next = p->next_same_hash;
6390 if (p->in_memory)
6391 remove_from_table (p, i);
6392 }
6393}
6394
6395/* If ADDR is an address that implicitly affects the stack pointer, return
6396 1 and update the register tables to show the effect. Else, return 0. */
6397
6398static int
6399addr_affects_sp_p (addr)
6400 rtx addr;
6401{
6402 if (GET_RTX_CLASS (GET_CODE (addr)) == 'a'
6403 && GET_CODE (XEXP (addr, 0)) == REG
6404 && REGNO (XEXP (addr, 0)) == STACK_POINTER_REGNUM)
6405 {
6406 if (REG_TICK (STACK_POINTER_REGNUM) >= 0)
6407 REG_TICK (STACK_POINTER_REGNUM)++;
6408
6409 /* This should be *very* rare. */
6410 if (TEST_HARD_REG_BIT (hard_regs_in_table, STACK_POINTER_REGNUM))
6411 invalidate (stack_pointer_rtx, VOIDmode);
6412
6413 return 1;
6414 }
6415
6416 return 0;
6417}
6418
6419/* Perform invalidation on the basis of everything about an insn
6420 except for invalidating the actual places that are SET in it.
6421 This includes the places CLOBBERed, and anything that might
6422 alias with something that is SET or CLOBBERed.
6423
6424 X is the pattern of the insn. */
6425
6426static void
6427invalidate_from_clobbers (x)
6428 rtx x;
6429{
6430 if (GET_CODE (x) == CLOBBER)
6431 {
6432 rtx ref = XEXP (x, 0);
6433 if (ref)
6434 {
6435 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6436 || GET_CODE (ref) == MEM)
6437 invalidate (ref, VOIDmode);
6438 else if (GET_CODE (ref) == STRICT_LOW_PART
6439 || GET_CODE (ref) == ZERO_EXTRACT)
6440 invalidate (XEXP (ref, 0), GET_MODE (ref));
6441 }
6442 }
6443 else if (GET_CODE (x) == PARALLEL)
6444 {
6445 int i;
6446 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
6447 {
6448 rtx y = XVECEXP (x, 0, i);
6449 if (GET_CODE (y) == CLOBBER)
6450 {
6451 rtx ref = XEXP (y, 0);
6452 if (GET_CODE (ref) == REG || GET_CODE (ref) == SUBREG
6453 || GET_CODE (ref) == MEM)
6454 invalidate (ref, VOIDmode);
6455 else if (GET_CODE (ref) == STRICT_LOW_PART
6456 || GET_CODE (ref) == ZERO_EXTRACT)
6457 invalidate (XEXP (ref, 0), GET_MODE (ref));
6458 }
6459 }
6460 }
6461}
6462
6463
6464/* Process X, part of the REG_NOTES of an insn. Look at any REG_EQUAL notes
6465 and replace any registers in them with either an equivalent constant
6466 or the canonical form of the register. If we are inside an address,
6467 only do this if the address remains valid.
6468
6469 OBJECT is 0 except when within a MEM in which case it is the MEM.
6470
6471 Return the replacement for X. */
6472
6473static rtx
6474cse_process_notes (x, object)
6475 rtx x;
6476 rtx object;
6477{
6478 enum rtx_code code = GET_CODE (x);
6479 const char *fmt = GET_RTX_FORMAT (code);
6480 int i;
6481
6482 switch (code)
6483 {
6484 case CONST_INT:
6485 case CONST:
6486 case SYMBOL_REF:
6487 case LABEL_REF:
6488 case CONST_DOUBLE:
6489 case CONST_VECTOR:
6490 case PC:
6491 case CC0:
6492 case LO_SUM:
6493 return x;
6494
6495 case MEM:
6496 validate_change (x, &XEXP (x, 0),
6497 cse_process_notes (XEXP (x, 0), x), 0);
6498 return x;
6499
6500 case EXPR_LIST:
6501 case INSN_LIST:
6502 if (REG_NOTE_KIND (x) == REG_EQUAL)
6503 XEXP (x, 0) = cse_process_notes (XEXP (x, 0), NULL_RTX);
6504 if (XEXP (x, 1))
6505 XEXP (x, 1) = cse_process_notes (XEXP (x, 1), NULL_RTX);
6506 return x;
6507
6508 case SIGN_EXTEND:
6509 case ZERO_EXTEND:
6510 case SUBREG:
6511 {
6512 rtx new = cse_process_notes (XEXP (x, 0), object);
6513 /* We don't substitute VOIDmode constants into these rtx,
6514 since they would impede folding. */
6515 if (GET_MODE (new) != VOIDmode)
6516 validate_change (object, &XEXP (x, 0), new, 0);
6517 return x;
6518 }
6519
6520 case REG:
6521 i = REG_QTY (REGNO (x));
6522
6523 /* Return a constant or a constant register. */
6524 if (REGNO_QTY_VALID_P (REGNO (x)))
6525 {
6526 struct qty_table_elem *ent = &qty_table[i];
6527
6528 if (ent->const_rtx != NULL_RTX
6529 && (CONSTANT_P (ent->const_rtx)
6530 || GET_CODE (ent->const_rtx) == REG))
6531 {
6532 rtx new = gen_lowpart_if_possible (GET_MODE (x), ent->const_rtx);
6533 if (new)
6534 return new;
6535 }
6536 }
6537
6538 /* Otherwise, canonicalize this register. */
6539 return canon_reg (x, NULL_RTX);
6540
6541 default:
6542 break;
6543 }
6544
6545 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6546 if (fmt[i] == 'e')
6547 validate_change (object, &XEXP (x, i),
6548 cse_process_notes (XEXP (x, i), object), 0);
6549
6550 return x;
6551}
6552
6553
6554/* Find common subexpressions between the end test of a loop and the beginning
6555 of the loop. LOOP_START is the CODE_LABEL at the start of a loop.
6556
6557 Often we have a loop where an expression in the exit test is used
6558 in the body of the loop. For example "while (*p) *q++ = *p++;".
6559 Because of the way we duplicate the loop exit test in front of the loop,
6560 however, we don't detect that common subexpression. This will be caught
6561 when global cse is implemented, but this is a quite common case.
6562
6563 This function handles the most common cases of these common expressions.
6564 It is called after we have processed the basic block ending with the
6565 NOTE_INSN_LOOP_END note that ends a loop and the previous JUMP_INSN
6566 jumps to a label used only once. */
6567
6568static void
6569cse_around_loop (loop_start)
6570 rtx loop_start;
6571{
6572 rtx insn;
6573 int i;
6574 struct table_elt *p;
6575
6576 /* If the jump at the end of the loop doesn't go to the start, we don't
6577 do anything. */
6578 for (insn = PREV_INSN (loop_start);
6579 insn && (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0);
6580 insn = PREV_INSN (insn))
6581 ;
6582
6583 if (insn == 0
6584 || GET_CODE (insn) != NOTE
6585 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_BEG)
6586 return;
6587
6588 /* If the last insn of the loop (the end test) was an NE comparison,
6589 we will interpret it as an EQ comparison, since we fell through
6590 the loop. Any equivalences resulting from that comparison are
6591 therefore not valid and must be invalidated. */
6592 if (last_jump_equiv_class)
6593 for (p = last_jump_equiv_class->first_same_value; p;
6594 p = p->next_same_value)
6595 {
6596 if (GET_CODE (p->exp) == MEM || GET_CODE (p->exp) == REG
6597 || (GET_CODE (p->exp) == SUBREG
6598 && GET_CODE (SUBREG_REG (p->exp)) == REG))
6599 invalidate (p->exp, VOIDmode);
6600 else if (GET_CODE (p->exp) == STRICT_LOW_PART
6601 || GET_CODE (p->exp) == ZERO_EXTRACT)
6602 invalidate (XEXP (p->exp, 0), GET_MODE (p->exp));
6603 }
6604
6605 /* Process insns starting after LOOP_START until we hit a CALL_INSN or
6606 a CODE_LABEL (we could handle a CALL_INSN, but it isn't worth it).
6607
6608 The only thing we do with SET_DEST is invalidate entries, so we
6609 can safely process each SET in order. It is slightly less efficient
6610 to do so, but we only want to handle the most common cases.
6611
6612 The gen_move_insn call in cse_set_around_loop may create new pseudos.
6613 These pseudos won't have valid entries in any of the tables indexed
6614 by register number, such as reg_qty. We avoid out-of-range array
6615 accesses by not processing any instructions created after cse started. */
6616
6617 for (insn = NEXT_INSN (loop_start);
6618 GET_CODE (insn) != CALL_INSN && GET_CODE (insn) != CODE_LABEL
6619 && INSN_UID (insn) < max_insn_uid
6620 && ! (GET_CODE (insn) == NOTE
6621 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
6622 insn = NEXT_INSN (insn))
6623 {
6624 if (INSN_P (insn)
6625 && (GET_CODE (PATTERN (insn)) == SET
6626 || GET_CODE (PATTERN (insn)) == CLOBBER))
6627 cse_set_around_loop (PATTERN (insn), insn, loop_start);
6628 else if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == PARALLEL)
6629 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
6630 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET
6631 || GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == CLOBBER)
6632 cse_set_around_loop (XVECEXP (PATTERN (insn), 0, i), insn,
6633 loop_start);
6634 }
6635}
6636
6637
6638/* Process one SET of an insn that was skipped. We ignore CLOBBERs
6639 since they are done elsewhere. This function is called via note_stores. */
6640
6641static void
6642invalidate_skipped_set (dest, set, data)
6643 rtx set;
6644 rtx dest;
6645 void *data ATTRIBUTE_UNUSED;
6646{
6647 enum rtx_code code = GET_CODE (dest);
6648
6649 if (code == MEM
6650 && ! addr_affects_sp_p (dest) /* If this is not a stack push ... */
6651 /* There are times when an address can appear varying and be a PLUS
6652 during this scan when it would be a fixed address were we to know
6653 the proper equivalences. So invalidate all memory if there is
6654 a BLKmode or nonscalar memory reference or a reference to a
6655 variable address. */
6656 && (MEM_IN_STRUCT_P (dest) || GET_MODE (dest) == BLKmode
6657 || cse_rtx_varies_p (XEXP (dest, 0), 0)))
6658 {
6659 invalidate_memory ();
6660 return;
6661 }
6662
6663 if (GET_CODE (set) == CLOBBER
6664#ifdef HAVE_cc0
6665 || dest == cc0_rtx
6666#endif
6667 || dest == pc_rtx)
6668 return;
6669
6670 if (code == STRICT_LOW_PART || code == ZERO_EXTRACT)
6671 invalidate (XEXP (dest, 0), GET_MODE (dest));
6672 else if (code == REG || code == SUBREG || code == MEM)
6673 invalidate (dest, VOIDmode);
6674}
6675
6676/* Invalidate all insns from START up to the end of the function or the
6677 next label. This called when we wish to CSE around a block that is
6678 conditionally executed. */
6679
6680static void
6681invalidate_skipped_block (start)
6682 rtx start;
6683{
6684 rtx insn;
6685
6686 for (insn = start; insn && GET_CODE (insn) != CODE_LABEL;
6687 insn = NEXT_INSN (insn))
6688 {
6689 if (! INSN_P (insn))
6690 continue;
6691
6692 if (GET_CODE (insn) == CALL_INSN)
6693 {
6694 if (! CONST_OR_PURE_CALL_P (insn))
6695 invalidate_memory ();
6696 invalidate_for_call ();
6697 }
6698
6699 invalidate_from_clobbers (PATTERN (insn));
6700 note_stores (PATTERN (insn), invalidate_skipped_set, NULL);
6701 }
6702}
6703
6704
6705/* If modifying X will modify the value in *DATA (which is really an
6706 `rtx *'), indicate that fact by setting the pointed to value to
6707 NULL_RTX. */
6708
6709static void
6710cse_check_loop_start (x, set, data)
6711 rtx x;
6712 rtx set ATTRIBUTE_UNUSED;
6713 void *data;
6714{
6715 rtx *cse_check_loop_start_value = (rtx *) data;
6716
6717 if (*cse_check_loop_start_value == NULL_RTX
6718 || GET_CODE (x) == CC0 || GET_CODE (x) == PC)
6719 return;
6720
6721 if ((GET_CODE (x) == MEM && GET_CODE (*cse_check_loop_start_value) == MEM)
6722 || reg_overlap_mentioned_p (x, *cse_check_loop_start_value))
6723 *cse_check_loop_start_value = NULL_RTX;
6724}
6725
6726/* X is a SET or CLOBBER contained in INSN that was found near the start of
6727 a loop that starts with the label at LOOP_START.
6728
6729 If X is a SET, we see if its SET_SRC is currently in our hash table.
6730 If so, we see if it has a value equal to some register used only in the
6731 loop exit code (as marked by jump.c).
6732
6733 If those two conditions are true, we search backwards from the start of
6734 the loop to see if that same value was loaded into a register that still
6735 retains its value at the start of the loop.
6736
6737 If so, we insert an insn after the load to copy the destination of that
6738 load into the equivalent register and (try to) replace our SET_SRC with that
6739 register.
6740
6741 In any event, we invalidate whatever this SET or CLOBBER modifies. */
6742
6743static void
6744cse_set_around_loop (x, insn, loop_start)
6745 rtx x;
6746 rtx insn;
6747 rtx loop_start;
6748{
6749 struct table_elt *src_elt;
6750
6751 /* If this is a SET, see if we can replace SET_SRC, but ignore SETs that
6752 are setting PC or CC0 or whose SET_SRC is already a register. */
6753 if (GET_CODE (x) == SET
6754 && GET_CODE (SET_DEST (x)) != PC && GET_CODE (SET_DEST (x)) != CC0
6755 && GET_CODE (SET_SRC (x)) != REG)
6756 {
6757 src_elt = lookup (SET_SRC (x),
6758 HASH (SET_SRC (x), GET_MODE (SET_DEST (x))),
6759 GET_MODE (SET_DEST (x)));
6760
6761 if (src_elt)
6762 for (src_elt = src_elt->first_same_value; src_elt;
6763 src_elt = src_elt->next_same_value)
6764 if (GET_CODE (src_elt->exp) == REG && REG_LOOP_TEST_P (src_elt->exp)
6765 && COST (src_elt->exp) < COST (SET_SRC (x)))
6766 {
6767 rtx p, set;
6768
6769 /* Look for an insn in front of LOOP_START that sets
6770 something in the desired mode to SET_SRC (x) before we hit
6771 a label or CALL_INSN. */
6772
6773 for (p = prev_nonnote_insn (loop_start);
6774 p && GET_CODE (p) != CALL_INSN
6775 && GET_CODE (p) != CODE_LABEL;
6776 p = prev_nonnote_insn (p))
6777 if ((set = single_set (p)) != 0
6778 && GET_CODE (SET_DEST (set)) == REG
6779 && GET_MODE (SET_DEST (set)) == src_elt->mode
6780 && rtx_equal_p (SET_SRC (set), SET_SRC (x)))
6781 {
6782 /* We now have to ensure that nothing between P
6783 and LOOP_START modified anything referenced in
6784 SET_SRC (x). We know that nothing within the loop
6785 can modify it, or we would have invalidated it in
6786 the hash table. */
6787 rtx q;
6788 rtx cse_check_loop_start_value = SET_SRC (x);
6789 for (q = p; q != loop_start; q = NEXT_INSN (q))
6790 if (INSN_P (q))
6791 note_stores (PATTERN (q),
6792 cse_check_loop_start,
6793 &cse_check_loop_start_value);
6794
6795 /* If nothing was changed and we can replace our
6796 SET_SRC, add an insn after P to copy its destination
6797 to what we will be replacing SET_SRC with. */
6798 if (cse_check_loop_start_value
6799 && validate_change (insn, &SET_SRC (x),
6800 src_elt->exp, 0))
6801 {
6802 /* If this creates new pseudos, this is unsafe,
6803 because the regno of new pseudo is unsuitable
6804 to index into reg_qty when cse_insn processes
6805 the new insn. Therefore, if a new pseudo was
6806 created, discard this optimization. */
6807 int nregs = max_reg_num ();
6808 rtx move
6809 = gen_move_insn (src_elt->exp, SET_DEST (set));
6810 if (nregs != max_reg_num ())
6811 {
6812 if (! validate_change (insn, &SET_SRC (x),
6813 SET_SRC (set), 0))
6814 abort ();
6815 }
6816 else
6817 emit_insn_after (move, p);
6818 }
6819 break;
6820 }
6821 }
6822 }
6823
6824 /* Deal with the destination of X affecting the stack pointer. */
6825 addr_affects_sp_p (SET_DEST (x));
6826
6827 /* See comment on similar code in cse_insn for explanation of these
6828 tests. */
6829 if (GET_CODE (SET_DEST (x)) == REG || GET_CODE (SET_DEST (x)) == SUBREG
6830 || GET_CODE (SET_DEST (x)) == MEM)
6831 invalidate (SET_DEST (x), VOIDmode);
6832 else if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6833 || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
6834 invalidate (XEXP (SET_DEST (x), 0), GET_MODE (SET_DEST (x)));
6835}
6836
6837
6838/* Find the end of INSN's basic block and return its range,
6839 the total number of SETs in all the insns of the block, the last insn of the
6840 block, and the branch path.
6841
6842 The branch path indicates which branches should be followed. If a non-zero
6843 path size is specified, the block should be rescanned and a different set
6844 of branches will be taken. The branch path is only used if
6845 FLAG_CSE_FOLLOW_JUMPS or FLAG_CSE_SKIP_BLOCKS is non-zero.
6846
6847 DATA is a pointer to a struct cse_basic_block_data, defined below, that is
6848 used to describe the block. It is filled in with the information about
6849 the current block. The incoming structure's branch path, if any, is used
6850 to construct the output branch path. */
6851
6852void
6853cse_end_of_basic_block (insn, data, follow_jumps, after_loop, skip_blocks)
6854 rtx insn;
6855 struct cse_basic_block_data *data;
6856 int follow_jumps;
6857 int after_loop;
6858 int skip_blocks;
6859{
6860 rtx p = insn, q;
6861 int nsets = 0;
6862 int low_cuid = INSN_CUID (insn), high_cuid = INSN_CUID (insn);
6863 rtx next = INSN_P (insn) ? insn : next_real_insn (insn);
6864 int path_size = data->path_size;
6865 int path_entry = 0;
6866 int i;
6867
6868 /* Update the previous branch path, if any. If the last branch was
6869 previously TAKEN, mark it NOT_TAKEN. If it was previously NOT_TAKEN,
6870 shorten the path by one and look at the previous branch. We know that
6871 at least one branch must have been taken if PATH_SIZE is non-zero. */
6872 while (path_size > 0)
6873 {
6874 if (data->path[path_size - 1].status != NOT_TAKEN)
6875 {
6876 data->path[path_size - 1].status = NOT_TAKEN;
6877 break;
6878 }
6879 else
6880 path_size--;
6881 }
6882
6883 /* If the first instruction is marked with QImode, that means we've
6884 already processed this block. Our caller will look at DATA->LAST
6885 to figure out where to go next. We want to return the next block
6886 in the instruction stream, not some branched-to block somewhere
6887 else. We accomplish this by pretending our called forbid us to
6888 follow jumps, or skip blocks. */
6889 if (GET_MODE (insn) == QImode)
6890 follow_jumps = skip_blocks = 0;
6891
6892 /* Scan to end of this basic block. */
6893 while (p && GET_CODE (p) != CODE_LABEL)
6894 {
6895 /* Don't cse out the end of a loop. This makes a difference
6896 only for the unusual loops that always execute at least once;
6897 all other loops have labels there so we will stop in any case.
6898 Cse'ing out the end of the loop is dangerous because it
6899 might cause an invariant expression inside the loop
6900 to be reused after the end of the loop. This would make it
6901 hard to move the expression out of the loop in loop.c,
6902 especially if it is one of several equivalent expressions
6903 and loop.c would like to eliminate it.
6904
6905 If we are running after loop.c has finished, we can ignore
6906 the NOTE_INSN_LOOP_END. */
6907
6908 if (! after_loop && GET_CODE (p) == NOTE
6909 && NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END)
6910 break;
6911
6912 /* Don't cse over a call to setjmp; on some machines (eg VAX)
6913 the regs restored by the longjmp come from
6914 a later time than the setjmp. */
6915 if (PREV_INSN (p) && GET_CODE (PREV_INSN (p)) == CALL_INSN
6916 && find_reg_note (PREV_INSN (p), REG_SETJMP, NULL))
6917 break;
6918
6919 /* A PARALLEL can have lots of SETs in it,
6920 especially if it is really an ASM_OPERANDS. */
6921 if (INSN_P (p) && GET_CODE (PATTERN (p)) == PARALLEL)
6922 nsets += XVECLEN (PATTERN (p), 0);
6923 else if (GET_CODE (p) != NOTE)
6924 nsets += 1;
6925
6926 /* Ignore insns made by CSE; they cannot affect the boundaries of
6927 the basic block. */
6928
6929 if (INSN_UID (p) <= max_uid && INSN_CUID (p) > high_cuid)
6930 high_cuid = INSN_CUID (p);
6931 if (INSN_UID (p) <= max_uid && INSN_CUID (p) < low_cuid)
6932 low_cuid = INSN_CUID (p);
6933
6934 /* See if this insn is in our branch path. If it is and we are to
6935 take it, do so. */
6936 if (path_entry < path_size && data->path[path_entry].branch == p)
6937 {
6938 if (data->path[path_entry].status != NOT_TAKEN)
6939 p = JUMP_LABEL (p);
6940
6941 /* Point to next entry in path, if any. */
6942 path_entry++;
6943 }
6944
6945 /* If this is a conditional jump, we can follow it if -fcse-follow-jumps
6946 was specified, we haven't reached our maximum path length, there are
6947 insns following the target of the jump, this is the only use of the
6948 jump label, and the target label is preceded by a BARRIER.
6949
6950 Alternatively, we can follow the jump if it branches around a
6951 block of code and there are no other branches into the block.
6952 In this case invalidate_skipped_block will be called to invalidate any
6953 registers set in the block when following the jump. */
6954
6955 else if ((follow_jumps || skip_blocks) && path_size < PATHLENGTH - 1
6956 && GET_CODE (p) == JUMP_INSN
6957 && GET_CODE (PATTERN (p)) == SET
6958 && GET_CODE (SET_SRC (PATTERN (p))) == IF_THEN_ELSE
6959 && JUMP_LABEL (p) != 0
6960 && LABEL_NUSES (JUMP_LABEL (p)) == 1
6961 && NEXT_INSN (JUMP_LABEL (p)) != 0)
6962 {
6963 for (q = PREV_INSN (JUMP_LABEL (p)); q; q = PREV_INSN (q))
6964 if ((GET_CODE (q) != NOTE
6965 || NOTE_LINE_NUMBER (q) == NOTE_INSN_LOOP_END
6966 || (PREV_INSN (q) && GET_CODE (PREV_INSN (q)) == CALL_INSN
6967 && find_reg_note (PREV_INSN (q), REG_SETJMP, NULL)))
6968 && (GET_CODE (q) != CODE_LABEL || LABEL_NUSES (q) != 0))
6969 break;
6970
6971 /* If we ran into a BARRIER, this code is an extension of the
6972 basic block when the branch is taken. */
6973 if (follow_jumps && q != 0 && GET_CODE (q) == BARRIER)
6974 {
6975 /* Don't allow ourself to keep walking around an
6976 always-executed loop. */
6977 if (next_real_insn (q) == next)
6978 {
6979 p = NEXT_INSN (p);
6980 continue;
6981 }
6982
6983 /* Similarly, don't put a branch in our path more than once. */
6984 for (i = 0; i < path_entry; i++)
6985 if (data->path[i].branch == p)
6986 break;
6987
6988 if (i != path_entry)
6989 break;
6990
6991 data->path[path_entry].branch = p;
6992 data->path[path_entry++].status = TAKEN;
6993
6994 /* This branch now ends our path. It was possible that we
6995 didn't see this branch the last time around (when the
6996 insn in front of the target was a JUMP_INSN that was
6997 turned into a no-op). */
6998 path_size = path_entry;
6999
7000 p = JUMP_LABEL (p);
7001 /* Mark block so we won't scan it again later. */
7002 PUT_MODE (NEXT_INSN (p), QImode);
7003 }
7004 /* Detect a branch around a block of code. */
7005 else if (skip_blocks && q != 0 && GET_CODE (q) != CODE_LABEL)
7006 {
7007 rtx tmp;
7008
7009 if (next_real_insn (q) == next)
7010 {
7011 p = NEXT_INSN (p);
7012 continue;
7013 }
7014
7015 for (i = 0; i < path_entry; i++)
7016 if (data->path[i].branch == p)
7017 break;
7018
7019 if (i != path_entry)
7020 break;
7021
7022 /* This is no_labels_between_p (p, q) with an added check for
7023 reaching the end of a function (in case Q precedes P). */
7024 for (tmp = NEXT_INSN (p); tmp && tmp != q; tmp = NEXT_INSN (tmp))
7025 if (GET_CODE (tmp) == CODE_LABEL)
7026 break;
7027
7028 if (tmp == q)
7029 {
7030 data->path[path_entry].branch = p;
7031 data->path[path_entry++].status = AROUND;
7032
7033 path_size = path_entry;
7034
7035 p = JUMP_LABEL (p);
7036 /* Mark block so we won't scan it again later. */
7037 PUT_MODE (NEXT_INSN (p), QImode);
7038 }
7039 }
7040 }
7041 p = NEXT_INSN (p);
7042 }
7043
7044 data->low_cuid = low_cuid;
7045 data->high_cuid = high_cuid;
7046 data->nsets = nsets;
7047 data->last = p;
7048
7049 /* If all jumps in the path are not taken, set our path length to zero
7050 so a rescan won't be done. */
7051 for (i = path_size - 1; i >= 0; i--)
7052 if (data->path[i].status != NOT_TAKEN)
7053 break;
7054
7055 if (i == -1)
7056 data->path_size = 0;
7057 else
7058 data->path_size = path_size;
7059
7060 /* End the current branch path. */
7061 data->path[path_size].branch = 0;
7062}
7063
7064
7065/* Perform cse on the instructions of a function.
7066 F is the first instruction.
7067 NREGS is one plus the highest pseudo-reg number used in the instruction.
7068
7069 AFTER_LOOP is 1 if this is the cse call done after loop optimization
7070 (only if -frerun-cse-after-loop).
7071
7072 Returns 1 if jump_optimize should be redone due to simplifications
7073 in conditional jump instructions. */
7074
7075int
7076cse_main (f, nregs, after_loop, file)
7077 rtx f;
7078 int nregs;
7079 int after_loop;
7080 FILE *file;
7081{
7082 struct cse_basic_block_data val;
7083 rtx insn = f;
7084 int i;
7085
7086 cse_jumps_altered = 0;
7087 recorded_label_ref = 0;
7088 constant_pool_entries_cost = 0;
7089 val.path_size = 0;
7090
7091 init_recog ();
7092 init_alias_analysis ();
7093
7094 max_reg = nregs;
7095
7096 max_insn_uid = get_max_uid ();
7097
7098 reg_eqv_table = (struct reg_eqv_elem *)
7099 xmalloc (nregs * sizeof (struct reg_eqv_elem));
7100
7101#ifdef LOAD_EXTEND_OP
7102
7103 /* Allocate scratch rtl here. cse_insn will fill in the memory reference
7104 and change the code and mode as appropriate. */
7105 memory_extend_rtx = gen_rtx_ZERO_EXTEND (VOIDmode, NULL_RTX);
7106#endif
7107
7108 /* Reset the counter indicating how many elements have been made
7109 thus far. */
7110 n_elements_made = 0;
7111
7112 /* Find the largest uid. */
7113
7114 max_uid = get_max_uid ();
7115 uid_cuid = (int *) xcalloc (max_uid + 1, sizeof (int));
7116
7117 /* Compute the mapping from uids to cuids.
7118 CUIDs are numbers assigned to insns, like uids,
7119 except that cuids increase monotonically through the code.
7120 Don't assign cuids to line-number NOTEs, so that the distance in cuids
7121 between two insns is not affected by -g. */
7122
7123 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
7124 {
7125 if (GET_CODE (insn) != NOTE
7126 || NOTE_LINE_NUMBER (insn) < 0)
7127 INSN_CUID (insn) = ++i;
7128 else
7129 /* Give a line number note the same cuid as preceding insn. */
7130 INSN_CUID (insn) = i;
7131 }
7132
7133 ggc_push_context ();
7134
7135 /* Loop over basic blocks.
7136 Compute the maximum number of qty's needed for each basic block
7137 (which is 2 for each SET). */
7138 insn = f;
7139 while (insn)
7140 {
7141 cse_altered = 0;
7142 cse_end_of_basic_block (insn, &val, flag_cse_follow_jumps, after_loop,
7143 flag_cse_skip_blocks);
7144
7145 /* If this basic block was already processed or has no sets, skip it. */
7146 if (val.nsets == 0 || GET_MODE (insn) == QImode)
7147 {
7148 PUT_MODE (insn, VOIDmode);
7149 insn = (val.last ? NEXT_INSN (val.last) : 0);
7150 val.path_size = 0;
7151 continue;
7152 }
7153
7154 cse_basic_block_start = val.low_cuid;
7155 cse_basic_block_end = val.high_cuid;
7156 max_qty = val.nsets * 2;
7157
7158 if (file)
7159 fnotice (file, ";; Processing block from %d to %d, %d sets.\n",
7160 INSN_UID (insn), val.last ? INSN_UID (val.last) : 0,
7161 val.nsets);
7162
7163 /* Make MAX_QTY bigger to give us room to optimize
7164 past the end of this basic block, if that should prove useful. */
7165 if (max_qty < 500)
7166 max_qty = 500;
7167
7168 max_qty += max_reg;
7169
7170 /* If this basic block is being extended by following certain jumps,
7171 (see `cse_end_of_basic_block'), we reprocess the code from the start.
7172 Otherwise, we start after this basic block. */
7173 if (val.path_size > 0)
7174 cse_basic_block (insn, val.last, val.path, 0);
7175 else
7176 {
7177 int old_cse_jumps_altered = cse_jumps_altered;
7178 rtx temp;
7179
7180 /* When cse changes a conditional jump to an unconditional
7181 jump, we want to reprocess the block, since it will give
7182 us a new branch path to investigate. */
7183 cse_jumps_altered = 0;
7184 temp = cse_basic_block (insn, val.last, val.path, ! after_loop);
7185 if (cse_jumps_altered == 0
7186 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7187 insn = temp;
7188
7189 cse_jumps_altered |= old_cse_jumps_altered;
7190 }
7191
7192 if (cse_altered)
7193 ggc_collect ();
7194
7195#ifdef USE_C_ALLOCA
7196 alloca (0);
7197#endif
7198 }
7199
7200 ggc_pop_context ();
7201
7202 if (max_elements_made < n_elements_made)
7203 max_elements_made = n_elements_made;
7204
7205 /* Clean up. */
7206 end_alias_analysis ();
7207 free (uid_cuid);
7208 free (reg_eqv_table);
7209
7210 return cse_jumps_altered || recorded_label_ref;
7211}
7212
7213/* Process a single basic block. FROM and TO and the limits of the basic
7214 block. NEXT_BRANCH points to the branch path when following jumps or
7215 a null path when not following jumps.
7216
7217 AROUND_LOOP is non-zero if we are to try to cse around to the start of a
7218 loop. This is true when we are being called for the last time on a
7219 block and this CSE pass is before loop.c. */
7220
7221static rtx
7222cse_basic_block (from, to, next_branch, around_loop)
7223 rtx from, to;
7224 struct branch_path *next_branch;
7225 int around_loop;
7226{
7227 rtx insn;
7228 int to_usage = 0;
7229 rtx libcall_insn = NULL_RTX;
7230 int num_insns = 0;
7231
7232 /* This array is undefined before max_reg, so only allocate
7233 the space actually needed and adjust the start. */
7234
7235 qty_table
7236 = (struct qty_table_elem *) xmalloc ((max_qty - max_reg)
7237 * sizeof (struct qty_table_elem));
7238 qty_table -= max_reg;
7239
7240 new_basic_block ();
7241
7242 /* TO might be a label. If so, protect it from being deleted. */
7243 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7244 ++LABEL_NUSES (to);
7245
7246 for (insn = from; insn != to; insn = NEXT_INSN (insn))
7247 {
7248 enum rtx_code code = GET_CODE (insn);
7249
7250 /* If we have processed 1,000 insns, flush the hash table to
7251 avoid extreme quadratic behavior. We must not include NOTEs
7252 in the count since there may be more of them when generating
7253 debugging information. If we clear the table at different
7254 times, code generated with -g -O might be different than code
7255 generated with -O but not -g.
7256
7257 ??? This is a real kludge and needs to be done some other way.
7258 Perhaps for 2.9. */
7259 if (code != NOTE && num_insns++ > 1000)
7260 {
7261 flush_hash_table ();
7262 num_insns = 0;
7263 }
7264
7265 /* See if this is a branch that is part of the path. If so, and it is
7266 to be taken, do so. */
7267 if (next_branch->branch == insn)
7268 {
7269 enum taken status = next_branch++->status;
7270 if (status != NOT_TAKEN)
7271 {
7272 if (status == TAKEN)
7273 record_jump_equiv (insn, 1);
7274 else
7275 invalidate_skipped_block (NEXT_INSN (insn));
7276
7277 /* Set the last insn as the jump insn; it doesn't affect cc0.
7278 Then follow this branch. */
7279#ifdef HAVE_cc0
7280 prev_insn_cc0 = 0;
7281#endif
7282 prev_insn = insn;
7283 insn = JUMP_LABEL (insn);
7284 continue;
7285 }
7286 }
7287
7288 if (GET_MODE (insn) == QImode)
7289 PUT_MODE (insn, VOIDmode);
7290
7291 if (GET_RTX_CLASS (code) == 'i')
7292 {
7293 rtx p;
7294
7295 /* Process notes first so we have all notes in canonical forms when
7296 looking for duplicate operations. */
7297
7298 if (REG_NOTES (insn))
7299 REG_NOTES (insn) = cse_process_notes (REG_NOTES (insn), NULL_RTX);
7300
7301 /* Track when we are inside in LIBCALL block. Inside such a block,
7302 we do not want to record destinations. The last insn of a
7303 LIBCALL block is not considered to be part of the block, since
7304 its destination is the result of the block and hence should be
7305 recorded. */
7306
7307 if (REG_NOTES (insn) != 0)
7308 {
7309 if ((p = find_reg_note (insn, REG_LIBCALL, NULL_RTX)))
7310 libcall_insn = XEXP (p, 0);
7311 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7312 libcall_insn = 0;
7313 }
7314
7315 cse_insn (insn, libcall_insn);
7316
7317 /* If we haven't already found an insn where we added a LABEL_REF,
7318 check this one. */
7319 if (GET_CODE (insn) == INSN && ! recorded_label_ref
7320 && for_each_rtx (&PATTERN (insn), check_for_label_ref,
7321 (void *) insn))
7322 recorded_label_ref = 1;
7323 }
7324
7325 /* If INSN is now an unconditional jump, skip to the end of our
7326 basic block by pretending that we just did the last insn in the
7327 basic block. If we are jumping to the end of our block, show
7328 that we can have one usage of TO. */
7329
7330 if (any_uncondjump_p (insn))
7331 {
7332 if (to == 0)
7333 {
7334 free (qty_table + max_reg);
7335 return 0;
7336 }
7337
7338 if (JUMP_LABEL (insn) == to)
7339 to_usage = 1;
7340
7341 /* Maybe TO was deleted because the jump is unconditional.
7342 If so, there is nothing left in this basic block. */
7343 /* ??? Perhaps it would be smarter to set TO
7344 to whatever follows this insn,
7345 and pretend the basic block had always ended here. */
7346 if (INSN_DELETED_P (to))
7347 break;
7348
7349 insn = PREV_INSN (to);
7350 }
7351
7352 /* See if it is ok to keep on going past the label
7353 which used to end our basic block. Remember that we incremented
7354 the count of that label, so we decrement it here. If we made
7355 a jump unconditional, TO_USAGE will be one; in that case, we don't
7356 want to count the use in that jump. */
7357
7358 if (to != 0 && NEXT_INSN (insn) == to
7359 && GET_CODE (to) == CODE_LABEL && --LABEL_NUSES (to) == to_usage)
7360 {
7361 struct cse_basic_block_data val;
7362 rtx prev;
7363
7364 insn = NEXT_INSN (to);
7365
7366 /* If TO was the last insn in the function, we are done. */
7367 if (insn == 0)
7368 {
7369 free (qty_table + max_reg);
7370 return 0;
7371 }
7372
7373 /* If TO was preceded by a BARRIER we are done with this block
7374 because it has no continuation. */
7375 prev = prev_nonnote_insn (to);
7376 if (prev && GET_CODE (prev) == BARRIER)
7377 {
7378 free (qty_table + max_reg);
7379 return insn;
7380 }
7381
7382 /* Find the end of the following block. Note that we won't be
7383 following branches in this case. */
7384 to_usage = 0;
7385 val.path_size = 0;
7386 cse_end_of_basic_block (insn, &val, 0, 0, 0);
7387
7388 /* If the tables we allocated have enough space left
7389 to handle all the SETs in the next basic block,
7390 continue through it. Otherwise, return,
7391 and that block will be scanned individually. */
7392 if (val.nsets * 2 + next_qty > max_qty)
7393 break;
7394
7395 cse_basic_block_start = val.low_cuid;
7396 cse_basic_block_end = val.high_cuid;
7397 to = val.last;
7398
7399 /* Prevent TO from being deleted if it is a label. */
7400 if (to != 0 && GET_CODE (to) == CODE_LABEL)
7401 ++LABEL_NUSES (to);
7402
7403 /* Back up so we process the first insn in the extension. */
7404 insn = PREV_INSN (insn);
7405 }
7406 }
7407
7408 if (next_qty > max_qty)
7409 abort ();
7410
7411 /* If we are running before loop.c, we stopped on a NOTE_INSN_LOOP_END, and
7412 the previous insn is the only insn that branches to the head of a loop,
7413 we can cse into the loop. Don't do this if we changed the jump
7414 structure of a loop unless we aren't going to be following jumps. */
7415
7416 insn = prev_nonnote_insn(to);
7417 if ((cse_jumps_altered == 0
7418 || (flag_cse_follow_jumps == 0 && flag_cse_skip_blocks == 0))
7419 && around_loop && to != 0
7420 && GET_CODE (to) == NOTE && NOTE_LINE_NUMBER (to) == NOTE_INSN_LOOP_END
7421 && GET_CODE (insn) == JUMP_INSN
7422 && JUMP_LABEL (insn) != 0
7423 && LABEL_NUSES (JUMP_LABEL (insn)) == 1)
7424 cse_around_loop (JUMP_LABEL (insn));
7425
7426 free (qty_table + max_reg);
7427
7428 return to ? NEXT_INSN (to) : 0;
7429}
7430
7431
7432/* Called via for_each_rtx to see if an insn is using a LABEL_REF for which
7433 there isn't a REG_LABEL note. Return one if so. DATA is the insn. */
7434
7435static int
7436check_for_label_ref (rtl, data)
7437 rtx *rtl;
7438 void *data;
7439{
7440 rtx insn = (rtx) data;
7441
7442 /* If this insn uses a LABEL_REF and there isn't a REG_LABEL note for it,
7443 we must rerun jump since it needs to place the note. If this is a
7444 LABEL_REF for a CODE_LABEL that isn't in the insn chain, don't do this
7445 since no REG_LABEL will be added. */
7446 return (GET_CODE (*rtl) == LABEL_REF
7447 && ! LABEL_REF_NONLOCAL_P (*rtl)
7448 && LABEL_P (XEXP (*rtl, 0))
7449 && INSN_UID (XEXP (*rtl, 0)) != 0
7450 && ! find_reg_note (insn, REG_LABEL, XEXP (*rtl, 0)));
7451}
7452
7453
7454/* Count the number of times registers are used (not set) in X.
7455 COUNTS is an array in which we accumulate the count, INCR is how much
7456 we count each register usage.
7457
7458 Don't count a usage of DEST, which is the SET_DEST of a SET which
7459 contains X in its SET_SRC. This is because such a SET does not
7460 modify the liveness of DEST. */
7461
7462static void
7463count_reg_usage (x, counts, dest, incr)
7464 rtx x;
7465 int *counts;
7466 rtx dest;
7467 int incr;
7468{
7469 enum rtx_code code;
7470 const char *fmt;
7471 int i, j;
7472
7473 if (x == 0)
7474 return;
7475
7476 switch (code = GET_CODE (x))
7477 {
7478 case REG:
7479 if (x != dest)
7480 counts[REGNO (x)] += incr;
7481 return;
7482
7483 case PC:
7484 case CC0:
7485 case CONST:
7486 case CONST_INT:
7487 case CONST_DOUBLE:
7488 case CONST_VECTOR:
7489 case SYMBOL_REF:
7490 case LABEL_REF:
7491 return;
7492
7493 case CLOBBER:
7494 /* If we are clobbering a MEM, mark any registers inside the address
7495 as being used. */
7496 if (GET_CODE (XEXP (x, 0)) == MEM)
7497 count_reg_usage (XEXP (XEXP (x, 0), 0), counts, NULL_RTX, incr);
7498 return;
7499
7500 case SET:
7501 /* Unless we are setting a REG, count everything in SET_DEST. */
7502 if (GET_CODE (SET_DEST (x)) != REG)
7503 count_reg_usage (SET_DEST (x), counts, NULL_RTX, incr);
7504
7505 /* If SRC has side-effects, then we can't delete this insn, so the
7506 usage of SET_DEST inside SRC counts.
7507
7508 ??? Strictly-speaking, we might be preserving this insn
7509 because some other SET has side-effects, but that's hard
7510 to do and can't happen now. */
7511 count_reg_usage (SET_SRC (x), counts,
7512 side_effects_p (SET_SRC (x)) ? NULL_RTX : SET_DEST (x),
7513 incr);
7514 return;
7515
7516 case CALL_INSN:
7517 count_reg_usage (CALL_INSN_FUNCTION_USAGE (x), counts, NULL_RTX, incr);
7518 /* Fall through. */
7519
7520 case INSN:
7521 case JUMP_INSN:
7522 count_reg_usage (PATTERN (x), counts, NULL_RTX, incr);
7523
7524 /* Things used in a REG_EQUAL note aren't dead since loop may try to
7525 use them. */
7526
7527 count_reg_usage (REG_NOTES (x), counts, NULL_RTX, incr);
7528 return;
7529
7530 case EXPR_LIST:
7531 case INSN_LIST:
7532 if (REG_NOTE_KIND (x) == REG_EQUAL
7533 || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE))
7534 count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
7535 count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
7536 return;
7537
7538 default:
7539 break;
7540 }
7541
7542 fmt = GET_RTX_FORMAT (code);
7543 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7544 {
7545 if (fmt[i] == 'e')
7546 count_reg_usage (XEXP (x, i), counts, dest, incr);
7547 else if (fmt[i] == 'E')
7548 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7549 count_reg_usage (XVECEXP (x, i, j), counts, dest, incr);
7550 }
7551}
7552
7553
7554/* Return true if set is live. */
7555static bool
7556set_live_p (set, insn, counts)
7557 rtx set;
7558 rtx insn ATTRIBUTE_UNUSED; /* Only used with HAVE_cc0. */
7559 int *counts;
7560{
7561#ifdef HAVE_cc0
7562 rtx tem;
7563#endif
7564
7565 if (set_noop_p (set))
7566 ;
7567
7568#ifdef HAVE_cc0
7569 else if (GET_CODE (SET_DEST (set)) == CC0
7570 && !side_effects_p (SET_SRC (set))
7571 && ((tem = next_nonnote_insn (insn)) == 0
7572 || !INSN_P (tem)
7573 || !reg_referenced_p (cc0_rtx, PATTERN (tem))))
7574 return false;
7575#endif
7576 else if (GET_CODE (SET_DEST (set)) != REG
7577 || REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
7578 || counts[REGNO (SET_DEST (set))] != 0
7579 || side_effects_p (SET_SRC (set))
7580 /* An ADDRESSOF expression can turn into a use of the
7581 internal arg pointer, so always consider the
7582 internal arg pointer live. If it is truly dead,
7583 flow will delete the initializing insn. */
7584 || (SET_DEST (set) == current_function_internal_arg_pointer))
7585 return true;
7586 return false;
7587}
7588
7589/* Return true if insn is live. */
7590
7591static bool
7592insn_live_p (insn, counts)
7593 rtx insn;
7594 int *counts;
7595{
7596 int i;
7597 if (GET_CODE (PATTERN (insn)) == SET)
7598 return set_live_p (PATTERN (insn), insn, counts);
7599 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7600 {
7601 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7602 {
7603 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7604
7605 if (GET_CODE (elt) == SET)
7606 {
7607 if (set_live_p (elt, insn, counts))
7608 return true;
7609 }
7610 else if (GET_CODE (elt) != CLOBBER && GET_CODE (elt) != USE)
7611 return true;
7612 }
7613 return false;
7614 }
7615 else
7616 return true;
7617}
7618
7619/* Return true if libcall is dead as a whole. */
7620
7621static bool
7622dead_libcall_p (insn)
7623 rtx insn;
7624{
7625 rtx note;
7626 /* See if there's a REG_EQUAL note on this insn and try to
7627 replace the source with the REG_EQUAL expression.
7628
7629 We assume that insns with REG_RETVALs can only be reg->reg
7630 copies at this point. */
7631 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
7632 if (note)
7633 {
7634 rtx set = single_set (insn);
7635 rtx new = simplify_rtx (XEXP (note, 0));
7636
7637 if (!new)
7638 new = XEXP (note, 0);
7639
7640 if (set && validate_change (insn, &SET_SRC (set), new, 0))
7641 {
7642 remove_note (insn, find_reg_note (insn, REG_RETVAL, NULL_RTX));
7643 return true;
7644 }
7645 }
7646 return false;
7647}
7648
7649/* Scan all the insns and delete any that are dead; i.e., they store a register
7650 that is never used or they copy a register to itself.
7651
7652 This is used to remove insns made obviously dead by cse, loop or other
7653 optimizations. It improves the heuristics in loop since it won't try to
7654 move dead invariants out of loops or make givs for dead quantities. The
7655 remaining passes of the compilation are also sped up. */
7656
7657void
7658delete_trivially_dead_insns (insns, nreg, preserve_basic_blocks)
7659 rtx insns;
7660 int nreg;
7661 int preserve_basic_blocks;
7662{
7663 int *counts;
7664 rtx insn, prev;
7665 int i;
7666 int in_libcall = 0, dead_libcall = 0;
7667 basic_block bb;
7668
7669 /* First count the number of times each register is used. */
7670 counts = (int *) xcalloc (nreg, sizeof (int));
7671 for (insn = next_real_insn (insns); insn; insn = next_real_insn (insn))
7672 count_reg_usage (insn, counts, NULL_RTX, 1);
7673
7674 /* Go from the last insn to the first and delete insns that only set unused
7675 registers or copy a register to itself. As we delete an insn, remove
7676 usage counts for registers it uses.
7677
7678 The first jump optimization pass may leave a real insn as the last
7679 insn in the function. We must not skip that insn or we may end
7680 up deleting code that is not really dead. */
7681 insn = get_last_insn ();
7682 if (! INSN_P (insn))
7683 insn = prev_real_insn (insn);
7684
7685 if (!preserve_basic_blocks)
7686 for (; insn; insn = prev)
7687 {
7688 int live_insn = 0;
7689
7690 prev = prev_real_insn (insn);
7691
7692 /* Don't delete any insns that are part of a libcall block unless
7693 we can delete the whole libcall block.
7694
7695 Flow or loop might get confused if we did that. Remember
7696 that we are scanning backwards. */
7697 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7698 {
7699 in_libcall = 1;
7700 live_insn = 1;
7701 dead_libcall = dead_libcall_p (insn);
7702 }
7703 else if (in_libcall)
7704 live_insn = ! dead_libcall;
7705 else
7706 live_insn = insn_live_p (insn, counts);
7707
7708 /* If this is a dead insn, delete it and show registers in it aren't
7709 being used. */
7710
7711 if (! live_insn)
7712 {
7713 count_reg_usage (insn, counts, NULL_RTX, -1);
7714 delete_related_insns (insn);
7715 }
7716
7717 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7718 {
7719 in_libcall = 0;
7720 dead_libcall = 0;
7721 }
7722 }
7723 else
7724 for (i = 0; i < n_basic_blocks; i++)
7725 for (bb = BASIC_BLOCK (i), insn = bb->end; insn != bb->head; insn = prev)
7726 {
7727 int live_insn = 0;
7728
7729 prev = PREV_INSN (insn);
7730 if (!INSN_P (insn))
7731 continue;
7732
7733 /* Don't delete any insns that are part of a libcall block unless
7734 we can delete the whole libcall block.
7735
7736 Flow or loop might get confused if we did that. Remember
7737 that we are scanning backwards. */
7738 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
7739 {
7740 in_libcall = 1;
7741 live_insn = 1;
7742 dead_libcall = dead_libcall_p (insn);
7743 }
7744 else if (in_libcall)
7745 live_insn = ! dead_libcall;
7746 else
7747 live_insn = insn_live_p (insn, counts);
7748
7749 /* If this is a dead insn, delete it and show registers in it aren't
7750 being used. */
7751
7752 if (! live_insn)
7753 {
7754 count_reg_usage (insn, counts, NULL_RTX, -1);
7755 delete_insn (insn);
7756 }
7757
7758 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
7759 {
7760 in_libcall = 0;
7761 dead_libcall = 0;
7762 }
7763 }
7764
7765 /* Clean up. */
7766 free (counts);
7767}
Note: See TracBrowser for help on using the repository browser.