1/* Expand builtin functions.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "machmode.h"
27#include "real.h"
28#include "rtl.h"
29#include "tree.h"
30#include "tree-gimple.h"
31#include "flags.h"
32#include "regs.h"
33#include "hard-reg-set.h"
34#include "except.h"
35#include "function.h"
36#include "insn-config.h"
37#include "expr.h"
38#include "optabs.h"
39#include "libfuncs.h"
40#include "recog.h"
41#include "output.h"
42#include "typeclass.h"
43#include "toplev.h"
44#include "predict.h"
45#include "tm_p.h"
46#include "target.h"
47#include "langhooks.h"
48#include "basic-block.h"
49#include "tree-mudflap.h"
50
51#ifndef PAD_VARARGS_DOWN
52#define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
53#endif
54
55/* Define the names of the builtin function types and codes.  */
56const char *const built_in_class_names[4]
57  = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
58
59#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
60const char * built_in_names[(int) END_BUILTINS] =
61{
62#include "builtins.def"
63};
64#undef DEF_BUILTIN
65
66/* Setup an array of _DECL trees, make sure each element is
67   initialized to NULL_TREE.  */
68tree built_in_decls[(int) END_BUILTINS];
69/* Declarations used when constructing the builtin implicitly in the compiler.
70   It may be NULL_TREE when this is invalid (for instance runtime is not
71   required to implement the function call in all cases).  */
72tree implicit_built_in_decls[(int) END_BUILTINS];
73
74static int get_pointer_alignment (tree, unsigned int);
75static const char *c_getstr (tree);
76static rtx c_readstr (const char *, enum machine_mode);
77static int target_char_cast (tree, char *);
78static rtx get_memory_rtx (tree, tree);
79static int apply_args_size (void);
80static int apply_result_size (void);
81#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
82static rtx result_vector (int, rtx);
83#endif
84static void expand_builtin_update_setjmp_buf (rtx);
85static void expand_builtin_prefetch (tree);
86static rtx expand_builtin_apply_args (void);
87static rtx expand_builtin_apply_args_1 (void);
88static rtx expand_builtin_apply (rtx, rtx, rtx);
89static void expand_builtin_return (rtx);
90static enum type_class type_to_class (tree);
91static rtx expand_builtin_classify_type (tree);
92static void expand_errno_check (tree, rtx);
93static rtx expand_builtin_mathfn (tree, rtx, rtx);
94static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
95static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
96static rtx expand_builtin_sincos (tree);
97static rtx expand_builtin_int_roundingfn (tree, rtx, rtx);
98static rtx expand_builtin_args_info (tree);
99static rtx expand_builtin_next_arg (void);
100static rtx expand_builtin_va_start (tree);
101static rtx expand_builtin_va_end (tree);
102static rtx expand_builtin_va_copy (tree);
103static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode);
104static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode);
105static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
106static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
107static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode);
108static rtx expand_builtin_strncat (tree, rtx, enum machine_mode);
109static rtx expand_builtin_strspn (tree, rtx, enum machine_mode);
110static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode);
111static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode);
112static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int);
113static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode, tree);
114static rtx expand_builtin_bcopy (tree);
115static rtx expand_builtin_strcpy (tree, tree, rtx, enum machine_mode);
116static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
117static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
118static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode);
119static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode);
120static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
121static rtx expand_builtin_memset (tree, rtx, enum machine_mode, tree);
122static rtx expand_builtin_bzero (tree);
123static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
124static rtx expand_builtin_strstr (tree, tree, rtx, enum machine_mode);
125static rtx expand_builtin_strpbrk (tree, tree, rtx, enum machine_mode);
126static rtx expand_builtin_strchr (tree, tree, rtx, enum machine_mode);
127static rtx expand_builtin_strrchr (tree, tree, rtx, enum machine_mode);
128static rtx expand_builtin_alloca (tree, rtx);
129static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
130static rtx expand_builtin_frame_address (tree, tree);
131static rtx expand_builtin_fputs (tree, rtx, bool);
132static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool);
133static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool);
134static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode);
135static tree stabilize_va_list (tree, int);
136static rtx expand_builtin_expect (tree, rtx);
137static tree fold_builtin_constant_p (tree);
138static tree fold_builtin_classify_type (tree);
139static tree fold_builtin_strlen (tree);
140static tree fold_builtin_inf (tree, int);
141static tree fold_builtin_nan (tree, tree, int);
142static int validate_arglist (tree, ...);
143static bool integer_valued_real_p (tree);
144static tree fold_trunc_transparent_mathfn (tree, tree);
145static bool readonly_data_expr (tree);
146static rtx expand_builtin_fabs (tree, rtx, rtx);
147static rtx expand_builtin_signbit (tree, rtx);
148static tree fold_builtin_sqrt (tree, tree);
149static tree fold_builtin_cbrt (tree, tree);
150static tree fold_builtin_pow (tree, tree, tree);
151static tree fold_builtin_powi (tree, tree, tree);
152static tree fold_builtin_sin (tree);
153static tree fold_builtin_cos (tree, tree, tree);
154static tree fold_builtin_tan (tree);
155static tree fold_builtin_atan (tree, tree);
156static tree fold_builtin_trunc (tree, tree);
157static tree fold_builtin_floor (tree, tree);
158static tree fold_builtin_ceil (tree, tree);
159static tree fold_builtin_round (tree, tree);
160static tree fold_builtin_int_roundingfn (tree, tree);
161static tree fold_builtin_bitop (tree, tree);
162static tree fold_builtin_memory_op (tree, tree, bool, int);
163static tree fold_builtin_strchr (tree, tree);
164static tree fold_builtin_memcmp (tree);
165static tree fold_builtin_strcmp (tree);
166static tree fold_builtin_strncmp (tree);
167static tree fold_builtin_signbit (tree, tree);
168static tree fold_builtin_copysign (tree, tree, tree);
169static tree fold_builtin_isascii (tree);
170static tree fold_builtin_toascii (tree);
171static tree fold_builtin_isdigit (tree);
172static tree fold_builtin_fabs (tree, tree);
173static tree fold_builtin_abs (tree, tree);
174static tree fold_builtin_unordered_cmp (tree, tree, enum tree_code,
175					enum tree_code);
176static tree fold_builtin_1 (tree, tree, bool);
177
178static tree fold_builtin_strpbrk (tree, tree);
179static tree fold_builtin_strstr (tree, tree);
180static tree fold_builtin_strrchr (tree, tree);
181static tree fold_builtin_strcat (tree);
182static tree fold_builtin_strncat (tree);
183static tree fold_builtin_strspn (tree);
184static tree fold_builtin_strcspn (tree);
185static tree fold_builtin_sprintf (tree, int);
186
187static rtx expand_builtin_object_size (tree);
188static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
189				      enum built_in_function);
190static void maybe_emit_chk_warning (tree, enum built_in_function);
191static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
192static tree fold_builtin_object_size (tree);
193static tree fold_builtin_strcat_chk (tree, tree);
194static tree fold_builtin_strncat_chk (tree, tree);
195static tree fold_builtin_sprintf_chk (tree, enum built_in_function);
196static tree fold_builtin_printf (tree, tree, bool, enum built_in_function);
197static tree fold_builtin_fprintf (tree, tree, bool, enum built_in_function);
198static bool init_target_chars (void);
199
200static unsigned HOST_WIDE_INT target_newline;
201static unsigned HOST_WIDE_INT target_percent;
202static unsigned HOST_WIDE_INT target_c;
203static unsigned HOST_WIDE_INT target_s;
204static char target_percent_c[3];
205static char target_percent_s[3];
206static char target_percent_s_newline[4];
207
208/* Return true if NODE should be considered for inline expansion regardless
209   of the optimization level.  This means whenever a function is invoked with
210   its "internal" name, which normally contains the prefix "__builtin".  */
211
212static bool called_as_built_in (tree node)
213{
214  const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
215  if (strncmp (name, "__builtin_", 10) == 0)
216    return true;
217  if (strncmp (name, "__sync_", 7) == 0)
218    return true;
219  return false;
220}
221
222/* Return the alignment in bits of EXP, a pointer valued expression.
223   But don't return more than MAX_ALIGN no matter what.
224   The alignment returned is, by default, the alignment of the thing that
225   EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
226
227   Otherwise, look at the expression to see if we can do better, i.e., if the
228   expression is actually pointing at an object whose alignment is tighter.  */
229
230static int
231get_pointer_alignment (tree exp, unsigned int max_align)
232{
233  unsigned int align, inner;
234
235  /* We rely on TER to compute accurate alignment information.  */
236  if (!(optimize && flag_tree_ter))
237    return 0;
238
239  if (!POINTER_TYPE_P (TREE_TYPE (exp)))
240    return 0;
241
242  align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
243  align = MIN (align, max_align);
244
245  while (1)
246    {
247      switch (TREE_CODE (exp))
248	{
249	case NOP_EXPR:
250	case CONVERT_EXPR:
251	case NON_LVALUE_EXPR:
252	  exp = TREE_OPERAND (exp, 0);
253	  if (! POINTER_TYPE_P (TREE_TYPE (exp)))
254	    return align;
255
256	  inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
257	  align = MIN (inner, max_align);
258	  break;
259
260	case PLUS_EXPR:
261	  /* If sum of pointer + int, restrict our maximum alignment to that
262	     imposed by the integer.  If not, we can't do any better than
263	     ALIGN.  */
264	  if (! host_integerp (TREE_OPERAND (exp, 1), 1))
265	    return align;
266
267	  while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
268		  & (max_align / BITS_PER_UNIT - 1))
269		 != 0)
270	    max_align >>= 1;
271
272	  exp = TREE_OPERAND (exp, 0);
273	  break;
274
275	case ADDR_EXPR:
276	  /* See what we are pointing at and look at its alignment.  */
277	  exp = TREE_OPERAND (exp, 0);
278	  inner = max_align;
279	  if (handled_component_p (exp))
280	    {
281	      HOST_WIDE_INT bitsize, bitpos;
282	      tree offset;
283	      enum machine_mode mode;
284	      int unsignedp, volatilep;
285
286	      exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
287					 &mode, &unsignedp, &volatilep, true);
288	      if (bitpos)
289		inner = MIN (inner, (unsigned) (bitpos & -bitpos));
290	      if (offset && TREE_CODE (offset) == PLUS_EXPR
291		  && host_integerp (TREE_OPERAND (offset, 1), 1))
292	        {
293		  /* Any overflow in calculating offset_bits won't change
294		     the alignment.  */
295		  unsigned offset_bits
296		    = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
297		       * BITS_PER_UNIT);
298
299		  if (offset_bits)
300		    inner = MIN (inner, (offset_bits & -offset_bits));
301		  offset = TREE_OPERAND (offset, 0);
302		}
303	      if (offset && TREE_CODE (offset) == MULT_EXPR
304		  && host_integerp (TREE_OPERAND (offset, 1), 1))
305	        {
306		  /* Any overflow in calculating offset_factor won't change
307		     the alignment.  */
308		  unsigned offset_factor
309		    = ((unsigned) tree_low_cst (TREE_OPERAND (offset, 1), 1)
310		       * BITS_PER_UNIT);
311
312		  if (offset_factor)
313		    inner = MIN (inner, (offset_factor & -offset_factor));
314		}
315	      else if (offset)
316		inner = MIN (inner, BITS_PER_UNIT);
317	    }
318	  if (DECL_P (exp))
319	    align = MIN (inner, DECL_ALIGN (exp));
320#ifdef CONSTANT_ALIGNMENT
321	  else if (CONSTANT_CLASS_P (exp))
322	    align = MIN (inner, (unsigned)CONSTANT_ALIGNMENT (exp, align));
323#endif
324	  else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR
325		   || TREE_CODE (exp) == INDIRECT_REF)
326	    align = MIN (TYPE_ALIGN (TREE_TYPE (exp)), inner);
327	  else
328	    align = MIN (align, inner);
329	  return MIN (align, max_align);
330
331	default:
332	  return align;
333	}
334    }
335}
336
337/* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
338   way, because it could contain a zero byte in the middle.
339   TREE_STRING_LENGTH is the size of the character array, not the string.
340
341   ONLY_VALUE should be nonzero if the result is not going to be emitted
342   into the instruction stream and zero if it is going to be expanded.
343   E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
344   is returned, otherwise NULL, since
345   len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
346   evaluate the side-effects.
347
348   The value returned is of type `ssizetype'.
349
350   Unfortunately, string_constant can't access the values of const char
351   arrays with initializers, so neither can we do so here.  */
352
353tree
354c_strlen (tree src, int only_value)
355{
356  tree offset_node;
357  HOST_WIDE_INT offset;
358  int max;
359  const char *ptr;
360
361  STRIP_NOPS (src);
362  if (TREE_CODE (src) == COND_EXPR
363      && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
364    {
365      tree len1, len2;
366
367      len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
368      len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
369      if (tree_int_cst_equal (len1, len2))
370	return len1;
371    }
372
373  if (TREE_CODE (src) == COMPOUND_EXPR
374      && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
375    return c_strlen (TREE_OPERAND (src, 1), only_value);
376
377  src = string_constant (src, &offset_node);
378  if (src == 0)
379    return 0;
380
381  max = TREE_STRING_LENGTH (src) - 1;
382  ptr = TREE_STRING_POINTER (src);
383
384  if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
385    {
386      /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
387	 compute the offset to the following null if we don't know where to
388	 start searching for it.  */
389      int i;
390
391      for (i = 0; i < max; i++)
392	if (ptr[i] == 0)
393	  return 0;
394
395      /* We don't know the starting offset, but we do know that the string
396	 has no internal zero bytes.  We can assume that the offset falls
397	 within the bounds of the string; otherwise, the programmer deserves
398	 what he gets.  Subtract the offset from the length of the string,
399	 and return that.  This would perhaps not be valid if we were dealing
400	 with named arrays in addition to literal string constants.  */
401
402      return size_diffop (size_int (max), offset_node);
403    }
404
405  /* We have a known offset into the string.  Start searching there for
406     a null character if we can represent it as a single HOST_WIDE_INT.  */
407  if (offset_node == 0)
408    offset = 0;
409  else if (! host_integerp (offset_node, 0))
410    offset = -1;
411  else
412    offset = tree_low_cst (offset_node, 0);
413
414  /* If the offset is known to be out of bounds, warn, and call strlen at
415     runtime.  */
416  if (offset < 0 || offset > max)
417    {
418      warning (0, "offset outside bounds of constant string");
419      return 0;
420    }
421
422  /* Use strlen to search for the first zero byte.  Since any strings
423     constructed with build_string will have nulls appended, we win even
424     if we get handed something like (char[4])"abcd".
425
426     Since OFFSET is our starting index into the string, no further
427     calculation is needed.  */
428  return ssize_int (strlen (ptr + offset));
429}
430
431/* Return a char pointer for a C string if it is a string constant
432   or sum of string constant and integer constant.  */
433
434static const char *
435c_getstr (tree src)
436{
437  tree offset_node;
438
439  src = string_constant (src, &offset_node);
440  if (src == 0)
441    return 0;
442
443  if (offset_node == 0)
444    return TREE_STRING_POINTER (src);
445  else if (!host_integerp (offset_node, 1)
446	   || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
447    return 0;
448
449  return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
450}
451
452/* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
453   GET_MODE_BITSIZE (MODE) bits from string constant STR.  */
454
455static rtx
456c_readstr (const char *str, enum machine_mode mode)
457{
458  HOST_WIDE_INT c[2];
459  HOST_WIDE_INT ch;
460  unsigned int i, j;
461
462  gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
463
464  c[0] = 0;
465  c[1] = 0;
466  ch = 1;
467  for (i = 0; i < GET_MODE_SIZE (mode); i++)
468    {
469      j = i;
470      if (WORDS_BIG_ENDIAN)
471	j = GET_MODE_SIZE (mode) - i - 1;
472      if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
473	  && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
474	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
475      j *= BITS_PER_UNIT;
476      gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
477
478      if (ch)
479	ch = (unsigned char) str[i];
480      c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
481    }
482  return immed_double_const (c[0], c[1], mode);
483}
484
485/* Cast a target constant CST to target CHAR and if that value fits into
486   host char type, return zero and put that value into variable pointed to by
487   P.  */
488
489static int
490target_char_cast (tree cst, char *p)
491{
492  unsigned HOST_WIDE_INT val, hostval;
493
494  if (!host_integerp (cst, 1)
495      || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
496    return 1;
497
498  val = tree_low_cst (cst, 1);
499  if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
500    val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
501
502  hostval = val;
503  if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
504    hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
505
506  if (val != hostval)
507    return 1;
508
509  *p = hostval;
510  return 0;
511}
512
513/* Similar to save_expr, but assumes that arbitrary code is not executed
514   in between the multiple evaluations.  In particular, we assume that a
515   non-addressable local variable will not be modified.  */
516
517static tree
518builtin_save_expr (tree exp)
519{
520  if (TREE_ADDRESSABLE (exp) == 0
521      && (TREE_CODE (exp) == PARM_DECL
522	  || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp))))
523    return exp;
524
525  return save_expr (exp);
526}
527
528/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
529   times to get the address of either a higher stack frame, or a return
530   address located within it (depending on FNDECL_CODE).  */
531
532static rtx
533expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
534{
535  int i;
536
537#ifdef INITIAL_FRAME_ADDRESS_RTX
538  rtx tem = INITIAL_FRAME_ADDRESS_RTX;
539#else
540  rtx tem;
541
542  /* For a zero count with __builtin_return_address, we don't care what
543     frame address we return, because target-specific definitions will
544     override us.  Therefore frame pointer elimination is OK, and using
545     the soft frame pointer is OK.
546
547     For a non-zero count, or a zero count with __builtin_frame_address,
548     we require a stable offset from the current frame pointer to the
549     previous one, so we must use the hard frame pointer, and
550     we must disable frame pointer elimination.  */
551  if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
552    tem = frame_pointer_rtx;
553  else
554    {
555      tem = hard_frame_pointer_rtx;
556
557      /* Tell reload not to eliminate the frame pointer.  */
558      current_function_accesses_prior_frames = 1;
559    }
560#endif
561
562  /* Some machines need special handling before we can access
563     arbitrary frames.  For example, on the SPARC, we must first flush
564     all register windows to the stack.  */
565#ifdef SETUP_FRAME_ADDRESSES
566  if (count > 0)
567    SETUP_FRAME_ADDRESSES ();
568#endif
569
570  /* On the SPARC, the return address is not in the frame, it is in a
571     register.  There is no way to access it off of the current frame
572     pointer, but it can be accessed off the previous frame pointer by
573     reading the value from the register window save area.  */
574#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
575  if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
576    count--;
577#endif
578
579  /* Scan back COUNT frames to the specified frame.  */
580  for (i = 0; i < count; i++)
581    {
582      /* Assume the dynamic chain pointer is in the word that the
583	 frame address points to, unless otherwise specified.  */
584#ifdef DYNAMIC_CHAIN_ADDRESS
585      tem = DYNAMIC_CHAIN_ADDRESS (tem);
586#endif
587      tem = memory_address (Pmode, tem);
588      tem = gen_frame_mem (Pmode, tem);
589      tem = copy_to_reg (tem);
590    }
591
592  /* For __builtin_frame_address, return what we've got.  But, on
593     the SPARC for example, we may have to add a bias.  */
594  if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
595#ifdef FRAME_ADDR_RTX
596    return FRAME_ADDR_RTX (tem);
597#else
598    return tem;
599#endif
600
601  /* For __builtin_return_address, get the return address from that frame.  */
602#ifdef RETURN_ADDR_RTX
603  tem = RETURN_ADDR_RTX (count, tem);
604#else
605  tem = memory_address (Pmode,
606			plus_constant (tem, GET_MODE_SIZE (Pmode)));
607  tem = gen_frame_mem (Pmode, tem);
608#endif
609  return tem;
610}
611
612/* Alias set used for setjmp buffer.  */
613static HOST_WIDE_INT setjmp_alias_set = -1;
614
615/* Construct the leading half of a __builtin_setjmp call.  Control will
616   return to RECEIVER_LABEL.  This is also called directly by the SJLJ
617   exception handling code.  */
618
619void
620expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
621{
622  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
623  rtx stack_save;
624  rtx mem;
625
626  if (setjmp_alias_set == -1)
627    setjmp_alias_set = new_alias_set ();
628
629  buf_addr = convert_memory_address (Pmode, buf_addr);
630
631  buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
632
633  /* We store the frame pointer and the address of receiver_label in
634     the buffer and use the rest of it for the stack save area, which
635     is machine-dependent.  */
636
637  mem = gen_rtx_MEM (Pmode, buf_addr);
638  set_mem_alias_set (mem, setjmp_alias_set);
639  emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
640
641  mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
642  set_mem_alias_set (mem, setjmp_alias_set);
643
644  emit_move_insn (validize_mem (mem),
645		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
646
647  stack_save = gen_rtx_MEM (sa_mode,
648			    plus_constant (buf_addr,
649					   2 * GET_MODE_SIZE (Pmode)));
650  set_mem_alias_set (stack_save, setjmp_alias_set);
651  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
652
653  /* If there is further processing to do, do it.  */
654#ifdef HAVE_builtin_setjmp_setup
655  if (HAVE_builtin_setjmp_setup)
656    emit_insn (gen_builtin_setjmp_setup (buf_addr));
657#endif
658
659  /* Tell optimize_save_area_alloca that extra work is going to
660     need to go on during alloca.  */
661  current_function_calls_setjmp = 1;
662
663  /* Set this so all the registers get saved in our frame; we need to be
664     able to copy the saved values for any registers from frames we unwind.  */
665  current_function_has_nonlocal_label = 1;
666}
667
668/* Construct the trailing part of a __builtin_setjmp call.  This is
669   also called directly by the SJLJ exception handling code.  */
670
671void
672expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
673{
674  /* Clobber the FP when we get here, so we have to make sure it's
675     marked as used by this function.  */
676  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
677
678  /* Mark the static chain as clobbered here so life information
679     doesn't get messed up for it.  */
680  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
681
682  /* Now put in the code to restore the frame pointer, and argument
683     pointer, if needed.  */
684#ifdef HAVE_nonlocal_goto
685  if (! HAVE_nonlocal_goto)
686#endif
687    {
688      emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
689      /* This might change the hard frame pointer in ways that aren't
690	 apparent to early optimization passes, so force a clobber.  */
691      emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx));
692    }
693
694#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
695  if (fixed_regs[ARG_POINTER_REGNUM])
696    {
697#ifdef ELIMINABLE_REGS
698      size_t i;
699      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
700
701      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
702	if (elim_regs[i].from == ARG_POINTER_REGNUM
703	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
704	  break;
705
706      if (i == ARRAY_SIZE (elim_regs))
707#endif
708	{
709	  /* Now restore our arg pointer from the address at which it
710	     was saved in our stack frame.  */
711	  emit_move_insn (virtual_incoming_args_rtx,
712			  copy_to_reg (get_arg_pointer_save_area (cfun)));
713	}
714    }
715#endif
716
717#ifdef HAVE_builtin_setjmp_receiver
718  if (HAVE_builtin_setjmp_receiver)
719    emit_insn (gen_builtin_setjmp_receiver (receiver_label));
720  else
721#endif
722#ifdef HAVE_nonlocal_goto_receiver
723    if (HAVE_nonlocal_goto_receiver)
724      emit_insn (gen_nonlocal_goto_receiver ());
725    else
726#endif
727      { /* Nothing */ }
728
729  /* @@@ This is a kludge.  Not all machine descriptions define a blockage
730     insn, but we must not allow the code we just generated to be reordered
731     by scheduling.  Specifically, the update of the frame pointer must
732     happen immediately, not later.  So emit an ASM_INPUT to act as blockage
733     insn.  */
734  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
735}
736
737/* __builtin_longjmp is passed a pointer to an array of five words (not
738   all will be used on all machines).  It operates similarly to the C
739   library function of the same name, but is more efficient.  Much of
740   the code below is copied from the handling of non-local gotos.  */
741
742static void
743expand_builtin_longjmp (rtx buf_addr, rtx value)
744{
745  rtx fp, lab, stack, insn, last;
746  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
747
748  if (setjmp_alias_set == -1)
749    setjmp_alias_set = new_alias_set ();
750
751  buf_addr = convert_memory_address (Pmode, buf_addr);
752
753  buf_addr = force_reg (Pmode, buf_addr);
754
755  /* We used to store value in static_chain_rtx, but that fails if pointers
756     are smaller than integers.  We instead require that the user must pass
757     a second argument of 1, because that is what builtin_setjmp will
758     return.  This also makes EH slightly more efficient, since we are no
759     longer copying around a value that we don't care about.  */
760  gcc_assert (value == const1_rtx);
761
762  last = get_last_insn ();
763#ifdef HAVE_builtin_longjmp
764  if (HAVE_builtin_longjmp)
765    emit_insn (gen_builtin_longjmp (buf_addr));
766  else
767#endif
768    {
769      fp = gen_rtx_MEM (Pmode, buf_addr);
770      lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
771					       GET_MODE_SIZE (Pmode)));
772
773      stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
774						   2 * GET_MODE_SIZE (Pmode)));
775      set_mem_alias_set (fp, setjmp_alias_set);
776      set_mem_alias_set (lab, setjmp_alias_set);
777      set_mem_alias_set (stack, setjmp_alias_set);
778
779      /* Pick up FP, label, and SP from the block and jump.  This code is
780	 from expand_goto in stmt.c; see there for detailed comments.  */
781#ifdef HAVE_nonlocal_goto
782      if (HAVE_nonlocal_goto)
783	/* We have to pass a value to the nonlocal_goto pattern that will
784	   get copied into the static_chain pointer, but it does not matter
785	   what that value is, because builtin_setjmp does not use it.  */
786	emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
787      else
788#endif
789	{
790	  lab = copy_to_reg (lab);
791
792	  emit_insn (gen_rtx_CLOBBER (VOIDmode,
793				      gen_rtx_MEM (BLKmode,
794						   gen_rtx_SCRATCH (VOIDmode))));
795	  emit_insn (gen_rtx_CLOBBER (VOIDmode,
796				      gen_rtx_MEM (BLKmode,
797						   hard_frame_pointer_rtx)));
798
799	  emit_move_insn (hard_frame_pointer_rtx, fp);
800	  emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
801
802	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
803	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
804	  emit_indirect_jump (lab);
805	}
806    }
807
808  /* Search backwards and mark the jump insn as a non-local goto.
809     Note that this precludes the use of __builtin_longjmp to a
810     __builtin_setjmp target in the same function.  However, we've
811     already cautioned the user that these functions are for
812     internal exception handling use only.  */
813  for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
814    {
815      gcc_assert (insn != last);
816
817      if (JUMP_P (insn))
818	{
819	  REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
820					      REG_NOTES (insn));
821	  break;
822	}
823      else if (CALL_P (insn))
824	break;
825    }
826}
827
828/* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
829   and the address of the save area.  */
830
831static rtx
832expand_builtin_nonlocal_goto (tree arglist)
833{
834  tree t_label, t_save_area;
835  rtx r_label, r_save_area, r_fp, r_sp, insn;
836
837  if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
838    return NULL_RTX;
839
840  t_label = TREE_VALUE (arglist);
841  arglist = TREE_CHAIN (arglist);
842  t_save_area = TREE_VALUE (arglist);
843
844  r_label = expand_normal (t_label);
845  r_label = convert_memory_address (Pmode, r_label);
846  r_save_area = expand_normal (t_save_area);
847  r_save_area = convert_memory_address (Pmode, r_save_area);
848  r_fp = gen_rtx_MEM (Pmode, r_save_area);
849  r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
850		      plus_constant (r_save_area, GET_MODE_SIZE (Pmode)));
851
852  current_function_has_nonlocal_goto = 1;
853
854#ifdef HAVE_nonlocal_goto
855  /* ??? We no longer need to pass the static chain value, afaik.  */
856  if (HAVE_nonlocal_goto)
857    emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
858  else
859#endif
860    {
861      r_label = copy_to_reg (r_label);
862
863      emit_insn (gen_rtx_CLOBBER (VOIDmode,
864				  gen_rtx_MEM (BLKmode,
865					       gen_rtx_SCRATCH (VOIDmode))));
866
867      emit_insn (gen_rtx_CLOBBER (VOIDmode,
868				  gen_rtx_MEM (BLKmode,
869					       hard_frame_pointer_rtx)));
870
871      /* Restore frame pointer for containing function.
872	 This sets the actual hard register used for the frame pointer
873	 to the location of the function's incoming static chain info.
874	 The non-local goto handler will then adjust it to contain the
875	 proper value and reload the argument pointer, if needed.  */
876      emit_move_insn (hard_frame_pointer_rtx, r_fp);
877      emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX);
878
879      /* USE of hard_frame_pointer_rtx added for consistency;
880	 not clear if really needed.  */
881      emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
882      emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
883      emit_indirect_jump (r_label);
884    }
885
886  /* Search backwards to the jump insn and mark it as a
887     non-local goto.  */
888  for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
889    {
890      if (JUMP_P (insn))
891	{
892	  REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
893					      const0_rtx, REG_NOTES (insn));
894	  break;
895	}
896      else if (CALL_P (insn))
897	break;
898    }
899
900  return const0_rtx;
901}
902
903/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
904   (not all will be used on all machines) that was passed to __builtin_setjmp.
905   It updates the stack pointer in that block to correspond to the current
906   stack pointer.  */
907
908static void
909expand_builtin_update_setjmp_buf (rtx buf_addr)
910{
911  enum machine_mode sa_mode = Pmode;
912  rtx stack_save;
913
914
915#ifdef HAVE_save_stack_nonlocal
916  if (HAVE_save_stack_nonlocal)
917    sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode;
918#endif
919#ifdef STACK_SAVEAREA_MODE
920  sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
921#endif
922
923  stack_save
924    = gen_rtx_MEM (sa_mode,
925		   memory_address
926		   (sa_mode,
927		    plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode))));
928
929#ifdef HAVE_setjmp
930  if (HAVE_setjmp)
931    emit_insn (gen_setjmp ());
932#endif
933
934  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
935}
936
937/* Expand a call to __builtin_prefetch.  For a target that does not support
938   data prefetch, evaluate the memory address argument in case it has side
939   effects.  */
940
941static void
942expand_builtin_prefetch (tree arglist)
943{
944  tree arg0, arg1, arg2;
945  rtx op0, op1, op2;
946
947  if (!validate_arglist (arglist, POINTER_TYPE, 0))
948    return;
949
950  arg0 = TREE_VALUE (arglist);
951  /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
952     zero (read) and argument 2 (locality) defaults to 3 (high degree of
953     locality).  */
954  if (TREE_CHAIN (arglist))
955    {
956      arg1 = TREE_VALUE (TREE_CHAIN (arglist));
957      if (TREE_CHAIN (TREE_CHAIN (arglist)))
958	arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
959      else
960	arg2 = build_int_cst (NULL_TREE, 3);
961    }
962  else
963    {
964      arg1 = integer_zero_node;
965      arg2 = build_int_cst (NULL_TREE, 3);
966    }
967
968  /* Argument 0 is an address.  */
969  op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
970
971  /* Argument 1 (read/write flag) must be a compile-time constant int.  */
972  if (TREE_CODE (arg1) != INTEGER_CST)
973    {
974      error ("second argument to %<__builtin_prefetch%> must be a constant");
975      arg1 = integer_zero_node;
976    }
977  op1 = expand_normal (arg1);
978  /* Argument 1 must be either zero or one.  */
979  if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
980    {
981      warning (0, "invalid second argument to %<__builtin_prefetch%>;"
982	       " using zero");
983      op1 = const0_rtx;
984    }
985
986  /* Argument 2 (locality) must be a compile-time constant int.  */
987  if (TREE_CODE (arg2) != INTEGER_CST)
988    {
989      error ("third argument to %<__builtin_prefetch%> must be a constant");
990      arg2 = integer_zero_node;
991    }
992  op2 = expand_normal (arg2);
993  /* Argument 2 must be 0, 1, 2, or 3.  */
994  if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
995    {
996      warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
997      op2 = const0_rtx;
998    }
999
1000#ifdef HAVE_prefetch
1001  if (HAVE_prefetch)
1002    {
1003      if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
1004	     (op0,
1005	      insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
1006	  || (GET_MODE (op0) != Pmode))
1007	{
1008	  op0 = convert_memory_address (Pmode, op0);
1009	  op0 = force_reg (Pmode, op0);
1010	}
1011      emit_insn (gen_prefetch (op0, op1, op2));
1012    }
1013#endif
1014
1015  /* Don't do anything with direct references to volatile memory, but
1016     generate code to handle other side effects.  */
1017  if (!MEM_P (op0) && side_effects_p (op0))
1018    emit_insn (op0);
1019}
1020
1021/* Get a MEM rtx for expression EXP which is the address of an operand
1022   to be used in a string instruction (cmpstrsi, movmemsi, ..).  LEN is
1023   the maximum length of the block of memory that might be accessed or
1024   NULL if unknown.  */
1025
1026static rtx
1027get_memory_rtx (tree exp, tree len)
1028{
1029  rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1030  rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1031
1032  /* Get an expression we can use to find the attributes to assign to MEM.
1033     If it is an ADDR_EXPR, use the operand.  Otherwise, dereference it if
1034     we can.  First remove any nops.  */
1035  while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
1036	  || TREE_CODE (exp) == NON_LVALUE_EXPR)
1037	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1038    exp = TREE_OPERAND (exp, 0);
1039
1040  if (TREE_CODE (exp) == ADDR_EXPR)
1041    exp = TREE_OPERAND (exp, 0);
1042  else if (POINTER_TYPE_P (TREE_TYPE (exp)))
1043    exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
1044  else
1045    exp = NULL;
1046
1047  /* Honor attributes derived from exp, except for the alias set
1048     (as builtin stringops may alias with anything) and the size
1049     (as stringops may access multiple array elements).  */
1050  if (exp)
1051    {
1052      set_mem_attributes (mem, exp, 0);
1053
1054      /* Allow the string and memory builtins to overflow from one
1055	 field into another, see http://gcc.gnu.org/PR23561.
1056	 Thus avoid COMPONENT_REFs in MEM_EXPR unless we know the whole
1057	 memory accessed by the string or memory builtin will fit
1058	 within the field.  */
1059      if (MEM_EXPR (mem) && TREE_CODE (MEM_EXPR (mem)) == COMPONENT_REF)
1060	{
1061	  tree mem_expr = MEM_EXPR (mem);
1062	  HOST_WIDE_INT offset = -1, length = -1;
1063	  tree inner = exp;
1064
1065	  while (TREE_CODE (inner) == ARRAY_REF
1066		 || TREE_CODE (inner) == NOP_EXPR
1067		 || TREE_CODE (inner) == CONVERT_EXPR
1068		 || TREE_CODE (inner) == NON_LVALUE_EXPR
1069		 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1070		 || TREE_CODE (inner) == SAVE_EXPR)
1071	    inner = TREE_OPERAND (inner, 0);
1072
1073	  gcc_assert (TREE_CODE (inner) == COMPONENT_REF);
1074
1075	  if (MEM_OFFSET (mem)
1076	      && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
1077	    offset = INTVAL (MEM_OFFSET (mem));
1078
1079	  if (offset >= 0 && len && host_integerp (len, 0))
1080	    length = tree_low_cst (len, 0);
1081
1082	  while (TREE_CODE (inner) == COMPONENT_REF)
1083	    {
1084	      tree field = TREE_OPERAND (inner, 1);
1085	      gcc_assert (! DECL_BIT_FIELD (field));
1086	      gcc_assert (TREE_CODE (mem_expr) == COMPONENT_REF);
1087	      gcc_assert (field == TREE_OPERAND (mem_expr, 1));
1088
1089	      if (length >= 0
1090		  && TYPE_SIZE_UNIT (TREE_TYPE (inner))
1091		  && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0))
1092		{
1093		  HOST_WIDE_INT size
1094		    = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (inner)), 0);
1095		  /* If we can prove the memory starting at XEXP (mem, 0)
1096		     and ending at XEXP (mem, 0) + LENGTH will fit into
1097		     this field, we can keep that COMPONENT_REF in MEM_EXPR.  */
1098		  if (offset <= size
1099		      && length <= size
1100		      && offset + length <= size)
1101		    break;
1102		}
1103
1104	      if (offset >= 0
1105		  && host_integerp (DECL_FIELD_OFFSET (field), 0))
1106		offset += tree_low_cst (DECL_FIELD_OFFSET (field), 0)
1107			  + tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1108			    / BITS_PER_UNIT;
1109	      else
1110		{
1111		  offset = -1;
1112		  length = -1;
1113		}
1114
1115	      mem_expr = TREE_OPERAND (mem_expr, 0);
1116	      inner = TREE_OPERAND (inner, 0);
1117	    }
1118
1119	  if (mem_expr == NULL)
1120	    offset = -1;
1121	  if (mem_expr != MEM_EXPR (mem))
1122	    {
1123	      set_mem_expr (mem, mem_expr);
1124	      set_mem_offset (mem, offset >= 0 ? GEN_INT (offset) : NULL_RTX);
1125	    }
1126	}
1127      set_mem_alias_set (mem, 0);
1128      set_mem_size (mem, NULL_RTX);
1129    }
1130
1131  return mem;
1132}
1133
1134/* Built-in functions to perform an untyped call and return.  */
1135
1136/* For each register that may be used for calling a function, this
1137   gives a mode used to copy the register's value.  VOIDmode indicates
1138   the register is not used for calling a function.  If the machine
1139   has register windows, this gives only the outbound registers.
1140   INCOMING_REGNO gives the corresponding inbound register.  */
1141static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
1142
1143/* For each register that may be used for returning values, this gives
1144   a mode used to copy the register's value.  VOIDmode indicates the
1145   register is not used for returning values.  If the machine has
1146   register windows, this gives only the outbound registers.
1147   INCOMING_REGNO gives the corresponding inbound register.  */
1148static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
1149
1150/* For each register that may be used for calling a function, this
1151   gives the offset of that register into the block returned by
1152   __builtin_apply_args.  0 indicates that the register is not
1153   used for calling a function.  */
1154static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
1155
1156/* Return the size required for the block returned by __builtin_apply_args,
1157   and initialize apply_args_mode.  */
1158
1159static int
1160apply_args_size (void)
1161{
1162  static int size = -1;
1163  int align;
1164  unsigned int regno;
1165  enum machine_mode mode;
1166
1167  /* The values computed by this function never change.  */
1168  if (size < 0)
1169    {
1170      /* The first value is the incoming arg-pointer.  */
1171      size = GET_MODE_SIZE (Pmode);
1172
1173      /* The second value is the structure value address unless this is
1174	 passed as an "invisible" first argument.  */
1175      if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1176	size += GET_MODE_SIZE (Pmode);
1177
1178      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1179	if (FUNCTION_ARG_REGNO_P (regno))
1180	  {
1181	    mode = reg_raw_mode[regno];
1182
1183	    gcc_assert (mode != VOIDmode);
1184
1185	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1186	    if (size % align != 0)
1187	      size = CEIL (size, align) * align;
1188	    apply_args_reg_offset[regno] = size;
1189	    size += GET_MODE_SIZE (mode);
1190	    apply_args_mode[regno] = mode;
1191	  }
1192	else
1193	  {
1194	    apply_args_mode[regno] = VOIDmode;
1195	    apply_args_reg_offset[regno] = 0;
1196	  }
1197    }
1198  return size;
1199}
1200
1201/* Return the size required for the block returned by __builtin_apply,
1202   and initialize apply_result_mode.  */
1203
1204static int
1205apply_result_size (void)
1206{
1207  static int size = -1;
1208  int align, regno;
1209  enum machine_mode mode;
1210
1211  /* The values computed by this function never change.  */
1212  if (size < 0)
1213    {
1214      size = 0;
1215
1216      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1217	if (FUNCTION_VALUE_REGNO_P (regno))
1218	  {
1219	    mode = reg_raw_mode[regno];
1220
1221	    gcc_assert (mode != VOIDmode);
1222
1223	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1224	    if (size % align != 0)
1225	      size = CEIL (size, align) * align;
1226	    size += GET_MODE_SIZE (mode);
1227	    apply_result_mode[regno] = mode;
1228	  }
1229	else
1230	  apply_result_mode[regno] = VOIDmode;
1231
1232      /* Allow targets that use untyped_call and untyped_return to override
1233	 the size so that machine-specific information can be stored here.  */
1234#ifdef APPLY_RESULT_SIZE
1235      size = APPLY_RESULT_SIZE;
1236#endif
1237    }
1238  return size;
1239}
1240
1241#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1242/* Create a vector describing the result block RESULT.  If SAVEP is true,
1243   the result block is used to save the values; otherwise it is used to
1244   restore the values.  */
1245
1246static rtx
1247result_vector (int savep, rtx result)
1248{
1249  int regno, size, align, nelts;
1250  enum machine_mode mode;
1251  rtx reg, mem;
1252  rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1253
1254  size = nelts = 0;
1255  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1256    if ((mode = apply_result_mode[regno]) != VOIDmode)
1257      {
1258	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1259	if (size % align != 0)
1260	  size = CEIL (size, align) * align;
1261	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1262	mem = adjust_address (result, mode, size);
1263	savevec[nelts++] = (savep
1264			    ? gen_rtx_SET (VOIDmode, mem, reg)
1265			    : gen_rtx_SET (VOIDmode, reg, mem));
1266	size += GET_MODE_SIZE (mode);
1267      }
1268  return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1269}
1270#endif /* HAVE_untyped_call or HAVE_untyped_return */
1271
1272/* Save the state required to perform an untyped call with the same
1273   arguments as were passed to the current function.  */
1274
1275static rtx
1276expand_builtin_apply_args_1 (void)
1277{
1278  rtx registers, tem;
1279  int size, align, regno;
1280  enum machine_mode mode;
1281  rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1282
1283  /* Create a block where the arg-pointer, structure value address,
1284     and argument registers can be saved.  */
1285  registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1286
1287  /* Walk past the arg-pointer and structure value address.  */
1288  size = GET_MODE_SIZE (Pmode);
1289  if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1290    size += GET_MODE_SIZE (Pmode);
1291
1292  /* Save each register used in calling a function to the block.  */
1293  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1294    if ((mode = apply_args_mode[regno]) != VOIDmode)
1295      {
1296	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1297	if (size % align != 0)
1298	  size = CEIL (size, align) * align;
1299
1300	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1301
1302	emit_move_insn (adjust_address (registers, mode, size), tem);
1303	size += GET_MODE_SIZE (mode);
1304      }
1305
1306  /* Save the arg pointer to the block.  */
1307  tem = copy_to_reg (virtual_incoming_args_rtx);
1308#ifdef STACK_GROWS_DOWNWARD
1309  /* We need the pointer as the caller actually passed them to us, not
1310     as we might have pretended they were passed.  Make sure it's a valid
1311     operand, as emit_move_insn isn't expected to handle a PLUS.  */
1312  tem
1313    = force_operand (plus_constant (tem, current_function_pretend_args_size),
1314		     NULL_RTX);
1315#endif
1316  emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1317
1318  size = GET_MODE_SIZE (Pmode);
1319
1320  /* Save the structure value address unless this is passed as an
1321     "invisible" first argument.  */
1322  if (struct_incoming_value)
1323    {
1324      emit_move_insn (adjust_address (registers, Pmode, size),
1325		      copy_to_reg (struct_incoming_value));
1326      size += GET_MODE_SIZE (Pmode);
1327    }
1328
1329  /* Return the address of the block.  */
1330  return copy_addr_to_reg (XEXP (registers, 0));
1331}
1332
1333/* __builtin_apply_args returns block of memory allocated on
1334   the stack into which is stored the arg pointer, structure
1335   value address, static chain, and all the registers that might
1336   possibly be used in performing a function call.  The code is
1337   moved to the start of the function so the incoming values are
1338   saved.  */
1339
1340static rtx
1341expand_builtin_apply_args (void)
1342{
1343  /* Don't do __builtin_apply_args more than once in a function.
1344     Save the result of the first call and reuse it.  */
1345  if (apply_args_value != 0)
1346    return apply_args_value;
1347  {
1348    /* When this function is called, it means that registers must be
1349       saved on entry to this function.  So we migrate the
1350       call to the first insn of this function.  */
1351    rtx temp;
1352    rtx seq;
1353
1354    start_sequence ();
1355    temp = expand_builtin_apply_args_1 ();
1356    seq = get_insns ();
1357    end_sequence ();
1358
1359    apply_args_value = temp;
1360
1361    /* Put the insns after the NOTE that starts the function.
1362       If this is inside a start_sequence, make the outer-level insn
1363       chain current, so the code is placed at the start of the
1364       function.  */
1365    push_topmost_sequence ();
1366    emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1367    pop_topmost_sequence ();
1368    return temp;
1369  }
1370}
1371
1372/* Perform an untyped call and save the state required to perform an
1373   untyped return of whatever value was returned by the given function.  */
1374
1375static rtx
1376expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1377{
1378  int size, align, regno;
1379  enum machine_mode mode;
1380  rtx incoming_args, result, reg, dest, src, call_insn;
1381  rtx old_stack_level = 0;
1382  rtx call_fusage = 0;
1383  rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1384
1385  arguments = convert_memory_address (Pmode, arguments);
1386
1387  /* Create a block where the return registers can be saved.  */
1388  result = assign_stack_local (BLKmode, apply_result_size (), -1);
1389
1390  /* Fetch the arg pointer from the ARGUMENTS block.  */
1391  incoming_args = gen_reg_rtx (Pmode);
1392  emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1393#ifndef STACK_GROWS_DOWNWARD
1394  incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1395				       incoming_args, 0, OPTAB_LIB_WIDEN);
1396#endif
1397
1398  /* Push a new argument block and copy the arguments.  Do not allow
1399     the (potential) memcpy call below to interfere with our stack
1400     manipulations.  */
1401  do_pending_stack_adjust ();
1402  NO_DEFER_POP;
1403
1404  /* Save the stack with nonlocal if available.  */
1405#ifdef HAVE_save_stack_nonlocal
1406  if (HAVE_save_stack_nonlocal)
1407    emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1408  else
1409#endif
1410    emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1411
1412  /* Allocate a block of memory onto the stack and copy the memory
1413     arguments to the outgoing arguments address.  */
1414  allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1415  dest = virtual_outgoing_args_rtx;
1416#ifndef STACK_GROWS_DOWNWARD
1417  if (GET_CODE (argsize) == CONST_INT)
1418    dest = plus_constant (dest, -INTVAL (argsize));
1419  else
1420    dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1421#endif
1422  dest = gen_rtx_MEM (BLKmode, dest);
1423  set_mem_align (dest, PARM_BOUNDARY);
1424  src = gen_rtx_MEM (BLKmode, incoming_args);
1425  set_mem_align (src, PARM_BOUNDARY);
1426  emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1427
1428  /* Refer to the argument block.  */
1429  apply_args_size ();
1430  arguments = gen_rtx_MEM (BLKmode, arguments);
1431  set_mem_align (arguments, PARM_BOUNDARY);
1432
1433  /* Walk past the arg-pointer and structure value address.  */
1434  size = GET_MODE_SIZE (Pmode);
1435  if (struct_value)
1436    size += GET_MODE_SIZE (Pmode);
1437
1438  /* Restore each of the registers previously saved.  Make USE insns
1439     for each of these registers for use in making the call.  */
1440  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1441    if ((mode = apply_args_mode[regno]) != VOIDmode)
1442      {
1443	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1444	if (size % align != 0)
1445	  size = CEIL (size, align) * align;
1446	reg = gen_rtx_REG (mode, regno);
1447	emit_move_insn (reg, adjust_address (arguments, mode, size));
1448	use_reg (&call_fusage, reg);
1449	size += GET_MODE_SIZE (mode);
1450      }
1451
1452  /* Restore the structure value address unless this is passed as an
1453     "invisible" first argument.  */
1454  size = GET_MODE_SIZE (Pmode);
1455  if (struct_value)
1456    {
1457      rtx value = gen_reg_rtx (Pmode);
1458      emit_move_insn (value, adjust_address (arguments, Pmode, size));
1459      emit_move_insn (struct_value, value);
1460      if (REG_P (struct_value))
1461	use_reg (&call_fusage, struct_value);
1462      size += GET_MODE_SIZE (Pmode);
1463    }
1464
1465  /* All arguments and registers used for the call are set up by now!  */
1466  function = prepare_call_address (function, NULL, &call_fusage, 0, 0);
1467
1468  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1469     and we don't want to load it into a register as an optimization,
1470     because prepare_call_address already did it if it should be done.  */
1471  if (GET_CODE (function) != SYMBOL_REF)
1472    function = memory_address (FUNCTION_MODE, function);
1473
1474  /* Generate the actual call instruction and save the return value.  */
1475#ifdef HAVE_untyped_call
1476  if (HAVE_untyped_call)
1477    emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1478				      result, result_vector (1, result)));
1479  else
1480#endif
1481#ifdef HAVE_call_value
1482  if (HAVE_call_value)
1483    {
1484      rtx valreg = 0;
1485
1486      /* Locate the unique return register.  It is not possible to
1487	 express a call that sets more than one return register using
1488	 call_value; use untyped_call for that.  In fact, untyped_call
1489	 only needs to save the return registers in the given block.  */
1490      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1491	if ((mode = apply_result_mode[regno]) != VOIDmode)
1492	  {
1493	    gcc_assert (!valreg); /* HAVE_untyped_call required.  */
1494
1495	    valreg = gen_rtx_REG (mode, regno);
1496	  }
1497
1498      emit_call_insn (GEN_CALL_VALUE (valreg,
1499				      gen_rtx_MEM (FUNCTION_MODE, function),
1500				      const0_rtx, NULL_RTX, const0_rtx));
1501
1502      emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1503    }
1504  else
1505#endif
1506    gcc_unreachable ();
1507
1508  /* Find the CALL insn we just emitted, and attach the register usage
1509     information.  */
1510  call_insn = last_call_insn ();
1511  add_function_usage_to (call_insn, call_fusage);
1512
1513  /* Restore the stack.  */
1514#ifdef HAVE_save_stack_nonlocal
1515  if (HAVE_save_stack_nonlocal)
1516    emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1517  else
1518#endif
1519    emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1520
1521  OK_DEFER_POP;
1522
1523  /* Return the address of the result block.  */
1524  result = copy_addr_to_reg (XEXP (result, 0));
1525  return convert_memory_address (ptr_mode, result);
1526}
1527
1528/* Perform an untyped return.  */
1529
1530static void
1531expand_builtin_return (rtx result)
1532{
1533  int size, align, regno;
1534  enum machine_mode mode;
1535  rtx reg;
1536  rtx call_fusage = 0;
1537
1538  result = convert_memory_address (Pmode, result);
1539
1540  apply_result_size ();
1541  result = gen_rtx_MEM (BLKmode, result);
1542
1543#ifdef HAVE_untyped_return
1544  if (HAVE_untyped_return)
1545    {
1546      emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1547      emit_barrier ();
1548      return;
1549    }
1550#endif
1551
1552  /* Restore the return value and note that each value is used.  */
1553  size = 0;
1554  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1555    if ((mode = apply_result_mode[regno]) != VOIDmode)
1556      {
1557	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1558	if (size % align != 0)
1559	  size = CEIL (size, align) * align;
1560	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1561	emit_move_insn (reg, adjust_address (result, mode, size));
1562
1563	push_to_sequence (call_fusage);
1564	emit_insn (gen_rtx_USE (VOIDmode, reg));
1565	call_fusage = get_insns ();
1566	end_sequence ();
1567	size += GET_MODE_SIZE (mode);
1568      }
1569
1570  /* Put the USE insns before the return.  */
1571  emit_insn (call_fusage);
1572
1573  /* Return whatever values was restored by jumping directly to the end
1574     of the function.  */
1575  expand_naked_return ();
1576}
1577
1578/* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1579
1580static enum type_class
1581type_to_class (tree type)
1582{
1583  switch (TREE_CODE (type))
1584    {
1585    case VOID_TYPE:	   return void_type_class;
1586    case INTEGER_TYPE:	   return integer_type_class;
1587    case ENUMERAL_TYPE:	   return enumeral_type_class;
1588    case BOOLEAN_TYPE:	   return boolean_type_class;
1589    case POINTER_TYPE:	   return pointer_type_class;
1590    case REFERENCE_TYPE:   return reference_type_class;
1591    case OFFSET_TYPE:	   return offset_type_class;
1592    case REAL_TYPE:	   return real_type_class;
1593    case COMPLEX_TYPE:	   return complex_type_class;
1594    case FUNCTION_TYPE:	   return function_type_class;
1595    case METHOD_TYPE:	   return method_type_class;
1596    case RECORD_TYPE:	   return record_type_class;
1597    case UNION_TYPE:
1598    case QUAL_UNION_TYPE:  return union_type_class;
1599    case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
1600				   ? string_type_class : array_type_class);
1601    case LANG_TYPE:	   return lang_type_class;
1602    default:		   return no_type_class;
1603    }
1604}
1605
1606/* Expand a call to __builtin_classify_type with arguments found in
1607   ARGLIST.  */
1608
1609static rtx
1610expand_builtin_classify_type (tree arglist)
1611{
1612  if (arglist != 0)
1613    return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1614  return GEN_INT (no_type_class);
1615}
1616
1617/* This helper macro, meant to be used in mathfn_built_in below,
1618   determines which among a set of three builtin math functions is
1619   appropriate for a given type mode.  The `F' and `L' cases are
1620   automatically generated from the `double' case.  */
1621#define CASE_MATHFN(BUILT_IN_MATHFN) \
1622  case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1623  fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1624  fcodel = BUILT_IN_MATHFN##L ; break;
1625
1626/* Return mathematic function equivalent to FN but operating directly
1627   on TYPE, if available.  If we can't do the conversion, return zero.  */
1628tree
1629mathfn_built_in (tree type, enum built_in_function fn)
1630{
1631  enum built_in_function fcode, fcodef, fcodel;
1632
1633  switch (fn)
1634    {
1635      CASE_MATHFN (BUILT_IN_ACOS)
1636      CASE_MATHFN (BUILT_IN_ACOSH)
1637      CASE_MATHFN (BUILT_IN_ASIN)
1638      CASE_MATHFN (BUILT_IN_ASINH)
1639      CASE_MATHFN (BUILT_IN_ATAN)
1640      CASE_MATHFN (BUILT_IN_ATAN2)
1641      CASE_MATHFN (BUILT_IN_ATANH)
1642      CASE_MATHFN (BUILT_IN_CBRT)
1643      CASE_MATHFN (BUILT_IN_CEIL)
1644      CASE_MATHFN (BUILT_IN_COPYSIGN)
1645      CASE_MATHFN (BUILT_IN_COS)
1646      CASE_MATHFN (BUILT_IN_COSH)
1647      CASE_MATHFN (BUILT_IN_DREM)
1648      CASE_MATHFN (BUILT_IN_ERF)
1649      CASE_MATHFN (BUILT_IN_ERFC)
1650      CASE_MATHFN (BUILT_IN_EXP)
1651      CASE_MATHFN (BUILT_IN_EXP10)
1652      CASE_MATHFN (BUILT_IN_EXP2)
1653      CASE_MATHFN (BUILT_IN_EXPM1)
1654      CASE_MATHFN (BUILT_IN_FABS)
1655      CASE_MATHFN (BUILT_IN_FDIM)
1656      CASE_MATHFN (BUILT_IN_FLOOR)
1657      CASE_MATHFN (BUILT_IN_FMA)
1658      CASE_MATHFN (BUILT_IN_FMAX)
1659      CASE_MATHFN (BUILT_IN_FMIN)
1660      CASE_MATHFN (BUILT_IN_FMOD)
1661      CASE_MATHFN (BUILT_IN_FREXP)
1662      CASE_MATHFN (BUILT_IN_GAMMA)
1663      CASE_MATHFN (BUILT_IN_HUGE_VAL)
1664      CASE_MATHFN (BUILT_IN_HYPOT)
1665      CASE_MATHFN (BUILT_IN_ILOGB)
1666      CASE_MATHFN (BUILT_IN_INF)
1667      CASE_MATHFN (BUILT_IN_J0)
1668      CASE_MATHFN (BUILT_IN_J1)
1669      CASE_MATHFN (BUILT_IN_JN)
1670      CASE_MATHFN (BUILT_IN_LCEIL)
1671      CASE_MATHFN (BUILT_IN_LDEXP)
1672      CASE_MATHFN (BUILT_IN_LFLOOR)
1673      CASE_MATHFN (BUILT_IN_LGAMMA)
1674      CASE_MATHFN (BUILT_IN_LLCEIL)
1675      CASE_MATHFN (BUILT_IN_LLFLOOR)
1676      CASE_MATHFN (BUILT_IN_LLRINT)
1677      CASE_MATHFN (BUILT_IN_LLROUND)
1678      CASE_MATHFN (BUILT_IN_LOG)
1679      CASE_MATHFN (BUILT_IN_LOG10)
1680      CASE_MATHFN (BUILT_IN_LOG1P)
1681      CASE_MATHFN (BUILT_IN_LOG2)
1682      CASE_MATHFN (BUILT_IN_LOGB)
1683      CASE_MATHFN (BUILT_IN_LRINT)
1684      CASE_MATHFN (BUILT_IN_LROUND)
1685      CASE_MATHFN (BUILT_IN_MODF)
1686      CASE_MATHFN (BUILT_IN_NAN)
1687      CASE_MATHFN (BUILT_IN_NANS)
1688      CASE_MATHFN (BUILT_IN_NEARBYINT)
1689      CASE_MATHFN (BUILT_IN_NEXTAFTER)
1690      CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1691      CASE_MATHFN (BUILT_IN_POW)
1692      CASE_MATHFN (BUILT_IN_POWI)
1693      CASE_MATHFN (BUILT_IN_POW10)
1694      CASE_MATHFN (BUILT_IN_REMAINDER)
1695      CASE_MATHFN (BUILT_IN_REMQUO)
1696      CASE_MATHFN (BUILT_IN_RINT)
1697      CASE_MATHFN (BUILT_IN_ROUND)
1698      CASE_MATHFN (BUILT_IN_SCALB)
1699      CASE_MATHFN (BUILT_IN_SCALBLN)
1700      CASE_MATHFN (BUILT_IN_SCALBN)
1701      CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1702      CASE_MATHFN (BUILT_IN_SIN)
1703      CASE_MATHFN (BUILT_IN_SINCOS)
1704      CASE_MATHFN (BUILT_IN_SINH)
1705      CASE_MATHFN (BUILT_IN_SQRT)
1706      CASE_MATHFN (BUILT_IN_TAN)
1707      CASE_MATHFN (BUILT_IN_TANH)
1708      CASE_MATHFN (BUILT_IN_TGAMMA)
1709      CASE_MATHFN (BUILT_IN_TRUNC)
1710      CASE_MATHFN (BUILT_IN_Y0)
1711      CASE_MATHFN (BUILT_IN_Y1)
1712      CASE_MATHFN (BUILT_IN_YN)
1713
1714      default:
1715	return 0;
1716      }
1717
1718  if (TYPE_MAIN_VARIANT (type) == double_type_node)
1719    return implicit_built_in_decls[fcode];
1720  else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1721    return implicit_built_in_decls[fcodef];
1722  else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1723    return implicit_built_in_decls[fcodel];
1724  else
1725    return 0;
1726}
1727
1728/* If errno must be maintained, expand the RTL to check if the result,
1729   TARGET, of a built-in function call, EXP, is NaN, and if so set
1730   errno to EDOM.  */
1731
1732static void
1733expand_errno_check (tree exp, rtx target)
1734{
1735  rtx lab = gen_label_rtx ();
1736
1737  /* Test the result; if it is NaN, set errno=EDOM because
1738     the argument was not in the domain.  */
1739  emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1740			   0, lab);
1741
1742#ifdef TARGET_EDOM
1743  /* If this built-in doesn't throw an exception, set errno directly.  */
1744  if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
1745    {
1746#ifdef GEN_ERRNO_RTX
1747      rtx errno_rtx = GEN_ERRNO_RTX;
1748#else
1749      rtx errno_rtx
1750	  = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1751#endif
1752      emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1753      emit_label (lab);
1754      return;
1755    }
1756#endif
1757
1758  /* We can't set errno=EDOM directly; let the library call do it.
1759     Pop the arguments right away in case the call gets deleted.  */
1760  NO_DEFER_POP;
1761  expand_call (exp, target, 0);
1762  OK_DEFER_POP;
1763  emit_label (lab);
1764}
1765
1766
1767/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
1768   Return 0 if a normal call should be emitted rather than expanding the
1769   function in-line.  EXP is the expression that is a call to the builtin
1770   function; if convenient, the result should be placed in TARGET.
1771   SUBTARGET may be used as the target for computing one of EXP's operands.  */
1772
1773static rtx
1774expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
1775{
1776  optab builtin_optab;
1777  rtx op0, insns, before_call;
1778  tree fndecl = get_callee_fndecl (exp);
1779  tree arglist = TREE_OPERAND (exp, 1);
1780  enum machine_mode mode;
1781  bool errno_set = false;
1782  tree arg, narg;
1783
1784  if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1785    return 0;
1786
1787  arg = TREE_VALUE (arglist);
1788
1789  switch (DECL_FUNCTION_CODE (fndecl))
1790    {
1791    CASE_FLT_FN (BUILT_IN_SQRT):
1792      errno_set = ! tree_expr_nonnegative_p (arg);
1793      builtin_optab = sqrt_optab;
1794      break;
1795    CASE_FLT_FN (BUILT_IN_EXP):
1796      errno_set = true; builtin_optab = exp_optab; break;
1797    CASE_FLT_FN (BUILT_IN_EXP10):
1798    CASE_FLT_FN (BUILT_IN_POW10):
1799      errno_set = true; builtin_optab = exp10_optab; break;
1800    CASE_FLT_FN (BUILT_IN_EXP2):
1801      errno_set = true; builtin_optab = exp2_optab; break;
1802    CASE_FLT_FN (BUILT_IN_EXPM1):
1803      errno_set = true; builtin_optab = expm1_optab; break;
1804    CASE_FLT_FN (BUILT_IN_LOGB):
1805      errno_set = true; builtin_optab = logb_optab; break;
1806    CASE_FLT_FN (BUILT_IN_ILOGB):
1807      errno_set = true; builtin_optab = ilogb_optab; break;
1808    CASE_FLT_FN (BUILT_IN_LOG):
1809      errno_set = true; builtin_optab = log_optab; break;
1810    CASE_FLT_FN (BUILT_IN_LOG10):
1811      errno_set = true; builtin_optab = log10_optab; break;
1812    CASE_FLT_FN (BUILT_IN_LOG2):
1813      errno_set = true; builtin_optab = log2_optab; break;
1814    CASE_FLT_FN (BUILT_IN_LOG1P):
1815      errno_set = true; builtin_optab = log1p_optab; break;
1816    CASE_FLT_FN (BUILT_IN_ASIN):
1817      builtin_optab = asin_optab; break;
1818    CASE_FLT_FN (BUILT_IN_ACOS):
1819      builtin_optab = acos_optab; break;
1820    CASE_FLT_FN (BUILT_IN_TAN):
1821      builtin_optab = tan_optab; break;
1822    CASE_FLT_FN (BUILT_IN_ATAN):
1823      builtin_optab = atan_optab; break;
1824    CASE_FLT_FN (BUILT_IN_FLOOR):
1825      builtin_optab = floor_optab; break;
1826    CASE_FLT_FN (BUILT_IN_CEIL):
1827      builtin_optab = ceil_optab; break;
1828    CASE_FLT_FN (BUILT_IN_TRUNC):
1829      builtin_optab = btrunc_optab; break;
1830    CASE_FLT_FN (BUILT_IN_ROUND):
1831      builtin_optab = round_optab; break;
1832    CASE_FLT_FN (BUILT_IN_NEARBYINT):
1833      builtin_optab = nearbyint_optab; break;
1834    CASE_FLT_FN (BUILT_IN_RINT):
1835      builtin_optab = rint_optab; break;
1836    CASE_FLT_FN (BUILT_IN_LRINT):
1837    CASE_FLT_FN (BUILT_IN_LLRINT):
1838      builtin_optab = lrint_optab; break;
1839    default:
1840      gcc_unreachable ();
1841    }
1842
1843  /* Make a suitable register to place result in.  */
1844  mode = TYPE_MODE (TREE_TYPE (exp));
1845
1846  if (! flag_errno_math || ! HONOR_NANS (mode))
1847    errno_set = false;
1848
1849  /* Before working hard, check whether the instruction is available.  */
1850  if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1851    {
1852      target = gen_reg_rtx (mode);
1853
1854      /* Wrap the computation of the argument in a SAVE_EXPR, as we may
1855	 need to expand the argument again.  This way, we will not perform
1856	 side-effects more the once.  */
1857      narg = builtin_save_expr (arg);
1858      if (narg != arg)
1859	{
1860	  arg = narg;
1861	  arglist = build_tree_list (NULL_TREE, arg);
1862	  exp = build_function_call_expr (fndecl, arglist);
1863	}
1864
1865      op0 = expand_expr (arg, subtarget, VOIDmode, 0);
1866
1867      start_sequence ();
1868
1869      /* Compute into TARGET.
1870	 Set TARGET to wherever the result comes back.  */
1871      target = expand_unop (mode, builtin_optab, op0, target, 0);
1872
1873      if (target != 0)
1874	{
1875	  if (errno_set)
1876	    expand_errno_check (exp, target);
1877
1878	  /* Output the entire sequence.  */
1879	  insns = get_insns ();
1880	  end_sequence ();
1881	  emit_insn (insns);
1882	  return target;
1883	}
1884
1885      /* If we were unable to expand via the builtin, stop the sequence
1886	 (without outputting the insns) and call to the library function
1887	 with the stabilized argument list.  */
1888      end_sequence ();
1889    }
1890
1891  before_call = get_last_insn ();
1892
1893  target = expand_call (exp, target, target == const0_rtx);
1894
1895  /* If this is a sqrt operation and we don't care about errno, try to
1896     attach a REG_EQUAL note with a SQRT rtx to the emitted libcall.
1897     This allows the semantics of the libcall to be visible to the RTL
1898     optimizers.  */
1899  if (builtin_optab == sqrt_optab && !errno_set)
1900    {
1901      /* Search backwards through the insns emitted by expand_call looking
1902	 for the instruction with the REG_RETVAL note.  */
1903      rtx last = get_last_insn ();
1904      while (last != before_call)
1905	{
1906	  if (find_reg_note (last, REG_RETVAL, NULL))
1907	    {
1908	      rtx note = find_reg_note (last, REG_EQUAL, NULL);
1909	      /* Check that the REQ_EQUAL note is an EXPR_LIST with
1910		 two elements, i.e. symbol_ref(sqrt) and the operand.  */
1911	      if (note
1912		  && GET_CODE (note) == EXPR_LIST
1913		  && GET_CODE (XEXP (note, 0)) == EXPR_LIST
1914		  && XEXP (XEXP (note, 0), 1) != NULL_RTX
1915		  && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX)
1916		{
1917		  rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0);
1918		  /* Check operand is a register with expected mode.  */
1919		  if (operand
1920		      && REG_P (operand)
1921		      && GET_MODE (operand) == mode)
1922		    {
1923		      /* Replace the REG_EQUAL note with a SQRT rtx.  */
1924		      rtx equiv = gen_rtx_SQRT (mode, operand);
1925		      set_unique_reg_note (last, REG_EQUAL, equiv);
1926		    }
1927		}
1928	      break;
1929	    }
1930	  last = PREV_INSN (last);
1931	}
1932    }
1933
1934  return target;
1935}
1936
1937/* Expand a call to the builtin binary math functions (pow and atan2).
1938   Return 0 if a normal call should be emitted rather than expanding the
1939   function in-line.  EXP is the expression that is a call to the builtin
1940   function; if convenient, the result should be placed in TARGET.
1941   SUBTARGET may be used as the target for computing one of EXP's
1942   operands.  */
1943
1944static rtx
1945expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
1946{
1947  optab builtin_optab;
1948  rtx op0, op1, insns;
1949  int op1_type = REAL_TYPE;
1950  tree fndecl = get_callee_fndecl (exp);
1951  tree arglist = TREE_OPERAND (exp, 1);
1952  tree arg0, arg1, temp, narg;
1953  enum machine_mode mode;
1954  bool errno_set = true;
1955  bool stable = true;
1956
1957  if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXP)
1958      || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPF)
1959      || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPL))
1960    op1_type = INTEGER_TYPE;
1961
1962  if (!validate_arglist (arglist, REAL_TYPE, op1_type, VOID_TYPE))
1963    return 0;
1964
1965  arg0 = TREE_VALUE (arglist);
1966  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
1967
1968  switch (DECL_FUNCTION_CODE (fndecl))
1969    {
1970    CASE_FLT_FN (BUILT_IN_POW):
1971      builtin_optab = pow_optab; break;
1972    CASE_FLT_FN (BUILT_IN_ATAN2):
1973      builtin_optab = atan2_optab; break;
1974    CASE_FLT_FN (BUILT_IN_LDEXP):
1975      builtin_optab = ldexp_optab; break;
1976    CASE_FLT_FN (BUILT_IN_FMOD):
1977      builtin_optab = fmod_optab; break;
1978    CASE_FLT_FN (BUILT_IN_DREM):
1979      builtin_optab = drem_optab; break;
1980    default:
1981      gcc_unreachable ();
1982    }
1983
1984  /* Make a suitable register to place result in.  */
1985  mode = TYPE_MODE (TREE_TYPE (exp));
1986
1987  /* Before working hard, check whether the instruction is available.  */
1988  if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
1989    return 0;
1990
1991  target = gen_reg_rtx (mode);
1992
1993  if (! flag_errno_math || ! HONOR_NANS (mode))
1994    errno_set = false;
1995
1996  /* Always stabilize the argument list.  */
1997  narg = builtin_save_expr (arg1);
1998  if (narg != arg1)
1999    {
2000      arg1 = narg;
2001      temp = build_tree_list (NULL_TREE, narg);
2002      stable = false;
2003    }
2004  else
2005    temp = TREE_CHAIN (arglist);
2006
2007  narg = builtin_save_expr (arg0);
2008  if (narg != arg0)
2009    {
2010      arg0 = narg;
2011      arglist = tree_cons (NULL_TREE, narg, temp);
2012      stable = false;
2013    }
2014  else if (! stable)
2015    arglist = tree_cons (NULL_TREE, arg0, temp);
2016
2017  if (! stable)
2018    exp = build_function_call_expr (fndecl, arglist);
2019
2020  op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2021  op1 = expand_normal (arg1);
2022
2023  start_sequence ();
2024
2025  /* Compute into TARGET.
2026     Set TARGET to wherever the result comes back.  */
2027  target = expand_binop (mode, builtin_optab, op0, op1,
2028			 target, 0, OPTAB_DIRECT);
2029
2030  /* If we were unable to expand via the builtin, stop the sequence
2031     (without outputting the insns) and call to the library function
2032     with the stabilized argument list.  */
2033  if (target == 0)
2034    {
2035      end_sequence ();
2036      return expand_call (exp, target, target == const0_rtx);
2037    }
2038
2039  if (errno_set)
2040    expand_errno_check (exp, target);
2041
2042  /* Output the entire sequence.  */
2043  insns = get_insns ();
2044  end_sequence ();
2045  emit_insn (insns);
2046
2047  return target;
2048}
2049
2050/* Expand a call to the builtin sin and cos math functions.
2051   Return 0 if a normal call should be emitted rather than expanding the
2052   function in-line.  EXP is the expression that is a call to the builtin
2053   function; if convenient, the result should be placed in TARGET.
2054   SUBTARGET may be used as the target for computing one of EXP's
2055   operands.  */
2056
2057static rtx
2058expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2059{
2060  optab builtin_optab;
2061  rtx op0, insns;
2062  tree fndecl = get_callee_fndecl (exp);
2063  tree arglist = TREE_OPERAND (exp, 1);
2064  enum machine_mode mode;
2065  tree arg, narg;
2066
2067  if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2068    return 0;
2069
2070  arg = TREE_VALUE (arglist);
2071
2072  switch (DECL_FUNCTION_CODE (fndecl))
2073    {
2074    CASE_FLT_FN (BUILT_IN_SIN):
2075    CASE_FLT_FN (BUILT_IN_COS):
2076      builtin_optab = sincos_optab; break;
2077    default:
2078      gcc_unreachable ();
2079    }
2080
2081  /* Make a suitable register to place result in.  */
2082  mode = TYPE_MODE (TREE_TYPE (exp));
2083
2084  /* Check if sincos insn is available, otherwise fallback
2085     to sin or cos insn.  */
2086  if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) {
2087    switch (DECL_FUNCTION_CODE (fndecl))
2088      {
2089      CASE_FLT_FN (BUILT_IN_SIN):
2090	builtin_optab = sin_optab; break;
2091      CASE_FLT_FN (BUILT_IN_COS):
2092	builtin_optab = cos_optab; break;
2093      default:
2094	gcc_unreachable ();
2095      }
2096  }
2097
2098  /* Before working hard, check whether the instruction is available.  */
2099  if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2100    {
2101      target = gen_reg_rtx (mode);
2102
2103      /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2104	 need to expand the argument again.  This way, we will not perform
2105	 side-effects more the once.  */
2106      narg = save_expr (arg);
2107      if (narg != arg)
2108	{
2109	  arg = narg;
2110	  arglist = build_tree_list (NULL_TREE, arg);
2111	  exp = build_function_call_expr (fndecl, arglist);
2112	}
2113
2114      op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2115
2116      start_sequence ();
2117
2118      /* Compute into TARGET.
2119	 Set TARGET to wherever the result comes back.  */
2120      if (builtin_optab == sincos_optab)
2121	{
2122	  int result;
2123
2124	  switch (DECL_FUNCTION_CODE (fndecl))
2125	    {
2126	    CASE_FLT_FN (BUILT_IN_SIN):
2127	      result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
2128	      break;
2129	    CASE_FLT_FN (BUILT_IN_COS):
2130	      result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
2131	      break;
2132	    default:
2133	      gcc_unreachable ();
2134	    }
2135	  gcc_assert (result);
2136	}
2137      else
2138	{
2139	  target = expand_unop (mode, builtin_optab, op0, target, 0);
2140	}
2141
2142      if (target != 0)
2143	{
2144	  /* Output the entire sequence.  */
2145	  insns = get_insns ();
2146	  end_sequence ();
2147	  emit_insn (insns);
2148	  return target;
2149	}
2150
2151      /* If we were unable to expand via the builtin, stop the sequence
2152	 (without outputting the insns) and call to the library function
2153	 with the stabilized argument list.  */
2154      end_sequence ();
2155    }
2156
2157  target = expand_call (exp, target, target == const0_rtx);
2158
2159  return target;
2160}
2161
2162/* Expand a call to the builtin sincos math function.
2163   Return 0 if a normal call should be emitted rather than expanding the
2164   function in-line.  EXP is the expression that is a call to the builtin
2165   function.  */
2166
2167static rtx
2168expand_builtin_sincos (tree exp)
2169{
2170  rtx op0, op1, op2, target1, target2;
2171  tree arglist = TREE_OPERAND (exp, 1);
2172  enum machine_mode mode;
2173  tree arg, sinp, cosp;
2174  int result;
2175
2176  if (!validate_arglist (arglist, REAL_TYPE,
2177			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2178    return 0;
2179
2180  arg = TREE_VALUE (arglist);
2181  sinp = TREE_VALUE (TREE_CHAIN (arglist));
2182  cosp = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2183
2184  /* Make a suitable register to place result in.  */
2185  mode = TYPE_MODE (TREE_TYPE (arg));
2186
2187  /* Check if sincos insn is available, otherwise emit the call.  */
2188  if (sincos_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2189    return NULL_RTX;
2190
2191  target1 = gen_reg_rtx (mode);
2192  target2 = gen_reg_rtx (mode);
2193
2194  op0 = expand_normal (arg);
2195  op1 = expand_normal (build_fold_indirect_ref (sinp));
2196  op2 = expand_normal (build_fold_indirect_ref (cosp));
2197
2198  /* Compute into target1 and target2.
2199     Set TARGET to wherever the result comes back.  */
2200  result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2201  gcc_assert (result);
2202
2203  /* Move target1 and target2 to the memory locations indicated
2204     by op1 and op2.  */
2205  emit_move_insn (op1, target1);
2206  emit_move_insn (op2, target2);
2207
2208  return const0_rtx;
2209}
2210
2211/* Expand a call to one of the builtin rounding functions (lfloor).
2212   If expanding via optab fails, lower expression to (int)(floor(x)).
2213   EXP is the expression that is a call to the builtin function;
2214   if convenient, the result should be placed in TARGET.  SUBTARGET may
2215   be used as the target for computing one of EXP's operands.  */
2216
2217static rtx
2218expand_builtin_int_roundingfn (tree exp, rtx target, rtx subtarget)
2219{
2220  optab builtin_optab;
2221  rtx op0, insns, tmp;
2222  tree fndecl = get_callee_fndecl (exp);
2223  tree arglist = TREE_OPERAND (exp, 1);
2224  enum built_in_function fallback_fn;
2225  tree fallback_fndecl;
2226  enum machine_mode mode;
2227  tree arg, narg;
2228
2229  if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
2230    gcc_unreachable ();
2231
2232  arg = TREE_VALUE (arglist);
2233
2234  switch (DECL_FUNCTION_CODE (fndecl))
2235    {
2236    CASE_FLT_FN (BUILT_IN_LCEIL):
2237    CASE_FLT_FN (BUILT_IN_LLCEIL):
2238      builtin_optab = lceil_optab;
2239      fallback_fn = BUILT_IN_CEIL;
2240      break;
2241
2242    CASE_FLT_FN (BUILT_IN_LFLOOR):
2243    CASE_FLT_FN (BUILT_IN_LLFLOOR):
2244      builtin_optab = lfloor_optab;
2245      fallback_fn = BUILT_IN_FLOOR;
2246      break;
2247
2248    default:
2249      gcc_unreachable ();
2250    }
2251
2252  /* Make a suitable register to place result in.  */
2253  mode = TYPE_MODE (TREE_TYPE (exp));
2254
2255  /* Before working hard, check whether the instruction is available.  */
2256  if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2257    {
2258      target = gen_reg_rtx (mode);
2259
2260      /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2261	 need to expand the argument again.  This way, we will not perform
2262	 side-effects more the once.  */
2263      narg = builtin_save_expr (arg);
2264      if (narg != arg)
2265	{
2266	  arg = narg;
2267	  arglist = build_tree_list (NULL_TREE, arg);
2268	  exp = build_function_call_expr (fndecl, arglist);
2269	}
2270
2271      op0 = expand_expr (arg, subtarget, VOIDmode, 0);
2272
2273      start_sequence ();
2274
2275      /* Compute into TARGET.
2276	 Set TARGET to wherever the result comes back.  */
2277      target = expand_unop (mode, builtin_optab, op0, target, 0);
2278
2279      if (target != 0)
2280	{
2281	  /* Output the entire sequence.  */
2282	  insns = get_insns ();
2283	  end_sequence ();
2284	  emit_insn (insns);
2285	  return target;
2286	}
2287
2288      /* If we were unable to expand via the builtin, stop the sequence
2289	 (without outputting the insns).  */
2290      end_sequence ();
2291    }
2292
2293  /* Fall back to floating point rounding optab.  */
2294  fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2295  /* We shouldn't get here on targets without TARGET_C99_FUNCTIONS.
2296     ??? Perhaps convert (int)floorf(x) into (int)floor((double)x).  */
2297  gcc_assert (fallback_fndecl != NULL_TREE);
2298  exp = build_function_call_expr (fallback_fndecl, arglist);
2299
2300  tmp = expand_normal (exp);
2301
2302  /* Truncate the result of floating point optab to integer
2303     via expand_fix ().  */
2304  target = gen_reg_rtx (mode);
2305  expand_fix (target, tmp, 0);
2306
2307  return target;
2308}
2309
2310/* To evaluate powi(x,n), the floating point value x raised to the
2311   constant integer exponent n, we use a hybrid algorithm that
2312   combines the "window method" with look-up tables.  For an
2313   introduction to exponentiation algorithms and "addition chains",
2314   see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth,
2315   "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming",
2316   3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation
2317   Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998.  */
2318
2319/* Provide a default value for POWI_MAX_MULTS, the maximum number of
2320   multiplications to inline before calling the system library's pow
2321   function.  powi(x,n) requires at worst 2*bits(n)-2 multiplications,
2322   so this default never requires calling pow, powf or powl.  */
2323
2324#ifndef POWI_MAX_MULTS
2325#define POWI_MAX_MULTS  (2*HOST_BITS_PER_WIDE_INT-2)
2326#endif
2327
2328/* The size of the "optimal power tree" lookup table.  All
2329   exponents less than this value are simply looked up in the
2330   powi_table below.  This threshold is also used to size the
2331   cache of pseudo registers that hold intermediate results.  */
2332#define POWI_TABLE_SIZE 256
2333
2334/* The size, in bits of the window, used in the "window method"
2335   exponentiation algorithm.  This is equivalent to a radix of
2336   (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method".  */
2337#define POWI_WINDOW_SIZE 3
2338
2339/* The following table is an efficient representation of an
2340   "optimal power tree".  For each value, i, the corresponding
2341   value, j, in the table states than an optimal evaluation
2342   sequence for calculating pow(x,i) can be found by evaluating
2343   pow(x,j)*pow(x,i-j).  An optimal power tree for the first
2344   100 integers is given in Knuth's "Seminumerical algorithms".  */
2345
2346static const unsigned char powi_table[POWI_TABLE_SIZE] =
2347  {
2348      0,   1,   1,   2,   2,   3,   3,   4,  /*   0 -   7 */
2349      4,   6,   5,   6,   6,  10,   7,   9,  /*   8 -  15 */
2350      8,  16,   9,  16,  10,  12,  11,  13,  /*  16 -  23 */
2351     12,  17,  13,  18,  14,  24,  15,  26,  /*  24 -  31 */
2352     16,  17,  17,  19,  18,  33,  19,  26,  /*  32 -  39 */
2353     20,  25,  21,  40,  22,  27,  23,  44,  /*  40 -  47 */
2354     24,  32,  25,  34,  26,  29,  27,  44,  /*  48 -  55 */
2355     28,  31,  29,  34,  30,  60,  31,  36,  /*  56 -  63 */
2356     32,  64,  33,  34,  34,  46,  35,  37,  /*  64 -  71 */
2357     36,  65,  37,  50,  38,  48,  39,  69,  /*  72 -  79 */
2358     40,  49,  41,  43,  42,  51,  43,  58,  /*  80 -  87 */
2359     44,  64,  45,  47,  46,  59,  47,  76,  /*  88 -  95 */
2360     48,  65,  49,  66,  50,  67,  51,  66,  /*  96 - 103 */
2361     52,  70,  53,  74,  54, 104,  55,  74,  /* 104 - 111 */
2362     56,  64,  57,  69,  58,  78,  59,  68,  /* 112 - 119 */
2363     60,  61,  61,  80,  62,  75,  63,  68,  /* 120 - 127 */
2364     64,  65,  65, 128,  66, 129,  67,  90,  /* 128 - 135 */
2365     68,  73,  69, 131,  70,  94,  71,  88,  /* 136 - 143 */
2366     72, 128,  73,  98,  74, 132,  75, 121,  /* 144 - 151 */
2367     76, 102,  77, 124,  78, 132,  79, 106,  /* 152 - 159 */
2368     80,  97,  81, 160,  82,  99,  83, 134,  /* 160 - 167 */
2369     84,  86,  85,  95,  86, 160,  87, 100,  /* 168 - 175 */
2370     88, 113,  89,  98,  90, 107,  91, 122,  /* 176 - 183 */
2371     92, 111,  93, 102,  94, 126,  95, 150,  /* 184 - 191 */
2372     96, 128,  97, 130,  98, 133,  99, 195,  /* 192 - 199 */
2373    100, 128, 101, 123, 102, 164, 103, 138,  /* 200 - 207 */
2374    104, 145, 105, 146, 106, 109, 107, 149,  /* 208 - 215 */
2375    108, 200, 109, 146, 110, 170, 111, 157,  /* 216 - 223 */
2376    112, 128, 113, 130, 114, 182, 115, 132,  /* 224 - 231 */
2377    116, 200, 117, 132, 118, 158, 119, 206,  /* 232 - 239 */
2378    120, 240, 121, 162, 122, 147, 123, 152,  /* 240 - 247 */
2379    124, 166, 125, 214, 126, 138, 127, 153,  /* 248 - 255 */
2380  };
2381
2382
2383/* Return the number of multiplications required to calculate
2384   powi(x,n) where n is less than POWI_TABLE_SIZE.  This is a
2385   subroutine of powi_cost.  CACHE is an array indicating
2386   which exponents have already been calculated.  */
2387
2388static int
2389powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache)
2390{
2391  /* If we've already calculated this exponent, then this evaluation
2392     doesn't require any additional multiplications.  */
2393  if (cache[n])
2394    return 0;
2395
2396  cache[n] = true;
2397  return powi_lookup_cost (n - powi_table[n], cache)
2398	 + powi_lookup_cost (powi_table[n], cache) + 1;
2399}
2400
2401/* Return the number of multiplications required to calculate
2402   powi(x,n) for an arbitrary x, given the exponent N.  This
2403   function needs to be kept in sync with expand_powi below.  */
2404
2405static int
2406powi_cost (HOST_WIDE_INT n)
2407{
2408  bool cache[POWI_TABLE_SIZE];
2409  unsigned HOST_WIDE_INT digit;
2410  unsigned HOST_WIDE_INT val;
2411  int result;
2412
2413  if (n == 0)
2414    return 0;
2415
2416  /* Ignore the reciprocal when calculating the cost.  */
2417  val = (n < 0) ? -n : n;
2418
2419  /* Initialize the exponent cache.  */
2420  memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool));
2421  cache[1] = true;
2422
2423  result = 0;
2424
2425  while (val >= POWI_TABLE_SIZE)
2426    {
2427      if (val & 1)
2428	{
2429	  digit = val & ((1 << POWI_WINDOW_SIZE) - 1);
2430	  result += powi_lookup_cost (digit, cache)
2431		    + POWI_WINDOW_SIZE + 1;
2432	  val >>= POWI_WINDOW_SIZE;
2433	}
2434      else
2435	{
2436	  val >>= 1;
2437	  result++;
2438	}
2439    }
2440
2441  return result + powi_lookup_cost (val, cache);
2442}
2443
2444/* Recursive subroutine of expand_powi.  This function takes the array,
2445   CACHE, of already calculated exponents and an exponent N and returns
2446   an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE.  */
2447
2448static rtx
2449expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache)
2450{
2451  unsigned HOST_WIDE_INT digit;
2452  rtx target, result;
2453  rtx op0, op1;
2454
2455  if (n < POWI_TABLE_SIZE)
2456    {
2457      if (cache[n])
2458	return cache[n];
2459
2460      target = gen_reg_rtx (mode);
2461      cache[n] = target;
2462
2463      op0 = expand_powi_1 (mode, n - powi_table[n], cache);
2464      op1 = expand_powi_1 (mode, powi_table[n], cache);
2465    }
2466  else if (n & 1)
2467    {
2468      target = gen_reg_rtx (mode);
2469      digit = n & ((1 << POWI_WINDOW_SIZE) - 1);
2470      op0 = expand_powi_1 (mode, n - digit, cache);
2471      op1 = expand_powi_1 (mode, digit, cache);
2472    }
2473  else
2474    {
2475      target = gen_reg_rtx (mode);
2476      op0 = expand_powi_1 (mode, n >> 1, cache);
2477      op1 = op0;
2478    }
2479
2480  result = expand_mult (mode, op0, op1, target, 0);
2481  if (result != target)
2482    emit_move_insn (target, result);
2483  return target;
2484}
2485
2486/* Expand the RTL to evaluate powi(x,n) in mode MODE.  X is the
2487   floating point operand in mode MODE, and N is the exponent.  This
2488   function needs to be kept in sync with powi_cost above.  */
2489
2490static rtx
2491expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n)
2492{
2493  unsigned HOST_WIDE_INT val;
2494  rtx cache[POWI_TABLE_SIZE];
2495  rtx result;
2496
2497  if (n == 0)
2498    return CONST1_RTX (mode);
2499
2500  val = (n < 0) ? -n : n;
2501
2502  memset (cache, 0, sizeof (cache));
2503  cache[1] = x;
2504
2505  result = expand_powi_1 (mode, (n < 0) ? -n : n, cache);
2506
2507  /* If the original exponent was negative, reciprocate the result.  */
2508  if (n < 0)
2509    result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode),
2510			   result, NULL_RTX, 0, OPTAB_LIB_WIDEN);
2511
2512  return result;
2513}
2514
2515/* Expand a call to the pow built-in mathematical function.  Return 0 if
2516   a normal call should be emitted rather than expanding the function
2517   in-line.  EXP is the expression that is a call to the builtin
2518   function; if convenient, the result should be placed in TARGET.  */
2519
2520static rtx
2521expand_builtin_pow (tree exp, rtx target, rtx subtarget)
2522{
2523  tree arglist = TREE_OPERAND (exp, 1);
2524  tree arg0, arg1;
2525
2526  if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2527    return 0;
2528
2529  arg0 = TREE_VALUE (arglist);
2530  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2531
2532  if (TREE_CODE (arg1) == REAL_CST
2533      && ! TREE_CONSTANT_OVERFLOW (arg1))
2534    {
2535      REAL_VALUE_TYPE cint;
2536      REAL_VALUE_TYPE c;
2537      HOST_WIDE_INT n;
2538
2539      c = TREE_REAL_CST (arg1);
2540      n = real_to_integer (&c);
2541      real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
2542      if (real_identical (&c, &cint))
2543	{
2544	  /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2545	     Otherwise, check the number of multiplications required.
2546	     Note that pow never sets errno for an integer exponent.  */
2547	  if ((n >= -1 && n <= 2)
2548	      || (flag_unsafe_math_optimizations
2549		  && ! optimize_size
2550		  && powi_cost (n) <= POWI_MAX_MULTS))
2551	    {
2552	      enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2553	      rtx op = expand_expr (arg0, subtarget, VOIDmode, 0);
2554	      op = force_reg (mode, op);
2555	      return expand_powi (op, mode, n);
2556	    }
2557	}
2558    }
2559
2560  if (! flag_unsafe_math_optimizations)
2561    return NULL_RTX;
2562  return expand_builtin_mathfn_2 (exp, target, subtarget);
2563}
2564
2565/* Expand a call to the powi built-in mathematical function.  Return 0 if
2566   a normal call should be emitted rather than expanding the function
2567   in-line.  EXP is the expression that is a call to the builtin
2568   function; if convenient, the result should be placed in TARGET.  */
2569
2570static rtx
2571expand_builtin_powi (tree exp, rtx target, rtx subtarget)
2572{
2573  tree arglist = TREE_OPERAND (exp, 1);
2574  tree arg0, arg1;
2575  rtx op0, op1;
2576  enum machine_mode mode;
2577  enum machine_mode mode2;
2578
2579  if (! validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2580    return 0;
2581
2582  arg0 = TREE_VALUE (arglist);
2583  arg1 = TREE_VALUE (TREE_CHAIN (arglist));
2584  mode = TYPE_MODE (TREE_TYPE (exp));
2585
2586  /* Handle constant power.  */
2587
2588  if (TREE_CODE (arg1) == INTEGER_CST
2589      && ! TREE_CONSTANT_OVERFLOW (arg1))
2590    {
2591      HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1);
2592
2593      /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact.
2594	 Otherwise, check the number of multiplications required.  */
2595      if ((TREE_INT_CST_HIGH (arg1) == 0
2596	   || TREE_INT_CST_HIGH (arg1) == -1)
2597	  && ((n >= -1 && n <= 2)
2598	      || (! optimize_size
2599		  && powi_cost (n) <= POWI_MAX_MULTS)))
2600	{
2601	  op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
2602	  op0 = force_reg (mode, op0);
2603	  return expand_powi (op0, mode, n);
2604	}
2605    }
2606
2607  /* Emit a libcall to libgcc.  */
2608
2609  /* Mode of the 2nd argument must match that of an int. */
2610  mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2611
2612  if (target == NULL_RTX)
2613    target = gen_reg_rtx (mode);
2614
2615  op0 = expand_expr (arg0, subtarget, mode, 0);
2616  if (GET_MODE (op0) != mode)
2617    op0 = convert_to_mode (mode, op0, 0);
2618  op1 = expand_expr (arg1, 0, mode2, 0);
2619  if (GET_MODE (op1) != mode2)
2620    op1 = convert_to_mode (mode2, op1, 0);
2621
2622  target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc,
2623				    target, LCT_CONST_MAKE_BLOCK, mode, 2,
2624				    op0, mode, op1, mode2);
2625
2626  return target;
2627}
2628
2629/* Expand expression EXP which is a call to the strlen builtin.  Return 0
2630   if we failed the caller should emit a normal call, otherwise
2631   try to get the result in TARGET, if convenient.  */
2632
2633static rtx
2634expand_builtin_strlen (tree arglist, rtx target,
2635		       enum machine_mode target_mode)
2636{
2637  if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
2638    return 0;
2639  else
2640    {
2641      rtx pat;
2642      tree len, src = TREE_VALUE (arglist);
2643      rtx result, src_reg, char_rtx, before_strlen;
2644      enum machine_mode insn_mode = target_mode, char_mode;
2645      enum insn_code icode = CODE_FOR_nothing;
2646      int align;
2647
2648      /* If the length can be computed at compile-time, return it.  */
2649      len = c_strlen (src, 0);
2650      if (len)
2651	return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2652
2653      /* If the length can be computed at compile-time and is constant
2654	 integer, but there are side-effects in src, evaluate
2655	 src for side-effects, then return len.
2656	 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2657	 can be optimized into: i++; x = 3;  */
2658      len = c_strlen (src, 1);
2659      if (len && TREE_CODE (len) == INTEGER_CST)
2660	{
2661	  expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2662	  return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2663	}
2664
2665      align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2666
2667      /* If SRC is not a pointer type, don't do this operation inline.  */
2668      if (align == 0)
2669	return 0;
2670
2671      /* Bail out if we can't compute strlen in the right mode.  */
2672      while (insn_mode != VOIDmode)
2673	{
2674	  icode = strlen_optab->handlers[(int) insn_mode].insn_code;
2675	  if (icode != CODE_FOR_nothing)
2676	    break;
2677
2678	  insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2679	}
2680      if (insn_mode == VOIDmode)
2681	return 0;
2682
2683      /* Make a place to write the result of the instruction.  */
2684      result = target;
2685      if (! (result != 0
2686	     && REG_P (result)
2687	     && GET_MODE (result) == insn_mode
2688	     && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2689	result = gen_reg_rtx (insn_mode);
2690
2691      /* Make a place to hold the source address.  We will not expand
2692	 the actual source until we are sure that the expansion will
2693	 not fail -- there are trees that cannot be expanded twice.  */
2694      src_reg = gen_reg_rtx (Pmode);
2695
2696      /* Mark the beginning of the strlen sequence so we can emit the
2697	 source operand later.  */
2698      before_strlen = get_last_insn ();
2699
2700      char_rtx = const0_rtx;
2701      char_mode = insn_data[(int) icode].operand[2].mode;
2702      if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
2703							    char_mode))
2704	char_rtx = copy_to_mode_reg (char_mode, char_rtx);
2705
2706      pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
2707			     char_rtx, GEN_INT (align));
2708      if (! pat)
2709	return 0;
2710      emit_insn (pat);
2711
2712      /* Now that we are assured of success, expand the source.  */
2713      start_sequence ();
2714      pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL);
2715      if (pat != src_reg)
2716	emit_move_insn (src_reg, pat);
2717      pat = get_insns ();
2718      end_sequence ();
2719
2720      if (before_strlen)
2721	emit_insn_after (pat, before_strlen);
2722      else
2723	emit_insn_before (pat, get_insns ());
2724
2725      /* Return the value in the proper mode for this function.  */
2726      if (GET_MODE (result) == target_mode)
2727	target = result;
2728      else if (target != 0)
2729	convert_move (target, result, 0);
2730      else
2731	target = convert_to_mode (target_mode, result, 0);
2732
2733      return target;
2734    }
2735}
2736
2737/* Expand a call to the strstr builtin.  Return 0 if we failed the
2738   caller should emit a normal call, otherwise try to get the result
2739   in TARGET, if convenient (and in mode MODE if that's convenient).  */
2740
2741static rtx
2742expand_builtin_strstr (tree arglist, tree type, rtx target, enum machine_mode mode)
2743{
2744  if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2745    {
2746      tree result = fold_builtin_strstr (arglist, type);
2747      if (result)
2748	return expand_expr (result, target, mode, EXPAND_NORMAL);
2749    }
2750  return 0;
2751}
2752
2753/* Expand a call to the strchr builtin.  Return 0 if we failed the
2754   caller should emit a normal call, otherwise try to get the result
2755   in TARGET, if convenient (and in mode MODE if that's convenient).  */
2756
2757static rtx
2758expand_builtin_strchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2759{
2760  if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2761    {
2762      tree result = fold_builtin_strchr (arglist, type);
2763      if (result)
2764	return expand_expr (result, target, mode, EXPAND_NORMAL);
2765
2766      /* FIXME: Should use strchrM optab so that ports can optimize this.  */
2767    }
2768  return 0;
2769}
2770
2771/* Expand a call to the strrchr builtin.  Return 0 if we failed the
2772   caller should emit a normal call, otherwise try to get the result
2773   in TARGET, if convenient (and in mode MODE if that's convenient).  */
2774
2775static rtx
2776expand_builtin_strrchr (tree arglist, tree type, rtx target, enum machine_mode mode)
2777{
2778  if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2779    {
2780      tree result = fold_builtin_strrchr (arglist, type);
2781      if (result)
2782	return expand_expr (result, target, mode, EXPAND_NORMAL);
2783    }
2784  return 0;
2785}
2786
2787/* Expand a call to the strpbrk builtin.  Return 0 if we failed the
2788   caller should emit a normal call, otherwise try to get the result
2789   in TARGET, if convenient (and in mode MODE if that's convenient).  */
2790
2791static rtx
2792expand_builtin_strpbrk (tree arglist, tree type, rtx target, enum machine_mode mode)
2793{
2794  if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2795    {
2796      tree result = fold_builtin_strpbrk (arglist, type);
2797      if (result)
2798	return expand_expr (result, target, mode, EXPAND_NORMAL);
2799    }
2800  return 0;
2801}
2802
2803/* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
2804   bytes from constant string DATA + OFFSET and return it as target
2805   constant.  */
2806
2807static rtx
2808builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2809			 enum machine_mode mode)
2810{
2811  const char *str = (const char *) data;
2812
2813  gcc_assert (offset >= 0
2814	      && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2815		  <= strlen (str) + 1));
2816
2817  return c_readstr (str + offset, mode);
2818}
2819
2820/* Expand a call to the memcpy builtin, with arguments in ARGLIST.
2821   Return 0 if we failed, the caller should emit a normal call,
2822   otherwise try to get the result in TARGET, if convenient (and in
2823   mode MODE if that's convenient).  */
2824static rtx
2825expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode)
2826{
2827  tree fndecl = get_callee_fndecl (exp);
2828  tree arglist = TREE_OPERAND (exp, 1);
2829  if (!validate_arglist (arglist,
2830			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2831    return 0;
2832  else
2833    {
2834      tree dest = TREE_VALUE (arglist);
2835      tree src = TREE_VALUE (TREE_CHAIN (arglist));
2836      tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2837      const char *src_str;
2838      unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2839      unsigned int dest_align
2840	= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2841      rtx dest_mem, src_mem, dest_addr, len_rtx;
2842      tree result = fold_builtin_memory_op (arglist, TREE_TYPE (TREE_TYPE (fndecl)),
2843					    false, /*endp=*/0);
2844
2845      if (result)
2846	{
2847	  while (TREE_CODE (result) == COMPOUND_EXPR)
2848	    {
2849	      expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
2850			   EXPAND_NORMAL);
2851	      result = TREE_OPERAND (result, 1);
2852	    }
2853	  return expand_expr (result, target, mode, EXPAND_NORMAL);
2854	}
2855
2856      /* If DEST is not a pointer type, call the normal function.  */
2857      if (dest_align == 0)
2858	return 0;
2859
2860      /* If either SRC is not a pointer type, don't do this
2861	 operation in-line.  */
2862      if (src_align == 0)
2863	return 0;
2864
2865      dest_mem = get_memory_rtx (dest, len);
2866      set_mem_align (dest_mem, dest_align);
2867      len_rtx = expand_normal (len);
2868      src_str = c_getstr (src);
2869
2870      /* If SRC is a string constant and block move would be done
2871	 by pieces, we can avoid loading the string from memory
2872	 and only stored the computed constants.  */
2873      if (src_str
2874	  && GET_CODE (len_rtx) == CONST_INT
2875	  && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2876	  && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2877				  (void *) src_str, dest_align))
2878	{
2879	  dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2880				      builtin_memcpy_read_str,
2881				      (void *) src_str, dest_align, 0);
2882	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2883	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
2884	  return dest_mem;
2885	}
2886
2887      src_mem = get_memory_rtx (src, len);
2888      set_mem_align (src_mem, src_align);
2889
2890      /* Copy word part most expediently.  */
2891      dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2892				   CALL_EXPR_TAILCALL (exp)
2893				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
2894
2895      if (dest_addr == 0)
2896	{
2897	  dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2898	  dest_addr = convert_memory_address (ptr_mode, dest_addr);
2899	}
2900      return dest_addr;
2901    }
2902}
2903
2904/* Expand a call to the mempcpy builtin, with arguments in ARGLIST.
2905   Return 0 if we failed; the caller should emit a normal call,
2906   otherwise try to get the result in TARGET, if convenient (and in
2907   mode MODE if that's convenient).  If ENDP is 0 return the
2908   destination pointer, if ENDP is 1 return the end pointer ala
2909   mempcpy, and if ENDP is 2 return the end pointer minus one ala
2910   stpcpy.  */
2911
2912static rtx
2913expand_builtin_mempcpy (tree arglist, tree type, rtx target, enum machine_mode mode,
2914			int endp)
2915{
2916  if (!validate_arglist (arglist,
2917			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2918    return 0;
2919  /* If return value is ignored, transform mempcpy into memcpy.  */
2920  else if (target == const0_rtx)
2921    {
2922      tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
2923
2924      if (!fn)
2925	return 0;
2926
2927      return expand_expr (build_function_call_expr (fn, arglist),
2928			  target, mode, EXPAND_NORMAL);
2929    }
2930  else
2931    {
2932      tree dest = TREE_VALUE (arglist);
2933      tree src = TREE_VALUE (TREE_CHAIN (arglist));
2934      tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2935      const char *src_str;
2936      unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
2937      unsigned int dest_align
2938	= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2939      rtx dest_mem, src_mem, len_rtx;
2940      tree result = fold_builtin_memory_op (arglist, type, false, endp);
2941
2942      if (result)
2943	{
2944	  while (TREE_CODE (result) == COMPOUND_EXPR)
2945	    {
2946	      expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
2947			   EXPAND_NORMAL);
2948	      result = TREE_OPERAND (result, 1);
2949	    }
2950	  return expand_expr (result, target, mode, EXPAND_NORMAL);
2951	}
2952
2953      /* If either SRC or DEST is not a pointer type, don't do this
2954	 operation in-line.  */
2955      if (dest_align == 0 || src_align == 0)
2956	return 0;
2957
2958      /* If LEN is not constant, call the normal function.  */
2959      if (! host_integerp (len, 1))
2960	return 0;
2961
2962      len_rtx = expand_normal (len);
2963      src_str = c_getstr (src);
2964
2965      /* If SRC is a string constant and block move would be done
2966	 by pieces, we can avoid loading the string from memory
2967	 and only stored the computed constants.  */
2968      if (src_str
2969	  && GET_CODE (len_rtx) == CONST_INT
2970	  && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2971	  && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2972				  (void *) src_str, dest_align))
2973	{
2974	  dest_mem = get_memory_rtx (dest, len);
2975	  set_mem_align (dest_mem, dest_align);
2976	  dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2977				      builtin_memcpy_read_str,
2978				      (void *) src_str, dest_align, endp);
2979	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2980	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
2981	  return dest_mem;
2982	}
2983
2984      if (GET_CODE (len_rtx) == CONST_INT
2985	  && can_move_by_pieces (INTVAL (len_rtx),
2986				 MIN (dest_align, src_align)))
2987	{
2988	  dest_mem = get_memory_rtx (dest, len);
2989	  set_mem_align (dest_mem, dest_align);
2990	  src_mem = get_memory_rtx (src, len);
2991	  set_mem_align (src_mem, src_align);
2992	  dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
2993				     MIN (dest_align, src_align), endp);
2994	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2995	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
2996	  return dest_mem;
2997	}
2998
2999      return 0;
3000    }
3001}
3002
3003/* Expand expression EXP, which is a call to the memmove builtin.  Return 0
3004   if we failed; the caller should emit a normal call.  */
3005
3006static rtx
3007expand_builtin_memmove (tree arglist, tree type, rtx target,
3008			enum machine_mode mode, tree orig_exp)
3009{
3010  if (!validate_arglist (arglist,
3011			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3012    return 0;
3013  else
3014    {
3015      tree dest = TREE_VALUE (arglist);
3016      tree src = TREE_VALUE (TREE_CHAIN (arglist));
3017      tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3018
3019      unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
3020      unsigned int dest_align
3021	= get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3022      tree result = fold_builtin_memory_op (arglist, type, false, /*endp=*/3);
3023
3024      if (result)
3025	{
3026	  while (TREE_CODE (result) == COMPOUND_EXPR)
3027	    {
3028	      expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3029			   EXPAND_NORMAL);
3030	      result = TREE_OPERAND (result, 1);
3031	    }
3032	  return expand_expr (result, target, mode, EXPAND_NORMAL);
3033	}
3034
3035      /* If DEST is not a pointer type, call the normal function.  */
3036      if (dest_align == 0)
3037	return 0;
3038
3039      /* If either SRC is not a pointer type, don't do this
3040	 operation in-line.  */
3041      if (src_align == 0)
3042	return 0;
3043
3044      /* If src is categorized for a readonly section we can use
3045	 normal memcpy.  */
3046      if (readonly_data_expr (src))
3047	{
3048	  tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY];
3049	  if (!fn)
3050	    return 0;
3051	  fn = build_function_call_expr (fn, arglist);
3052	  if (TREE_CODE (fn) == CALL_EXPR)
3053	    CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3054	  return expand_expr (fn, target, mode, EXPAND_NORMAL);
3055	}
3056
3057      /* If length is 1 and we can expand memcpy call inline,
3058	 it is ok to use memcpy as well.  */
3059      if (integer_onep (len))
3060	{
3061	  rtx ret = expand_builtin_mempcpy (arglist, type, target, mode,
3062					    /*endp=*/0);
3063	  if (ret)
3064	    return ret;
3065	}
3066
3067      /* Otherwise, call the normal function.  */
3068      return 0;
3069   }
3070}
3071
3072/* Expand expression EXP, which is a call to the bcopy builtin.  Return 0
3073   if we failed the caller should emit a normal call.  */
3074
3075static rtx
3076expand_builtin_bcopy (tree exp)
3077{
3078  tree arglist = TREE_OPERAND (exp, 1);
3079  tree type = TREE_TYPE (exp);
3080  tree src, dest, size, newarglist;
3081
3082  if (!validate_arglist (arglist,
3083			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3084    return NULL_RTX;
3085
3086  src = TREE_VALUE (arglist);
3087  dest = TREE_VALUE (TREE_CHAIN (arglist));
3088  size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3089
3090  /* New argument list transforming bcopy(ptr x, ptr y, int z) to
3091     memmove(ptr y, ptr x, size_t z).   This is done this way
3092     so that if it isn't expanded inline, we fallback to
3093     calling bcopy instead of memmove.  */
3094
3095  newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3096  newarglist = tree_cons (NULL_TREE, src, newarglist);
3097  newarglist = tree_cons (NULL_TREE, dest, newarglist);
3098
3099  return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode, exp);
3100}
3101
3102#ifndef HAVE_movstr
3103# define HAVE_movstr 0
3104# define CODE_FOR_movstr CODE_FOR_nothing
3105#endif
3106
3107/* Expand into a movstr instruction, if one is available.  Return 0 if
3108   we failed, the caller should emit a normal call, otherwise try to
3109   get the result in TARGET, if convenient.  If ENDP is 0 return the
3110   destination pointer, if ENDP is 1 return the end pointer ala
3111   mempcpy, and if ENDP is 2 return the end pointer minus one ala
3112   stpcpy.  */
3113
3114static rtx
3115expand_movstr (tree dest, tree src, rtx target, int endp)
3116{
3117  rtx end;
3118  rtx dest_mem;
3119  rtx src_mem;
3120  rtx insn;
3121  const struct insn_data * data;
3122
3123  if (!HAVE_movstr)
3124    return 0;
3125
3126  dest_mem = get_memory_rtx (dest, NULL);
3127  src_mem = get_memory_rtx (src, NULL);
3128  if (!endp)
3129    {
3130      target = force_reg (Pmode, XEXP (dest_mem, 0));
3131      dest_mem = replace_equiv_address (dest_mem, target);
3132      end = gen_reg_rtx (Pmode);
3133    }
3134  else
3135    {
3136      if (target == 0 || target == const0_rtx)
3137	{
3138	  end = gen_reg_rtx (Pmode);
3139	  if (target == 0)
3140	    target = end;
3141	}
3142      else
3143	end = target;
3144    }
3145
3146  data = insn_data + CODE_FOR_movstr;
3147
3148  if (data->operand[0].mode != VOIDmode)
3149    end = gen_lowpart (data->operand[0].mode, end);
3150
3151  insn = data->genfun (end, dest_mem, src_mem);
3152
3153  gcc_assert (insn);
3154
3155  emit_insn (insn);
3156
3157  /* movstr is supposed to set end to the address of the NUL
3158     terminator.  If the caller requested a mempcpy-like return value,
3159     adjust it.  */
3160  if (endp == 1 && target != const0_rtx)
3161    {
3162      rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1);
3163      emit_move_insn (target, force_operand (tem, NULL_RTX));
3164    }
3165
3166  return target;
3167}
3168
3169/* Expand expression EXP, which is a call to the strcpy builtin.  Return 0
3170   if we failed the caller should emit a normal call, otherwise try to get
3171   the result in TARGET, if convenient (and in mode MODE if that's
3172   convenient).  */
3173
3174static rtx
3175expand_builtin_strcpy (tree fndecl, tree arglist, rtx target, enum machine_mode mode)
3176{
3177  if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3178    {
3179      tree result = fold_builtin_strcpy (fndecl, arglist, 0);
3180      if (result)
3181	{
3182	  while (TREE_CODE (result) == COMPOUND_EXPR)
3183	    {
3184	      expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3185			   EXPAND_NORMAL);
3186	      result = TREE_OPERAND (result, 1);
3187	    }
3188	  return expand_expr (result, target, mode, EXPAND_NORMAL);
3189	}
3190
3191      return expand_movstr (TREE_VALUE (arglist),
3192			    TREE_VALUE (TREE_CHAIN (arglist)),
3193			    target, /*endp=*/0);
3194    }
3195  return 0;
3196}
3197
3198/* Expand a call to the stpcpy builtin, with arguments in ARGLIST.
3199   Return 0 if we failed the caller should emit a normal call,
3200   otherwise try to get the result in TARGET, if convenient (and in
3201   mode MODE if that's convenient).  */
3202
3203static rtx
3204expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3205{
3206  tree arglist = TREE_OPERAND (exp, 1);
3207  /* If return value is ignored, transform stpcpy into strcpy.  */
3208  if (target == const0_rtx)
3209    {
3210      tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
3211      if (!fn)
3212	return 0;
3213
3214      return expand_expr (build_function_call_expr (fn, arglist),
3215			  target, mode, EXPAND_NORMAL);
3216    }
3217
3218  if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3219    return 0;
3220  else
3221    {
3222      tree dst, src, len, lenp1;
3223      tree narglist;
3224      rtx ret;
3225
3226      /* Ensure we get an actual string whose length can be evaluated at
3227	 compile-time, not an expression containing a string.  This is
3228	 because the latter will potentially produce pessimized code
3229	 when used to produce the return value.  */
3230      src = TREE_VALUE (TREE_CHAIN (arglist));
3231      if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3232	return expand_movstr (TREE_VALUE (arglist),
3233			      TREE_VALUE (TREE_CHAIN (arglist)),
3234			      target, /*endp=*/2);
3235
3236      dst = TREE_VALUE (arglist);
3237      lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1));
3238      narglist = build_tree_list (NULL_TREE, lenp1);
3239      narglist = tree_cons (NULL_TREE, src, narglist);
3240      narglist = tree_cons (NULL_TREE, dst, narglist);
3241      ret = expand_builtin_mempcpy (narglist, TREE_TYPE (exp),
3242				    target, mode, /*endp=*/2);
3243
3244      if (ret)
3245	return ret;
3246
3247      if (TREE_CODE (len) == INTEGER_CST)
3248	{
3249	  rtx len_rtx = expand_normal (len);
3250
3251	  if (GET_CODE (len_rtx) == CONST_INT)
3252	    {
3253	      ret = expand_builtin_strcpy (get_callee_fndecl (exp),
3254					   arglist, target, mode);
3255
3256	      if (ret)
3257		{
3258		  if (! target)
3259		    {
3260		      if (mode != VOIDmode)
3261			target = gen_reg_rtx (mode);
3262		      else
3263			target = gen_reg_rtx (GET_MODE (ret));
3264		    }
3265		  if (GET_MODE (target) != GET_MODE (ret))
3266		    ret = gen_lowpart (GET_MODE (target), ret);
3267
3268		  ret = plus_constant (ret, INTVAL (len_rtx));
3269		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3270		  gcc_assert (ret);
3271
3272		  return target;
3273		}
3274	    }
3275	}
3276
3277      return expand_movstr (TREE_VALUE (arglist),
3278			    TREE_VALUE (TREE_CHAIN (arglist)),
3279			    target, /*endp=*/2);
3280    }
3281}
3282
3283/* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3284   bytes from constant string DATA + OFFSET and return it as target
3285   constant.  */
3286
3287static rtx
3288builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3289			  enum machine_mode mode)
3290{
3291  const char *str = (const char *) data;
3292
3293  if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3294    return const0_rtx;
3295
3296  return c_readstr (str + offset, mode);
3297}
3298
3299/* Expand expression EXP, which is a call to the strncpy builtin.  Return 0
3300   if we failed the caller should emit a normal call.  */
3301
3302static rtx
3303expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode)
3304{
3305  tree fndecl = get_callee_fndecl (exp);
3306  tree arglist = TREE_OPERAND (exp, 1);
3307  if (validate_arglist (arglist,
3308			POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3309    {
3310      tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)), 1);
3311      tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3312      tree result = fold_builtin_strncpy (fndecl, arglist, slen);
3313
3314      if (result)
3315	{
3316	  while (TREE_CODE (result) == COMPOUND_EXPR)
3317	    {
3318	      expand_expr (TREE_OPERAND (result, 0), const0_rtx, VOIDmode,
3319			   EXPAND_NORMAL);
3320	      result = TREE_OPERAND (result, 1);
3321	    }
3322	  return expand_expr (result, target, mode, EXPAND_NORMAL);
3323	}
3324
3325      /* We must be passed a constant len and src parameter.  */
3326      if (!host_integerp (len, 1) || !slen || !host_integerp (slen, 1))
3327	return 0;
3328
3329      slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
3330
3331      /* We're required to pad with trailing zeros if the requested
3332	 len is greater than strlen(s2)+1.  In that case try to
3333	 use store_by_pieces, if it fails, punt.  */
3334      if (tree_int_cst_lt (slen, len))
3335	{
3336	  tree dest = TREE_VALUE (arglist);
3337	  unsigned int dest_align
3338	    = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3339	  const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
3340	  rtx dest_mem;
3341
3342	  if (!p || dest_align == 0 || !host_integerp (len, 1)
3343	      || !can_store_by_pieces (tree_low_cst (len, 1),
3344				       builtin_strncpy_read_str,
3345				       (void *) p, dest_align))
3346	    return 0;
3347
3348	  dest_mem = get_memory_rtx (dest, len);
3349	  store_by_pieces (dest_mem, tree_low_cst (len, 1),
3350			   builtin_strncpy_read_str,
3351			   (void *) p, dest_align, 0);
3352	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3353	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3354	  return dest_mem;
3355	}
3356    }
3357  return 0;
3358}
3359
3360/* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3361   bytes from constant string DATA + OFFSET and return it as target
3362   constant.  */
3363
3364static rtx
3365builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3366			 enum machine_mode mode)
3367{
3368  const char *c = (const char *) data;
3369  char *p = alloca (GET_MODE_SIZE (mode));
3370
3371  memset (p, *c, GET_MODE_SIZE (mode));
3372
3373  return c_readstr (p, mode);
3374}
3375
3376/* Callback routine for store_by_pieces.  Return the RTL of a register
3377   containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3378   char value given in the RTL register data.  For example, if mode is
3379   4 bytes wide, return the RTL for 0x01010101*data.  */
3380
3381static rtx
3382builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3383			enum machine_mode mode)
3384{
3385  rtx target, coeff;
3386  size_t size;
3387  char *p;
3388
3389  size = GET_MODE_SIZE (mode);
3390  if (size == 1)
3391    return (rtx) data;
3392
3393  p = alloca (size);
3394  memset (p, 1, size);
3395  coeff = c_readstr (p, mode);
3396
3397  target = convert_to_mode (mode, (rtx) data, 1);
3398  target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3399  return force_reg (mode, target);
3400}
3401
3402/* Expand expression EXP, which is a call to the memset builtin.  Return 0
3403   if we failed the caller should emit a normal call, otherwise try to get
3404   the result in TARGET, if convenient (and in mode MODE if that's
3405   convenient).  */
3406
3407static rtx
3408expand_builtin_memset (tree arglist, rtx target, enum machine_mode mode,
3409		       tree orig_exp)
3410{
3411  if (!validate_arglist (arglist,
3412			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3413    return 0;
3414  else
3415    {
3416      tree dest = TREE_VALUE (arglist);
3417      tree val = TREE_VALUE (TREE_CHAIN (arglist));
3418      tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3419      tree fndecl, fn;
3420      enum built_in_function fcode;
3421      char c;
3422      unsigned int dest_align;
3423      rtx dest_mem, dest_addr, len_rtx;
3424
3425      dest_align = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
3426
3427      /* If DEST is not a pointer type, don't do this
3428	 operation in-line.  */
3429      if (dest_align == 0)
3430	return 0;
3431
3432      /* If the LEN parameter is zero, return DEST.  */
3433      if (integer_zerop (len))
3434	{
3435	  /* Evaluate and ignore VAL in case it has side-effects.  */
3436	  expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3437	  return expand_expr (dest, target, mode, EXPAND_NORMAL);
3438	}
3439
3440      /* Stabilize the arguments in case we fail.  */
3441      dest = builtin_save_expr (dest);
3442      val = builtin_save_expr (val);
3443      len = builtin_save_expr (len);
3444
3445      len_rtx = expand_normal (len);
3446      dest_mem = get_memory_rtx (dest, len);
3447
3448      if (TREE_CODE (val) != INTEGER_CST)
3449	{
3450	  rtx val_rtx;
3451
3452	  val_rtx = expand_normal (val);
3453	  val_rtx = convert_to_mode (TYPE_MODE (unsigned_char_type_node),
3454				     val_rtx, 0);
3455
3456	  /* Assume that we can memset by pieces if we can store the
3457	   * the coefficients by pieces (in the required modes).
3458	   * We can't pass builtin_memset_gen_str as that emits RTL.  */
3459	  c = 1;
3460	  if (host_integerp (len, 1)
3461	      && !(optimize_size && tree_low_cst (len, 1) > 1)
3462	      && can_store_by_pieces (tree_low_cst (len, 1),
3463				      builtin_memset_read_str, &c, dest_align))
3464	    {
3465	      val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
3466				   val_rtx);
3467	      store_by_pieces (dest_mem, tree_low_cst (len, 1),
3468			       builtin_memset_gen_str, val_rtx, dest_align, 0);
3469	    }
3470	  else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3471					    dest_align))
3472	    goto do_libcall;
3473
3474	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3475	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3476	  return dest_mem;
3477	}
3478
3479      if (target_char_cast (val, &c))
3480	goto do_libcall;
3481
3482      if (c)
3483	{
3484	  if (host_integerp (len, 1)
3485	      && !(optimize_size && tree_low_cst (len, 1) > 1)
3486	      && can_store_by_pieces (tree_low_cst (len, 1),
3487				      builtin_memset_read_str, &c, dest_align))
3488	    store_by_pieces (dest_mem, tree_low_cst (len, 1),
3489			     builtin_memset_read_str, &c, dest_align, 0);
3490	  else if (!set_storage_via_setmem (dest_mem, len_rtx, GEN_INT (c),
3491					    dest_align))
3492	    goto do_libcall;
3493
3494	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3495	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3496	  return dest_mem;
3497	}
3498
3499      set_mem_align (dest_mem, dest_align);
3500      dest_addr = clear_storage (dest_mem, len_rtx,
3501				 CALL_EXPR_TAILCALL (orig_exp)
3502				 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL);
3503
3504      if (dest_addr == 0)
3505	{
3506	  dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3507	  dest_addr = convert_memory_address (ptr_mode, dest_addr);
3508	}
3509
3510      return dest_addr;
3511
3512    do_libcall:
3513      fndecl = get_callee_fndecl (orig_exp);
3514      fcode = DECL_FUNCTION_CODE (fndecl);
3515      gcc_assert (fcode == BUILT_IN_MEMSET || fcode == BUILT_IN_BZERO);
3516      arglist = build_tree_list (NULL_TREE, len);
3517      if (fcode == BUILT_IN_MEMSET)
3518	arglist = tree_cons (NULL_TREE, val, arglist);
3519      arglist = tree_cons (NULL_TREE, dest, arglist);
3520      fn = build_function_call_expr (fndecl, arglist);
3521      if (TREE_CODE (fn) == CALL_EXPR)
3522	CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3523      return expand_call (fn, target, target == const0_rtx);
3524    }
3525}
3526
3527/* Expand expression EXP, which is a call to the bzero builtin.  Return 0
3528   if we failed the caller should emit a normal call.  */
3529
3530static rtx
3531expand_builtin_bzero (tree exp)
3532{
3533  tree arglist = TREE_OPERAND (exp, 1);
3534  tree dest, size, newarglist;
3535
3536  if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3537    return NULL_RTX;
3538
3539  dest = TREE_VALUE (arglist);
3540  size = TREE_VALUE (TREE_CHAIN (arglist));
3541
3542  /* New argument list transforming bzero(ptr x, int y) to
3543     memset(ptr x, int 0, size_t y).   This is done this way
3544     so that if it isn't expanded inline, we fallback to
3545     calling bzero instead of memset.  */
3546
3547  newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size));
3548  newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
3549  newarglist = tree_cons (NULL_TREE, dest, newarglist);
3550
3551  return expand_builtin_memset (newarglist, const0_rtx, VOIDmode, exp);
3552}
3553
3554/* Expand expression EXP, which is a call to the memcmp built-in function.
3555   ARGLIST is the argument list for this call.  Return 0 if we failed and the
3556   caller should emit a normal call, otherwise try to get the result in
3557   TARGET, if convenient (and in mode MODE, if that's convenient).  */
3558
3559static rtx
3560expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
3561		       enum machine_mode mode)
3562{
3563  if (!validate_arglist (arglist,
3564			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3565    return 0;
3566  else
3567    {
3568      tree result = fold_builtin_memcmp (arglist);
3569      if (result)
3570	return expand_expr (result, target, mode, EXPAND_NORMAL);
3571    }
3572
3573#if defined HAVE_cmpmemsi || defined HAVE_cmpstrnsi
3574  {
3575    tree arg1 = TREE_VALUE (arglist);
3576    tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3577    tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3578    rtx arg1_rtx, arg2_rtx, arg3_rtx;
3579    rtx result;
3580    rtx insn;
3581
3582    int arg1_align
3583      = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3584    int arg2_align
3585      = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3586    enum machine_mode insn_mode;
3587
3588#ifdef HAVE_cmpmemsi
3589    if (HAVE_cmpmemsi)
3590      insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3591    else
3592#endif
3593#ifdef HAVE_cmpstrnsi
3594    if (HAVE_cmpstrnsi)
3595      insn_mode = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3596    else
3597#endif
3598      return 0;
3599
3600    /* If we don't have POINTER_TYPE, call the function.  */
3601    if (arg1_align == 0 || arg2_align == 0)
3602      return 0;
3603
3604    /* Make a place to write the result of the instruction.  */
3605    result = target;
3606    if (! (result != 0
3607	   && REG_P (result) && GET_MODE (result) == insn_mode
3608	   && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3609      result = gen_reg_rtx (insn_mode);
3610
3611    arg1_rtx = get_memory_rtx (arg1, len);
3612    arg2_rtx = get_memory_rtx (arg2, len);
3613    arg3_rtx = expand_normal (len);
3614
3615    /* Set MEM_SIZE as appropriate.  */
3616    if (GET_CODE (arg3_rtx) == CONST_INT)
3617      {
3618	set_mem_size (arg1_rtx, arg3_rtx);
3619	set_mem_size (arg2_rtx, arg3_rtx);
3620      }
3621
3622#ifdef HAVE_cmpmemsi
3623    if (HAVE_cmpmemsi)
3624      insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3625			   GEN_INT (MIN (arg1_align, arg2_align)));
3626    else
3627#endif
3628#ifdef HAVE_cmpstrnsi
3629    if (HAVE_cmpstrnsi)
3630      insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3631			    GEN_INT (MIN (arg1_align, arg2_align)));
3632    else
3633#endif
3634      gcc_unreachable ();
3635
3636    if (insn)
3637      emit_insn (insn);
3638    else
3639      emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
3640			       TYPE_MODE (integer_type_node), 3,
3641			       XEXP (arg1_rtx, 0), Pmode,
3642			       XEXP (arg2_rtx, 0), Pmode,
3643			       convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3644						TYPE_UNSIGNED (sizetype)),
3645			       TYPE_MODE (sizetype));
3646
3647    /* Return the value in the proper mode for this function.  */
3648    mode = TYPE_MODE (TREE_TYPE (exp));
3649    if (GET_MODE (result) == mode)
3650      return result;
3651    else if (target != 0)
3652      {
3653	convert_move (target, result, 0);
3654	return target;
3655      }
3656    else
3657      return convert_to_mode (mode, result, 0);
3658  }
3659#endif
3660
3661  return 0;
3662}
3663
3664/* Expand expression EXP, which is a call to the strcmp builtin.  Return 0
3665   if we failed the caller should emit a normal call, otherwise try to get
3666   the result in TARGET, if convenient.  */
3667
3668static rtx
3669expand_builtin_strcmp (tree exp, rtx target, enum machine_mode mode)
3670{
3671  tree arglist = TREE_OPERAND (exp, 1);
3672
3673  if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3674    return 0;
3675  else
3676    {
3677      tree result = fold_builtin_strcmp (arglist);
3678      if (result)
3679	return expand_expr (result, target, mode, EXPAND_NORMAL);
3680    }
3681
3682#if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3683  if (cmpstr_optab[SImode] != CODE_FOR_nothing
3684      || cmpstrn_optab[SImode] != CODE_FOR_nothing)
3685    {
3686      rtx arg1_rtx, arg2_rtx;
3687      rtx result, insn = NULL_RTX;
3688      tree fndecl, fn;
3689
3690      tree arg1 = TREE_VALUE (arglist);
3691      tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3692      int arg1_align
3693	= get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3694      int arg2_align
3695	= get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3696
3697      /* If we don't have POINTER_TYPE, call the function.  */
3698      if (arg1_align == 0 || arg2_align == 0)
3699	return 0;
3700
3701      /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
3702      arg1 = builtin_save_expr (arg1);
3703      arg2 = builtin_save_expr (arg2);
3704
3705      arg1_rtx = get_memory_rtx (arg1, NULL);
3706      arg2_rtx = get_memory_rtx (arg2, NULL);
3707
3708#ifdef HAVE_cmpstrsi
3709      /* Try to call cmpstrsi.  */
3710      if (HAVE_cmpstrsi)
3711	{
3712	  enum machine_mode insn_mode
3713	    = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3714
3715	  /* Make a place to write the result of the instruction.  */
3716	  result = target;
3717	  if (! (result != 0
3718		 && REG_P (result) && GET_MODE (result) == insn_mode
3719		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3720	    result = gen_reg_rtx (insn_mode);
3721
3722	  insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3723			       GEN_INT (MIN (arg1_align, arg2_align)));
3724	}
3725#endif
3726#ifdef HAVE_cmpstrnsi
3727      /* Try to determine at least one length and call cmpstrnsi.  */
3728      if (!insn && HAVE_cmpstrnsi)
3729	{
3730	  tree len;
3731	  rtx arg3_rtx;
3732
3733	  enum machine_mode insn_mode
3734	    = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3735	  tree len1 = c_strlen (arg1, 1);
3736	  tree len2 = c_strlen (arg2, 1);
3737
3738	  if (len1)
3739	    len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3740	  if (len2)
3741	    len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3742
3743	  /* If we don't have a constant length for the first, use the length
3744	     of the second, if we know it.  We don't require a constant for
3745	     this case; some cost analysis could be done if both are available
3746	     but neither is constant.  For now, assume they're equally cheap,
3747	     unless one has side effects.  If both strings have constant lengths,
3748	     use the smaller.  */
3749
3750	  if (!len1)
3751	    len = len2;
3752	  else if (!len2)
3753	    len = len1;
3754	  else if (TREE_SIDE_EFFECTS (len1))
3755	    len = len2;
3756	  else if (TREE_SIDE_EFFECTS (len2))
3757	    len = len1;
3758	  else if (TREE_CODE (len1) != INTEGER_CST)
3759	    len = len2;
3760	  else if (TREE_CODE (len2) != INTEGER_CST)
3761	    len = len1;
3762	  else if (tree_int_cst_lt (len1, len2))
3763	    len = len1;
3764	  else
3765	    len = len2;
3766
3767	  /* If both arguments have side effects, we cannot optimize.  */
3768	  if (!len || TREE_SIDE_EFFECTS (len))
3769	    goto do_libcall;
3770
3771	  arg3_rtx = expand_normal (len);
3772
3773	  /* Make a place to write the result of the instruction.  */
3774	  result = target;
3775	  if (! (result != 0
3776		 && REG_P (result) && GET_MODE (result) == insn_mode
3777		 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3778	    result = gen_reg_rtx (insn_mode);
3779
3780	  insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3781				GEN_INT (MIN (arg1_align, arg2_align)));
3782	}
3783#endif
3784
3785      if (insn)
3786	{
3787	  emit_insn (insn);
3788
3789	  /* Return the value in the proper mode for this function.  */
3790	  mode = TYPE_MODE (TREE_TYPE (exp));
3791	  if (GET_MODE (result) == mode)
3792	    return result;
3793	  if (target == 0)
3794	    return convert_to_mode (mode, result, 0);
3795	  convert_move (target, result, 0);
3796	  return target;
3797	}
3798
3799      /* Expand the library call ourselves using a stabilized argument
3800	 list to avoid re-evaluating the function's arguments twice.  */
3801#ifdef HAVE_cmpstrnsi
3802    do_libcall:
3803#endif
3804      arglist = build_tree_list (NULL_TREE, arg2);
3805      arglist = tree_cons (NULL_TREE, arg1, arglist);
3806      fndecl = get_callee_fndecl (exp);
3807      fn = build_function_call_expr (fndecl, arglist);
3808      if (TREE_CODE (fn) == CALL_EXPR)
3809	CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3810      return expand_call (fn, target, target == const0_rtx);
3811    }
3812#endif
3813  return 0;
3814}
3815
3816/* Expand expression EXP, which is a call to the strncmp builtin.  Return 0
3817   if we failed the caller should emit a normal call, otherwise try to get
3818   the result in TARGET, if convenient.  */
3819
3820static rtx
3821expand_builtin_strncmp (tree exp, rtx target, enum machine_mode mode)
3822{
3823  tree arglist = TREE_OPERAND (exp, 1);
3824
3825  if (!validate_arglist (arglist,
3826			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3827    return 0;
3828  else
3829    {
3830      tree result = fold_builtin_strncmp (arglist);
3831      if (result)
3832	return expand_expr (result, target, mode, EXPAND_NORMAL);
3833    }
3834
3835  /* If c_strlen can determine an expression for one of the string
3836     lengths, and it doesn't have side effects, then emit cmpstrnsi
3837     using length MIN(strlen(string)+1, arg3).  */
3838#ifdef HAVE_cmpstrnsi
3839  if (HAVE_cmpstrnsi)
3840  {
3841    tree arg1 = TREE_VALUE (arglist);
3842    tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
3843    tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
3844    tree len, len1, len2;
3845    rtx arg1_rtx, arg2_rtx, arg3_rtx;
3846    rtx result, insn;
3847    tree fndecl, fn;
3848
3849    int arg1_align
3850      = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3851    int arg2_align
3852      = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
3853    enum machine_mode insn_mode
3854      = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3855
3856    len1 = c_strlen (arg1, 1);
3857    len2 = c_strlen (arg2, 1);
3858
3859    if (len1)
3860      len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3861    if (len2)
3862      len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3863
3864    /* If we don't have a constant length for the first, use the length
3865       of the second, if we know it.  We don't require a constant for
3866       this case; some cost analysis could be done if both are available
3867       but neither is constant.  For now, assume they're equally cheap,
3868       unless one has side effects.  If both strings have constant lengths,
3869       use the smaller.  */
3870
3871    if (!len1)
3872      len = len2;
3873    else if (!len2)
3874      len = len1;
3875    else if (TREE_SIDE_EFFECTS (len1))
3876      len = len2;
3877    else if (TREE_SIDE_EFFECTS (len2))
3878      len = len1;
3879    else if (TREE_CODE (len1) != INTEGER_CST)
3880      len = len2;
3881    else if (TREE_CODE (len2) != INTEGER_CST)
3882      len = len1;
3883    else if (tree_int_cst_lt (len1, len2))
3884      len = len1;
3885    else
3886      len = len2;
3887
3888    /* If both arguments have side effects, we cannot optimize.  */
3889    if (!len || TREE_SIDE_EFFECTS (len))
3890      return 0;
3891
3892    /* The actual new length parameter is MIN(len,arg3).  */
3893    len = fold_build2 (MIN_EXPR, TREE_TYPE (len), len,
3894		       fold_convert (TREE_TYPE (len), arg3));
3895
3896    /* If we don't have POINTER_TYPE, call the function.  */
3897    if (arg1_align == 0 || arg2_align == 0)
3898      return 0;
3899
3900    /* Make a place to write the result of the instruction.  */
3901    result = target;
3902    if (! (result != 0
3903	   && REG_P (result) && GET_MODE (result) == insn_mode
3904	   && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3905      result = gen_reg_rtx (insn_mode);
3906
3907    /* Stabilize the arguments in case gen_cmpstrnsi fails.  */
3908    arg1 = builtin_save_expr (arg1);
3909    arg2 = builtin_save_expr (arg2);
3910    len = builtin_save_expr (len);
3911
3912    arg1_rtx = get_memory_rtx (arg1, len);
3913    arg2_rtx = get_memory_rtx (arg2, len);
3914    arg3_rtx = expand_normal (len);
3915    insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3916			  GEN_INT (MIN (arg1_align, arg2_align)));
3917    if (insn)
3918      {
3919	emit_insn (insn);
3920
3921	/* Return the value in the proper mode for this function.  */
3922	mode = TYPE_MODE (TREE_TYPE (exp));
3923	if (GET_MODE (result) == mode)
3924	  return result;
3925	if (target == 0)
3926	  return convert_to_mode (mode, result, 0);
3927	convert_move (target, result, 0);
3928	return target;
3929      }
3930
3931    /* Expand the library call ourselves using a stabilized argument
3932       list to avoid re-evaluating the function's arguments twice.  */
3933    arglist = build_tree_list (NULL_TREE, len);
3934    arglist = tree_cons (NULL_TREE, arg2, arglist);
3935    arglist = tree_cons (NULL_TREE, arg1, arglist);
3936    fndecl = get_callee_fndecl (exp);
3937    fn = build_function_call_expr (fndecl, arglist);
3938    if (TREE_CODE (fn) == CALL_EXPR)
3939      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3940    return expand_call (fn, target, target == const0_rtx);
3941  }
3942#endif
3943  return 0;
3944}
3945
3946/* Expand expression EXP, which is a call to the strcat builtin.
3947   Return 0 if we failed the caller should emit a normal call,
3948   otherwise try to get the result in TARGET, if convenient.  */
3949
3950static rtx
3951expand_builtin_strcat (tree fndecl, tree arglist, rtx target, enum machine_mode mode)
3952{
3953  if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3954    return 0;
3955  else
3956    {
3957      tree dst = TREE_VALUE (arglist),
3958      src = TREE_VALUE (TREE_CHAIN (arglist));
3959      const char *p = c_getstr (src);
3960
3961      /* If the string length is zero, return the dst parameter.  */
3962      if (p && *p == '\0')
3963	return expand_expr (dst, target, mode, EXPAND_NORMAL);
3964
3965      if (!optimize_size)
3966	{
3967	  /* See if we can store by pieces into (dst + strlen(dst)).  */
3968	  tree newsrc, newdst,
3969	    strlen_fn = implicit_built_in_decls[BUILT_IN_STRLEN];
3970	  rtx insns;
3971
3972	  /* Stabilize the argument list.  */
3973	  newsrc = builtin_save_expr (src);
3974	  if (newsrc != src)
3975	    arglist = build_tree_list (NULL_TREE, newsrc);
3976	  else
3977	    arglist = TREE_CHAIN (arglist); /* Reusing arglist if safe.  */
3978
3979	  dst = builtin_save_expr (dst);
3980
3981	  start_sequence ();
3982
3983	  /* Create strlen (dst).  */
3984	  newdst =
3985	    build_function_call_expr (strlen_fn,
3986				      build_tree_list (NULL_TREE, dst));
3987	  /* Create (dst + (cast) strlen (dst)).  */
3988	  newdst = fold_convert (TREE_TYPE (dst), newdst);
3989	  newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
3990
3991	  newdst = builtin_save_expr (newdst);
3992	  arglist = tree_cons (NULL_TREE, newdst, arglist);
3993
3994	  if (!expand_builtin_strcpy (fndecl, arglist, target, mode))
3995	    {
3996	      end_sequence (); /* Stop sequence.  */
3997	      return 0;
3998	    }
3999
4000	  /* Output the entire sequence.  */
4001	  insns = get_insns ();
4002	  end_sequence ();
4003	  emit_insn (insns);
4004
4005	  return expand_expr (dst, target, mode, EXPAND_NORMAL);
4006	}
4007
4008      return 0;
4009    }
4010}
4011
4012/* Expand expression EXP, which is a call to the strncat builtin.
4013   Return 0 if we failed the caller should emit a normal call,
4014   otherwise try to get the result in TARGET, if convenient.  */
4015
4016static rtx
4017expand_builtin_strncat (tree arglist, rtx target, enum machine_mode mode)
4018{
4019  if (validate_arglist (arglist,
4020			POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4021    {
4022      tree result = fold_builtin_strncat (arglist);
4023      if (result)
4024	return expand_expr (result, target, mode, EXPAND_NORMAL);
4025    }
4026  return 0;
4027}
4028
4029/* Expand expression EXP, which is a call to the strspn builtin.
4030   Return 0 if we failed the caller should emit a normal call,
4031   otherwise try to get the result in TARGET, if convenient.  */
4032
4033static rtx
4034expand_builtin_strspn (tree arglist, rtx target, enum machine_mode mode)
4035{
4036  if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4037    {
4038      tree result = fold_builtin_strspn (arglist);
4039      if (result)
4040	return expand_expr (result, target, mode, EXPAND_NORMAL);
4041    }
4042  return 0;
4043}
4044
4045/* Expand expression EXP, which is a call to the strcspn builtin.
4046   Return 0 if we failed the caller should emit a normal call,
4047   otherwise try to get the result in TARGET, if convenient.  */
4048
4049static rtx
4050expand_builtin_strcspn (tree arglist, rtx target, enum machine_mode mode)
4051{
4052  if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4053    {
4054      tree result = fold_builtin_strcspn (arglist);
4055      if (result)
4056	return expand_expr (result, target, mode, EXPAND_NORMAL);
4057    }
4058  return 0;
4059}
4060
4061/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4062   if that's convenient.  */
4063
4064rtx
4065expand_builtin_saveregs (void)
4066{
4067  rtx val, seq;
4068
4069  /* Don't do __builtin_saveregs more than once in a function.
4070     Save the result of the first call and reuse it.  */
4071  if (saveregs_value != 0)
4072    return saveregs_value;
4073
4074  /* When this function is called, it means that registers must be
4075     saved on entry to this function.  So we migrate the call to the
4076     first insn of this function.  */
4077
4078  start_sequence ();
4079
4080  /* Do whatever the machine needs done in this case.  */
4081  val = targetm.calls.expand_builtin_saveregs ();
4082
4083  seq = get_insns ();
4084  end_sequence ();
4085
4086  saveregs_value = val;
4087
4088  /* Put the insns after the NOTE that starts the function.  If this
4089     is inside a start_sequence, make the outer-level insn chain current, so
4090     the code is placed at the start of the function.  */
4091  push_topmost_sequence ();
4092  emit_insn_after (seq, entry_of_function ());
4093  pop_topmost_sequence ();
4094
4095  return val;
4096}
4097
4098/* __builtin_args_info (N) returns word N of the arg space info
4099   for the current function.  The number and meanings of words
4100   is controlled by the definition of CUMULATIVE_ARGS.  */
4101
4102static rtx
4103expand_builtin_args_info (tree arglist)
4104{
4105  int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
4106  int *word_ptr = (int *) &current_function_args_info;
4107
4108  gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
4109
4110  if (arglist != 0)
4111    {
4112      if (!host_integerp (TREE_VALUE (arglist), 0))
4113	error ("argument of %<__builtin_args_info%> must be constant");
4114      else
4115	{
4116	  HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
4117
4118	  if (wordnum < 0 || wordnum >= nwords)
4119	    error ("argument of %<__builtin_args_info%> out of range");
4120	  else
4121	    return GEN_INT (word_ptr[wordnum]);
4122	}
4123    }
4124  else
4125    error ("missing argument in %<__builtin_args_info%>");
4126
4127  return const0_rtx;
4128}
4129
4130/* Expand a call to __builtin_next_arg.  */
4131
4132static rtx
4133expand_builtin_next_arg (void)
4134{
4135  /* Checking arguments is already done in fold_builtin_next_arg
4136     that must be called before this function.  */
4137  return expand_binop (Pmode, add_optab,
4138		       current_function_internal_arg_pointer,
4139		       current_function_arg_offset_rtx,
4140		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
4141}
4142
4143/* Make it easier for the backends by protecting the valist argument
4144   from multiple evaluations.  */
4145
4146static tree
4147stabilize_va_list (tree valist, int needs_lvalue)
4148{
4149  if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4150    {
4151      if (TREE_SIDE_EFFECTS (valist))
4152	valist = save_expr (valist);
4153
4154      /* For this case, the backends will be expecting a pointer to
4155	 TREE_TYPE (va_list_type_node), but it's possible we've
4156	 actually been given an array (an actual va_list_type_node).
4157	 So fix it.  */
4158      if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4159	{
4160	  tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4161	  valist = build_fold_addr_expr_with_type (valist, p1);
4162	}
4163    }
4164  else
4165    {
4166      tree pt;
4167
4168      if (! needs_lvalue)
4169	{
4170	  if (! TREE_SIDE_EFFECTS (valist))
4171	    return valist;
4172
4173	  pt = build_pointer_type (va_list_type_node);
4174	  valist = fold_build1 (ADDR_EXPR, pt, valist);
4175	  TREE_SIDE_EFFECTS (valist) = 1;
4176	}
4177
4178      if (TREE_SIDE_EFFECTS (valist))
4179	valist = save_expr (valist);
4180      valist = build_fold_indirect_ref (valist);
4181    }
4182
4183  return valist;
4184}
4185
4186/* The "standard" definition of va_list is void*.  */
4187
4188tree
4189std_build_builtin_va_list (void)
4190{
4191  return ptr_type_node;
4192}
4193
4194/* The "standard" implementation of va_start: just assign `nextarg' to
4195   the variable.  */
4196
4197void
4198std_expand_builtin_va_start (tree valist, rtx nextarg)
4199{
4200  tree t;
4201
4202  t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
4203	      make_tree (ptr_type_node, nextarg));
4204  TREE_SIDE_EFFECTS (t) = 1;
4205
4206  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4207}
4208
4209/* Expand ARGLIST, from a call to __builtin_va_start.  */
4210
4211static rtx
4212expand_builtin_va_start (tree arglist)
4213{
4214  rtx nextarg;
4215  tree chain, valist;
4216
4217  chain = TREE_CHAIN (arglist);
4218
4219  if (!chain)
4220    {
4221      error ("too few arguments to function %<va_start%>");
4222      return const0_rtx;
4223    }
4224
4225  if (fold_builtin_next_arg (chain))
4226    return const0_rtx;
4227
4228  nextarg = expand_builtin_next_arg ();
4229  valist = stabilize_va_list (TREE_VALUE (arglist), 1);
4230
4231#ifdef EXPAND_BUILTIN_VA_START
4232  EXPAND_BUILTIN_VA_START (valist, nextarg);
4233#else
4234  std_expand_builtin_va_start (valist, nextarg);
4235#endif
4236
4237  return const0_rtx;
4238}
4239
4240/* The "standard" implementation of va_arg: read the value from the
4241   current (padded) address and increment by the (padded) size.  */
4242
4243tree
4244std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
4245{
4246  tree addr, t, type_size, rounded_size, valist_tmp;
4247  unsigned HOST_WIDE_INT align, boundary;
4248  bool indirect;
4249
4250#ifdef ARGS_GROW_DOWNWARD
4251  /* All of the alignment and movement below is for args-grow-up machines.
4252     As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
4253     implement their own specialized gimplify_va_arg_expr routines.  */
4254  gcc_unreachable ();
4255#endif
4256
4257  indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
4258  if (indirect)
4259    type = build_pointer_type (type);
4260
4261  align = PARM_BOUNDARY / BITS_PER_UNIT;
4262  boundary = FUNCTION_ARG_BOUNDARY (TYPE_MODE (type), type) / BITS_PER_UNIT;
4263
4264  /* Hoist the valist value into a temporary for the moment.  */
4265  valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
4266
4267  /* va_list pointer is aligned to PARM_BOUNDARY.  If argument actually
4268     requires greater alignment, we must perform dynamic alignment.  */
4269  if (boundary > align
4270      && !integer_zerop (TYPE_SIZE (type)))
4271    {
4272      t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
4273      t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4274		  build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
4275      gimplify_and_add (t, pre_p);
4276
4277      t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
4278      t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
4279		  build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
4280      gimplify_and_add (t, pre_p);
4281    }
4282  else
4283    boundary = align;
4284
4285  /* If the actual alignment is less than the alignment of the type,
4286     adjust the type accordingly so that we don't assume strict alignment
4287     when deferencing the pointer.  */
4288  boundary *= BITS_PER_UNIT;
4289  if (boundary < TYPE_ALIGN (type))
4290    {
4291      type = build_variant_type_copy (type);
4292      TYPE_ALIGN (type) = boundary;
4293    }
4294
4295  /* Compute the rounded size of the type.  */
4296  type_size = size_in_bytes (type);
4297  rounded_size = round_up (type_size, align);
4298
4299  /* Reduce rounded_size so it's sharable with the postqueue.  */
4300  gimplify_expr (&rounded_size, pre_p, post_p, is_gimple_val, fb_rvalue);
4301
4302  /* Get AP.  */
4303  addr = valist_tmp;
4304  if (PAD_VARARGS_DOWN && !integer_zerop (rounded_size))
4305    {
4306      /* Small args are padded downward.  */
4307      t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
4308      t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
4309		       size_binop (MINUS_EXPR, rounded_size, type_size));
4310      t = fold_convert (TREE_TYPE (addr), t);
4311      addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
4312    }
4313
4314  /* Compute new value for AP.  */
4315  t = fold_convert (TREE_TYPE (valist), rounded_size);
4316  t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
4317  t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
4318  gimplify_and_add (t, pre_p);
4319
4320  addr = fold_convert (build_pointer_type (type), addr);
4321
4322  if (indirect)
4323    addr = build_va_arg_indirect_ref (addr);
4324
4325  return build_va_arg_indirect_ref (addr);
4326}
4327
4328/* Build an indirect-ref expression over the given TREE, which represents a
4329   piece of a va_arg() expansion.  */
4330tree
4331build_va_arg_indirect_ref (tree addr)
4332{
4333  addr = build_fold_indirect_ref (addr);
4334
4335  if (flag_mudflap) /* Don't instrument va_arg INDIRECT_REF.  */
4336    mf_mark (addr);
4337
4338  return addr;
4339}
4340
4341/* Return a dummy expression of type TYPE in order to keep going after an
4342   error.  */
4343
4344static tree
4345dummy_object (tree type)
4346{
4347  tree t = build_int_cst (build_pointer_type (type), 0);
4348  return build1 (INDIRECT_REF, type, t);
4349}
4350
4351/* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
4352   builtin function, but a very special sort of operator.  */
4353
4354enum gimplify_status
4355gimplify_va_arg_expr (tree *expr_p, tree *pre_p, tree *post_p)
4356{
4357  tree promoted_type, want_va_type, have_va_type;
4358  tree valist = TREE_OPERAND (*expr_p, 0);
4359  tree type = TREE_TYPE (*expr_p);
4360  tree t;
4361
4362  /* Verify that valist is of the proper type.  */
4363  want_va_type = va_list_type_node;
4364  have_va_type = TREE_TYPE (valist);
4365
4366  if (have_va_type == error_mark_node)
4367    return GS_ERROR;
4368
4369  if (TREE_CODE (want_va_type) == ARRAY_TYPE)
4370    {
4371      /* If va_list is an array type, the argument may have decayed
4372	 to a pointer type, e.g. by being passed to another function.
4373	 In that case, unwrap both types so that we can compare the
4374	 underlying records.  */
4375      if (TREE_CODE (have_va_type) == ARRAY_TYPE
4376	  || POINTER_TYPE_P (have_va_type))
4377	{
4378	  want_va_type = TREE_TYPE (want_va_type);
4379	  have_va_type = TREE_TYPE (have_va_type);
4380	}
4381    }
4382
4383  if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
4384    {
4385      error ("first argument to %<va_arg%> not of type %<va_list%>");
4386      return GS_ERROR;
4387    }
4388
4389  /* Generate a diagnostic for requesting data of a type that cannot
4390     be passed through `...' due to type promotion at the call site.  */
4391  else if ((promoted_type = lang_hooks.types.type_promotes_to (type))
4392	   != type)
4393    {
4394      static bool gave_help;
4395
4396      /* Unfortunately, this is merely undefined, rather than a constraint
4397	 violation, so we cannot make this an error.  If this call is never
4398	 executed, the program is still strictly conforming.  */
4399      warning (0, "%qT is promoted to %qT when passed through %<...%>",
4400	       type, promoted_type);
4401      if (! gave_help)
4402	{
4403	  gave_help = true;
4404	  warning (0, "(so you should pass %qT not %qT to %<va_arg%>)",
4405		   promoted_type, type);
4406	}
4407
4408      /* We can, however, treat "undefined" any way we please.
4409	 Call abort to encourage the user to fix the program.  */
4410      inform ("if this code is reached, the program will abort");
4411      t = build_function_call_expr (implicit_built_in_decls[BUILT_IN_TRAP],
4412				    NULL);
4413      append_to_statement_list (t, pre_p);
4414
4415      /* This is dead code, but go ahead and finish so that the
4416	 mode of the result comes out right.  */
4417      *expr_p = dummy_object (type);
4418      return GS_ALL_DONE;
4419    }
4420  else
4421    {
4422      /* Make it easier for the backends by protecting the valist argument
4423	 from multiple evaluations.  */
4424      if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4425	{
4426	  /* For this case, the backends will be expecting a pointer to
4427	     TREE_TYPE (va_list_type_node), but it's possible we've
4428	     actually been given an array (an actual va_list_type_node).
4429	     So fix it.  */
4430	  if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4431	    {
4432	      tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
4433	      valist = build_fold_addr_expr_with_type (valist, p1);
4434	    }
4435	  gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
4436	}
4437      else
4438	gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
4439
4440      if (!targetm.gimplify_va_arg_expr)
4441	/* FIXME:Once most targets are converted we should merely
4442	   assert this is non-null.  */
4443	return GS_ALL_DONE;
4444
4445      *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
4446      return GS_OK;
4447    }
4448}
4449
4450/* Expand ARGLIST, from a call to __builtin_va_end.  */
4451
4452static rtx
4453expand_builtin_va_end (tree arglist)
4454{
4455  tree valist = TREE_VALUE (arglist);
4456
4457  /* Evaluate for side effects, if needed.  I hate macros that don't
4458     do that.  */
4459  if (TREE_SIDE_EFFECTS (valist))
4460    expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4461
4462  return const0_rtx;
4463}
4464
4465/* Expand ARGLIST, from a call to __builtin_va_copy.  We do this as a
4466   builtin rather than just as an assignment in stdarg.h because of the
4467   nastiness of array-type va_list types.  */
4468
4469static rtx
4470expand_builtin_va_copy (tree arglist)
4471{
4472  tree dst, src, t;
4473
4474  dst = TREE_VALUE (arglist);
4475  src = TREE_VALUE (TREE_CHAIN (arglist));
4476
4477  dst = stabilize_va_list (dst, 1);
4478  src = stabilize_va_list (src, 0);
4479
4480  if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
4481    {
4482      t = build2 (MODIFY_EXPR, va_list_type_node, dst, src);
4483      TREE_SIDE_EFFECTS (t) = 1;
4484      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4485    }
4486  else
4487    {
4488      rtx dstb, srcb, size;
4489
4490      /* Evaluate to pointers.  */
4491      dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4492      srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4493      size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
4494			  VOIDmode, EXPAND_NORMAL);
4495
4496      dstb = convert_memory_address (Pmode, dstb);
4497      srcb = convert_memory_address (Pmode, srcb);
4498
4499      /* "Dereference" to BLKmode memories.  */
4500      dstb = gen_rtx_MEM (BLKmode, dstb);
4501      set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4502      set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
4503      srcb = gen_rtx_MEM (BLKmode, srcb);
4504      set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4505      set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
4506
4507      /* Copy.  */
4508      emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4509    }
4510
4511  return const0_rtx;
4512}
4513
4514/* Expand a call to one of the builtin functions __builtin_frame_address or
4515   __builtin_return_address.  */
4516
4517static rtx
4518expand_builtin_frame_address (tree fndecl, tree arglist)
4519{
4520  /* The argument must be a nonnegative integer constant.
4521     It counts the number of frames to scan up the stack.
4522     The value is the return address saved in that frame.  */
4523  if (arglist == 0)
4524    /* Warning about missing arg was already issued.  */
4525    return const0_rtx;
4526  else if (! host_integerp (TREE_VALUE (arglist), 1))
4527    {
4528      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4529	error ("invalid argument to %<__builtin_frame_address%>");
4530      else
4531	error ("invalid argument to %<__builtin_return_address%>");
4532      return const0_rtx;
4533    }
4534  else
4535    {
4536      rtx tem
4537	= expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4538				      tree_low_cst (TREE_VALUE (arglist), 1));
4539
4540      /* Some ports cannot access arbitrary stack frames.  */
4541      if (tem == NULL)
4542	{
4543	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4544	    warning (0, "unsupported argument to %<__builtin_frame_address%>");
4545	  else
4546	    warning (0, "unsupported argument to %<__builtin_return_address%>");
4547	  return const0_rtx;
4548	}
4549
4550      /* For __builtin_frame_address, return what we've got.  */
4551      if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4552	return tem;
4553
4554      if (!REG_P (tem)
4555	  && ! CONSTANT_P (tem))
4556	tem = copy_to_mode_reg (Pmode, tem);
4557      return tem;
4558    }
4559}
4560
4561/* Expand a call to the alloca builtin, with arguments ARGLIST.  Return 0 if
4562   we failed and the caller should emit a normal call, otherwise try to get
4563   the result in TARGET, if convenient.  */
4564
4565static rtx
4566expand_builtin_alloca (tree arglist, rtx target)
4567{
4568  rtx op0;
4569  rtx result;
4570
4571  /* In -fmudflap-instrumented code, alloca() and __builtin_alloca()
4572     should always expand to function calls.  These can be intercepted
4573     in libmudflap.  */
4574  if (flag_mudflap)
4575    return 0;
4576
4577  if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4578    return 0;
4579
4580  /* Compute the argument.  */
4581  op0 = expand_normal (TREE_VALUE (arglist));
4582
4583  /* Allocate the desired space.  */
4584  result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
4585  result = convert_memory_address (ptr_mode, result);
4586
4587  return result;
4588}
4589
4590/* Expand a call to a bswap builtin.  The arguments are in ARGLIST.  MODE
4591   is the mode to expand with.  */
4592
4593static rtx
4594expand_builtin_bswap (tree arglist, rtx target, rtx subtarget)
4595{
4596  enum machine_mode mode;
4597  tree arg;
4598  rtx op0;
4599
4600  if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4601    return 0;
4602
4603  arg = TREE_VALUE (arglist);
4604  mode = TYPE_MODE (TREE_TYPE (arg));
4605  op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4606
4607  target = expand_unop (mode, bswap_optab, op0, target, 1);
4608
4609  gcc_assert (target);
4610
4611  return convert_to_mode (mode, target, 0);
4612}
4613
4614/* Expand a call to a unary builtin.  The arguments are in ARGLIST.
4615   Return 0 if a normal call should be emitted rather than expanding the
4616   function in-line.  If convenient, the result should be placed in TARGET.
4617   SUBTARGET may be used as the target for computing one of EXP's operands.  */
4618
4619static rtx
4620expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
4621		     rtx subtarget, optab op_optab)
4622{
4623  rtx op0;
4624  if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
4625    return 0;
4626
4627  /* Compute the argument.  */
4628  op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
4629  /* Compute op, into TARGET if possible.
4630     Set TARGET to wherever the result comes back.  */
4631  target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
4632			op_optab, op0, target, 1);
4633  gcc_assert (target);
4634
4635  return convert_to_mode (target_mode, target, 0);
4636}
4637
4638/* If the string passed to fputs is a constant and is one character
4639   long, we attempt to transform this call into __builtin_fputc().  */
4640
4641static rtx
4642expand_builtin_fputs (tree arglist, rtx target, bool unlocked)
4643{
4644  /* Verify the arguments in the original call.  */
4645  if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4646    {
4647      tree result = fold_builtin_fputs (arglist, (target == const0_rtx),
4648					unlocked, NULL_TREE);
4649      if (result)
4650	return expand_expr (result, target, VOIDmode, EXPAND_NORMAL);
4651    }
4652  return 0;
4653}
4654
4655/* Expand a call to __builtin_expect.  We return our argument and emit a
4656   NOTE_INSN_EXPECTED_VALUE note.  This is the expansion of __builtin_expect in
4657   a non-jump context.  */
4658
4659static rtx
4660expand_builtin_expect (tree arglist, rtx target)
4661{
4662  tree exp, c;
4663  rtx note, rtx_c;
4664
4665  if (arglist == NULL_TREE
4666      || TREE_CHAIN (arglist) == NULL_TREE)
4667    return const0_rtx;
4668  exp = TREE_VALUE (arglist);
4669  c = TREE_VALUE (TREE_CHAIN (arglist));
4670
4671  if (TREE_CODE (c) != INTEGER_CST)
4672    {
4673      error ("second argument to %<__builtin_expect%> must be a constant");
4674      c = integer_zero_node;
4675    }
4676
4677  target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
4678
4679  /* Don't bother with expected value notes for integral constants.  */
4680  if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
4681    {
4682      /* We do need to force this into a register so that we can be
4683	 moderately sure to be able to correctly interpret the branch
4684	 condition later.  */
4685      target = force_reg (GET_MODE (target), target);
4686
4687      rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
4688
4689      note = emit_note (NOTE_INSN_EXPECTED_VALUE);
4690      NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
4691    }
4692
4693  return target;
4694}
4695
4696/* Like expand_builtin_expect, except do this in a jump context.  This is
4697   called from do_jump if the conditional is a __builtin_expect.  Return either
4698   a list of insns to emit the jump or NULL if we cannot optimize
4699   __builtin_expect.  We need to optimize this at jump time so that machines
4700   like the PowerPC don't turn the test into a SCC operation, and then jump
4701   based on the test being 0/1.  */
4702
4703rtx
4704expand_builtin_expect_jump (tree exp, rtx if_false_label, rtx if_true_label)
4705{
4706  tree arglist = TREE_OPERAND (exp, 1);
4707  tree arg0 = TREE_VALUE (arglist);
4708  tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4709  rtx ret = NULL_RTX;
4710
4711  /* Only handle __builtin_expect (test, 0) and
4712     __builtin_expect (test, 1).  */
4713  if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
4714      && (integer_zerop (arg1) || integer_onep (arg1)))
4715    {
4716      rtx insn, drop_through_label, temp;
4717
4718      /* Expand the jump insns.  */
4719      start_sequence ();
4720      do_jump (arg0, if_false_label, if_true_label);
4721      ret = get_insns ();
4722
4723      drop_through_label = get_last_insn ();
4724      if (drop_through_label && NOTE_P (drop_through_label))
4725	drop_through_label = prev_nonnote_insn (drop_through_label);
4726      if (drop_through_label && !LABEL_P (drop_through_label))
4727	drop_through_label = NULL_RTX;
4728      end_sequence ();
4729
4730      if (! if_true_label)
4731	if_true_label = drop_through_label;
4732      if (! if_false_label)
4733	if_false_label = drop_through_label;
4734
4735      /* Go through and add the expect's to each of the conditional jumps.  */
4736      insn = ret;
4737      while (insn != NULL_RTX)
4738	{
4739	  rtx next = NEXT_INSN (insn);
4740
4741	  if (JUMP_P (insn) && any_condjump_p (insn))
4742	    {
4743	      rtx ifelse = SET_SRC (pc_set (insn));
4744	      rtx then_dest = XEXP (ifelse, 1);
4745	      rtx else_dest = XEXP (ifelse, 2);
4746	      int taken = -1;
4747
4748	      /* First check if we recognize any of the labels.  */
4749	      if (GET_CODE (then_dest) == LABEL_REF
4750		  && XEXP (then_dest, 0) == if_true_label)
4751		taken = 1;
4752	      else if (GET_CODE (then_dest) == LABEL_REF
4753		       && XEXP (then_dest, 0) == if_false_label)
4754		taken = 0;
4755	      else if (GET_CODE (else_dest) == LABEL_REF
4756		       && XEXP (else_dest, 0) == if_false_label)
4757		taken = 1;
4758	      else if (GET_CODE (else_dest) == LABEL_REF
4759		       && XEXP (else_dest, 0) == if_true_label)
4760		taken = 0;
4761	      /* Otherwise check where we drop through.  */
4762	      else if (else_dest == pc_rtx)
4763		{
4764		  if (next && NOTE_P (next))
4765		    next = next_nonnote_insn (next);
4766
4767		  if (next && JUMP_P (next)
4768		      && any_uncondjump_p (next))
4769		    temp = XEXP (SET_SRC (pc_set (next)), 0);
4770		  else
4771		    temp = next;
4772
4773		  /* TEMP is either a CODE_LABEL, NULL_RTX or something
4774		     else that can't possibly match either target label.  */
4775		  if (temp == if_false_label)
4776		    taken = 1;
4777		  else if (temp == if_true_label)
4778		    taken = 0;
4779		}
4780	      else if (then_dest == pc_rtx)
4781		{
4782		  if (next && NOTE_P (next))
4783		    next = next_nonnote_insn (next);
4784
4785		  if (next && JUMP_P (next)
4786		      && any_uncondjump_p (next))
4787		    temp = XEXP (SET_SRC (pc_set (next)), 0);
4788		  else
4789		    temp = next;
4790
4791		  if (temp == if_false_label)
4792		    taken = 0;
4793		  else if (temp == if_true_label)
4794		    taken = 1;
4795		}
4796
4797	      if (taken != -1)
4798		{
4799		  /* If the test is expected to fail, reverse the
4800		     probabilities.  */
4801		  if (integer_zerop (arg1))
4802		    taken = 1 - taken;
4803		  predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
4804		}
4805	    }
4806
4807	  insn = next;
4808	}
4809    }
4810
4811  return ret;
4812}
4813
4814void
4815expand_builtin_trap (void)
4816{
4817#ifdef HAVE_trap
4818  if (HAVE_trap)
4819    emit_insn (gen_trap ());
4820  else
4821#endif
4822    emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4823  emit_barrier ();
4824}
4825
4826/* Expand a call to fabs, fabsf or fabsl with arguments ARGLIST.
4827   Return 0 if a normal call should be emitted rather than expanding
4828   the function inline.  If convenient, the result should be placed
4829   in TARGET.  SUBTARGET may be used as the target for computing
4830   the operand.  */
4831
4832static rtx
4833expand_builtin_fabs (tree arglist, rtx target, rtx subtarget)
4834{
4835  enum machine_mode mode;
4836  tree arg;
4837  rtx op0;
4838
4839  if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
4840    return 0;
4841
4842  arg = TREE_VALUE (arglist);
4843  mode = TYPE_MODE (TREE_TYPE (arg));
4844  op0 = expand_expr (arg, subtarget, VOIDmode, 0);
4845  return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4846}
4847
4848/* Expand a call to copysign, copysignf, or copysignl with arguments ARGLIST.
4849   Return NULL is a normal call should be emitted rather than expanding the
4850   function inline.  If convenient, the result should be placed in TARGET.
4851   SUBTARGET may be used as the target for computing the operand.  */
4852
4853static rtx
4854expand_builtin_copysign (tree arglist, rtx target, rtx subtarget)
4855{
4856  rtx op0, op1;
4857  tree arg;
4858
4859  if (!validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4860    return 0;
4861
4862  arg = TREE_VALUE (arglist);
4863  op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4864
4865  arg = TREE_VALUE (TREE_CHAIN (arglist));
4866  op1 = expand_normal (arg);
4867
4868  return expand_copysign (op0, op1, target);
4869}
4870
4871/* Create a new constant string literal and return a char* pointer to it.
4872   The STRING_CST value is the LEN characters at STR.  */
4873tree
4874build_string_literal (int len, const char *str)
4875{
4876  tree t, elem, index, type;
4877
4878  t = build_string (len, str);
4879  elem = build_type_variant (char_type_node, 1, 0);
4880  index = build_index_type (build_int_cst (NULL_TREE, len - 1));
4881  type = build_array_type (elem, index);
4882  TREE_TYPE (t) = type;
4883  TREE_CONSTANT (t) = 1;
4884  TREE_INVARIANT (t) = 1;
4885  TREE_READONLY (t) = 1;
4886  TREE_STATIC (t) = 1;
4887
4888  type = build_pointer_type (type);
4889  t = build1 (ADDR_EXPR, type, t);
4890
4891  type = build_pointer_type (elem);
4892  t = build1 (NOP_EXPR, type, t);
4893  return t;
4894}
4895
4896/* Expand EXP, a call to printf or printf_unlocked.
4897   Return 0 if a normal call should be emitted rather than transforming
4898   the function inline.  If convenient, the result should be placed in
4899   TARGET with mode MODE.  UNLOCKED indicates this is a printf_unlocked
4900   call.  */
4901static rtx
4902expand_builtin_printf (tree exp, rtx target, enum machine_mode mode,
4903		       bool unlocked)
4904{
4905  tree arglist = TREE_OPERAND (exp, 1);
4906  /* If we're using an unlocked function, assume the other unlocked
4907     functions exist explicitly.  */
4908  tree const fn_putchar = unlocked ? built_in_decls[BUILT_IN_PUTCHAR_UNLOCKED]
4909    : implicit_built_in_decls[BUILT_IN_PUTCHAR];
4910  tree const fn_puts = unlocked ? built_in_decls[BUILT_IN_PUTS_UNLOCKED]
4911    : implicit_built_in_decls[BUILT_IN_PUTS];
4912  const char *fmt_str;
4913  tree fn, fmt, arg;
4914
4915  /* If the return value is used, don't do the transformation.  */
4916  if (target != const0_rtx)
4917    return 0;
4918
4919  /* Verify the required arguments in the original call.  */
4920  if (! arglist)
4921    return 0;
4922  fmt = TREE_VALUE (arglist);
4923  if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
4924    return 0;
4925  arglist = TREE_CHAIN (arglist);
4926
4927  /* Check whether the format is a literal string constant.  */
4928  fmt_str = c_getstr (fmt);
4929  if (fmt_str == NULL)
4930    return 0;
4931
4932  if (!init_target_chars())
4933    return 0;
4934
4935  /* If the format specifier was "%s\n", call __builtin_puts(arg).  */
4936  if (strcmp (fmt_str, target_percent_s_newline) == 0)
4937    {
4938      if (! arglist
4939	  || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
4940	  || TREE_CHAIN (arglist))
4941	return 0;
4942      fn = fn_puts;
4943    }
4944  /* If the format specifier was "%c", call __builtin_putchar(arg).  */
4945  else if (strcmp (fmt_str, target_percent_c) == 0)
4946    {
4947      if (! arglist
4948	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE
4949	  || TREE_CHAIN (arglist))
4950	return 0;
4951      fn = fn_putchar;
4952    }
4953  else
4954    {
4955      /* We can't handle anything else with % args or %% ... yet.  */
4956      if (strchr (fmt_str, target_percent))
4957	return 0;
4958
4959      if (arglist)
4960	return 0;
4961
4962      /* If the format specifier was "", printf does nothing.  */
4963      if (fmt_str[0] == '\0')
4964	return const0_rtx;
4965      /* If the format specifier has length of 1, call putchar.  */
4966      if (fmt_str[1] == '\0')
4967	{
4968	  /* Given printf("c"), (where c is any one character,)
4969	     convert "c"[0] to an int and pass that to the replacement
4970	     function.  */
4971	  arg = build_int_cst (NULL_TREE, fmt_str[0]);
4972	  arglist = build_tree_list (NULL_TREE, arg);
4973	  fn = fn_putchar;
4974	}
4975      else
4976	{
4977	  /* If the format specifier was "string\n", call puts("string").  */
4978	  size_t len = strlen (fmt_str);
4979	  if ((unsigned char)fmt_str[len - 1] == target_newline)
4980	    {
4981	      /* Create a NUL-terminated string that's one char shorter
4982		 than the original, stripping off the trailing '\n'.  */
4983	      char *newstr = alloca (len);
4984	      memcpy (newstr, fmt_str, len - 1);
4985	      newstr[len - 1] = 0;
4986
4987	      arg = build_string_literal (len, newstr);
4988	      arglist = build_tree_list (NULL_TREE, arg);
4989	      fn = fn_puts;
4990	    }
4991	  else
4992	    /* We'd like to arrange to call fputs(string,stdout) here,
4993	       but we need stdout and don't have a way to get it yet.  */
4994	    return 0;
4995	}
4996    }
4997
4998  if (!fn)
4999    return 0;
5000  fn = build_function_call_expr (fn, arglist);
5001  if (TREE_CODE (fn) == CALL_EXPR)
5002    CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5003  return expand_expr (fn, target, mode, EXPAND_NORMAL);
5004}
5005
5006/* Expand EXP, a call to fprintf or fprintf_unlocked.
5007   Return 0 if a normal call should be emitted rather than transforming
5008   the function inline.  If convenient, the result should be placed in
5009   TARGET with mode MODE.  UNLOCKED indicates this is a fprintf_unlocked
5010   call.  */
5011static rtx
5012expand_builtin_fprintf (tree exp, rtx target, enum machine_mode mode,
5013			bool unlocked)
5014{
5015  tree arglist = TREE_OPERAND (exp, 1);
5016  /* If we're using an unlocked function, assume the other unlocked
5017     functions exist explicitly.  */
5018  tree const fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
5019    : implicit_built_in_decls[BUILT_IN_FPUTC];
5020  tree const fn_fputs = unlocked ? built_in_decls[BUILT_IN_FPUTS_UNLOCKED]
5021    : implicit_built_in_decls[BUILT_IN_FPUTS];
5022  const char *fmt_str;
5023  tree fn, fmt, fp, arg;
5024
5025  /* If the return value is used, don't do the transformation.  */
5026  if (target != const0_rtx)
5027    return 0;
5028
5029  /* Verify the required arguments in the original call.  */
5030  if (! arglist)
5031    return 0;
5032  fp = TREE_VALUE (arglist);
5033  if (! POINTER_TYPE_P (TREE_TYPE (fp)))
5034    return 0;
5035  arglist = TREE_CHAIN (arglist);
5036  if (! arglist)
5037    return 0;
5038  fmt = TREE_VALUE (arglist);
5039  if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5040    return 0;
5041  arglist = TREE_CHAIN (arglist);
5042
5043  /* Check whether the format is a literal string constant.  */
5044  fmt_str = c_getstr (fmt);
5045  if (fmt_str == NULL)
5046    return 0;
5047
5048  if (!init_target_chars())
5049    return 0;
5050
5051  /* If the format specifier was "%s", call __builtin_fputs(arg,fp).  */
5052  if (strcmp (fmt_str, target_percent_s) == 0)
5053    {
5054      if (! arglist
5055	  || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
5056	  || TREE_CHAIN (arglist))
5057	return 0;
5058      arg = TREE_VALUE (arglist);
5059      arglist = build_tree_list (NULL_TREE, fp);
5060      arglist = tree_cons (NULL_TREE, arg, arglist);
5061      fn = fn_fputs;
5062    }
5063  /* If the format specifier was "%c", call __builtin_fputc(arg,fp).  */
5064  else if (strcmp (fmt_str, target_percent_c) == 0)
5065    {
5066      if (! arglist
5067	  || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE
5068	  || TREE_CHAIN (arglist))
5069	return 0;
5070      arg = TREE_VALUE (arglist);
5071      arglist = build_tree_list (NULL_TREE, fp);
5072      arglist = tree_cons (NULL_TREE, arg, arglist);
5073      fn = fn_fputc;
5074    }
5075  else
5076    {
5077      /* We can't handle anything else with % args or %% ... yet.  */
5078      if (strchr (fmt_str, target_percent))
5079	return 0;
5080
5081      if (arglist)
5082	return 0;
5083
5084      /* If the format specifier was "", fprintf does nothing.  */
5085      if (fmt_str[0] == '\0')
5086	{
5087	  /* Evaluate and ignore FILE* argument for side-effects.  */
5088	  expand_expr (fp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5089	  return const0_rtx;
5090	}
5091
5092      /* When "string" doesn't contain %, replace all cases of
5093	 fprintf(stream,string) with fputs(string,stream).  The fputs
5094	 builtin will take care of special cases like length == 1.  */
5095      arglist = build_tree_list (NULL_TREE, fp);
5096      arglist = tree_cons (NULL_TREE, fmt, arglist);
5097      fn = fn_fputs;
5098    }
5099
5100  if (!fn)
5101    return 0;
5102  fn = build_function_call_expr (fn, arglist);
5103  if (TREE_CODE (fn) == CALL_EXPR)
5104    CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5105  return expand_expr (fn, target, mode, EXPAND_NORMAL);
5106}
5107
5108/* Expand a call to sprintf with argument list ARGLIST.  Return 0 if
5109   a normal call should be emitted rather than expanding the function
5110   inline.  If convenient, the result should be placed in TARGET with
5111   mode MODE.  */
5112
5113static rtx
5114expand_builtin_sprintf (tree arglist, rtx target, enum machine_mode mode)
5115{
5116  tree orig_arglist, dest, fmt;
5117  const char *fmt_str;
5118
5119  orig_arglist = arglist;
5120
5121  /* Verify the required arguments in the original call.  */
5122  if (! arglist)
5123    return 0;
5124  dest = TREE_VALUE (arglist);
5125  if (! POINTER_TYPE_P (TREE_TYPE (dest)))
5126    return 0;
5127  arglist = TREE_CHAIN (arglist);
5128  if (! arglist)
5129    return 0;
5130  fmt = TREE_VALUE (arglist);
5131  if (! POINTER_TYPE_P (TREE_TYPE (fmt)))
5132    return 0;
5133  arglist = TREE_CHAIN (arglist);
5134
5135  /* Check whether the format is a literal string constant.  */
5136  fmt_str = c_getstr (fmt);
5137  if (fmt_str == NULL)
5138    return 0;
5139
5140  if (!init_target_chars())
5141    return 0;
5142
5143  /* If the format doesn't contain % args or %%, use strcpy.  */
5144  if (strchr (fmt_str, target_percent) == 0)
5145    {
5146      tree fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5147      tree exp;
5148
5149      if (arglist || ! fn)
5150	return 0;
5151      expand_expr (build_function_call_expr (fn, orig_arglist),
5152		   const0_rtx, VOIDmode, EXPAND_NORMAL);
5153      if (target == const0_rtx)
5154	return const0_rtx;
5155      exp = build_int_cst (NULL_TREE, strlen (fmt_str));
5156      return expand_expr (exp, target, mode, EXPAND_NORMAL);
5157    }
5158  /* If the format is "%s", use strcpy if the result isn't used.  */
5159  else if (strcmp (fmt_str, target_percent_s) == 0)
5160    {
5161      tree fn, arg, len;
5162      fn = implicit_built_in_decls[BUILT_IN_STRCPY];
5163
5164      if (! fn)
5165	return 0;
5166
5167      if (! arglist || TREE_CHAIN (arglist))
5168	return 0;
5169      arg = TREE_VALUE (arglist);
5170      if (! POINTER_TYPE_P (TREE_TYPE (arg)))
5171	return 0;
5172
5173      if (target != const0_rtx)
5174	{
5175	  len = c_strlen (arg, 1);
5176	  if (! len || TREE_CODE (len) != INTEGER_CST)
5177	    return 0;
5178	}
5179      else
5180	len = NULL_TREE;
5181
5182      arglist = build_tree_list (NULL_TREE, arg);
5183      arglist = tree_cons (NULL_TREE, dest, arglist);
5184      expand_expr (build_function_call_expr (fn, arglist),
5185		   const0_rtx, VOIDmode, EXPAND_NORMAL);
5186
5187      if (target == const0_rtx)
5188	return const0_rtx;
5189      return expand_expr (len, target, mode, EXPAND_NORMAL);
5190    }
5191
5192  return 0;
5193}
5194
5195/* Expand a call to either the entry or exit function profiler.  */
5196
5197static rtx
5198expand_builtin_profile_func (bool exitp)
5199{
5200  rtx this, which;
5201
5202  this = DECL_RTL (current_function_decl);
5203  gcc_assert (MEM_P (this));
5204  this = XEXP (this, 0);
5205
5206  if (exitp)
5207    which = profile_function_exit_libfunc;
5208  else
5209    which = profile_function_entry_libfunc;
5210
5211  emit_library_call (which, LCT_NORMAL, VOIDmode, 2, this, Pmode,
5212		     expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
5213						 0),
5214		     Pmode);
5215
5216  return const0_rtx;
5217}
5218
5219/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
5220
5221static rtx
5222round_trampoline_addr (rtx tramp)
5223{
5224  rtx temp, addend, mask;
5225
5226  /* If we don't need too much alignment, we'll have been guaranteed
5227     proper alignment by get_trampoline_type.  */
5228  if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5229    return tramp;
5230
5231  /* Round address up to desired boundary.  */
5232  temp = gen_reg_rtx (Pmode);
5233  addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5234  mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5235
5236  temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
5237			       temp, 0, OPTAB_LIB_WIDEN);
5238  tramp = expand_simple_binop (Pmode, AND, temp, mask,
5239			       temp, 0, OPTAB_LIB_WIDEN);
5240
5241  return tramp;
5242}
5243
5244static rtx
5245expand_builtin_init_trampoline (tree arglist)
5246{
5247  tree t_tramp, t_func, t_chain;
5248  rtx r_tramp, r_func, r_chain;
5249#ifdef TRAMPOLINE_TEMPLATE
5250  rtx blktramp;
5251#endif
5252
5253  if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE,
5254			 POINTER_TYPE, VOID_TYPE))
5255    return NULL_RTX;
5256
5257  t_tramp = TREE_VALUE (arglist);
5258  arglist = TREE_CHAIN (arglist);
5259  t_func = TREE_VALUE (arglist);
5260  arglist = TREE_CHAIN (arglist);
5261  t_chain = TREE_VALUE (arglist);
5262
5263  r_tramp = expand_normal (t_tramp);
5264  r_func = expand_normal (t_func);
5265  r_chain = expand_normal (t_chain);
5266
5267  /* Generate insns to initialize the trampoline.  */
5268  r_tramp = round_trampoline_addr (r_tramp);
5269#ifdef TRAMPOLINE_TEMPLATE
5270  blktramp = gen_rtx_MEM (BLKmode, r_tramp);
5271  set_mem_align (blktramp, TRAMPOLINE_ALIGNMENT);
5272  emit_block_move (blktramp, assemble_trampoline_template (),
5273		   GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
5274#endif
5275  trampolines_created = 1;
5276  INITIALIZE_TRAMPOLINE (r_tramp, r_func, r_chain);
5277
5278  return const0_rtx;
5279}
5280
5281static rtx
5282expand_builtin_adjust_trampoline (tree arglist)
5283{
5284  rtx tramp;
5285
5286  if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
5287    return NULL_RTX;
5288
5289  tramp = expand_normal (TREE_VALUE (arglist));
5290  tramp = round_trampoline_addr (tramp);
5291#ifdef TRAMPOLINE_ADJUST_ADDRESS
5292  TRAMPOLINE_ADJUST_ADDRESS (tramp);
5293#endif
5294
5295  return tramp;
5296}
5297
5298/* Expand a call to the built-in signbit, signbitf or signbitl function.
5299   Return NULL_RTX if a normal call should be emitted rather than expanding
5300   the function in-line.  EXP is the expression that is a call to the builtin
5301   function; if convenient, the result should be placed in TARGET.  */
5302
5303static rtx
5304expand_builtin_signbit (tree exp, rtx target)
5305{
5306  const struct real_format *fmt;
5307  enum machine_mode fmode, imode, rmode;
5308  HOST_WIDE_INT hi, lo;
5309  tree arg, arglist;
5310  int word, bitpos;
5311  rtx temp;
5312
5313  arglist = TREE_OPERAND (exp, 1);
5314  if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
5315    return 0;
5316
5317  arg = TREE_VALUE (arglist);
5318  fmode = TYPE_MODE (TREE_TYPE (arg));
5319  rmode = TYPE_MODE (TREE_TYPE (exp));
5320  fmt = REAL_MODE_FORMAT (fmode);
5321
5322  /* For floating point formats without a sign bit, implement signbit
5323     as "ARG < 0.0".  */
5324  bitpos = fmt->signbit_ro;
5325  if (bitpos < 0)
5326  {
5327    /* But we can't do this if the format supports signed zero.  */
5328    if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
5329      return 0;
5330
5331    arg = fold_build2 (LT_EXPR, TREE_TYPE (exp), arg,
5332		       build_real (TREE_TYPE (arg), dconst0));
5333    return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5334  }
5335
5336  temp = expand_normal (arg);
5337  if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5338    {
5339      imode = int_mode_for_mode (fmode);
5340      if (imode == BLKmode)
5341	return 0;
5342      temp = gen_lowpart (imode, temp);
5343    }
5344  else
5345    {
5346      imode = word_mode;
5347      /* Handle targets with different FP word orders.  */
5348      if (FLOAT_WORDS_BIG_ENDIAN)
5349	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5350      else
5351	word = bitpos / BITS_PER_WORD;
5352      temp = operand_subword_force (temp, word, fmode);
5353      bitpos = bitpos % BITS_PER_WORD;
5354    }
5355
5356  /* Force the intermediate word_mode (or narrower) result into a
5357     register.  This avoids attempting to create paradoxical SUBREGs
5358     of floating point modes below.  */
5359  temp = force_reg (imode, temp);
5360
5361  /* If the bitpos is within the "result mode" lowpart, the operation
5362     can be implement with a single bitwise AND.  Otherwise, we need
5363     a right shift and an AND.  */
5364
5365  if (bitpos < GET_MODE_BITSIZE (rmode))
5366    {
5367      if (bitpos < HOST_BITS_PER_WIDE_INT)
5368	{
5369	  hi = 0;
5370	  lo = (HOST_WIDE_INT) 1 << bitpos;
5371	}
5372      else
5373	{
5374	  hi = (HOST_WIDE_INT) 1 << (bitpos - HOST_BITS_PER_WIDE_INT);
5375	  lo = 0;
5376	}
5377
5378      if (imode != rmode)
5379	temp = gen_lowpart (rmode, temp);
5380      temp = expand_binop (rmode, and_optab, temp,
5381			   immed_double_const (lo, hi, rmode),
5382			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5383    }
5384  else
5385    {
5386      /* Perform a logical right shift to place the signbit in the least
5387	 significant bit, then truncate the result to the desired mode
5388	 and mask just this bit.  */
5389      temp = expand_shift (RSHIFT_EXPR, imode, temp,
5390			   build_int_cst (NULL_TREE, bitpos), NULL_RTX, 1);
5391      temp = gen_lowpart (rmode, temp);
5392      temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5393			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5394    }
5395
5396  return temp;
5397}
5398
5399/* Expand fork or exec calls.  TARGET is the desired target of the
5400   call.  ARGLIST is the list of arguments of the call.  FN is the
5401   identificator of the actual function.  IGNORE is nonzero if the
5402   value is to be ignored.  */
5403
5404static rtx
5405expand_builtin_fork_or_exec (tree fn, tree arglist, rtx target, int ignore)
5406{
5407  tree id, decl;
5408  tree call;
5409
5410  /* If we are not profiling, just call the function.  */
5411  if (!profile_arc_flag)
5412    return NULL_RTX;
5413
5414  /* Otherwise call the wrapper.  This should be equivalent for the rest of
5415     compiler, so the code does not diverge, and the wrapper may run the
5416     code necessary for keeping the profiling sane.  */
5417
5418  switch (DECL_FUNCTION_CODE (fn))
5419    {
5420    case BUILT_IN_FORK:
5421      id = get_identifier ("__gcov_fork");
5422      break;
5423
5424    case BUILT_IN_EXECL:
5425      id = get_identifier ("__gcov_execl");
5426      break;
5427
5428    case BUILT_IN_EXECV:
5429      id = get_identifier ("__gcov_execv");
5430      break;
5431
5432    case BUILT_IN_EXECLP:
5433      id = get_identifier ("__gcov_execlp");
5434      break;
5435
5436    case BUILT_IN_EXECLE:
5437      id = get_identifier ("__gcov_execle");
5438      break;
5439
5440    case BUILT_IN_EXECVP:
5441      id = get_identifier ("__gcov_execvp");
5442      break;
5443
5444    case BUILT_IN_EXECVE:
5445      id = get_identifier ("__gcov_execve");
5446      break;
5447
5448    default:
5449      gcc_unreachable ();
5450    }
5451
5452  decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
5453  DECL_EXTERNAL (decl) = 1;
5454  TREE_PUBLIC (decl) = 1;
5455  DECL_ARTIFICIAL (decl) = 1;
5456  TREE_NOTHROW (decl) = 1;
5457  DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5458  DECL_VISIBILITY_SPECIFIED (decl) = 1;
5459  call = build_function_call_expr (decl, arglist);
5460
5461  return expand_call (call, target, ignore);
5462}
5463
5464
5465/* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
5466   the pointer in these functions is void*, the tree optimizers may remove
5467   casts.  The mode computed in expand_builtin isn't reliable either, due
5468   to __sync_bool_compare_and_swap.
5469
5470   FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5471   group of builtins.  This gives us log2 of the mode size.  */
5472
5473static inline enum machine_mode
5474get_builtin_sync_mode (int fcode_diff)
5475{
5476  /* The size is not negotiable, so ask not to get BLKmode in return
5477     if the target indicates that a smaller size would be better.  */
5478  return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5479}
5480
5481/* Expand the memory expression LOC and return the appropriate memory operand
5482   for the builtin_sync operations.  */
5483
5484static rtx
5485get_builtin_sync_mem (tree loc, enum machine_mode mode)
5486{
5487  rtx addr, mem;
5488
5489  addr = expand_expr (loc, NULL, Pmode, EXPAND_SUM);
5490
5491  /* Note that we explicitly do not want any alias information for this
5492     memory, so that we kill all other live memories.  Otherwise we don't
5493     satisfy the full barrier semantics of the intrinsic.  */
5494  mem = validize_mem (gen_rtx_MEM (mode, addr));
5495
5496  set_mem_align (mem, get_pointer_alignment (loc, BIGGEST_ALIGNMENT));
5497  set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5498  MEM_VOLATILE_P (mem) = 1;
5499
5500  return mem;
5501}
5502
5503/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5504   ARGLIST is the operands list to the function.  CODE is the rtx code
5505   that corresponds to the arithmetic or logical operation from the name;
5506   an exception here is that NOT actually means NAND.  TARGET is an optional
5507   place for us to store the results; AFTER is true if this is the
5508   fetch_and_xxx form.  IGNORE is true if we don't actually care about
5509   the result of the operation at all.  */
5510
5511static rtx
5512expand_builtin_sync_operation (enum machine_mode mode, tree arglist,
5513			       enum rtx_code code, bool after,
5514			       rtx target, bool ignore)
5515{
5516  rtx val, mem;
5517  enum machine_mode old_mode;
5518
5519  /* Expand the operands.  */
5520  mem = get_builtin_sync_mem (TREE_VALUE (arglist), mode);
5521
5522  arglist = TREE_CHAIN (arglist);
5523  val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
5524  /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5525     of CONST_INTs, where we know the old_mode only from the call argument.  */
5526  old_mode = GET_MODE (val);
5527  if (old_mode == VOIDmode)
5528    old_mode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
5529  val = convert_modes (mode, old_mode, val, 1);
5530
5531  if (ignore)
5532    return expand_sync_operation (mem, val, code);
5533  else
5534    return expand_sync_fetch_operation (mem, val, code, after, target);
5535}
5536
5537/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5538   intrinsics.  ARGLIST is the operands list to the function.  IS_BOOL is
5539   true if this is the boolean form.  TARGET is a place for us to store the
5540   results; this is NOT optional if IS_BOOL is true.  */
5541
5542static rtx
5543expand_builtin_compare_and_swap (enum machine_mode mode, tree arglist,
5544				 bool is_bool, rtx target)
5545{
5546  rtx old_val, new_val, mem;
5547  enum machine_mode old_mode;
5548
5549  /* Expand the operands.  */
5550  mem = get_builtin_sync_mem (TREE_VALUE (arglist), mode);
5551
5552  arglist = TREE_CHAIN (arglist);
5553  old_val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
5554  /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5555     of CONST_INTs, where we know the old_mode only from the call argument.  */
5556  old_mode = GET_MODE (old_val);
5557  if (old_mode == VOIDmode)
5558    old_mode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
5559  old_val = convert_modes (mode, old_mode, old_val, 1);
5560
5561  arglist = TREE_CHAIN (arglist);
5562  new_val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
5563  /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5564     of CONST_INTs, where we know the old_mode only from the call argument.  */
5565  old_mode = GET_MODE (new_val);
5566  if (old_mode == VOIDmode)
5567    old_mode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
5568  new_val = convert_modes (mode, old_mode, new_val, 1);
5569
5570  if (is_bool)
5571    return expand_bool_compare_and_swap (mem, old_val, new_val, target);
5572  else
5573    return expand_val_compare_and_swap (mem, old_val, new_val, target);
5574}
5575
5576/* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
5577   general form is actually an atomic exchange, and some targets only
5578   support a reduced form with the second argument being a constant 1.
5579   ARGLIST is the operands list to the function; TARGET is an optional
5580   place for us to store the results.  */
5581
5582static rtx
5583expand_builtin_lock_test_and_set (enum machine_mode mode, tree arglist,
5584				  rtx target)
5585{
5586  rtx val, mem;
5587  enum machine_mode old_mode;
5588
5589  /* Expand the operands.  */
5590  mem = get_builtin_sync_mem (TREE_VALUE (arglist), mode);
5591
5592  arglist = TREE_CHAIN (arglist);
5593  val = expand_expr (TREE_VALUE (arglist), NULL, mode, EXPAND_NORMAL);
5594  /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5595     of CONST_INTs, where we know the old_mode only from the call argument.  */
5596  old_mode = GET_MODE (val);
5597  if (old_mode == VOIDmode)
5598    old_mode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
5599  val = convert_modes (mode, old_mode, val, 1);
5600
5601  return expand_sync_lock_test_and_set (mem, val, target);
5602}
5603
5604/* Expand the __sync_synchronize intrinsic.  */
5605
5606static void
5607expand_builtin_synchronize (void)
5608{
5609  tree x;
5610
5611#ifdef HAVE_memory_barrier
5612  if (HAVE_memory_barrier)
5613    {
5614      emit_insn (gen_memory_barrier ());
5615      return;
5616    }
5617#endif
5618
5619  /* If no explicit memory barrier instruction is available, create an
5620     empty asm stmt with a memory clobber.  */
5621  x = build4 (ASM_EXPR, void_type_node, build_string (0, ""), NULL, NULL,
5622	      tree_cons (NULL, build_string (6, "memory"), NULL));
5623  ASM_VOLATILE_P (x) = 1;
5624  expand_asm_expr (x);
5625}
5626
5627/* Expand the __sync_lock_release intrinsic.  ARGLIST is the operands list
5628   to the function.  */
5629
5630static void
5631expand_builtin_lock_release (enum machine_mode mode, tree arglist)
5632{
5633  enum insn_code icode;
5634  rtx mem, insn;
5635  rtx val = const0_rtx;
5636
5637  /* Expand the operands.  */
5638  mem = get_builtin_sync_mem (TREE_VALUE (arglist), mode);
5639
5640  /* If there is an explicit operation in the md file, use it.  */
5641  icode = sync_lock_release[mode];
5642  if (icode != CODE_FOR_nothing)
5643    {
5644      if (!insn_data[icode].operand[1].predicate (val, mode))
5645	val = force_reg (mode, val);
5646
5647      insn = GEN_FCN (icode) (mem, val);
5648      if (insn)
5649	{
5650	  emit_insn (insn);
5651	  return;
5652	}
5653    }
5654
5655  /* Otherwise we can implement this operation by emitting a barrier
5656     followed by a store of zero.  */
5657  expand_builtin_synchronize ();
5658  emit_move_insn (mem, val);
5659}
5660
5661/* Expand an expression EXP that calls a built-in function,
5662   with result going to TARGET if that's convenient
5663   (and in mode MODE if that's convenient).
5664   SUBTARGET may be used as the target for computing one of EXP's operands.
5665   IGNORE is nonzero if the value is to be ignored.  */
5666
5667rtx
5668expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5669		int ignore)
5670{
5671  tree fndecl = get_callee_fndecl (exp);
5672  tree arglist = TREE_OPERAND (exp, 1);
5673  enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5674  enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5675
5676  if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5677    return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5678
5679  /* When not optimizing, generate calls to library functions for a certain
5680     set of builtins.  */
5681  if (!optimize
5682      && !called_as_built_in (fndecl)
5683      && DECL_ASSEMBLER_NAME_SET_P (fndecl)
5684      && fcode != BUILT_IN_ALLOCA)
5685    return expand_call (exp, target, ignore);
5686
5687  /* The built-in function expanders test for target == const0_rtx
5688     to determine whether the function's result will be ignored.  */
5689  if (ignore)
5690    target = const0_rtx;
5691
5692  /* If the result of a pure or const built-in function is ignored, and
5693     none of its arguments are volatile, we can avoid expanding the
5694     built-in call and just evaluate the arguments for side-effects.  */
5695  if (target == const0_rtx
5696      && (DECL_IS_PURE (fndecl) || TREE_READONLY (fndecl)))
5697    {
5698      bool volatilep = false;
5699      tree arg;
5700
5701      for (arg = arglist; arg; arg = TREE_CHAIN (arg))
5702	if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
5703	  {
5704	    volatilep = true;
5705	    break;
5706	  }
5707
5708      if (! volatilep)
5709	{
5710	  for (arg = arglist; arg; arg = TREE_CHAIN (arg))
5711	    expand_expr (TREE_VALUE (arg), const0_rtx,
5712			 VOIDmode, EXPAND_NORMAL);
5713	  return const0_rtx;
5714	}
5715    }
5716
5717  switch (fcode)
5718    {
5719    CASE_FLT_FN (BUILT_IN_FABS):
5720      target = expand_builtin_fabs (arglist, target, subtarget);
5721      if (target)
5722	return target;
5723      break;
5724
5725    CASE_FLT_FN (BUILT_IN_COPYSIGN):
5726      target = expand_builtin_copysign (arglist, target, subtarget);
5727      if (target)
5728	return target;
5729      break;
5730
5731      /* Just do a normal library call if we were unable to fold
5732	 the values.  */
5733    CASE_FLT_FN (BUILT_IN_CABS):
5734      break;
5735
5736    CASE_FLT_FN (BUILT_IN_EXP):
5737    CASE_FLT_FN (BUILT_IN_EXP10):
5738    CASE_FLT_FN (BUILT_IN_POW10):
5739    CASE_FLT_FN (BUILT_IN_EXP2):
5740    CASE_FLT_FN (BUILT_IN_EXPM1):
5741    CASE_FLT_FN (BUILT_IN_LOGB):
5742    CASE_FLT_FN (BUILT_IN_ILOGB):
5743    CASE_FLT_FN (BUILT_IN_LOG):
5744    CASE_FLT_FN (BUILT_IN_LOG10):
5745    CASE_FLT_FN (BUILT_IN_LOG2):
5746    CASE_FLT_FN (BUILT_IN_LOG1P):
5747    CASE_FLT_FN (BUILT_IN_TAN):
5748    CASE_FLT_FN (BUILT_IN_ASIN):
5749    CASE_FLT_FN (BUILT_IN_ACOS):
5750    CASE_FLT_FN (BUILT_IN_ATAN):
5751      /* Treat these like sqrt only if unsafe math optimizations are allowed,
5752	 because of possible accuracy problems.  */
5753      if (! flag_unsafe_math_optimizations)
5754	break;
5755    CASE_FLT_FN (BUILT_IN_SQRT):
5756    CASE_FLT_FN (BUILT_IN_FLOOR):
5757    CASE_FLT_FN (BUILT_IN_CEIL):
5758    CASE_FLT_FN (BUILT_IN_TRUNC):
5759    CASE_FLT_FN (BUILT_IN_ROUND):
5760    CASE_FLT_FN (BUILT_IN_NEARBYINT):
5761    CASE_FLT_FN (BUILT_IN_RINT):
5762    CASE_FLT_FN (BUILT_IN_LRINT):
5763    CASE_FLT_FN (BUILT_IN_LLRINT):
5764      target = expand_builtin_mathfn (exp, target, subtarget);
5765      if (target)
5766	return target;
5767      break;
5768
5769    CASE_FLT_FN (BUILT_IN_LCEIL):
5770    CASE_FLT_FN (BUILT_IN_LLCEIL):
5771    CASE_FLT_FN (BUILT_IN_LFLOOR):
5772    CASE_FLT_FN (BUILT_IN_LLFLOOR):
5773      target = expand_builtin_int_roundingfn (exp, target, subtarget);
5774      if (target)
5775	return target;
5776      break;
5777
5778    CASE_FLT_FN (BUILT_IN_POW):
5779      target = expand_builtin_pow (exp, target, subtarget);
5780      if (target)
5781	return target;
5782      break;
5783
5784    CASE_FLT_FN (BUILT_IN_POWI):
5785      target = expand_builtin_powi (exp, target, subtarget);
5786      if (target)
5787	return target;
5788      break;
5789
5790    CASE_FLT_FN (BUILT_IN_ATAN2):
5791    CASE_FLT_FN (BUILT_IN_LDEXP):
5792    CASE_FLT_FN (BUILT_IN_FMOD):
5793    CASE_FLT_FN (BUILT_IN_DREM):
5794      if (! flag_unsafe_math_optimizations)
5795	break;
5796      target = expand_builtin_mathfn_2 (exp, target, subtarget);
5797      if (target)
5798	return target;
5799      break;
5800
5801    CASE_FLT_FN (BUILT_IN_SIN):
5802    CASE_FLT_FN (BUILT_IN_COS):
5803      if (! flag_unsafe_math_optimizations)
5804	break;
5805      target = expand_builtin_mathfn_3 (exp, target, subtarget);
5806      if (target)
5807	return target;
5808      break;
5809
5810    CASE_FLT_FN (BUILT_IN_SINCOS):
5811      if (! flag_unsafe_math_optimizations)
5812	break;
5813      target = expand_builtin_sincos (exp);
5814      if (target)
5815	return target;
5816      break;
5817
5818    case BUILT_IN_APPLY_ARGS:
5819      return expand_builtin_apply_args ();
5820
5821      /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5822	 FUNCTION with a copy of the parameters described by
5823	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
5824	 allocated on the stack into which is stored all the registers
5825	 that might possibly be used for returning the result of a
5826	 function.  ARGUMENTS is the value returned by
5827	 __builtin_apply_args.  ARGSIZE is the number of bytes of
5828	 arguments that must be copied.  ??? How should this value be
5829	 computed?  We'll also need a safe worst case value for varargs
5830	 functions.  */
5831    case BUILT_IN_APPLY:
5832      if (!validate_arglist (arglist, POINTER_TYPE,
5833			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5834	  && !validate_arglist (arglist, REFERENCE_TYPE,
5835				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5836	return const0_rtx;
5837      else
5838	{
5839	  int i;
5840	  tree t;
5841	  rtx ops[3];
5842
5843	  for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5844	    ops[i] = expand_normal (TREE_VALUE (t));
5845
5846	  return expand_builtin_apply (ops[0], ops[1], ops[2