forked from riscvarchive/riscv-gcc
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbuiltins.c
11873 lines (10077 loc) · 366 KB
/
builtins.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/* Expand builtin functions.
Copyright (C) 1988-2020 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
/* Legacy warning! Please add no further builtin simplifications here
(apart from pure constant folding) - builtin simplifications should go
to match.pd or gimple-fold.c instead. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "backend.h"
#include "target.h"
#include "rtl.h"
#include "tree.h"
#include "memmodel.h"
#include "gimple.h"
#include "predict.h"
#include "tm_p.h"
#include "stringpool.h"
#include "tree-vrp.h"
#include "tree-ssanames.h"
#include "expmed.h"
#include "optabs.h"
#include "emit-rtl.h"
#include "recog.h"
#include "diagnostic-core.h"
#include "alias.h"
#include "fold-const.h"
#include "fold-const-call.h"
#include "gimple-ssa-warn-restrict.h"
#include "stor-layout.h"
#include "calls.h"
#include "varasm.h"
#include "tree-object-size.h"
#include "tree-ssa-strlen.h"
#include "realmpfr.h"
#include "cfgrtl.h"
#include "except.h"
#include "dojump.h"
#include "explow.h"
#include "stmt.h"
#include "expr.h"
#include "libfuncs.h"
#include "output.h"
#include "typeclass.h"
#include "langhooks.h"
#include "value-prof.h"
#include "builtins.h"
#include "stringpool.h"
#include "attribs.h"
#include "asan.h"
#include "internal-fn.h"
#include "case-cfn-macros.h"
#include "gimple-fold.h"
#include "intl.h"
#include "file-prefix-map.h" /* remap_macro_filename() */
#include "gomp-constants.h"
#include "omp-general.h"
#include "tree-dfa.h"
struct target_builtins default_target_builtins;
#if SWITCHABLE_TARGET
struct target_builtins *this_target_builtins = &default_target_builtins;
#endif
/* Define the names of the builtin function types and codes. */
const char *const built_in_class_names[BUILT_IN_LAST]
= {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
const char * built_in_names[(int) END_BUILTINS] =
{
#include "builtins.def"
};
/* Setup an array of builtin_info_type, make sure each element decl is
initialized to NULL_TREE. */
builtin_info_type builtin_info[(int)END_BUILTINS];
/* Non-zero if __builtin_constant_p should be folded right away. */
bool force_folding_builtin_constant_p;
static int target_char_cast (tree, char *);
static rtx get_memory_rtx (tree, tree);
static int apply_args_size (void);
static int apply_result_size (void);
static rtx result_vector (int, rtx);
static void expand_builtin_prefetch (tree);
static rtx expand_builtin_apply_args (void);
static rtx expand_builtin_apply_args_1 (void);
static rtx expand_builtin_apply (rtx, rtx, rtx);
static void expand_builtin_return (rtx);
static enum type_class type_to_class (tree);
static rtx expand_builtin_classify_type (tree);
static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
static rtx expand_builtin_interclass_mathfn (tree, rtx);
static rtx expand_builtin_sincos (tree);
static rtx expand_builtin_cexpi (tree, rtx);
static rtx expand_builtin_int_roundingfn (tree, rtx);
static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
static rtx expand_builtin_next_arg (void);
static rtx expand_builtin_va_start (tree);
static rtx expand_builtin_va_end (tree);
static rtx expand_builtin_va_copy (tree);
static rtx inline_expand_builtin_string_cmp (tree, rtx);
static rtx expand_builtin_strcmp (tree, rtx);
static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
static rtx expand_builtin_memchr (tree, rtx);
static rtx expand_builtin_memcpy (tree, rtx);
static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
rtx target, tree exp,
memop_ret retmode,
bool might_overlap);
static rtx expand_builtin_memmove (tree, rtx);
static rtx expand_builtin_mempcpy (tree, rtx);
static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
static rtx expand_builtin_strcat (tree);
static rtx expand_builtin_strcpy (tree, rtx);
static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
static rtx expand_builtin_stpncpy (tree, rtx);
static rtx expand_builtin_strncat (tree, rtx);
static rtx expand_builtin_strncpy (tree, rtx);
static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
static rtx expand_builtin_memset (tree, rtx, machine_mode);
static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
static rtx expand_builtin_bzero (tree);
static rtx expand_builtin_strlen (tree, rtx, machine_mode);
static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
static rtx expand_builtin_alloca (tree);
static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
static rtx expand_builtin_frame_address (tree, tree);
static tree stabilize_va_list_loc (location_t, tree, int);
static rtx expand_builtin_expect (tree, rtx);
static rtx expand_builtin_expect_with_probability (tree, rtx);
static tree fold_builtin_constant_p (tree);
static tree fold_builtin_classify_type (tree);
static tree fold_builtin_strlen (location_t, tree, tree);
static tree fold_builtin_inf (location_t, tree, int);
static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
static bool validate_arg (const_tree, enum tree_code code);
static rtx expand_builtin_fabs (tree, rtx, rtx);
static rtx expand_builtin_signbit (tree, rtx);
static tree fold_builtin_memcmp (location_t, tree, tree, tree);
static tree fold_builtin_isascii (location_t, tree);
static tree fold_builtin_toascii (location_t, tree);
static tree fold_builtin_isdigit (location_t, tree);
static tree fold_builtin_fabs (location_t, tree, tree);
static tree fold_builtin_abs (location_t, tree, tree);
static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
enum tree_code);
static tree fold_builtin_varargs (location_t, tree, tree*, int);
static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
static tree fold_builtin_strspn (location_t, tree, tree, tree);
static tree fold_builtin_strcspn (location_t, tree, tree, tree);
static rtx expand_builtin_object_size (tree);
static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
enum built_in_function);
static void maybe_emit_chk_warning (tree, enum built_in_function);
static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
static void maybe_emit_free_warning (tree);
static tree fold_builtin_object_size (tree, tree);
unsigned HOST_WIDE_INT target_newline;
unsigned HOST_WIDE_INT target_percent;
static unsigned HOST_WIDE_INT target_c;
static unsigned HOST_WIDE_INT target_s;
char target_percent_c[3];
char target_percent_s[3];
char target_percent_s_newline[4];
static tree do_mpfr_remquo (tree, tree, tree);
static tree do_mpfr_lgamma_r (tree, tree, tree);
static void expand_builtin_sync_synchronize (void);
/* Return true if NAME starts with __builtin_ or __sync_. */
static bool
is_builtin_name (const char *name)
{
if (strncmp (name, "__builtin_", 10) == 0)
return true;
if (strncmp (name, "__sync_", 7) == 0)
return true;
if (strncmp (name, "__atomic_", 9) == 0)
return true;
return false;
}
/* Return true if NODE should be considered for inline expansion regardless
of the optimization level. This means whenever a function is invoked with
its "internal" name, which normally contains the prefix "__builtin". */
bool
called_as_built_in (tree node)
{
/* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
we want the name used to call the function, not the name it
will have. */
const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
return is_builtin_name (name);
}
/* Compute values M and N such that M divides (address of EXP - N) and such
that N < M. If these numbers can be determined, store M in alignp and N in
*BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
*alignp and any bit-offset to *bitposp.
Note that the address (and thus the alignment) computed here is based
on the address to which a symbol resolves, whereas DECL_ALIGN is based
on the address at which an object is actually located. These two
addresses are not always the same. For example, on ARM targets,
the address &foo of a Thumb function foo() has the lowest bit set,
whereas foo() itself starts on an even address.
If ADDR_P is true we are taking the address of the memory reference EXP
and thus cannot rely on the access taking place. */
static bool
get_object_alignment_2 (tree exp, unsigned int *alignp,
unsigned HOST_WIDE_INT *bitposp, bool addr_p)
{
poly_int64 bitsize, bitpos;
tree offset;
machine_mode mode;
int unsignedp, reversep, volatilep;
unsigned int align = BITS_PER_UNIT;
bool known_alignment = false;
/* Get the innermost object and the constant (bitpos) and possibly
variable (offset) offset of the access. */
exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &reversep, &volatilep);
/* Extract alignment information from the innermost object and
possibly adjust bitpos and offset. */
if (TREE_CODE (exp) == FUNCTION_DECL)
{
/* Function addresses can encode extra information besides their
alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
allows the low bit to be used as a virtual bit, we know
that the address itself must be at least 2-byte aligned. */
if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
align = 2 * BITS_PER_UNIT;
}
else if (TREE_CODE (exp) == LABEL_DECL)
;
else if (TREE_CODE (exp) == CONST_DECL)
{
/* The alignment of a CONST_DECL is determined by its initializer. */
exp = DECL_INITIAL (exp);
align = TYPE_ALIGN (TREE_TYPE (exp));
if (CONSTANT_CLASS_P (exp))
align = targetm.constant_alignment (exp, align);
known_alignment = true;
}
else if (DECL_P (exp))
{
align = DECL_ALIGN (exp);
known_alignment = true;
}
else if (TREE_CODE (exp) == INDIRECT_REF
|| TREE_CODE (exp) == MEM_REF
|| TREE_CODE (exp) == TARGET_MEM_REF)
{
tree addr = TREE_OPERAND (exp, 0);
unsigned ptr_align;
unsigned HOST_WIDE_INT ptr_bitpos;
unsigned HOST_WIDE_INT ptr_bitmask = ~0;
/* If the address is explicitely aligned, handle that. */
if (TREE_CODE (addr) == BIT_AND_EXPR
&& TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
{
ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
ptr_bitmask *= BITS_PER_UNIT;
align = least_bit_hwi (ptr_bitmask);
addr = TREE_OPERAND (addr, 0);
}
known_alignment
= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
align = MAX (ptr_align, align);
/* Re-apply explicit alignment to the bitpos. */
ptr_bitpos &= ptr_bitmask;
/* The alignment of the pointer operand in a TARGET_MEM_REF
has to take the variable offset parts into account. */
if (TREE_CODE (exp) == TARGET_MEM_REF)
{
if (TMR_INDEX (exp))
{
unsigned HOST_WIDE_INT step = 1;
if (TMR_STEP (exp))
step = TREE_INT_CST_LOW (TMR_STEP (exp));
align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
}
if (TMR_INDEX2 (exp))
align = BITS_PER_UNIT;
known_alignment = false;
}
/* When EXP is an actual memory reference then we can use
TYPE_ALIGN of a pointer indirection to derive alignment.
Do so only if get_pointer_alignment_1 did not reveal absolute
alignment knowledge and if using that alignment would
improve the situation. */
unsigned int talign;
if (!addr_p && !known_alignment
&& (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
&& talign > align)
align = talign;
else
{
/* Else adjust bitpos accordingly. */
bitpos += ptr_bitpos;
if (TREE_CODE (exp) == MEM_REF
|| TREE_CODE (exp) == TARGET_MEM_REF)
bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
}
}
else if (TREE_CODE (exp) == STRING_CST)
{
/* STRING_CST are the only constant objects we allow to be not
wrapped inside a CONST_DECL. */
align = TYPE_ALIGN (TREE_TYPE (exp));
if (CONSTANT_CLASS_P (exp))
align = targetm.constant_alignment (exp, align);
known_alignment = true;
}
/* If there is a non-constant offset part extract the maximum
alignment that can prevail. */
if (offset)
{
unsigned int trailing_zeros = tree_ctz (offset);
if (trailing_zeros < HOST_BITS_PER_INT)
{
unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
if (inner)
align = MIN (align, inner);
}
}
/* Account for the alignment of runtime coefficients, so that the constant
bitpos is guaranteed to be accurate. */
unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
if (alt_align != 0 && alt_align < align)
{
align = alt_align;
known_alignment = false;
}
*alignp = align;
*bitposp = bitpos.coeffs[0] & (align - 1);
return known_alignment;
}
/* For a memory reference expression EXP compute values M and N such that M
divides (&EXP - N) and such that N < M. If these numbers can be determined,
store M in alignp and N in *BITPOSP and return true. Otherwise return false
and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
bool
get_object_alignment_1 (tree exp, unsigned int *alignp,
unsigned HOST_WIDE_INT *bitposp)
{
return get_object_alignment_2 (exp, alignp, bitposp, false);
}
/* Return the alignment in bits of EXP, an object. */
unsigned int
get_object_alignment (tree exp)
{
unsigned HOST_WIDE_INT bitpos = 0;
unsigned int align;
get_object_alignment_1 (exp, &align, &bitpos);
/* align and bitpos now specify known low bits of the pointer.
ptr & (align - 1) == bitpos. */
if (bitpos != 0)
align = least_bit_hwi (bitpos);
return align;
}
/* For a pointer valued expression EXP compute values M and N such that M
divides (EXP - N) and such that N < M. If these numbers can be determined,
store M in alignp and N in *BITPOSP and return true. Return false if
the results are just a conservative approximation.
If EXP is not a pointer, false is returned too. */
bool
get_pointer_alignment_1 (tree exp, unsigned int *alignp,
unsigned HOST_WIDE_INT *bitposp)
{
STRIP_NOPS (exp);
if (TREE_CODE (exp) == ADDR_EXPR)
return get_object_alignment_2 (TREE_OPERAND (exp, 0),
alignp, bitposp, true);
else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
{
unsigned int align;
unsigned HOST_WIDE_INT bitpos;
bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
&align, &bitpos);
if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
else
{
unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
if (trailing_zeros < HOST_BITS_PER_INT)
{
unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
if (inner)
align = MIN (align, inner);
}
}
*alignp = align;
*bitposp = bitpos & (align - 1);
return res;
}
else if (TREE_CODE (exp) == SSA_NAME
&& POINTER_TYPE_P (TREE_TYPE (exp)))
{
unsigned int ptr_align, ptr_misalign;
struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
{
*bitposp = ptr_misalign * BITS_PER_UNIT;
*alignp = ptr_align * BITS_PER_UNIT;
/* Make sure to return a sensible alignment when the multiplication
by BITS_PER_UNIT overflowed. */
if (*alignp == 0)
*alignp = 1u << (HOST_BITS_PER_INT - 1);
/* We cannot really tell whether this result is an approximation. */
return false;
}
else
{
*bitposp = 0;
*alignp = BITS_PER_UNIT;
return false;
}
}
else if (TREE_CODE (exp) == INTEGER_CST)
{
*alignp = BIGGEST_ALIGNMENT;
*bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
& (BIGGEST_ALIGNMENT - 1));
return true;
}
*bitposp = 0;
*alignp = BITS_PER_UNIT;
return false;
}
/* Return the alignment in bits of EXP, a pointer valued expression.
The alignment returned is, by default, the alignment of the thing that
EXP points to. If it is not a POINTER_TYPE, 0 is returned.
Otherwise, look at the expression to see if we can do better, i.e., if the
expression is actually pointing at an object whose alignment is tighter. */
unsigned int
get_pointer_alignment (tree exp)
{
unsigned HOST_WIDE_INT bitpos = 0;
unsigned int align;
get_pointer_alignment_1 (exp, &align, &bitpos);
/* align and bitpos now specify known low bits of the pointer.
ptr & (align - 1) == bitpos. */
if (bitpos != 0)
align = least_bit_hwi (bitpos);
return align;
}
/* Return the number of leading non-zero elements in the sequence
[ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
unsigned
string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
{
gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
unsigned n;
if (eltsize == 1)
{
/* Optimize the common case of plain char. */
for (n = 0; n < maxelts; n++)
{
const char *elt = (const char*) ptr + n;
if (!*elt)
break;
}
}
else
{
for (n = 0; n < maxelts; n++)
{
const char *elt = (const char*) ptr + n * eltsize;
if (!memcmp (elt, "\0\0\0\0", eltsize))
break;
}
}
return n;
}
/* For a call at LOC to a function FN that expects a string in the argument
ARG, issue a diagnostic due to it being a called with an argument
declared at NONSTR that is a character array with no terminating NUL. */
void
warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
{
if (TREE_NO_WARNING (arg))
return;
loc = expansion_point_location_if_in_system_header (loc);
if (warning_at (loc, OPT_Wstringop_overflow_,
"%qs argument missing terminating nul", fn))
{
inform (DECL_SOURCE_LOCATION (decl),
"referenced argument declared here");
TREE_NO_WARNING (arg) = 1;
}
}
/* For a call EXPR (which may be null) that expects a string argument
and SRC as the argument, returns false if SRC is a character array
with no terminating NUL. When nonnull, BOUND is the number of
characters in which to expect the terminating NUL.
When EXPR is nonnull also issues a warning. */
bool
check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
{
tree size;
bool exact;
tree nonstr = unterminated_array (src, &size, &exact);
if (!nonstr)
return true;
/* NONSTR refers to the non-nul terminated constant array and SIZE
is the constant size of the array in bytes. EXACT is true when
SIZE is exact. */
if (bound)
{
wide_int min, max;
if (TREE_CODE (bound) == INTEGER_CST)
min = max = wi::to_wide (bound);
else
{
value_range_kind rng = get_range_info (bound, &min, &max);
if (rng != VR_RANGE)
return true;
}
if (wi::leu_p (min, wi::to_wide (size)))
return true;
}
if (expr && !TREE_NO_WARNING (expr))
{
tree fndecl = get_callee_fndecl (expr);
const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
}
return false;
}
/* If EXP refers to an unterminated constant character array return
the declaration of the object of which the array is a member or
element and if SIZE is not null, set *SIZE to the size of
the unterminated array and set *EXACT if the size is exact or
clear it otherwise. Otherwise return null. */
tree
unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
{
/* C_STRLEN will return NULL and set DECL in the info
structure if EXP references a unterminated array. */
c_strlen_data lendata = { };
tree len = c_strlen (exp, 1, &lendata);
if (len == NULL_TREE && lendata.minlen && lendata.decl)
{
if (size)
{
len = lendata.minlen;
if (lendata.off)
{
/* Constant offsets are already accounted for in LENDATA.MINLEN,
but not in a SSA_NAME + CST expression. */
if (TREE_CODE (lendata.off) == INTEGER_CST)
*exact = true;
else if (TREE_CODE (lendata.off) == PLUS_EXPR
&& TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
{
/* Subtract the offset from the size of the array. */
*exact = false;
tree temp = TREE_OPERAND (lendata.off, 1);
temp = fold_convert (ssizetype, temp);
len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
}
else
*exact = false;
}
else
*exact = true;
*size = len;
}
return lendata.decl;
}
return NULL_TREE;
}
/* Compute the length of a null-terminated character string or wide
character string handling character sizes of 1, 2, and 4 bytes.
TREE_STRING_LENGTH is not the right way because it evaluates to
the size of the character array in bytes (as opposed to characters)
and because it can contain a zero byte in the middle.
ONLY_VALUE should be nonzero if the result is not going to be emitted
into the instruction stream and zero if it is going to be expanded.
E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
is returned, otherwise NULL, since
len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
evaluate the side-effects.
If ONLY_VALUE is two then we do not emit warnings about out-of-bound
accesses. Note that this implies the result is not going to be emitted
into the instruction stream.
Additional information about the string accessed may be recorded
in DATA. For example, if ARG references an unterminated string,
then the declaration will be stored in the DECL field. If the
length of the unterminated string can be determined, it'll be
stored in the LEN field. Note this length could well be different
than what a C strlen call would return.
ELTSIZE is 1 for normal single byte character strings, and 2 or
4 for wide characer strings. ELTSIZE is by default 1.
The value returned is of type `ssizetype'. */
tree
c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
{
/* If we were not passed a DATA pointer, then get one to a local
structure. That avoids having to check DATA for NULL before
each time we want to use it. */
c_strlen_data local_strlen_data = { };
if (!data)
data = &local_strlen_data;
gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
tree src = STRIP_NOPS (arg);
if (TREE_CODE (src) == COND_EXPR
&& (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
{
tree len1, len2;
len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
if (tree_int_cst_equal (len1, len2))
return len1;
}
if (TREE_CODE (src) == COMPOUND_EXPR
&& (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
location_t loc = EXPR_LOC_OR_LOC (src, input_location);
/* Offset from the beginning of the string in bytes. */
tree byteoff;
tree memsize;
tree decl;
src = string_constant (src, &byteoff, &memsize, &decl);
if (src == 0)
return NULL_TREE;
/* Determine the size of the string element. */
if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
return NULL_TREE;
/* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
in case the latter is less than the size of the array, such as when
SRC refers to a short string literal used to initialize a large array.
In that case, the elements of the array after the terminating NUL are
all NUL. */
HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
strelts = strelts / eltsize;
if (!tree_fits_uhwi_p (memsize))
return NULL_TREE;
HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
/* PTR can point to the byte representation of any string type, including
char* and wchar_t*. */
const char *ptr = TREE_STRING_POINTER (src);
if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
{
/* The code below works only for single byte character types. */
if (eltsize != 1)
return NULL_TREE;
/* If the string has an internal NUL character followed by any
non-NUL characters (e.g., "foo\0bar"), we can't compute
the offset to the following NUL if we don't know where to
start searching for it. */
unsigned len = string_length (ptr, eltsize, strelts);
/* Return when an embedded null character is found or none at all.
In the latter case, set the DECL/LEN field in the DATA structure
so that callers may examine them. */
if (len + 1 < strelts)
return NULL_TREE;
else if (len >= maxelts)
{
data->decl = decl;
data->off = byteoff;
data->minlen = ssize_int (len);
return NULL_TREE;
}
/* For empty strings the result should be zero. */
if (len == 0)
return ssize_int (0);
/* We don't know the starting offset, but we do know that the string
has no internal zero bytes. If the offset falls within the bounds
of the string subtract the offset from the length of the string,
and return that. Otherwise the length is zero. Take care to
use SAVE_EXPR in case the OFFSET has side-effects. */
tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
: byteoff;
offsave = fold_convert_loc (loc, sizetype, offsave);
tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
size_int (len));
tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
offsave);
lenexp = fold_convert_loc (loc, ssizetype, lenexp);
return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
build_zero_cst (ssizetype));
}
/* Offset from the beginning of the string in elements. */
HOST_WIDE_INT eltoff;
/* We have a known offset into the string. Start searching there for
a null character if we can represent it as a single HOST_WIDE_INT. */
if (byteoff == 0)
eltoff = 0;
else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
eltoff = -1;
else
eltoff = tree_to_uhwi (byteoff) / eltsize;
/* If the offset is known to be out of bounds, warn, and call strlen at
runtime. */
if (eltoff < 0 || eltoff >= maxelts)
{
/* Suppress multiple warnings for propagated constant strings. */
if (only_value != 2
&& !TREE_NO_WARNING (arg)
&& warning_at (loc, OPT_Warray_bounds,
"offset %qwi outside bounds of constant string",
eltoff))
{
if (decl)
inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
TREE_NO_WARNING (arg) = 1;
}
return NULL_TREE;
}
/* If eltoff is larger than strelts but less than maxelts the
string length is zero, since the excess memory will be zero. */
if (eltoff > strelts)
return ssize_int (0);
/* Use strlen to search for the first zero byte. Since any strings
constructed with build_string will have nulls appended, we win even
if we get handed something like (char[4])"abcd".
Since ELTOFF is our starting index into the string, no further
calculation is needed. */
unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
strelts - eltoff);
/* Don't know what to return if there was no zero termination.
Ideally this would turn into a gcc_checking_assert over time.
Set DECL/LEN so callers can examine them. */
if (len >= maxelts - eltoff)
{
data->decl = decl;
data->off = byteoff;
data->minlen = ssize_int (len);
return NULL_TREE;
}
return ssize_int (len);
}
/* Return a constant integer corresponding to target reading
GET_MODE_BITSIZE (MODE) bits from string constant STR. If
NULL_TERMINATED_P, reading stops after '\0' character, all further ones
are assumed to be zero, otherwise it reads as many characters
as needed. */
rtx
c_readstr (const char *str, scalar_int_mode mode,
bool null_terminated_p/*=true*/)
{
HOST_WIDE_INT ch;
unsigned int i, j;
HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
/ HOST_BITS_PER_WIDE_INT;
gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
for (i = 0; i < len; i++)
tmp[i] = 0;
ch = 1;
for (i = 0; i < GET_MODE_SIZE (mode); i++)
{
j = i;
if (WORDS_BIG_ENDIAN)
j = GET_MODE_SIZE (mode) - i - 1;
if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
j *= BITS_PER_UNIT;
if (ch || !null_terminated_p)
ch = (unsigned char) str[i];
tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
}
wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
return immed_wide_int_const (c, mode);
}
/* Cast a target constant CST to target CHAR and if that value fits into
host char type, return zero and put that value into variable pointed to by
P. */
static int
target_char_cast (tree cst, char *p)
{
unsigned HOST_WIDE_INT val, hostval;
if (TREE_CODE (cst) != INTEGER_CST
|| CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
return 1;
/* Do not care if it fits or not right here. */
val = TREE_INT_CST_LOW (cst);
if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
hostval = val;
if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
if (val != hostval)
return 1;
*p = hostval;
return 0;
}
/* Similar to save_expr, but assumes that arbitrary code is not executed
in between the multiple evaluations. In particular, we assume that a
non-addressable local variable will not be modified. */
static tree
builtin_save_expr (tree exp)
{
if (TREE_CODE (exp) == SSA_NAME
|| (TREE_ADDRESSABLE (exp) == 0
&& (TREE_CODE (exp) == PARM_DECL
|| (VAR_P (exp) && !TREE_STATIC (exp)))))
return exp;
return save_expr (exp);
}
/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
times to get the address of either a higher stack frame, or a return
address located within it (depending on FNDECL_CODE). */
static rtx
expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
{
int i;
rtx tem = INITIAL_FRAME_ADDRESS_RTX;
if (tem == NULL_RTX)
{
/* For a zero count with __builtin_return_address, we don't care what
frame address we return, because target-specific definitions will
override us. Therefore frame pointer elimination is OK, and using
the soft frame pointer is OK.
For a nonzero count, or a zero count with __builtin_frame_address,
we require a stable offset from the current frame pointer to the
previous one, so we must use the hard frame pointer, and
we must disable frame pointer elimination. */
if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
tem = frame_pointer_rtx;
else
{
tem = hard_frame_pointer_rtx;
/* Tell reload not to eliminate the frame pointer. */
crtl->accesses_prior_frames = 1;
}
}
if (count > 0)
SETUP_FRAME_ADDRESSES ();
/* On the SPARC, the return address is not in the frame, it is in a
register. There is no way to access it off of the current frame
pointer, but it can be accessed off the previous frame pointer by
reading the value from the register window save area. */
if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
count--;
/* Scan back COUNT frames to the specified frame. */
for (i = 0; i < count; i++)
{
/* Assume the dynamic chain pointer is in the word that the
frame address points to, unless otherwise specified. */
tem = DYNAMIC_CHAIN_ADDRESS (tem);
tem = memory_address (Pmode, tem);
tem = gen_frame_mem (Pmode, tem);
tem = copy_to_reg (tem);
}
/* For __builtin_frame_address, return what we've got. But, on
the SPARC for example, we may have to add a bias. */
if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
return FRAME_ADDR_RTX (tem);
/* For __builtin_return_address, get the return address from that frame. */
#ifdef RETURN_ADDR_RTX
tem = RETURN_ADDR_RTX (count, tem);
#else
tem = memory_address (Pmode,
plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
tem = gen_frame_mem (Pmode, tem);