Source-Changes-HG archive
[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index][Old Index]
[src/trunk]: src/gnu/dist/gcc/gcc apply the changes in PR#22489.
details: https://anonhg.NetBSD.org/src/rev/223aae70fb3c
branches: trunk
changeset: 565376:223aae70fb3c
user: mrg <mrg%NetBSD.org@localhost>
date: Mon Apr 05 14:32:56 2004 +0000
description:
apply the changes in PR#22489.
2003-07-10 Kazu Hirata <kazu%cs.umass.edu@localhost>
PR c/11449
* fold-const.c (sign_bit_p): Return EXP if VAL is the sign bit
of HOST_WIDE_INT.
(fold_single_bit_test): If sign_bit_p() fails, assume that the
bit being tested is not a sign bit.
2003-07-04 Jeff Law <law%redhat.com@localhost>
PR c/11428
* expr.c (do_store_flag): Pass in the correct result type
when calling fold_single_bit_test.
* fold-const.c (fold_single_bit_test): Use result_type for the
result when folding a sign bit test.
2003-07-02 Jeff Law <law%redhat.com@localhost>
* expr.c (do_store_flag): Remove special case folding for
single bit tests. Instead call back into the commonized folder
routine.
* fold-const.c (fold_single_bit_test): New function, mostly
extracted from do_store_flag, with an additional case extracted
from fold.
(fold): Call fold_single_bit_test appropriately.
* tree.h (fold_single_bit_test): Prototype.
diffstat:
gnu/dist/gcc/gcc/ChangeLog | 27 +++++++
gnu/dist/gcc/gcc/expr.c | 61 ++--------------
gnu/dist/gcc/gcc/fold-const.c | 151 ++++++++++++++++++++++++++++++++++++-----
gnu/dist/gcc/gcc/tree.h | 2 +
4 files changed, 168 insertions(+), 73 deletions(-)
diffs (truncated from 318 to 300 lines):
diff -r 68f86b2a44ec -r 223aae70fb3c gnu/dist/gcc/gcc/ChangeLog
--- a/gnu/dist/gcc/gcc/ChangeLog Mon Apr 05 13:06:23 2004 +0000
+++ b/gnu/dist/gcc/gcc/ChangeLog Mon Apr 05 14:32:56 2004 +0000
@@ -1,3 +1,30 @@
+2003-07-10 Kazu Hirata <kazu%cs.umass.edu@localhost>
+
+ PR c/11449
+ * fold-const.c (sign_bit_p): Return EXP if VAL is the sign bit
+ of HOST_WIDE_INT.
+ (fold_single_bit_test): If sign_bit_p() fails, assume that the
+ bit being tested is not a sign bit.
+
+2003-07-04 Jeff Law <law%redhat.com@localhost>
+
+ PR c/11428
+ * expr.c (do_store_flag): Pass in the correct result type
+ when calling fold_single_bit_test.
+ * fold-const.c (fold_single_bit_test): Use result_type for the
+ result when folding a sign bit test.
+
+2003-07-02 Jeff Law <law%redhat.com@localhost>
+
+ * expr.c (do_store_flag): Remove special case folding for
+ single bit tests. Instead call back into the commonized folder
+ routine.
+ * fold-const.c (fold_single_bit_test): New function, mostly
+ extracted from do_store_flag, with an additional case extracted
+ from fold.
+ (fold): Call fold_single_bit_test appropriately.
+ * tree.h (fold_single_bit_test): Prototype.
+
2004-01-27 Eric Botcazou <ebotcazou%libertysurf.fr@localhost>
PR target/10904
diff -r 68f86b2a44ec -r 223aae70fb3c gnu/dist/gcc/gcc/expr.c
--- a/gnu/dist/gcc/gcc/expr.c Mon Apr 05 13:06:23 2004 +0000
+++ b/gnu/dist/gcc/gcc/expr.c Mon Apr 05 14:32:56 2004 +0000
@@ -10879,64 +10879,19 @@
do this by shifting the bit being tested to the low-order bit and
masking the result with the constant 1. If the condition was EQ,
we xor it with 1. This does not require an scc insn and is faster
- than an scc insn even if we have it. */
+ than an scc insn even if we have it.
+
+ The code to make this transformation was moved into fold_single_bit_test,
+ so we just call into the folder and expand its result. */
if ((code == NE || code == EQ)
&& TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
&& integer_pow2p (TREE_OPERAND (arg0, 1)))
{
- tree inner = TREE_OPERAND (arg0, 0);
- int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
- int ops_unsignedp;
-
- /* If INNER is a right shift of a constant and it plus BITNUM does
- not overflow, adjust BITNUM and INNER. */
-
- if (TREE_CODE (inner) == RSHIFT_EXPR
- && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
- && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
- && bitnum < TYPE_PRECISION (type)
- && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
- bitnum - TYPE_PRECISION (type)))
- {
- bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
- inner = TREE_OPERAND (inner, 0);
- }
-
- /* If we are going to be able to omit the AND below, we must do our
- operations as unsigned. If we must use the AND, we have a choice.
- Normally unsigned is faster, but for some machines signed is. */
- ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
-#ifdef LOAD_EXTEND_OP
- : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
-#else
- : 1
-#endif
- );
-
- if (! get_subtarget (subtarget)
- || GET_MODE (subtarget) != operand_mode
- || ! safe_from_p (subtarget, inner, 1))
- subtarget = 0;
-
- op0 = expand_expr (inner, subtarget, VOIDmode, 0);
-
- if (bitnum != 0)
- op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
- size_int (bitnum), subtarget, ops_unsignedp);
-
- if (GET_MODE (op0) != mode)
- op0 = convert_to_mode (mode, op0, ops_unsignedp);
-
- if ((code == EQ && ! invert) || (code == NE && invert))
- op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
- ops_unsignedp, OPTAB_LIB_WIDEN);
-
- /* Put the AND last so it can combine with more things. */
- if (bitnum != TYPE_PRECISION (type) - 1)
- op0 = expand_and (mode, op0, const1_rtx, subtarget);
-
- return op0;
+ tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
+ return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
+ arg0, arg1, type),
+ target, VOIDmode, EXPAND_NORMAL);
}
/* Now see if we are likely to be able to do this. Return if not. */
diff -r 68f86b2a44ec -r 223aae70fb3c gnu/dist/gcc/gcc/fold-const.c
--- a/gnu/dist/gcc/gcc/fold-const.c Mon Apr 05 13:06:23 2004 +0000
+++ b/gnu/dist/gcc/gcc/fold-const.c Mon Apr 05 14:32:56 2004 +0000
@@ -2709,8 +2709,8 @@
tree exp;
tree val;
{
- unsigned HOST_WIDE_INT lo;
- HOST_WIDE_INT hi;
+ unsigned HOST_WIDE_INT mask_lo, lo;
+ HOST_WIDE_INT mask_hi, hi;
int width;
tree t;
@@ -2729,14 +2729,25 @@
{
hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
lo = 0;
+
+ mask_hi = ((unsigned HOST_WIDE_INT) -1
+ >> (2 * HOST_BITS_PER_WIDE_INT - width));
+ mask_lo = -1;
}
else
{
hi = 0;
lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
+
+ mask_hi = 0;
+ mask_lo = ((unsigned HOST_WIDE_INT) -1
+ >> (HOST_BITS_PER_WIDE_INT - width));
}
- if (TREE_INT_CST_HIGH (val) == hi && TREE_INT_CST_LOW (val) == lo)
+ /* We mask off those bits beyond TREE_TYPE (exp) so that we can
+ treat VAL as if it were unsigned. */
+ if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
+ && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
return exp;
/* Handle extension from a narrower type. */
@@ -4585,6 +4596,112 @@
}
+/* If CODE with arguments ARG0 and ARG1 represents a single bit
+ equality/inequality test, then return a simplified form of
+ the test using shifts and logical operations. Otherwise return
+ NULL. TYPE is the desired result type. */
+
+tree
+fold_single_bit_test (code, arg0, arg1, result_type)
+ enum tree_code code;
+ tree arg0;
+ tree arg1;
+ tree result_type;
+{
+ /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
+ operand 0. */
+ if (code == TRUTH_NOT_EXPR)
+ {
+ code = TREE_CODE (arg0);
+ if (code != NE_EXPR && code != EQ_EXPR)
+ return NULL_TREE;
+
+ /* Extract the arguments of the EQ/NE. */
+ arg1 = TREE_OPERAND (arg0, 1);
+ arg0 = TREE_OPERAND (arg0, 0);
+
+ /* This requires us to invert the code. */
+ code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
+ }
+
+ /* If this is testing a single bit, we can optimize the test. */
+ if ((code == NE_EXPR || code == EQ_EXPR)
+ && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
+ && integer_pow2p (TREE_OPERAND (arg0, 1)))
+ {
+ tree inner = TREE_OPERAND (arg0, 0);
+ tree type = TREE_TYPE (arg0);
+ int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
+ enum machine_mode operand_mode = TYPE_MODE (type);
+ int ops_unsigned;
+ tree signed_type, unsigned_type;
+ tree arg00;
+
+ /* If we have (A & C) != 0 where C is the sign bit of A, convert
+ this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
+ arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
+ if (arg00 != NULL_TREE)
+ {
+ tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
+ return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
+ convert (stype, arg00),
+ convert (stype, integer_zero_node)));
+ }
+
+ /* At this point, we know that arg0 is not testing the sign bit. */
+ if (TYPE_PRECISION (type) - 1 == bitnum)
+ abort ();
+
+ /* Otherwise we have (A & C) != 0 where C is a single bit,
+ convert that into ((A >> C2) & 1). Where C2 = log2(C).
+ Similarly for (A & C) == 0. */
+
+ /* If INNER is a right shift of a constant and it plus BITNUM does
+ not overflow, adjust BITNUM and INNER. */
+ if (TREE_CODE (inner) == RSHIFT_EXPR
+ && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
+ && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
+ && bitnum < TYPE_PRECISION (type)
+ && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
+ bitnum - TYPE_PRECISION (type)))
+ {
+ bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
+ inner = TREE_OPERAND (inner, 0);
+ }
+
+ /* If we are going to be able to omit the AND below, we must do our
+ operations as unsigned. If we must use the AND, we have a choice.
+ Normally unsigned is faster, but for some machines signed is. */
+#ifdef LOAD_EXTEND_OP
+ ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
+#else
+ ops_unsigned = 1;
+#endif
+
+ signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
+ unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
+
+ if (bitnum != 0)
+ inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
+ inner, size_int (bitnum));
+
+ if (code == EQ_EXPR)
+ inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
+ inner, integer_one_node);
+
+ /* Put the AND last so it can combine with more things. */
+ inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
+ inner, integer_one_node);
+
+ /* Make sure to return the proper type. */
+ if (TREE_TYPE (inner) != result_type)
+ inner = convert (result_type, inner);
+
+ return inner;
+ }
+ return NULL_TREE;
+}
+
/* Perform constant folding and related simplification of EXPR.
The related simplifications include x*1 => x, x*0 => 0, etc.,
and application of the associative law.
@@ -5826,7 +5943,12 @@
tem = invert_truthvalue (arg0);
/* Avoid infinite recursion. */
if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
- return t;
+ {
+ tem = fold_single_bit_test (code, arg0, arg1, type);
+ if (tem)
+ return tem;
+ return t;
+ }
return convert (type, tem);
case TRUTH_ANDIF_EXPR:
@@ -6456,22 +6578,11 @@
return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
arg0, integer_zero_node));
- /* If we have (A & C) != 0 where C is the sign bit of A, convert
- this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
- if ((code == EQ_EXPR || code == NE_EXPR)
- && TREE_CODE (arg0) == BIT_AND_EXPR
- && integer_zerop (arg1))
- {
- tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0),
- TREE_OPERAND (arg0, 1));
- if (arg00 != NULL_TREE)
- {
- tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
- return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
- convert (stype, arg00),
- convert (stype, integer_zero_node)));
- }
- }
+ /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
+ 2, then fold the expression into shifts and logical operations. */
Home |
Main Index |
Thread Index |
Old Index