diff options
author | Roger Sayle <roger@nextmovesoftware.com> | 2020-08-25 10:50:48 +0100 |
---|---|---|
committer | Roger Sayle <roger@nextmovesoftware.com> | 2020-08-25 10:52:06 +0100 |
commit | a0b4e42af26a85da2698e573ac8e32fa0a5709d6 (patch) | |
tree | 596101b46ace339575d90d008a761964e7e71e1e | |
parent | 68e605c93d57c17f07edd50f7e1c80f9216befd2 (diff) |
middle-end: PR tree-optimization/21137: STRIP_NOPS avoids missed optimization.
PR tree-optimization/21137 is now an old enhancement request pointing out
that an optimization I added back in 2006, to optimize "((x>>31)&64) != 0"
as "x < 0", doesn't fire in the presence of unanticipated type conversions.
The fix is to call STRIP_NOPS at the appropriate point.
2020-08-25 Roger Sayle <roger@nextmovesoftware.com>
gcc/ChangeLog
PR tree-optimization/21137
* fold-const.c (fold_binary_loc) [NE_EXPR/EQ_EXPR]: Call
STRIP_NOPS when checking whether to simplify ((x>>C1)&C2) != 0.
gcc/testsuite/ChangeLog
PR tree-optimization/21137
* gcc.dg/pr21137.c: New test.
-rw-r--r-- | gcc/fold-const.c | 70 | ||||
-rw-r--r-- | gcc/testsuite/gcc.dg/pr21137.c | 20 |
2 files changed, 59 insertions, 31 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 78f72f0cfa3..1f861630225 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -11609,45 +11609,53 @@ fold_binary_loc (location_t loc, enum tree_code code, tree type, C1 is a valid shift constant, and C2 is a power of two, i.e. a single bit. */ if (TREE_CODE (arg0) == BIT_AND_EXPR - && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR - && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)) - == INTEGER_CST && integer_pow2p (TREE_OPERAND (arg0, 1)) && integer_zerop (arg1)) { - tree itype = TREE_TYPE (arg0); - tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1); - prec = TYPE_PRECISION (itype); - - /* Check for a valid shift count. */ - if (wi::ltu_p (wi::to_wide (arg001), prec)) + tree arg00 = TREE_OPERAND (arg0, 0); + STRIP_NOPS (arg00); + if (TREE_CODE (arg00) == RSHIFT_EXPR + && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST) { - tree arg01 = TREE_OPERAND (arg0, 1); - tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); - unsigned HOST_WIDE_INT log2 = tree_log2 (arg01); - /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0 - can be rewritten as (X & (C2 << C1)) != 0. */ - if ((log2 + TREE_INT_CST_LOW (arg001)) < prec) + tree itype = TREE_TYPE (arg00); + tree arg001 = TREE_OPERAND (arg00, 1); + prec = TYPE_PRECISION (itype); + + /* Check for a valid shift count. */ + if (wi::ltu_p (wi::to_wide (arg001), prec)) { - tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001); - tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem); - return fold_build2_loc (loc, code, type, tem, - fold_convert_loc (loc, itype, arg1)); - } - /* Otherwise, for signed (arithmetic) shifts, - ((X >> C1) & C2) != 0 is rewritten as X < 0, and - ((X >> C1) & C2) == 0 is rewritten as X >= 0. */ - else if (!TYPE_UNSIGNED (itype)) - return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type, - arg000, build_int_cst (itype, 0)); - /* Otherwise, of unsigned (logical) shifts, - ((X >> C1) & C2) != 0 is rewritten as (X,false), and - ((X >> C1) & C2) == 0 is rewritten as (X,true). */ - else - return omit_one_operand_loc (loc, type, + tree arg01 = TREE_OPERAND (arg0, 1); + tree arg000 = TREE_OPERAND (arg00, 0); + unsigned HOST_WIDE_INT log2 = tree_log2 (arg01); + /* If (C2 << C1) doesn't overflow, then + ((X >> C1) & C2) != 0 can be rewritten as + (X & (C2 << C1)) != 0. */ + if ((log2 + TREE_INT_CST_LOW (arg001)) < prec) + { + tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, + arg01, arg001); + tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, + arg000, tem); + return fold_build2_loc (loc, code, type, tem, + fold_convert_loc (loc, itype, arg1)); + } + /* Otherwise, for signed (arithmetic) shifts, + ((X >> C1) & C2) != 0 is rewritten as X < 0, and + ((X >> C1) & C2) == 0 is rewritten as X >= 0. */ + else if (!TYPE_UNSIGNED (itype)) + return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR + : LT_EXPR, + type, arg000, + build_int_cst (itype, 0)); + /* Otherwise, of unsigned (logical) shifts, + ((X >> C1) & C2) != 0 is rewritten as (X,false), and + ((X >> C1) & C2) == 0 is rewritten as (X,true). */ + else + return omit_one_operand_loc (loc, type, code == EQ_EXPR ? integer_one_node : integer_zero_node, arg000); + } } } diff --git a/gcc/testsuite/gcc.dg/pr21137.c b/gcc/testsuite/gcc.dg/pr21137.c new file mode 100644 index 00000000000..6d73deaee6c --- /dev/null +++ b/gcc/testsuite/gcc.dg/pr21137.c @@ -0,0 +1,20 @@ +/* { dg-do compile } */ +/* { dg-options "-O2 -fdump-tree-optimized" } */ + +void foo(); + +void test5_1(int e) +{ + if ((e >> 31) & 64) + foo(); +} + +typedef int myint; + +void test5_2(myint e) +{ + if ((e >> 31) & 64) + foo(); +} + +/* { dg-final { scan-tree-dump-times " < 0" 2 "optimized" } } */ |