mirror of
https://forge.sourceware.org/marek/gcc.git
synced 2026-02-21 19:35:36 -05:00
538 lines
15 KiB
C++
538 lines
15 KiB
C++
/* Preamble and helpers for the autogenerated gimple-match.cc file.
|
|
Copyright (C) 2014-2026 Free Software Foundation, Inc.
|
|
|
|
This file is part of GCC.
|
|
|
|
GCC is free software; you can redistribute it and/or modify it under
|
|
the terms of the GNU General Public License as published by the Free
|
|
Software Foundation; either version 3, or (at your option) any later
|
|
version.
|
|
|
|
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
|
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
|
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
for more details.
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
along with GCC; see the file COPYING3. If not see
|
|
<http://www.gnu.org/licenses/>. */
|
|
|
|
#include "config.h"
|
|
#include "system.h"
|
|
#include "coretypes.h"
|
|
#include "backend.h"
|
|
#include "target.h"
|
|
#include "rtl.h"
|
|
#include "tree.h"
|
|
#include "gimple.h"
|
|
#include "ssa.h"
|
|
#include "cgraph.h"
|
|
#include "vec-perm-indices.h"
|
|
#include "fold-const.h"
|
|
#include "fold-const-call.h"
|
|
#include "stor-layout.h"
|
|
#include "gimple-iterator.h"
|
|
#include "gimple-fold.h"
|
|
#include "calls.h"
|
|
#include "tree-dfa.h"
|
|
#include "builtins.h"
|
|
#include "gimple-match.h"
|
|
#include "tree-pass.h"
|
|
#include "internal-fn.h"
|
|
#include "case-cfn-macros.h"
|
|
#include "gimplify.h"
|
|
#include "memmodel.h"
|
|
#include "optabs.h"
|
|
#include "optabs-tree.h"
|
|
#include "tree-eh.h"
|
|
#include "dbgcnt.h"
|
|
#include "tm.h"
|
|
#include "gimple-range.h"
|
|
#include "langhooks.h"
|
|
#include "attribs.h"
|
|
#include "asan.h"
|
|
|
|
tree do_valueize (tree, tree (*)(tree), bool &);
|
|
tree do_valueize (tree (*)(tree), tree);
|
|
|
|
/* Helper for the autogenerated code, get at the definition of NAME when
|
|
VALUEIZE allows that. */
|
|
|
|
inline gimple *
|
|
get_def (tree (*valueize)(tree), tree name)
|
|
{
|
|
if (valueize && ! valueize (name))
|
|
return NULL;
|
|
return SSA_NAME_DEF_STMT (name);
|
|
}
|
|
|
|
/* Routine to determine if the types T1 and T2 are effectively
|
|
the same for GIMPLE. If T1 or T2 is not a type, the test
|
|
applies to their TREE_TYPE. */
|
|
|
|
static inline bool
|
|
types_match (tree t1, tree t2)
|
|
{
|
|
if (!TYPE_P (t1))
|
|
t1 = TREE_TYPE (t1);
|
|
if (!TYPE_P (t2))
|
|
t2 = TREE_TYPE (t2);
|
|
|
|
return types_compatible_p (t1, t2);
|
|
}
|
|
|
|
/* Routine to determine if the types T1, T2 and T3 are effectively
|
|
the same for GIMPLE. If T1, T2 or T2 is not a type, the test
|
|
applies to their TREE_TYPE. */
|
|
|
|
static inline bool
|
|
types_match (tree t1, tree t2, tree t3)
|
|
{
|
|
return types_match (t1, t2) && types_match (t2, t3);
|
|
}
|
|
|
|
/* Return if T has a single use. For GIMPLE, we also allow any
|
|
non-SSA_NAME (ie constants) and zero uses to cope with uses
|
|
that aren't linked up yet. */
|
|
|
|
static bool
|
|
single_use (const_tree) ATTRIBUTE_PURE;
|
|
|
|
static bool
|
|
single_use (const_tree t)
|
|
{
|
|
if (TREE_CODE (t) != SSA_NAME)
|
|
return true;
|
|
|
|
/* Inline return has_zero_uses (t) || has_single_use (t); */
|
|
const ssa_use_operand_t *const head = &(SSA_NAME_IMM_USE_NODE (t));
|
|
const ssa_use_operand_t *ptr;
|
|
bool single = false;
|
|
|
|
for (ptr = head->next; ptr != head; ptr = ptr->next)
|
|
if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
|
|
{
|
|
if (single)
|
|
return false;
|
|
single = true;
|
|
}
|
|
return true;
|
|
}
|
|
|
|
/* Return true if math operations should be canonicalized,
|
|
e.g. sqrt(sqrt(x)) -> pow(x, 0.25). */
|
|
|
|
static inline bool
|
|
canonicalize_math_p ()
|
|
{
|
|
return !cfun || (cfun->curr_properties & PROP_gimple_opt_math) == 0;
|
|
}
|
|
|
|
/* Return true if math operations that are beneficial only after
|
|
vectorization should be canonicalized. */
|
|
|
|
static inline bool
|
|
canonicalize_math_after_vectorization_p ()
|
|
{
|
|
return !cfun || (cfun->curr_properties & PROP_gimple_lvec) != 0;
|
|
}
|
|
|
|
/* Return true if we can still perform transformations that may introduce
|
|
vector operations that are not supported by the target. Vector lowering
|
|
normally handles those, but after that pass, it becomes unsafe. */
|
|
|
|
static inline bool
|
|
optimize_vectors_before_lowering_p ()
|
|
{
|
|
return !cfun || (cfun->curr_properties & PROP_gimple_lvec) == 0;
|
|
}
|
|
|
|
/* Returns true if the expression T has no side effects
|
|
including not trapping. */
|
|
static inline bool
|
|
expr_no_side_effects_p (tree t)
|
|
{
|
|
/* For gimple, there should only be gimple val's here. */
|
|
gcc_assert (is_gimple_val (t));
|
|
return true;
|
|
}
|
|
|
|
/* Return true if pow(cst, x) should be optimized into exp(log(cst) * x).
|
|
As a workaround for SPEC CPU2017 628.pop2_s, don't do it if arg0
|
|
is an exact integer, arg1 = phi_res +/- cst1 and phi_res = PHI <cst2, ...>
|
|
where cst2 +/- cst1 is an exact integer, because then pow (arg0, arg1)
|
|
will likely be exact, while exp (log (arg0) * arg1) might be not.
|
|
Also don't do it if arg1 is phi_res above and cst2 is an exact integer. */
|
|
|
|
static bool
|
|
optimize_pow_to_exp (tree arg0, tree arg1)
|
|
{
|
|
gcc_assert (TREE_CODE (arg0) == REAL_CST);
|
|
if (!real_isinteger (TREE_REAL_CST_PTR (arg0), TYPE_MODE (TREE_TYPE (arg0))))
|
|
return true;
|
|
|
|
if (TREE_CODE (arg1) != SSA_NAME)
|
|
return true;
|
|
|
|
gimple *def = SSA_NAME_DEF_STMT (arg1);
|
|
gphi *phi = dyn_cast <gphi *> (def);
|
|
tree cst1 = NULL_TREE;
|
|
enum tree_code code = ERROR_MARK;
|
|
if (!phi)
|
|
{
|
|
if (!is_gimple_assign (def))
|
|
return true;
|
|
code = gimple_assign_rhs_code (def);
|
|
switch (code)
|
|
{
|
|
case PLUS_EXPR:
|
|
case MINUS_EXPR:
|
|
break;
|
|
default:
|
|
return true;
|
|
}
|
|
if (TREE_CODE (gimple_assign_rhs1 (def)) != SSA_NAME
|
|
|| TREE_CODE (gimple_assign_rhs2 (def)) != REAL_CST)
|
|
return true;
|
|
|
|
cst1 = gimple_assign_rhs2 (def);
|
|
|
|
phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def)));
|
|
if (!phi)
|
|
return true;
|
|
}
|
|
|
|
tree cst2 = NULL_TREE;
|
|
int n = gimple_phi_num_args (phi);
|
|
for (int i = 0; i < n; i++)
|
|
{
|
|
tree arg = PHI_ARG_DEF (phi, i);
|
|
if (TREE_CODE (arg) != REAL_CST)
|
|
continue;
|
|
else if (cst2 == NULL_TREE)
|
|
cst2 = arg;
|
|
else if (!operand_equal_p (cst2, arg, 0))
|
|
return true;
|
|
}
|
|
|
|
if (cst1 && cst2)
|
|
cst2 = const_binop (code, TREE_TYPE (cst2), cst2, cst1);
|
|
if (cst2
|
|
&& TREE_CODE (cst2) == REAL_CST
|
|
&& real_isinteger (TREE_REAL_CST_PTR (cst2),
|
|
TYPE_MODE (TREE_TYPE (cst2))))
|
|
return false;
|
|
return true;
|
|
}
|
|
|
|
/* Return true if a division INNER_DIV / DIVISOR where INNER_DIV
|
|
is another division can be optimized. Don't optimize if INNER_DIV
|
|
is used in a TRUNC_MOD_EXPR with DIVISOR as second operand. */
|
|
|
|
static bool
|
|
optimize_successive_divisions_p (tree divisor, tree inner_div)
|
|
{
|
|
if (!gimple_in_ssa_p (cfun))
|
|
return false;
|
|
|
|
imm_use_iterator imm_iter;
|
|
use_operand_p use_p;
|
|
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, inner_div)
|
|
{
|
|
gimple *use_stmt = USE_STMT (use_p);
|
|
if (!is_gimple_assign (use_stmt)
|
|
|| gimple_assign_rhs_code (use_stmt) != TRUNC_MOD_EXPR
|
|
|| !operand_equal_p (gimple_assign_rhs2 (use_stmt), divisor, 0))
|
|
continue;
|
|
return false;
|
|
}
|
|
return true;
|
|
}
|
|
|
|
/* Return true if EXPR1 and EXPR2 have the same value, but not necessarily
|
|
same type. The types can differ through nop conversions. */
|
|
#define bitwise_equal_p(expr1, expr2) \
|
|
gimple_bitwise_equal_p (expr1, expr2, valueize)
|
|
|
|
bool gimple_nop_convert (tree, tree *, tree (*) (tree));
|
|
bool gimple_maybe_truncate (tree, tree *, tree (*) (tree));
|
|
|
|
/* Helper function for bitwise_equal_p macro. */
|
|
|
|
static inline bool
|
|
gimple_bitwise_equal_p (tree expr1, tree expr2, tree (*valueize) (tree))
|
|
{
|
|
if (expr1 == expr2)
|
|
return true;
|
|
if (!tree_nop_conversion_p (TREE_TYPE (expr1), TREE_TYPE (expr2)))
|
|
return false;
|
|
if (TREE_CODE (expr1) == INTEGER_CST && TREE_CODE (expr2) == INTEGER_CST)
|
|
return wi::to_wide (expr1) == wi::to_wide (expr2);
|
|
if (operand_equal_p (expr1, expr2, 0))
|
|
return true;
|
|
tree expr3, expr4;
|
|
if (!gimple_nop_convert (expr1, &expr3, valueize))
|
|
expr3 = expr1;
|
|
if (!gimple_nop_convert (expr2, &expr4, valueize))
|
|
expr4 = expr2;
|
|
if (expr1 != expr3)
|
|
{
|
|
if (operand_equal_p (expr3, expr2, 0))
|
|
return true;
|
|
if (expr2 != expr4 && operand_equal_p (expr3, expr4, 0))
|
|
return true;
|
|
}
|
|
if (expr2 != expr4 && operand_equal_p (expr1, expr4, 0))
|
|
return true;
|
|
if (gimple_maybe_truncate (expr3, &expr3, valueize)
|
|
&& gimple_maybe_truncate (expr4, &expr4, valueize)
|
|
&& operand_equal_p (expr3, expr4, 0))
|
|
return true;
|
|
return false;
|
|
}
|
|
|
|
/* Return true if EXPR1 and EXPR2 have the bitwise opposite value,
|
|
but not necessarily same type.
|
|
The types can differ through nop conversions. */
|
|
#define bitwise_inverted_equal_p(expr1, expr2, wascmp) \
|
|
gimple_bitwise_inverted_equal_p (expr1, expr2, wascmp, valueize)
|
|
|
|
|
|
bool gimple_bit_not_with_nop (tree, tree *, tree (*) (tree));
|
|
bool gimple_maybe_cmp (tree, tree *, tree (*) (tree));
|
|
bool gimple_bit_xor_cst (tree, tree *, tree (*) (tree));
|
|
|
|
/* Helper function for bitwise_inverted_equal_p macro. */
|
|
|
|
static inline bool
|
|
gimple_bitwise_inverted_equal_p (tree expr1, tree expr2, bool &wascmp, tree (*valueize) (tree))
|
|
{
|
|
wascmp = false;
|
|
if (expr1 == expr2)
|
|
return false;
|
|
if (!tree_nop_conversion_p (TREE_TYPE (expr1), TREE_TYPE (expr2)))
|
|
return false;
|
|
tree cst1 = uniform_integer_cst_p (expr1);
|
|
tree cst2 = uniform_integer_cst_p (expr2);
|
|
if (cst1 && cst2)
|
|
return wi::to_wide (cst1) == ~wi::to_wide (cst2);
|
|
if (operand_equal_p (expr1, expr2, 0))
|
|
return false;
|
|
|
|
tree xor1[2];
|
|
tree xor2[2];
|
|
/* `X ^ CST` and `X ^ ~CST` match for ~. */
|
|
if (gimple_bit_xor_cst (expr1, xor1, valueize)
|
|
&& gimple_bit_xor_cst (expr2, xor2, valueize))
|
|
{
|
|
if (operand_equal_p (xor1[0], xor2[0], 0)
|
|
&& (wi::to_wide (uniform_integer_cst_p (xor1[1]))
|
|
== ~wi::to_wide (uniform_integer_cst_p (xor2[1]))))
|
|
return true;
|
|
}
|
|
|
|
tree other;
|
|
/* Try if EXPR1 was defined as ~EXPR2. */
|
|
if (gimple_bit_not_with_nop (expr1, &other, valueize))
|
|
{
|
|
if (gimple_bitwise_equal_p (other, expr2, valueize))
|
|
return true;
|
|
}
|
|
/* Try if EXPR2 was defined as ~EXPR1. */
|
|
if (gimple_bit_not_with_nop (expr2, &other, valueize))
|
|
{
|
|
if (gimple_bitwise_equal_p (other, expr1, valueize))
|
|
return true;
|
|
}
|
|
|
|
/* If neither are defined by BIT_NOT, try to see if
|
|
both are defined by comparisons and see if they are
|
|
complementary (inversion) of each other. */
|
|
tree newexpr1, newexpr2;
|
|
if (!gimple_maybe_cmp (expr1, &newexpr1, valueize))
|
|
return false;
|
|
if (!gimple_maybe_cmp (expr2, &newexpr2, valueize))
|
|
return false;
|
|
|
|
gimple *d1 = get_def (valueize, newexpr1);
|
|
gassign *a1 = dyn_cast <gassign *> (d1);
|
|
gimple *d2 = get_def (valueize, newexpr2);
|
|
gassign *a2 = dyn_cast <gassign *> (d2);
|
|
tree op10 = do_valueize (valueize, gimple_assign_rhs1 (a1));
|
|
tree op20 = do_valueize (valueize, gimple_assign_rhs1 (a2));
|
|
if (!operand_equal_p (op10, op20))
|
|
return false;
|
|
tree op11 = do_valueize (valueize, gimple_assign_rhs2 (a1));
|
|
tree op21 = do_valueize (valueize, gimple_assign_rhs2 (a2));
|
|
if (!operand_equal_p (op11, op21))
|
|
return false;
|
|
wascmp = true;
|
|
tree_code ac1 = gimple_assign_rhs_code (a1);
|
|
tree_code ac2 = gimple_assign_rhs_code (a2);
|
|
/* Match `^` against `==` but this should only
|
|
happen when the type is a 1bit precision integer. */
|
|
if (ac1 == BIT_XOR_EXPR)
|
|
{
|
|
tree type = TREE_TYPE (newexpr1);
|
|
gcc_assert (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1);
|
|
return ac2 == EQ_EXPR;
|
|
}
|
|
if (ac2 == BIT_XOR_EXPR)
|
|
{
|
|
tree type = TREE_TYPE (newexpr1);
|
|
gcc_assert (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1);
|
|
return ac1 == EQ_EXPR;
|
|
}
|
|
if (invert_tree_comparison (ac1, HONOR_NANS (op10)) == ac2)
|
|
return true;
|
|
return false;
|
|
}
|
|
|
|
/*
|
|
* Return the relevant gcond * of the given phi, as well as the true
|
|
* and false TREE args of the phi. Or return nullptr.
|
|
*
|
|
* If matched the gcond *, the output argument TREE true_arg and false_arg
|
|
* will be updated to the relevant args of phi.
|
|
*
|
|
* If failed to match, nullptr gcond * will be returned, as well as the output
|
|
* arguments will be set to NULL_TREE.
|
|
*/
|
|
|
|
static inline gcond *
|
|
match_cond_with_binary_phi (gphi *phi, tree *true_arg, tree *false_arg)
|
|
{
|
|
*true_arg = *false_arg = NULL_TREE;
|
|
|
|
if (gimple_phi_num_args (phi) != 2)
|
|
return nullptr;
|
|
|
|
basic_block pred_b0 = EDGE_PRED (gimple_bb (phi), 0)->src;
|
|
basic_block pred_b1 = EDGE_PRED (gimple_bb (phi), 1)->src;
|
|
edge edge_for_pred_0 = nullptr;
|
|
|
|
if (EDGE_COUNT (pred_b0->succs) == 2
|
|
&& EDGE_COUNT (pred_b1->succs) == 1
|
|
&& EDGE_COUNT (pred_b1->preds) == 1
|
|
&& pred_b0 == EDGE_PRED (pred_b1, 0)->src)
|
|
/*
|
|
* +------+
|
|
* | b0: |
|
|
* | def | +-----+
|
|
* | ... | | b1: |
|
|
* | cond |------>| def |
|
|
* +------+ | ... |
|
|
* | +-----+
|
|
* # |
|
|
* | |
|
|
* v |
|
|
* +-----+ |
|
|
* | b2: | |
|
|
* | def |<----------+
|
|
* +-----+
|
|
* #: edge_for_pred_0.
|
|
*/
|
|
edge_for_pred_0 = EDGE_PRED (gimple_bb (phi), 0);
|
|
else if (EDGE_COUNT (pred_b1->succs) == 2
|
|
&& EDGE_COUNT (pred_b0->succs) == 1
|
|
&& EDGE_COUNT (pred_b0->preds) == 1
|
|
&& pred_b1 == EDGE_PRED (pred_b0, 0)->src)
|
|
/*
|
|
* +------+
|
|
* | b1: |
|
|
* +-----+ | def |
|
|
* | b0: | | ... |
|
|
* | def |<---#---| cond |
|
|
* | ... | +------+
|
|
* +-----+ |
|
|
* | |
|
|
* | |
|
|
* | |
|
|
* v |
|
|
* +-----+ |
|
|
* | b2: | |
|
|
* | def |<----------+
|
|
* +-----+
|
|
* #: edge_for_pred_0.
|
|
*/
|
|
edge_for_pred_0 = EDGE_PRED (pred_b0, 0);
|
|
else if (EDGE_COUNT (pred_b0->succs) == 1
|
|
&& EDGE_COUNT (pred_b1->succs) == 1
|
|
&& EDGE_COUNT (pred_b0->preds) == 1
|
|
&& EDGE_COUNT (pred_b1->preds) == 1
|
|
&& EDGE_COUNT (EDGE_PRED (pred_b0, 0)->src->succs) == 2
|
|
&& EDGE_PRED (pred_b0, 0)->src == EDGE_PRED (pred_b1, 0)->src)
|
|
/* +------+
|
|
* | b0: |
|
|
* | ... | +-----+
|
|
* | cond |------>| b2: |
|
|
* +------+ | ... |
|
|
* | +-----+
|
|
* # |
|
|
* | |
|
|
* v |
|
|
* +-----+ |
|
|
* | b1: | |
|
|
* | ... | |
|
|
* +-----+ |
|
|
* | |
|
|
* | |
|
|
* v |
|
|
* +-----+ |
|
|
* | b3: |<----------+
|
|
* | ... |
|
|
* +-----+
|
|
* #: edge_for_pred_0.
|
|
*/
|
|
edge_for_pred_0 = EDGE_PRED (pred_b0, 0);
|
|
|
|
if (!edge_for_pred_0)
|
|
return nullptr;
|
|
|
|
gcond *cond = safe_dyn_cast <gcond *> (*gsi_last_bb (edge_for_pred_0->src));
|
|
|
|
if (!cond)
|
|
return nullptr;
|
|
|
|
if (edge_for_pred_0->flags & EDGE_TRUE_VALUE)
|
|
{
|
|
*true_arg = gimple_phi_arg_def (phi, 0);
|
|
*false_arg = gimple_phi_arg_def (phi, 1);
|
|
}
|
|
else /* Aka edge_for_pred_0->flags & EDGE_FALSE_VALUE */
|
|
{
|
|
*false_arg = gimple_phi_arg_def (phi, 0);
|
|
*true_arg = gimple_phi_arg_def (phi, 1);
|
|
}
|
|
|
|
return cond;
|
|
}
|
|
|
|
/* If OP is a SSA_NAME with SSA_NAME_DEF_STMT in the IL, return that
|
|
stmt, otherwise NULL. For use in range_of_expr calls. */
|
|
|
|
static inline gimple *
|
|
gimple_match_ctx (tree op)
|
|
{
|
|
if (TREE_CODE (op) == SSA_NAME
|
|
&& SSA_NAME_DEF_STMT (op)
|
|
&& gimple_bb (SSA_NAME_DEF_STMT (op)))
|
|
return SSA_NAME_DEF_STMT (op);
|
|
return NULL;
|
|
}
|
|
|
|
/* Helper to shorten range queries in match.pd. R is the range to
|
|
be queried, OP tree on which it should be queried and CTX is some
|
|
capture on which gimple_match_ctx should be called, or NULL for
|
|
global range. */
|
|
|
|
static inline bool
|
|
gimple_match_range_of_expr (vrange &r, tree op, tree ctx = NULL_TREE)
|
|
{
|
|
if (!get_range_query (cfun)->range_of_expr (r, op,
|
|
ctx ? gimple_match_ctx (ctx)
|
|
: NULL))
|
|
return false;
|
|
return !r.undefined_p ();
|
|
}
|