@@ -1,3 +1,13 @@
+2014-11-08 Kugan Vivekanandarajah <kuganv@linaro.org>
+
+ * Makefile.in (OBJS): Add tree-type-prmtn.o.
+ * common.opt (ftree-type-prmt): New flag.
+ * opts.c (OPT_ftree_type_prmt): New option added.
+ * passes.def: New pass included.
+ * tree-pass.h: LikeWise.
+ * timevar.def (TV_TREE_TYPE_PRMT): New timevar.
+ * tree-type-prmtn.c: New file.
+
2014-11-08 Richard Sandiford <richard.sandiford@arm.com>
* config/aarch64/aarch64.c: Include rtl-iter.h.
@@ -1463,6 +1463,7 @@ OBJS = \
tree-vect-slp.o \
tree-vectorizer.o \
tree-vrp.o \
+ tree-type-prmtn.o \
tree.o \
valtrack.o \
value-prof.o \
@@ -2304,6 +2304,10 @@ ftree-vrp
Common Report Var(flag_tree_vrp) Init(0) Optimization
Perform Value Range Propagation on trees
+ftree-type-prmt
+Common Report Var(flag_tree_type_prmt) Init(0) Optimization
+Perform Type Promotion on trees
+
funit-at-a-time
Common Report Var(flag_unit_at_a_time) Init(1) Optimization
Compile whole compilation unit at a time
@@ -500,6 +500,7 @@ static const struct default_options default_options_table[] =
{ OPT_LEVELS_2_PLUS, OPT_fipa_icf, NULL, 1 },
{ OPT_LEVELS_2_PLUS, OPT_fisolate_erroneous_paths_dereference, NULL, 1 },
{ OPT_LEVELS_2_PLUS, OPT_fuse_caller_save, NULL, 1 },
+ { OPT_LEVELS_2_PLUS, OPT_ftree_type_prmt, NULL, 1 },
/* -O3 optimizations. */
{ OPT_LEVELS_3_PLUS, OPT_ftree_loop_distribute_patterns, NULL, 1 },
@@ -264,6 +264,7 @@ along with GCC; see the file COPYING3. If not see
PUSH_INSERT_PASSES_WITHIN (pass_tree_no_loop)
NEXT_PASS (pass_slp_vectorize);
POP_INSERT_PASSES ()
+ NEXT_PASS (pass_type_promotion);
NEXT_PASS (pass_lower_vector_ssa);
NEXT_PASS (pass_cse_reciprocals);
NEXT_PASS (pass_reassoc);
@@ -266,6 +266,7 @@ DEFTIMEVAR (TV_PLUGIN_RUN , "plugin execution")
DEFTIMEVAR (TV_GIMPLE_SLSR , "straight-line strength reduction")
DEFTIMEVAR (TV_VTABLE_VERIFICATION , "vtable verification")
DEFTIMEVAR (TV_TREE_UBSAN , "tree ubsan")
+DEFTIMEVAR (TV_TREE_TYPE_PRMT , "tree type promotion")
/* Everything else in rest_of_compilation not included above. */
DEFTIMEVAR (TV_EARLY_LOCAL , "early local passes")
@@ -424,6 +424,7 @@ extern gimple_opt_pass *make_pass_fre (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_check_data_deps (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_copy_prop (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_isolate_erroneous_paths (gcc::context *ctxt);
+extern gimple_opt_pass *make_pass_type_promotion (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_vrp (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_uncprop (gcc::context *ctxt);
extern gimple_opt_pass *make_pass_return_slot (gcc::context *ctxt);
new file mode 100644
@@ -0,0 +1,1103 @@
+/* Type promotion of SSA names to minimise redundant zero/sign extension.
+ Copyright (C) 2014 Free Software Foundation, Inc.
+
+This file is part of GCC.
+
+GCC is free software; you can redistribute it and/or modify
+it under the terms of the GNU General Public License as published by
+the Free Software Foundation; either version 3, or (at your option)
+any later version.
+
+GCC is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
+
+#include "config.h"
+#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
+#include "tree.h"
+#include "stor-layout.h"
+#include "calls.h"
+#include "predict.h"
+#include "machmode.h"
+#include "hard-reg-set.h"
+#include "function.h"
+#include "dominance.h"
+#include "cfg.h"
+#include "basic-block.h"
+#include "tree-ssa-alias.h"
+#include "gimple-fold.h"
+#include "tree-eh.h"
+#include "gimple-expr.h"
+#include "is-a.h"
+#include "gimple.h"
+#include "gimple-iterator.h"
+#include "gimple-ssa.h"
+#include "tree-cfg.h"
+#include "tree-phinodes.h"
+#include "ssa-iterators.h"
+#include "stringpool.h"
+#include "tree-ssanames.h"
+#include "tree-ssa.h"
+#include "tree-pass.h"
+#include "gimple-pretty-print.h"
+#include "langhooks.h"
+#include "sbitmap.h"
+#include "domwalk.h"
+
+/* This pass applies type promotion to SSA names in the function and
+ inserts appropriate truncations. Idea of this pass is to promote operations
+ such a way that we can minimise generation of subreg in RTL,
+ that intern results in removal of redundant zero/sign extensions. This pass
+ will run prior to The VRP and DOM such that they will be able to optimise
+ redundant truncations and extensions. This is based on the discussion from
+ https://gcc.gnu.org/ml/gcc-patches/2014-09/msg00472.html. */
+
+static unsigned n_ssa_val;
+static sbitmap ssa_not_safe_bitmap;
+static sbitmap ssa_to_be_promoted_bitmap;
+static sbitmap ssa_sets_higher_bits_bitmap;
+
+/* Return the promoted type for TYPE as defined by PROMOTE_MODE of the
+ target. */
+static tree
+get_promoted_type (tree type)
+{
+#ifdef PROMOTE_MODE
+ tree promoted_type;
+ enum machine_mode mode = TYPE_MODE (type);
+ int uns = TYPE_SIGN (type);
+
+ if (POINTER_TYPE_P (type)
+ || TYPE_PRECISION (type) == 1
+ || !INTEGRAL_TYPE_P (type))
+ return type;
+
+ PROMOTE_MODE (mode, uns, type);
+ uns = TYPE_SIGN (type);
+ promoted_type = lang_hooks.types.type_for_mode (mode, uns);
+
+ if (TYPE_PRECISION (promoted_type) > TYPE_PRECISION (type))
+ {
+ promoted_type = build_type_attribute_qual_variant (promoted_type,
+ TYPE_ATTRIBUTES (type),
+ TYPE_QUALS (type));
+ type = promoted_type;
+ }
+#endif
+ return type;
+}
+
+/* Predicate that tells if promoting computation with ssa NAME is safe. */
+static bool
+promotion_safe_p (tree name)
+{
+ if (TREE_CODE (name) == SSA_NAME)
+ {
+ gimple stmt = SSA_NAME_DEF_STMT (name);
+ unsigned int index = SSA_NAME_VERSION (name);
+
+ if (gimple_vdef (stmt) != NULL_TREE
+ || gimple_vuse (stmt) != NULL_TREE)
+ return false;
+ if (index < n_ssa_val)
+ return !bitmap_bit_p (ssa_not_safe_bitmap, index);
+ }
+ return false;
+}
+
+/* Return true if ssa NAME is already considered for promotion. */
+static bool
+ssa_promoted_p (tree name)
+{
+ if (TREE_CODE (name) == SSA_NAME)
+ {
+ unsigned int index = SSA_NAME_VERSION (name);
+ if (index < n_ssa_val)
+ return !bitmap_bit_p (ssa_to_be_promoted_bitmap, index);
+ }
+ return true;
+}
+
+/* Return true if ssa NAME will be considered for promotion. */
+static bool
+ssa_tobe_promoted_p (tree name)
+{
+ if (TREE_CODE (name) == SSA_NAME)
+ {
+ unsigned int index = SSA_NAME_VERSION (name);
+ if (index < n_ssa_val)
+ return bitmap_bit_p (ssa_to_be_promoted_bitmap, index);
+ }
+ return false;
+}
+
+/* Set ssa NAME to be already considered for promotion. */
+static void
+set_ssa_promoted (tree name)
+{
+ if (TREE_CODE (name) == SSA_NAME)
+ {
+ unsigned int index = SSA_NAME_VERSION (name);
+ if (index < n_ssa_val)
+ bitmap_clear_bit (ssa_to_be_promoted_bitmap, index);
+ }
+}
+
+/* Set ssa NAME will have higher bits if promoted. */
+static void
+set_ssa_overflows (tree name)
+{
+ if (TREE_CODE (name) == SSA_NAME)
+ {
+ unsigned int index = SSA_NAME_VERSION (name);
+ if (index < n_ssa_val)
+ bitmap_set_bit (ssa_sets_higher_bits_bitmap, index);
+ }
+}
+
+/* Return true if ssa NAME will have higher bits if promoted. */
+static bool
+ssa_overflows_p (tree name)
+{
+ if (TREE_CODE (name) == SSA_NAME)
+ {
+ unsigned int index = SSA_NAME_VERSION (name);
+ if (index < n_ssa_val)
+ return bitmap_bit_p (ssa_sets_higher_bits_bitmap, index);
+ }
+ return false;
+}
+
+/* Create an ssa with TYPE to copy ssa VAR. */
+static tree
+make_promoted_copy (tree var, gimple def_stmt, tree type)
+{
+ tree new_lhs = make_ssa_name (type, def_stmt);
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (var))
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_lhs) = 1;
+ return new_lhs;
+}
+
+/* Return single successor (excluding EH edge) basic block. If there are more
+ than one successors, return NULL. */
+static basic_block
+get_next_bb (basic_block bb)
+{
+ edge e, res = NULL;
+ edge_iterator ei;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (!(e->flags & EDGE_EH))
+ {
+ if (res)
+ return NULL;
+ res = e;
+ }
+ return res->dest;
+}
+
+/* Insert COPY_STMT after STMT when STMT can throw. Create a new basic block
+ between basic block containing STMT and its successor. */
+static void
+insert_next_bb (gimple stmt, gimple copy_stmt)
+{
+ edge_iterator ei;
+ edge e, edge = NULL;
+ gimple_stmt_iterator gsi;
+ basic_block bb = gimple_bb (stmt);
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (!(e->flags & EDGE_EH))
+ {
+ gcc_assert (edge == NULL);
+ edge = e;
+ }
+
+ gcc_assert (edge);
+ basic_block new_bb = split_edge (edge);
+ gsi = gsi_after_labels (new_bb);
+ gsi_insert_after (&gsi, copy_stmt, GSI_NEW_STMT);
+}
+
+
+/* Return false if rhs type cannot be promoted in the stmt. Return true
+ otherwise. */
+static bool
+assign_rhs_promotable_p (gimple stmt, tree promoted_type)
+{
+ gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+ enum tree_code code = gimple_assign_rhs_code (stmt);
+ tree lhs = gimple_assign_lhs (stmt);
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ tree rhs2 = gimple_assign_rhs2 (stmt);
+
+ /* If the OP is part of condition in COND_EXPR, it can be promoted only if
+ higher_bits for both the operands are not set. */
+ if (TREE_CODE_CLASS (code) == tcc_comparison)
+ {
+ /* LHS and RHS can be promoted without changing the results of
+ comparison. */
+ if (((ssa_tobe_promoted_p (rhs1)
+ && promotion_safe_p (rhs1)
+ && !ssa_overflows_p (rhs1))
+ || (TREE_CODE (rhs1) == INTEGER_CST))
+ && ((ssa_tobe_promoted_p (rhs2)
+ && promotion_safe_p (rhs2)
+ && !ssa_overflows_p (rhs2))
+ || (TREE_CODE (rhs2) == INTEGER_CST)))
+ return true;
+ /* LHS or RHS of the comparison is already promoted. */
+ else if ((TYPE_PRECISION (TREE_TYPE (rhs1))
+ == TYPE_PRECISION (promoted_type))
+ || (TYPE_PRECISION (TREE_TYPE (rhs2))
+ == TYPE_PRECISION (promoted_type)))
+ return true;
+ else
+ return false;
+ }
+
+ if (TREE_CODE_CLASS (code) == tcc_reference
+ || code == VIEW_CONVERT_EXPR
+ || code == COMPLEX_EXPR
+ || code == ASM_EXPR
+ || code == OBJ_TYPE_REF
+ || gimple_vdef (stmt)
+ || VECTOR_TYPE_P (TREE_TYPE (lhs)))
+ return false;
+ return true;
+}
+
+/* Promote constants in STMT to TYPE. */
+static void
+promote_cst_in_stmt (gimple stmt, tree type)
+{
+ tree op;
+ ssa_op_iter iter;
+ use_operand_p oprnd;
+ int index;
+
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_ASSIGN:
+ op = gimple_assign_rhs1 (stmt);
+ if (op && TREE_CODE (op) == INTEGER_CST)
+ gimple_assign_set_rhs1 (stmt, fold_convert (type, op));
+ op = gimple_assign_rhs2 (stmt);
+ if (op && TREE_CODE (op) == INTEGER_CST)
+ gimple_assign_set_rhs2 (stmt, fold_convert (type, op));
+ break;
+
+ case GIMPLE_PHI:
+ FOR_EACH_PHI_ARG (oprnd, stmt, iter, SSA_OP_USE)
+ {
+ op = USE_FROM_PTR (oprnd);
+ index = PHI_ARG_INDEX_FROM_USE (oprnd);
+ if (TREE_CODE (op) == INTEGER_CST)
+ SET_PHI_ARG_DEF (stmt, index, fold_convert (type, op));
+ }
+ break;
+
+ case GIMPLE_COND:
+ op = gimple_cond_lhs (stmt);
+ if (op && TREE_CODE (op) == INTEGER_CST)
+ gimple_cond_set_lhs (stmt, fold_convert (type, op));
+ op = gimple_cond_rhs (stmt);
+ if (op && TREE_CODE (op) == INTEGER_CST)
+ gimple_cond_set_rhs (stmt, fold_convert (type, op));
+
+ default:
+ break;
+ }
+}
+
+/* Promote use in an assignment. Depending on the gimple_assign_rhs_code,
+ values in NEW_USE might have to be truncated to the type of USE. */
+static void
+promote_assign_stmt_use (gimple stmt,
+ tree use,
+ imm_use_iterator *ui,
+ tree new_use,
+ tree copy_of_use,
+ tree promoted_type)
+{
+ tree lhs = gimple_assign_lhs (stmt);
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+ tree rhs2 = gimple_assign_rhs2 (stmt);
+ gimple_stmt_iterator gsi;
+ use_operand_p op;
+ tree type;
+ enum tree_code code = gimple_assign_rhs_code (stmt);
+ /* If promoted and fix up is tobe performed, fix is true. */
+ bool fix = false;
+ /* If stmt code specifc and fix upis performed, done is true. */
+ bool done = false;
+
+ switch (code)
+ {
+ CASE_CONVERT:
+ /* If this is where precision is lost, just replace the use with
+ new_use. */
+ if (TYPE_PRECISION (TREE_TYPE (lhs)) < TYPE_PRECISION (TREE_TYPE (rhs1)))
+ {
+ done = true;
+ FOR_EACH_IMM_USE_ON_STMT (op, *ui)
+ SET_USE (op, new_use);
+ update_stmt (stmt);
+ }
+ break;
+
+ case RSHIFT_EXPR:
+ case LSHIFT_EXPR:
+ case WIDEN_LSHIFT_EXPR:
+ if (use == rhs2
+ && ssa_overflows_p (use))
+ fix = true;
+ break;
+
+ case TRUNC_MOD_EXPR:
+ case CEIL_MOD_EXPR:
+ case FLOOR_MOD_EXPR:
+ case ROUND_MOD_EXPR:
+ case TRUNC_DIV_EXPR:
+ case CEIL_DIV_EXPR:
+ case FLOOR_DIV_EXPR:
+ case RDIV_EXPR:
+ case ROUND_DIV_EXPR:
+ case EXACT_DIV_EXPR:
+ if (ssa_overflows_p (use))
+ fix = true;
+ break;
+
+ default:
+ break;
+ }
+
+ if (fix && !done)
+ {
+ if (promotion_safe_p (lhs))
+ {
+ tree temp = make_promoted_copy (use, NULL, promoted_type);
+ gimple copy_stmt
+ = gimple_build_assign_with_ops (CONVERT_EXPR, temp,
+ new_use, NULL_TREE);
+ gsi = gsi_for_stmt (stmt);
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ FOR_EACH_IMM_USE_ON_STMT (op, *ui)
+ SET_USE (op, temp);
+ update_stmt (stmt);
+ }
+ else
+ {
+ tree temp;
+ if (copy_of_use)
+ temp = copy_of_use;
+ else
+ {
+ temp = make_promoted_copy (use, NULL, TREE_TYPE (use));
+ gimple copy_stmt
+ = gimple_build_assign_with_ops (CONVERT_EXPR, temp,
+ new_use, NULL_TREE);
+ gsi = gsi_for_stmt (stmt);
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ }
+
+ FOR_EACH_IMM_USE_ON_STMT (op, *ui)
+ SET_USE (op, temp);
+ update_stmt (stmt);
+ }
+ }
+ else if (!done)
+ {
+ if (assign_rhs_promotable_p (stmt, promoted_type)
+ && (promotion_safe_p (lhs)
+ || (TREE_CODE_CLASS (code) == tcc_comparison)))
+ {
+ type = promoted_type;
+ if (TYPE_PRECISION (TREE_TYPE (use))
+ < TYPE_PRECISION (promoted_type))
+ promote_cst_in_stmt (stmt, promoted_type);
+ }
+ else
+ type = TREE_TYPE (use);
+
+ if ((type != TREE_TYPE (new_use)
+ && type != TREE_TYPE (use))
+ || (type == TREE_TYPE (use)
+ && !copy_of_use))
+ {
+ tree temp = make_promoted_copy (use, NULL, type);
+ gimple copy_stmt
+ = gimple_build_assign_with_ops (CONVERT_EXPR, temp,
+ new_use, NULL_TREE);
+ gsi = gsi_for_stmt (stmt);
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ FOR_EACH_IMM_USE_ON_STMT (op, *ui)
+ SET_USE (op, temp);
+ update_stmt (stmt);
+ }
+ else if (use != new_use)
+ {
+ FOR_EACH_IMM_USE_ON_STMT (op, *ui)
+ if (type == TREE_TYPE (new_use))
+ SET_USE (op, new_use);
+ else
+ SET_USE (op, copy_of_use);
+ update_stmt (stmt);
+ }
+ }
+}
+
+/* Promote ssa USE in phi STMT to PROMOTED_TYPE. */
+static void
+promote_phi_stmt_use (gimple stmt,
+ tree use,
+ imm_use_iterator *ui,
+ tree new_use,
+ tree copy_of_use,
+ tree promoted_type)
+{
+ tree lhs = PHI_RESULT (stmt);
+ tree type;
+ tree temp;
+ gimple_stmt_iterator gsi;
+ use_operand_p op;
+
+ if (ssa_tobe_promoted_p (lhs)
+ && promotion_safe_p (lhs))
+ type = promoted_type;
+ else
+ type = TREE_TYPE (lhs);
+
+ /* Check if we need a convert stmt to get the required type. */
+ if ((type != TREE_TYPE (new_use) && type != TREE_TYPE (use))
+ || (type == TREE_TYPE (use) && !copy_of_use))
+ {
+ temp = make_promoted_copy (use, NULL, type);
+ gimple copy_stmt
+ = gimple_build_assign_with_ops (CONVERT_EXPR, temp,
+ new_use, NULL_TREE);
+ if (gimple_code (SSA_NAME_DEF_STMT (new_use)) == GIMPLE_NOP)
+ {
+ basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+ bb = get_next_bb (bb);
+ gcc_assert (bb);
+ gsi = gsi_after_labels (bb);
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ }
+ else if (gimple_code (SSA_NAME_DEF_STMT (new_use))
+ != GIMPLE_PHI)
+ {
+ gsi = gsi_for_stmt (SSA_NAME_DEF_STMT (new_use));
+ if (lookup_stmt_eh_lp (SSA_NAME_DEF_STMT (new_use)) > 0)
+ insert_next_bb (SSA_NAME_DEF_STMT (new_use), copy_stmt);
+ else
+ gsi_insert_after (&gsi, copy_stmt, GSI_NEW_STMT);
+ }
+ else
+ {
+ gsi = gsi_after_labels
+ (gimple_bb (SSA_NAME_DEF_STMT (new_use)));
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ }
+ }
+ else if (type == TREE_TYPE (new_use))
+ temp = new_use;
+ else
+ temp = copy_of_use;
+
+ FOR_EACH_IMM_USE_ON_STMT (op, *ui)
+ SET_USE (op, temp);
+ update_stmt (stmt);
+}
+
+/* Promote ssa USE in STMT to PROMOTED_TYPE. */
+static void
+promote_cond_stmt_use (gimple stmt,
+ tree use,
+ imm_use_iterator *ui,
+ tree new_use,
+ tree copy_of_use,
+ tree promoted_type)
+{
+ bool promote;
+ tree lhs = gimple_cond_lhs (stmt);
+ tree rhs = gimple_cond_rhs (stmt);
+ gimple_stmt_iterator gsi;
+ use_operand_p op;
+
+ /* check if LHS and RHS can be promoted without changing the results
+ of comparison. */
+ if (((ssa_tobe_promoted_p (lhs)
+ && promotion_safe_p (lhs)
+ && !ssa_overflows_p (lhs))
+ || (TREE_CODE (lhs) == INTEGER_CST))
+ && (( ssa_tobe_promoted_p (rhs)
+ && promotion_safe_p (rhs)
+ && !ssa_overflows_p (rhs))
+ || (TREE_CODE (rhs) == INTEGER_CST)))
+ promote = true;
+ /* LHS or RHS of the comparsion is already promoted. */
+ else if ((TYPE_PRECISION (TREE_TYPE (lhs))
+ == TYPE_PRECISION (promoted_type))
+ || (TYPE_PRECISION (TREE_TYPE (rhs))
+ == TYPE_PRECISION (promoted_type)))
+ promote = true;
+ else
+ promote = false;
+
+ if (promote)
+ {
+ /* Copmparison will happen in promoted type. */
+ tree temp;
+ if (TREE_TYPE (use) != TREE_TYPE (new_use))
+ temp = new_use;
+ else
+ {
+ temp = make_promoted_copy (use, NULL, promoted_type);
+ gimple copy_stmt
+ = gimple_build_assign_with_ops (CONVERT_EXPR, temp,
+ new_use, NULL_TREE);
+ gsi = gsi_for_stmt (stmt);
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ }
+ FOR_EACH_IMM_USE_ON_STMT (op, *ui)
+ SET_USE (op, temp);
+ update_stmt (stmt);
+ promote_cst_in_stmt (stmt, promoted_type);
+ }
+ else if (TREE_TYPE (use) != TREE_TYPE (new_use))
+ {
+ /* Copmparison will happen in original type. */
+ tree temp;
+ if (copy_of_use)
+ temp = copy_of_use;
+ else
+ {
+ temp = make_promoted_copy (use, NULL, TREE_TYPE (use));
+ gimple copy_stmt
+ = gimple_build_assign_with_ops (CONVERT_EXPR, temp,
+ new_use, NULL_TREE);
+ gsi = gsi_for_stmt (stmt);
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ }
+
+ FOR_EACH_IMM_USE_ON_STMT (op, *ui)
+ SET_USE (op, temp);
+ update_stmt (stmt);
+ }
+}
+
+/* Promote definition DEF to NEW_TYPE. If the DEF is replaced and has to
+ be released, set RELEASE_DEF. Also return COPY_OF_DEF with the original
+ type for any use statement that needs truncation. */
+static tree
+promote_definition (tree def,
+ tree promoted_type,
+ tree *copy_of_def,
+ bool *release_def)
+{
+ gimple def_stmt = SSA_NAME_DEF_STMT (def);
+ gimple copy_stmt = NULL;
+ tree new_def;
+ basic_block bb;
+ gimple_stmt_iterator gsi;
+ gcc_assert (release_def);
+ *release_def = false;
+
+ switch (gimple_code (def_stmt))
+ {
+ case GIMPLE_PHI:
+ new_def = make_promoted_copy (def, def_stmt, promoted_type);
+ *copy_of_def = NULL;
+ gimple_phi_set_result (def_stmt, new_def);
+ SET_PHI_RESULT (def_stmt, new_def);
+ *release_def = true;
+ update_stmt (def_stmt);
+ promote_cst_in_stmt (def_stmt, promoted_type);
+ break;
+
+ case GIMPLE_NOP:
+ /* Create a promoted type copy of parameters. */
+ bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
+ bb = get_next_bb (bb);
+ gcc_assert (bb);
+ gsi = gsi_after_labels (bb);
+ new_def = make_promoted_copy (def, NULL, promoted_type);
+ copy_stmt = gimple_build_assign_with_ops (CONVERT_EXPR, new_def,
+ def, NULL_TREE);
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ *copy_of_def = def;
+ break;
+
+ case GIMPLE_ASSIGN:
+ new_def = make_promoted_copy (def, def_stmt, promoted_type);
+ gimple_assign_set_lhs (def_stmt, new_def);
+ update_stmt (def_stmt);
+ if (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt))
+ != tcc_comparison)
+ promote_cst_in_stmt (def_stmt, promoted_type);
+ *release_def = true;
+ *copy_of_def = NULL;
+ break;
+
+ default:
+ new_def = make_promoted_copy (def, NULL, promoted_type);
+ copy_stmt = gimple_build_assign_with_ops (CONVERT_EXPR, new_def,
+ def, NULL_TREE);
+ gsi = gsi_for_stmt (def_stmt);
+ if (lookup_stmt_eh_lp (def_stmt) > 0)
+ insert_next_bb (def_stmt, copy_stmt);
+ else
+ gsi_insert_after (&gsi, copy_stmt, GSI_NEW_STMT);
+ update_stmt (copy_stmt);
+ *copy_of_def = def;
+ break;
+ }
+ return new_def;
+}
+
+
+/* Promote all the USE with NEW_USE. */
+static unsigned int
+promote_all_uses (tree use, tree new_use, tree copy_of_use, tree promoted_type)
+{
+ gimple stmt;
+ imm_use_iterator ui;
+ gimple_stmt_iterator gsi;
+ use_operand_p op;
+
+ /* Replace all the use with the promoted variable. */
+ FOR_EACH_IMM_USE_STMT (stmt, ui, use)
+ {
+ if (stmt == SSA_NAME_DEF_STMT (new_use))
+ continue;
+
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_ASSIGN:
+ promote_assign_stmt_use (stmt, use, &ui, new_use,
+ copy_of_use, promoted_type);
+ break;
+ case GIMPLE_PHI:
+ promote_phi_stmt_use (stmt, use, &ui, new_use,
+ copy_of_use, promoted_type);
+ break;
+ case GIMPLE_COND:
+ promote_cond_stmt_use (stmt, use, &ui, new_use,
+ copy_of_use, promoted_type);
+ break;
+ case GIMPLE_DEBUG:
+ if (TREE_TYPE (use) != TREE_TYPE (new_use))
+ {
+ gsi = gsi_for_stmt (stmt);
+ gsi_remove (&gsi, true);
+ }
+ break;
+ case GIMPLE_RETURN:
+ default:
+ if (TREE_TYPE (use) != TREE_TYPE (new_use))
+ {
+ tree temp;
+ if (copy_of_use)
+ temp = copy_of_use;
+ else
+ {
+ temp = make_promoted_copy (use, NULL, TREE_TYPE (use));
+ gimple copy_stmt
+ = gimple_build_assign_with_ops (CONVERT_EXPR, temp,
+ new_use, NULL_TREE);
+ gsi = gsi_for_stmt (stmt);
+ gsi_insert_before (&gsi, copy_stmt, GSI_NEW_STMT);
+ }
+
+ FOR_EACH_IMM_USE_ON_STMT (op, ui)
+ SET_USE (op, temp);
+ update_stmt (stmt);
+ }
+ break;
+ }
+ }
+ return 0;
+}
+
+/* Promote definition of NAME and uses. */
+static unsigned int
+promote_def_and_uses (tree name)
+{
+ tree type, new_name, copy_of_name;
+ bool release_def = false;
+
+ if (TREE_CODE (name) != SSA_NAME
+ || POINTER_TYPE_P (TREE_TYPE (name))
+ || !INTEGRAL_TYPE_P (TREE_TYPE (name))
+ || VECTOR_TYPE_P (TREE_TYPE (name))
+ || ssa_promoted_p (name)
+ || (type = get_promoted_type (TREE_TYPE (name))) == TREE_TYPE (name))
+ return 0;
+
+ if (promotion_safe_p (name))
+ {
+ new_name = promote_definition (name, type, ©_of_name,
+ &release_def);
+ promote_all_uses (name, new_name, copy_of_name, type);
+ }
+ else
+ promote_all_uses (name, name, name, type);
+ set_ssa_promoted (name);
+
+ if (release_def)
+ release_ssa_name (name);
+ return 0;
+}
+
+/* Mark the candidates. */
+static void
+set_ssa_to_be_promoted_flag (gimple stmt)
+{
+ ssa_op_iter i;
+ tree def;
+ use_operand_p op;
+
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_PHI:
+ def = PHI_RESULT (stmt);
+ bitmap_set_bit (ssa_to_be_promoted_bitmap, SSA_NAME_VERSION (def));
+ FOR_EACH_PHI_ARG (op, stmt, i, SSA_OP_USE)
+ {
+ def = USE_FROM_PTR (op);
+ if (TREE_CODE (def) == SSA_NAME)
+ bitmap_set_bit (ssa_to_be_promoted_bitmap, SSA_NAME_VERSION (def));
+ }
+ break;
+
+ default:
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_USE | SSA_OP_DEF)
+ {
+ if (TREE_CODE (def) == SSA_NAME)
+ bitmap_set_bit (ssa_to_be_promoted_bitmap, SSA_NAME_VERSION (def));
+ }
+ break;
+ }
+}
+
+/* Visit PHI stmt and record if variables might have higher bits set if
+ promoted. */
+static bool
+record_visit_phi_node (gimple phi)
+{
+ tree def;
+ ssa_op_iter i;
+ use_operand_p op;
+ bool high_bits_set = false;
+ tree lhs = PHI_RESULT (phi);
+
+ if (TREE_CODE (lhs) != SSA_NAME
+ || POINTER_TYPE_P (TREE_TYPE (lhs))
+ || !INTEGRAL_TYPE_P (TREE_TYPE (lhs))
+ || ssa_overflows_p (lhs))
+ return false;
+
+ FOR_EACH_PHI_ARG (op, phi, i, SSA_OP_USE)
+ {
+ def = USE_FROM_PTR (op);
+ if (ssa_overflows_p (def))
+ high_bits_set = true;
+ }
+
+ if (high_bits_set)
+ {
+ set_ssa_overflows (lhs);
+ return true;
+ }
+ else
+ return false;
+}
+
+/* Visit STMT and record if variables might have higher bits set if
+ promoted. */
+static bool
+record_visit_stmt (gimple stmt)
+{
+ tree def;
+ ssa_op_iter i;
+ bool changed = false;
+ gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+ enum tree_code code = gimple_assign_rhs_code (stmt);
+ tree lhs = gimple_assign_lhs (stmt);
+ tree rhs1 = gimple_assign_rhs1 (stmt);
+
+ if (TREE_CODE (lhs) != SSA_NAME
+ || POINTER_TYPE_P (TREE_TYPE (lhs))
+ || !INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
+ return false;
+
+ switch (code)
+ {
+ /* Conversion expressions that may need to be preserved. */
+ CASE_CONVERT:
+ bitmap_set_bit (ssa_not_safe_bitmap, SSA_NAME_VERSION (lhs));
+ break;
+
+ case SSA_NAME:
+ if (!ssa_overflows_p (lhs)
+ && ssa_overflows_p (rhs1))
+ {
+ set_ssa_overflows (lhs);
+ changed = true;
+ }
+ break;
+
+ case NE_EXPR:
+ case LT_EXPR:
+ case LE_EXPR:
+ case GT_EXPR:
+ case GE_EXPR:
+ case EQ_EXPR:
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
+ case LTGT_EXPR:
+ case RSHIFT_EXPR:
+ case LSHIFT_EXPR:
+ case WIDEN_LSHIFT_EXPR:
+ break;
+
+ case TRUNC_DIV_EXPR:
+ case CEIL_DIV_EXPR:
+ case FLOOR_DIV_EXPR:
+ case RDIV_EXPR:
+ case ROUND_DIV_EXPR:
+ case EXACT_DIV_EXPR:
+ if (!ssa_overflows_p (lhs))
+ {
+ set_ssa_overflows (lhs);
+ changed = true;
+ }
+ break;
+
+ /* Expressions which may produce results that will have higher bits if
+ computed in promoted type. (i.e. results may overflow) */
+ case MULT_HIGHPART_EXPR:
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ case BIT_XOR_EXPR:
+ case BIT_NOT_EXPR:
+ case WIDEN_MULT_EXPR:
+ case WIDEN_MULT_PLUS_EXPR:
+ case WIDEN_MULT_MINUS_EXPR:
+ case WIDEN_SUM_EXPR:
+ case BIT_IOR_EXPR:
+ case BIT_AND_EXPR:
+ if (!ssa_overflows_p (lhs))
+ {
+ set_ssa_overflows (lhs);
+ changed = true;
+ }
+ break;
+
+ /* Expressions for which operation has to be performed in original
+ types if promoted operands may have higher bits. */
+ case MIN_EXPR:
+ case MAX_EXPR:
+ case RANGE_EXPR:
+ case ABS_EXPR:
+ case NEGATE_EXPR:
+ case TRUNC_MOD_EXPR:
+ case CEIL_MOD_EXPR:
+ case FLOOR_MOD_EXPR:
+ case ROUND_MOD_EXPR:
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_USE)
+ {
+ if (ssa_overflows_p (def))
+ bitmap_set_bit (ssa_not_safe_bitmap, SSA_NAME_VERSION (lhs));
+ }
+ break;
+
+ /* Expressions that has to be done in original types. */
+ case LROTATE_EXPR:
+ case RROTATE_EXPR:
+ bitmap_set_bit (ssa_not_safe_bitmap, SSA_NAME_VERSION (lhs));
+ break;
+
+ /* To be safe, all other have to be done in original types. */
+ default:
+ bitmap_set_bit (ssa_not_safe_bitmap, SSA_NAME_VERSION (lhs));
+ break;
+ }
+ return changed;
+}
+
+
+/* Promote all the stmts in the basic block. */
+static void
+promote_all_stmts (basic_block bb)
+{
+ gimple_stmt_iterator gsi;
+ ssa_op_iter iter;
+ tree def;
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ use_operand_p op;
+
+ def = PHI_RESULT (stmt);
+ promote_def_and_uses (def);
+ FOR_EACH_PHI_ARG (op, stmt, iter, SSA_OP_USE)
+ {
+ def = USE_FROM_PTR (op);
+ promote_def_and_uses (def);
+ }
+ }
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_USE | SSA_OP_DEF)
+ promote_def_and_uses (def);
+ }
+}
+
+static void
+process_all_stmts_for_unsafe_promotion ()
+{
+ basic_block bb;
+ gimple_stmt_iterator gsi;
+ auto_vec<gimple> work_list;
+
+ FOR_EACH_BB_FN (bb, cfun)
+ {
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple phi = gsi_stmt (gsi);
+ set_ssa_to_be_promoted_flag (phi);
+ work_list.safe_push (phi);
+ }
+
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ set_ssa_to_be_promoted_flag (stmt);
+ if (gimple_code (stmt) == GIMPLE_ASSIGN)
+ work_list.safe_push (stmt);
+ }
+ }
+
+ while (work_list.length () > 0)
+ {
+ bool changed;
+ gimple stmt = work_list.pop ();
+ tree lhs;
+
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_ASSIGN:
+ changed = record_visit_stmt (stmt);
+ lhs = gimple_assign_lhs (stmt);
+ break;
+ case GIMPLE_PHI:
+ changed = record_visit_phi_node (stmt);
+ lhs = PHI_RESULT (stmt);
+ break;
+ default:
+ gcc_assert (false);
+ break;
+ }
+
+ if (changed)
+ {
+ gimple use_stmt;
+ imm_use_iterator ui;
+ FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
+ {
+ if (gimple_code (use_stmt) == GIMPLE_ASSIGN
+ || gimple_code (use_stmt) == GIMPLE_PHI)
+ work_list.safe_push (use_stmt);
+ }
+ }
+ }
+}
+
+class type_promotion_dom_walker : public dom_walker
+{
+public:
+ type_promotion_dom_walker (cdi_direction direction)
+ : dom_walker (direction) {}
+ virtual void before_dom_children (basic_block bb)
+ {
+ promote_all_stmts (bb);
+ }
+};
+
+/* Main entry point to the pass. */
+static unsigned int
+execute_type_promotion (void)
+{
+ n_ssa_val = num_ssa_names;
+ ssa_not_safe_bitmap = sbitmap_alloc (n_ssa_val);
+ bitmap_clear (ssa_not_safe_bitmap);
+ ssa_to_be_promoted_bitmap = sbitmap_alloc (n_ssa_val);
+ bitmap_clear (ssa_to_be_promoted_bitmap);
+ ssa_sets_higher_bits_bitmap = sbitmap_alloc (n_ssa_val);
+ bitmap_clear (ssa_sets_higher_bits_bitmap);
+
+ process_all_stmts_for_unsafe_promotion ();
+ /* Walk the CFG in dominator order. */
+ type_promotion_dom_walker (CDI_DOMINATORS)
+ .walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
+
+ sbitmap_free (ssa_not_safe_bitmap);
+ sbitmap_free (ssa_to_be_promoted_bitmap);
+ sbitmap_free (ssa_sets_higher_bits_bitmap);
+ return 0;
+}
+
+namespace {
+const pass_data pass_data_type_promotion =
+{
+ GIMPLE_PASS, /* type */
+ "promotion", /* name */
+ OPTGROUP_NONE, /* optinfo_flags */
+ TV_TREE_TYPE_PRMT, /* tv_id */
+ PROP_ssa, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ (TODO_cleanup_cfg | TODO_update_ssa | TODO_verify_all),
+};
+
+class pass_type_promotion : public gimple_opt_pass
+{
+public:
+ pass_type_promotion (gcc::context *ctxt)
+ : gimple_opt_pass (pass_data_type_promotion, ctxt)
+ {}
+
+ /* opt_pass methods: */
+ opt_pass * clone () { return new pass_type_promotion (m_ctxt); }
+ virtual bool gate (function *) { return flag_tree_type_prmt != 0; }
+ virtual unsigned int execute (function *)
+ {
+ return execute_type_promotion ();
+ }
+
+}; // class pass_type_promotion
+
+} // anon namespace
+
+gimple_opt_pass *
+make_pass_type_promotion (gcc::context *ctxt)
+{
+ return new pass_type_promotion (ctxt);
+}
+