===================================================================
@@ -719,8 +719,7 @@ extern rtx expand_divmod (int, enum tree
#endif
extern void store_bit_field (rtx, poly_uint64, poly_uint64,
- unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT,
+ poly_uint64, poly_uint64,
machine_mode, rtx, bool);
extern rtx extract_bit_field (rtx, poly_uint64, poly_uint64, int, rtx,
machine_mode, machine_mode, bool, rtx *);
===================================================================
@@ -49,14 +49,12 @@ struct target_expmed *this_target_expmed
static bool store_integral_bit_field (rtx, opt_scalar_int_mode,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT,
+ poly_uint64, poly_uint64,
machine_mode, rtx, bool, bool);
static void store_fixed_bit_field (rtx, opt_scalar_int_mode,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT,
+ poly_uint64, poly_uint64,
rtx, scalar_int_mode, bool);
static void store_fixed_bit_field_1 (rtx, scalar_int_mode,
unsigned HOST_WIDE_INT,
@@ -65,8 +63,7 @@ static void store_fixed_bit_field_1 (rtx
static void store_split_bit_field (rtx, opt_scalar_int_mode,
unsigned HOST_WIDE_INT,
unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT,
+ poly_uint64, poly_uint64,
rtx, scalar_int_mode, bool);
static rtx extract_integral_bit_field (rtx, opt_scalar_int_mode,
unsigned HOST_WIDE_INT,
@@ -471,8 +468,8 @@ narrow_bit_field_mem (rtx mem, opt_scala
adjust_bit_field_mem_for_reg (enum extraction_pattern pattern,
rtx op0, HOST_WIDE_INT bitsize,
HOST_WIDE_INT bitnum,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start,
+ poly_uint64 bitregion_end,
machine_mode fieldmode,
unsigned HOST_WIDE_INT *new_bitnum)
{
@@ -536,8 +533,8 @@ lowpart_bit_field_p (poly_uint64 bitnum,
strict_volatile_bitfield_p (rtx op0, unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum,
scalar_int_mode fieldmode,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end)
+ poly_uint64 bitregion_start,
+ poly_uint64 bitregion_end)
{
unsigned HOST_WIDE_INT modesize = GET_MODE_BITSIZE (fieldmode);
@@ -564,9 +561,10 @@ strict_volatile_bitfield_p (rtx op0, uns
return false;
/* Check for cases where the C++ memory model applies. */
- if (bitregion_end != 0
- && (bitnum - bitnum % modesize < bitregion_start
- || bitnum - bitnum % modesize + modesize - 1 > bitregion_end))
+ if (maybe_nonzero (bitregion_end)
+ && (may_lt (bitnum - bitnum % modesize, bitregion_start)
+ || may_gt (bitnum - bitnum % modesize + modesize - 1,
+ bitregion_end)))
return false;
return true;
@@ -730,8 +728,7 @@ store_bit_field_using_insv (const extrac
static bool
store_bit_field_1 (rtx str_rtx, poly_uint64 bitsize, poly_uint64 bitnum,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start, poly_uint64 bitregion_end,
machine_mode fieldmode,
rtx value, bool reverse, bool fallback_p)
{
@@ -858,8 +855,8 @@ store_bit_field_1 (rtx str_rtx, poly_uin
store_integral_bit_field (rtx op0, opt_scalar_int_mode op0_mode,
unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start,
+ poly_uint64 bitregion_end,
machine_mode fieldmode,
rtx value, bool reverse, bool fallback_p)
{
@@ -1085,8 +1082,7 @@ store_integral_bit_field (rtx op0, opt_s
void
store_bit_field (rtx str_rtx, poly_uint64 bitsize, poly_uint64 bitnum,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start, poly_uint64 bitregion_end,
machine_mode fieldmode,
rtx value, bool reverse)
{
@@ -1133,15 +1129,12 @@ store_bit_field (rtx str_rtx, poly_uint6
/* Under the C++0x memory model, we must not touch bits outside the
bit region. Adjust the address to start at the beginning of the
bit region. */
- if (MEM_P (str_rtx) && bitregion_start > 0)
+ if (MEM_P (str_rtx) && maybe_nonzero (bitregion_start))
{
scalar_int_mode best_mode;
machine_mode addr_mode = VOIDmode;
- HOST_WIDE_INT offset;
-
- gcc_assert ((bitregion_start % BITS_PER_UNIT) == 0);
- offset = bitregion_start / BITS_PER_UNIT;
+ poly_uint64 offset = exact_div (bitregion_start, BITS_PER_UNIT);
bitnum -= bitregion_start;
poly_int64 size = bits_to_bytes_round_up (bitnum + bitsize);
bitregion_end -= bitregion_start;
@@ -1174,8 +1167,7 @@ store_bit_field (rtx str_rtx, poly_uint6
store_fixed_bit_field (rtx op0, opt_scalar_int_mode op0_mode,
unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitnum,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start, poly_uint64 bitregion_end,
rtx value, scalar_int_mode value_mode, bool reverse)
{
/* There is a case not handled here:
@@ -1330,8 +1322,7 @@ store_fixed_bit_field_1 (rtx op0, scalar
store_split_bit_field (rtx op0, opt_scalar_int_mode op0_mode,
unsigned HOST_WIDE_INT bitsize,
unsigned HOST_WIDE_INT bitpos,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start, poly_uint64 bitregion_end,
rtx value, scalar_int_mode value_mode, bool reverse)
{
unsigned int unit, total_bits, bitsdone = 0;
@@ -1379,9 +1370,9 @@ store_split_bit_field (rtx op0, opt_scal
UNIT close to the end of the region as needed. If op0 is a REG
or SUBREG of REG, don't do this, as there can't be data races
on a register and we can expand shorter code in some cases. */
- if (bitregion_end
+ if (maybe_nonzero (bitregion_end)
&& unit > BITS_PER_UNIT
- && bitpos + bitsdone - thispos + unit > bitregion_end + 1
+ && may_gt (bitpos + bitsdone - thispos + unit, bitregion_end + 1)
&& !REG_P (op0)
&& (GET_CODE (op0) != SUBREG || !REG_P (SUBREG_REG (op0))))
{
===================================================================
@@ -79,13 +79,9 @@ static void emit_block_move_via_loop (rt
static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
static rtx_insn *compress_float_constant (rtx, rtx);
static rtx get_subtarget (rtx);
-static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
- HOST_WIDE_INT, unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT, machine_mode,
- tree, int, alias_set_type, bool);
static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
- unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
+ poly_uint64, poly_uint64,
machine_mode, tree, alias_set_type, bool, bool);
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
@@ -4611,10 +4607,10 @@ get_subtarget (rtx x)
and there's nothing else to do. */
static bool
-optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
- unsigned HOST_WIDE_INT bitpos,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+optimize_bitfield_assignment_op (poly_uint64 pbitsize,
+ poly_uint64 pbitpos,
+ poly_uint64 pbitregion_start,
+ poly_uint64 pbitregion_end,
machine_mode mode1, rtx str_rtx,
tree to, tree src, bool reverse)
{
@@ -4626,7 +4622,12 @@ optimize_bitfield_assignment_op (unsigne
gimple *srcstmt;
enum tree_code code;
+ unsigned HOST_WIDE_INT bitsize, bitpos, bitregion_start, bitregion_end;
if (mode1 != VOIDmode
+ || !pbitsize.is_constant (&bitsize)
+ || !pbitpos.is_constant (&bitpos)
+ || !pbitregion_start.is_constant (&bitregion_start)
+ || !pbitregion_end.is_constant (&bitregion_end)
|| bitsize >= BITS_PER_WORD
|| str_bitsize > BITS_PER_WORD
|| TREE_SIDE_EFFECTS (to)
@@ -6082,8 +6083,8 @@ all_zeros_p (const_tree exp)
static void
store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
HOST_WIDE_INT bitpos,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start,
+ poly_uint64 bitregion_end,
machine_mode mode,
tree exp, int cleared,
alias_set_type alias_set, bool reverse)
@@ -6762,8 +6763,7 @@ store_constructor (tree exp, rtx target,
static rtx
store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start, poly_uint64 bitregion_end,
machine_mode mode, tree exp,
alias_set_type alias_set, bool nontemporal, bool reverse)
{
===================================================================
@@ -760,7 +760,7 @@ mode_for_int_vector (machine_mode mode)
{
public:
bit_field_mode_iterator (HOST_WIDE_INT, HOST_WIDE_INT,
- HOST_WIDE_INT, HOST_WIDE_INT,
+ poly_int64, poly_int64,
unsigned int, bool);
bool next_mode (scalar_int_mode *);
bool prefer_smaller_modes ();
@@ -771,8 +771,8 @@ mode_for_int_vector (machine_mode mode)
for invalid input such as gcc.dg/pr48335-8.c. */
HOST_WIDE_INT m_bitsize;
HOST_WIDE_INT m_bitpos;
- HOST_WIDE_INT m_bitregion_start;
- HOST_WIDE_INT m_bitregion_end;
+ poly_int64 m_bitregion_start;
+ poly_int64 m_bitregion_end;
unsigned int m_align;
bool m_volatilep;
int m_count;
@@ -780,8 +780,7 @@ mode_for_int_vector (machine_mode mode)
/* Find the best mode to use to access a bit field. */
-extern bool get_best_mode (int, int, unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT, unsigned int,
+extern bool get_best_mode (int, int, poly_uint64, poly_uint64, unsigned int,
unsigned HOST_WIDE_INT, bool, scalar_int_mode *);
/* Determine alignment, 1<=result<=BIGGEST_ALIGNMENT. */
===================================================================
@@ -2747,15 +2747,15 @@ fixup_unsigned_type (tree type)
bit_field_mode_iterator
::bit_field_mode_iterator (HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
- HOST_WIDE_INT bitregion_start,
- HOST_WIDE_INT bitregion_end,
+ poly_int64 bitregion_start,
+ poly_int64 bitregion_end,
unsigned int align, bool volatilep)
: m_mode (NARROWEST_INT_MODE), m_bitsize (bitsize),
m_bitpos (bitpos), m_bitregion_start (bitregion_start),
m_bitregion_end (bitregion_end), m_align (align),
m_volatilep (volatilep), m_count (0)
{
- if (!m_bitregion_end)
+ if (known_zero (m_bitregion_end))
{
/* We can assume that any aligned chunk of ALIGN bits that overlaps
the bitfield is mapped and won't trap, provided that ALIGN isn't
@@ -2765,8 +2765,8 @@ fixup_unsigned_type (tree type)
= MIN (align, MAX (BIGGEST_ALIGNMENT, BITS_PER_WORD));
if (bitsize <= 0)
bitsize = 1;
- m_bitregion_end = bitpos + bitsize + units - 1;
- m_bitregion_end -= m_bitregion_end % units + 1;
+ HOST_WIDE_INT end = bitpos + bitsize + units - 1;
+ m_bitregion_end = end - end % units - 1;
}
}
@@ -2803,10 +2803,11 @@ bit_field_mode_iterator::next_mode (scal
/* Stop if the mode goes outside the bitregion. */
HOST_WIDE_INT start = m_bitpos - substart;
- if (m_bitregion_start && start < m_bitregion_start)
+ if (maybe_nonzero (m_bitregion_start)
+ && may_lt (start, m_bitregion_start))
break;
HOST_WIDE_INT end = start + unit;
- if (end > m_bitregion_end + 1)
+ if (may_gt (end, m_bitregion_end + 1))
break;
/* Stop if the mode requires too much alignment. */
@@ -2862,8 +2863,7 @@ bit_field_mode_iterator::prefer_smaller_
bool
get_best_mode (int bitsize, int bitpos,
- unsigned HOST_WIDE_INT bitregion_start,
- unsigned HOST_WIDE_INT bitregion_end,
+ poly_uint64 bitregion_start, poly_uint64 bitregion_end,
unsigned int align,
unsigned HOST_WIDE_INT largest_mode_bitsize, bool volatilep,
scalar_int_mode *best_mode)