===================================================================
@@ -1852,6 +1852,8 @@ #define TYPE_MODE_RAW(NODE) (TYPE_CHECK
#define TYPE_MODE(NODE) \
(VECTOR_TYPE_P (TYPE_CHECK (NODE)) \
? vector_type_mode (NODE) : (NODE)->type_common.mode)
+#define SCALAR_TYPE_MODE(NODE) \
+ (as_a <scalar_mode> (TYPE_CHECK (NODE)->type_common.mode))
#define SCALAR_INT_TYPE_MODE(NODE) \
(as_a <scalar_int_mode> (TYPE_CHECK (NODE)->type_common.mode))
#define SCALAR_FLOAT_TYPE_MODE(NODE) \
===================================================================
@@ -7758,7 +7758,7 @@ expand_expr_addr_expr_1 (tree exp, rtx t
The expression is therefore always offset by the size of the
scalar type. */
offset = 0;
- bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
+ bitpos = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp)));
inner = TREE_OPERAND (exp, 0);
break;
@@ -9436,7 +9436,7 @@ #define REDUCE_BIT_FIELD(expr) (reduce_b
{
tree sel_type = TREE_TYPE (treeop2);
machine_mode vmode
- = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
+ = mode_for_vector (SCALAR_TYPE_MODE (TREE_TYPE (sel_type)),
TYPE_VECTOR_SUBPARTS (sel_type));
gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
===================================================================
@@ -2058,8 +2058,8 @@ fold_convert_const_fixed_from_fixed (tre
tree t;
bool overflow_p;
- overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
- TYPE_SATURATING (type));
+ overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
+ &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
t = build_fixed (type, value);
/* Propagate overflow flags. */
@@ -2087,7 +2087,7 @@ fold_convert_const_fixed_from_int (tree
else
di.high = TREE_INT_CST_ELT (arg1, 1);
- overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
+ overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
TYPE_UNSIGNED (TREE_TYPE (arg1)),
TYPE_SATURATING (type));
t = build_fixed (type, value);
@@ -2108,7 +2108,7 @@ fold_convert_const_fixed_from_real (tree
tree t;
bool overflow_p;
- overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
+ overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
&TREE_REAL_CST (arg1),
TYPE_SATURATING (type));
t = build_fixed (type, value);
@@ -7115,7 +7115,7 @@ native_encode_int (const_tree expr, unsi
native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
{
tree type = TREE_TYPE (expr);
- machine_mode mode = TYPE_MODE (type);
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
int total_bytes = GET_MODE_SIZE (mode);
FIXED_VALUE_TYPE value;
tree i_value, i_type;
@@ -7217,7 +7217,7 @@ native_encode_complex (const_tree expr,
return 0;
part = TREE_IMAGPART (expr);
if (off != -1)
- off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
+ off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
if (off == -1
&& isize != rsize)
@@ -7241,7 +7241,7 @@ native_encode_vector (const_tree expr, u
offset = 0;
count = VECTOR_CST_NELTS (expr);
itype = TREE_TYPE (TREE_TYPE (expr));
- size = GET_MODE_SIZE (TYPE_MODE (itype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
for (i = 0; i < count; i++)
{
if (off >= size)
@@ -7369,7 +7369,8 @@ native_interpret_int (tree type, const u
static tree
native_interpret_fixed (tree type, const unsigned char *ptr, int len)
{
- int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
+ int total_bytes = GET_MODE_SIZE (mode);
double_int result;
FIXED_VALUE_TYPE fixed_value;
@@ -7378,7 +7379,7 @@ native_interpret_fixed (tree type, const
return NULL_TREE;
result = double_int::from_buffer (ptr, total_bytes);
- fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
+ fixed_value = fixed_from_double_int (result, mode);
return build_fixed (type, fixed_value);
}
@@ -7400,7 +7401,6 @@ native_interpret_real (tree type, const
REAL_VALUE_TYPE r;
long tmp[6];
- total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
if (total_bytes > len || total_bytes > 24)
return NULL_TREE;
int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
@@ -7455,7 +7455,7 @@ native_interpret_complex (tree type, con
int size;
etype = TREE_TYPE (type);
- size = GET_MODE_SIZE (TYPE_MODE (etype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
if (size * 2 > len)
return NULL_TREE;
rpart = native_interpret_expr (etype, ptr, size);
@@ -7480,7 +7480,7 @@ native_interpret_vector (tree type, cons
tree *elements;
etype = TREE_TYPE (type);
- size = GET_MODE_SIZE (TYPE_MODE (etype));
+ size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
count = TYPE_VECTOR_SUBPARTS (type);
if (size * count > len)
return NULL_TREE;
===================================================================
@@ -495,7 +495,7 @@ simd_clone_adjust_return_type (struct cg
veclen = node->simdclone->vecsize_int;
else
veclen = node->simdclone->vecsize_float;
- veclen /= GET_MODE_BITSIZE (TYPE_MODE (t));
+ veclen /= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (t));
if (veclen > node->simdclone->simdlen)
veclen = node->simdclone->simdlen;
if (POINTER_TYPE_P (t))
@@ -605,7 +605,7 @@ simd_clone_adjust_argument_types (struct
veclen = sc->vecsize_int;
else
veclen = sc->vecsize_float;
- veclen /= GET_MODE_BITSIZE (TYPE_MODE (parm_type));
+ veclen /= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (parm_type));
if (veclen > sc->simdlen)
veclen = sc->simdlen;
adj.arg_prefix = "simd";
@@ -649,7 +649,7 @@ simd_clone_adjust_argument_types (struct
veclen = sc->vecsize_int;
else
veclen = sc->vecsize_float;
- veclen /= GET_MODE_BITSIZE (TYPE_MODE (base_type));
+ veclen /= GET_MODE_BITSIZE (SCALAR_TYPE_MODE (base_type));
if (veclen > sc->simdlen)
veclen = sc->simdlen;
if (sc->mask_mode != VOIDmode)
@@ -791,8 +791,8 @@ simd_clone_init_simd_arrays (struct cgra
arg = DECL_CHAIN (arg);
j++;
}
- elemsize
- = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))));
+ tree elemtype = TREE_TYPE (TREE_TYPE (arg));
+ elemsize = GET_MODE_SIZE (SCALAR_TYPE_MODE (elemtype));
tree t = build2 (MEM_REF, TREE_TYPE (arg), ptr,
build_int_cst (ptype, k * elemsize));
t = build2 (MODIFY_EXPR, TREE_TYPE (t), t, arg);
@@ -1225,7 +1225,7 @@ simd_clone_adjust (struct cgraph_node *n
mask_array, iter1, NULL, NULL);
g = gimple_build_assign (mask, aref);
gsi_insert_after (&gsi, g, GSI_CONTINUE_LINKING);
- int bitsize = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (aref)));
+ int bitsize = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (aref)));
if (!INTEGRAL_TYPE_P (TREE_TYPE (aref)))
{
aref = build1 (VIEW_CONVERT_EXPR,
===================================================================
@@ -2155,10 +2155,13 @@ layout_type (tree type)
}
case FIXED_POINT_TYPE:
- /* TYPE_MODE (type) has been set already. */
- TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
- TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
- break;
+ {
+ /* TYPE_MODE (type) has been set already. */
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
+ TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (mode));
+ TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
+ break;
+ }
case COMPLEX_TYPE:
TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
@@ -2179,7 +2182,8 @@ layout_type (tree type)
/* Find an appropriate mode for the vector type. */
if (TYPE_MODE (type) == VOIDmode)
SET_TYPE_MODE (type,
- mode_for_vector (TYPE_MODE (innertype), nunits));
+ mode_for_vector (SCALAR_TYPE_MODE (innertype),
+ nunits));
TYPE_SATURATING (type) = TYPE_SATURATING (TREE_TYPE (type));
TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
===================================================================
@@ -2128,8 +2128,9 @@ build_minus_one_cst (tree type)
case FIXED_POINT_TYPE:
/* We can only generate 1 for accum types. */
gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
- return build_fixed (type, fixed_from_double_int (double_int_minus_one,
- TYPE_MODE (type)));
+ return build_fixed (type,
+ fixed_from_double_int (double_int_minus_one,
+ SCALAR_TYPE_MODE (type)));
case VECTOR_TYPE:
{
===================================================================
@@ -4190,7 +4190,7 @@ verify_gimple_assign_ternary (gassign *s
if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
|| GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type)))
- != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
+ != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type))))
{
error ("invalid mask type in vector permute expression");
debug_generic_expr (lhs_type);
===================================================================
@@ -3871,7 +3871,7 @@ estimate_move_cost (tree type, bool ARG_
if (TREE_CODE (type) == VECTOR_TYPE)
{
- machine_mode inner = TYPE_MODE (TREE_TYPE (type));
+ scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (type));
machine_mode simd
= targetm.vectorize.preferred_simd_mode (inner);
int simd_mode_size = GET_MODE_SIZE (simd);
===================================================================
@@ -3227,7 +3227,8 @@ convert_plusminus_to_widen (gimple_stmt_
optab this_optab;
enum tree_code wmult_code;
enum insn_code handler;
- machine_mode to_mode, from_mode, actual_mode;
+ scalar_mode to_mode, from_mode;
+ machine_mode actual_mode;
location_t loc = gimple_location (stmt);
int actual_precision;
bool from_unsigned1, from_unsigned2;
@@ -3323,8 +3324,8 @@ convert_plusminus_to_widen (gimple_stmt_
else
return false;
- to_mode = TYPE_MODE (type);
- from_mode = TYPE_MODE (type1);
+ to_mode = SCALAR_TYPE_MODE (type);
+ from_mode = SCALAR_TYPE_MODE (type1);
from_unsigned1 = TYPE_UNSIGNED (type1);
from_unsigned2 = TYPE_UNSIGNED (type2);
optype = type1;
===================================================================
@@ -4745,7 +4745,8 @@ vect_create_epilog_for_reduction (vec<tr
(index_vec_type);
/* Get an unsigned integer version of the type of the data vector. */
- int scalar_precision = GET_MODE_PRECISION (TYPE_MODE (scalar_type));
+ int scalar_precision
+ = GET_MODE_PRECISION (SCALAR_TYPE_MODE (scalar_type));
tree scalar_type_unsigned = make_unsigned_type (scalar_precision);
tree vectype_unsigned = build_vector_type
(scalar_type_unsigned, TYPE_VECTOR_SUBPARTS (vectype));
@@ -6142,7 +6143,8 @@ vectorizable_reduction (gimple *stmt, gi
}
else
{
- int scalar_precision = GET_MODE_PRECISION (TYPE_MODE (scalar_type));
+ int scalar_precision
+ = GET_MODE_PRECISION (SCALAR_TYPE_MODE (scalar_type));
cr_index_scalar_type = make_unsigned_type (scalar_precision);
cr_index_vector_type = build_vector_type
(cr_index_scalar_type, TYPE_VECTOR_SUBPARTS (vectype_out));
===================================================================
@@ -939,8 +939,8 @@ vect_recog_widen_mult_pattern (vec<gimpl
tree itype = type;
if (TYPE_PRECISION (type) > TYPE_PRECISION (half_type0) * 2)
itype = build_nonstandard_integer_type
- (GET_MODE_BITSIZE (TYPE_MODE (half_type0)) * 2,
- TYPE_UNSIGNED (type));
+ (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (half_type0)) * 2,
+ TYPE_UNSIGNED (type));
/* Pattern detected. */
if (dump_enabled_p ())
@@ -3083,7 +3083,7 @@ vect_recog_mixed_size_cond_pattern (vec<
TYPE_UNSIGNED (type));
if (itype == NULL_TREE
- || GET_MODE_BITSIZE (TYPE_MODE (itype)) != cmp_mode_size)
+ || GET_MODE_BITSIZE (SCALAR_TYPE_MODE (itype)) != cmp_mode_size)
return NULL;
vecitype = get_vectype_for_scalar_type (itype);
@@ -3200,7 +3200,7 @@ check_bool_pattern (tree var, vec_info *
if (TREE_CODE (TREE_TYPE (rhs1)) != INTEGER_TYPE)
{
- machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
+ scalar_mode mode = SCALAR_TYPE_MODE (TREE_TYPE (rhs1));
tree itype
= build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 1);
vecitype = get_vectype_for_scalar_type (itype);
@@ -3322,7 +3322,7 @@ adjust_bool_pattern (tree var, tree out_
irhs1 = *defs.get (rhs1);
tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
if (TYPE_PRECISION (TREE_TYPE (irhs1))
- == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (def_rhs1))))
+ == GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (def_rhs1))))
{
rhs_code = def_rhs_code;
rhs1 = def_rhs1;
@@ -3341,7 +3341,7 @@ adjust_bool_pattern (tree var, tree out_
irhs2 = *defs.get (rhs2);
tree def_rhs1 = gimple_assign_rhs1 (def_stmt);
if (TYPE_PRECISION (TREE_TYPE (irhs2))
- == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (def_rhs1))))
+ == GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (def_rhs1))))
{
rhs_code = def_rhs_code;
rhs1 = def_rhs1;
@@ -3391,7 +3391,7 @@ adjust_bool_pattern (tree var, tree out_
|| (TYPE_PRECISION (TREE_TYPE (rhs1))
!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1)))))
{
- machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
+ scalar_mode mode = SCALAR_TYPE_MODE (TREE_TYPE (rhs1));
itype
= build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 1);
}
@@ -3544,7 +3544,7 @@ search_type_for_mask_1 (tree var, vec_in
if (TREE_CODE (TREE_TYPE (rhs1)) != INTEGER_TYPE
|| !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
{
- machine_mode mode = TYPE_MODE (TREE_TYPE (rhs1));
+ scalar_mode mode = SCALAR_TYPE_MODE (TREE_TYPE (rhs1));
res = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode), 1);
}
else
===================================================================
@@ -3685,7 +3685,7 @@ vect_schedule_slp_instance (slp_tree nod
v1 = SLP_TREE_VEC_STMTS (node).copy ();
SLP_TREE_VEC_STMTS (node).truncate (0);
tree meltype = build_nonstandard_integer_type
- (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (vectype))), 1);
+ (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (vectype))), 1);
tree mvectype = get_same_sized_vectype (meltype, vectype);
unsigned k = 0, l;
for (j = 0; j < v0.length (); ++j)
===================================================================
@@ -4051,7 +4051,6 @@ vectorizable_conversion (gimple *stmt, g
vec<tree> interm_types = vNULL;
tree last_oprnd, intermediate_type, cvt_type = NULL_TREE;
int op_type;
- machine_mode rhs_mode;
unsigned short fltsz;
/* Is STMT a vectorizable conversion? */
@@ -4196,6 +4195,9 @@ vectorizable_conversion (gimple *stmt, g
needs to be generated. */
gcc_assert (ncopies >= 1);
+ machine_mode lhs_mode = SCALAR_TYPE_MODE (lhs_type);
+ machine_mode rhs_mode = SCALAR_TYPE_MODE (rhs_type);
+
/* Supportable by target? */
switch (modifier)
{
@@ -4224,12 +4226,11 @@ vectorizable_conversion (gimple *stmt, g
}
if (code != FLOAT_EXPR
- || (GET_MODE_SIZE (TYPE_MODE (lhs_type))
- <= GET_MODE_SIZE (TYPE_MODE (rhs_type))))
+ || GET_MODE_SIZE (lhs_mode) <= GET_MODE_SIZE (rhs_mode))
goto unsupported;
- fltsz = GET_MODE_SIZE (TYPE_MODE (lhs_type));
- FOR_EACH_2XWIDER_MODE (rhs_mode, TYPE_MODE (rhs_type))
+ fltsz = GET_MODE_SIZE (lhs_mode);
+ FOR_EACH_2XWIDER_MODE (rhs_mode, rhs_mode)
{
if (GET_MODE_SIZE (rhs_mode) > fltsz)
break;
@@ -4281,11 +4282,9 @@ vectorizable_conversion (gimple *stmt, g
break;
if (code != FIX_TRUNC_EXPR
- || (GET_MODE_SIZE (TYPE_MODE (lhs_type))
- >= GET_MODE_SIZE (TYPE_MODE (rhs_type))))
+ || GET_MODE_SIZE (lhs_mode) >= GET_MODE_SIZE (rhs_mode))
goto unsupported;
- rhs_mode = TYPE_MODE (rhs_type);
cvt_type
= build_nonstandard_integer_type (GET_MODE_BITSIZE (rhs_mode), 0);
cvt_type = get_same_sized_vectype (cvt_type, vectype_in);
===================================================================
@@ -120,7 +120,8 @@ decl_for_type_insert (tree type, tree de
ubsan_encode_value (tree t, enum ubsan_encode_value_phase phase)
{
tree type = TREE_TYPE (t);
- const unsigned int bitsize = GET_MODE_BITSIZE (TYPE_MODE (type));
+ scalar_mode mode = SCALAR_TYPE_MODE (type);
+ const unsigned int bitsize = GET_MODE_BITSIZE (mode);
if (bitsize <= POINTER_SIZE)
switch (TREE_CODE (type))
{
@@ -157,10 +158,8 @@ ubsan_encode_value (tree t, enum ubsan_e
}
if (phase == UBSAN_ENCODE_VALUE_RTL)
{
- rtx mem
- = assign_stack_temp_for_type (TYPE_MODE (type),
- GET_MODE_SIZE (TYPE_MODE (type)),
- type);
+ rtx mem = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
+ type);
SET_DECL_RTL (var, mem);
expand_assignment (var, t, false);
return build_fold_addr_expr (var);
===================================================================
@@ -4837,7 +4837,7 @@ output_constant (tree exp, unsigned HOST
break;
case VECTOR_CST:
{
- machine_mode inner = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
+ scalar_mode inner = SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
unsigned int nalign = MIN (align, GET_MODE_ALIGNMENT (inner));
int elt_size = GET_MODE_SIZE (inner);
output_constant (VECTOR_CST_ELT (exp, 0), elt_size, align,
===================================================================
@@ -1109,7 +1109,7 @@ interpret_fixed (const cpp_token *token,
memcpy (copy, token->val.str.text, copylen);
copy[copylen] = '\0';
- fixed_from_string (&fixed, copy, TYPE_MODE (type));
+ fixed_from_string (&fixed, copy, SCALAR_TYPE_MODE (type));
/* Create a node with determined type and value. */
value = build_fixed (type, fixed);
===================================================================
@@ -1031,8 +1031,8 @@ c_build_vec_perm_expr (location_t loc, t
return error_mark_node;
}
- if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (v0))))
- != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (mask)))))
+ if (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (v0))))
+ != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (TREE_TYPE (mask)))))
{
if (complain)
error_at (loc, "__builtin_shuffle argument vector(s) inner type "
===================================================================
@@ -810,11 +810,11 @@ c_common_type (tree t1, tree t2)
if (code1 == FIXED_POINT_TYPE || code2 == FIXED_POINT_TYPE)
{
unsigned int unsignedp = 0, satp = 0;
- machine_mode m1, m2;
+ scalar_mode m1, m2;
unsigned int fbit1, ibit1, fbit2, ibit2, max_fbit, max_ibit;
- m1 = TYPE_MODE (t1);
- m2 = TYPE_MODE (t2);
+ m1 = SCALAR_TYPE_MODE (t1);
+ m2 = SCALAR_TYPE_MODE (t2);
/* If one input type is saturating, the result type is saturating. */
if (TYPE_SATURATING (t1) || TYPE_SATURATING (t2))
@@ -846,7 +846,8 @@ c_common_type (tree t1, tree t2)
mclass = MODE_ACCUM;
else
gcc_unreachable ();
- m1 = mode_for_size (GET_MODE_PRECISION (m1), mclass, 0);
+ m1 = as_a <scalar_mode>
+ (mode_for_size (GET_MODE_PRECISION (m1), mclass, 0));
}
if (code2 == FIXED_POINT_TYPE && TYPE_UNSIGNED (t2))
{
@@ -857,7 +858,8 @@ c_common_type (tree t1, tree t2)
mclass = MODE_ACCUM;
else
gcc_unreachable ();
- m2 = mode_for_size (GET_MODE_PRECISION (m2), mclass, 0);
+ m2 = as_a <scalar_mode>
+ (mode_for_size (GET_MODE_PRECISION (m2), mclass, 0));
}
}
@@ -11306,7 +11308,8 @@ build_binary_op (location_t location, en
/* Always construct signed integer vector type. */
intt = c_common_type_for_size (GET_MODE_BITSIZE
- (TYPE_MODE (TREE_TYPE (type0))), 0);
+ (SCALAR_TYPE_MODE
+ (TREE_TYPE (type0))), 0);
result_type = build_opaque_vector_type (intt,
TYPE_VECTOR_SUBPARTS (type0));
converted = 1;
@@ -11465,7 +11468,8 @@ build_binary_op (location_t location, en
/* Always construct signed integer vector type. */
intt = c_common_type_for_size (GET_MODE_BITSIZE
- (TYPE_MODE (TREE_TYPE (type0))), 0);
+ (SCALAR_TYPE_MODE
+ (TREE_TYPE (type0))), 0);
result_type = build_opaque_vector_type (intt,
TYPE_VECTOR_SUBPARTS (type0));
converted = 1;
===================================================================
@@ -4950,8 +4950,8 @@ cp_build_binary_op (location_t location,
}
/* Always construct signed integer vector type. */
- intt = c_common_type_for_size (GET_MODE_BITSIZE
- (TYPE_MODE (TREE_TYPE (type0))), 0);
+ intt = c_common_type_for_size
+ (GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (type0))), 0);
if (!intt)
{
if (complain & tf_error)