===================================================================
@@ -44,8 +44,7 @@ extern tree follow_single_use_edges (tre
extern tree gimple_fold_stmt_to_constant_1 (gimple *, tree (*) (tree),
tree (*) (tree) = no_follow_ssa_edges);
extern tree gimple_fold_stmt_to_constant (gimple *, tree (*) (tree));
-extern tree fold_ctor_reference (tree, tree, unsigned HOST_WIDE_INT,
- unsigned HOST_WIDE_INT, tree);
+extern tree fold_ctor_reference (tree, tree, poly_uint64, poly_uint64, tree);
extern tree fold_const_aggregate_ref_1 (tree, tree (*) (tree));
extern tree fold_const_aggregate_ref (tree);
extern tree gimple_get_virt_method_for_binfo (HOST_WIDE_INT, tree,
===================================================================
@@ -6365,20 +6365,25 @@ fold_nonarray_ctor_reference (tree type,
return build_zero_cst (type);
}
-/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
- to the memory at bit OFFSET. */
+/* CTOR is value initializing memory, fold reference of type TYPE and
+ size POLY_SIZE to the memory at bit POLY_OFFSET. */
tree
-fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
- unsigned HOST_WIDE_INT size, tree from_decl)
+fold_ctor_reference (tree type, tree ctor, poly_uint64 poly_offset,
+ poly_uint64 poly_size, tree from_decl)
{
tree ret;
/* We found the field with exact match. */
if (useless_type_conversion_p (type, TREE_TYPE (ctor))
- && !offset)
+ && known_zero (poly_offset))
return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
+ /* The remaining optimizations need a constant size and offset. */
+ unsigned HOST_WIDE_INT size, offset;
+ if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
+ return NULL_TREE;
+
/* We are at the end of walk, see if we can view convert the
result. */
if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset