@@ -1102,7 +1102,13 @@ void tcg_gen_movcond_i32(TCGCond cond, TCGv_i32 ret, TCGv_i32 c1,
void tcg_gen_add2_i32(TCGv_i32 rl, TCGv_i32 rh, TCGv_i32 al,
TCGv_i32 ah, TCGv_i32 bl, TCGv_i32 bh)
{
- if (TCG_TARGET_HAS_add2_i32) {
+ if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_I32, 0)) {
+ TCGv_i32 t0 = tcg_temp_ebb_new_i32();
+ tcg_gen_op3_i32(INDEX_op_addco, t0, al, bl);
+ tcg_gen_op3_i32(INDEX_op_addci, rh, ah, bh);
+ tcg_gen_mov_i32(rl, t0);
+ tcg_temp_free_i32(t0);
+ } else if (TCG_TARGET_HAS_add2_i32) {
tcg_gen_op6_i32(INDEX_op_add2_i32, rl, rh, al, ah, bl, bh);
} else {
TCGv_i32 t0 = tcg_temp_ebb_new_i32();
@@ -2822,7 +2828,26 @@ void tcg_gen_movcond_i64(TCGCond cond, TCGv_i64 ret, TCGv_i64 c1,
void tcg_gen_add2_i64(TCGv_i64 rl, TCGv_i64 rh, TCGv_i64 al,
TCGv_i64 ah, TCGv_i64 bl, TCGv_i64 bh)
{
- if (TCG_TARGET_HAS_add2_i64) {
+ if (tcg_op_supported(INDEX_op_addci, TCG_TYPE_REG, 0)) {
+ TCGv_i64 t0 = tcg_temp_ebb_new_i64();
+
+ if (TCG_TARGET_REG_BITS == 32) {
+ tcg_gen_op3_i32(INDEX_op_addco, TCGV_LOW(t0),
+ TCGV_LOW(al), TCGV_LOW(bl));
+ tcg_gen_op3_i32(INDEX_op_addcio, TCGV_HIGH(t0),
+ TCGV_HIGH(al), TCGV_HIGH(bl));
+ tcg_gen_op3_i32(INDEX_op_addcio, TCGV_LOW(rh),
+ TCGV_LOW(ah), TCGV_LOW(bh));
+ tcg_gen_op3_i32(INDEX_op_addci, TCGV_HIGH(rh),
+ TCGV_HIGH(ah), TCGV_HIGH(bh));
+ } else {
+ tcg_gen_op3_i64(INDEX_op_addco, t0, al, bl);
+ tcg_gen_op3_i64(INDEX_op_addci, rh, ah, bh);
+ }
+
+ tcg_gen_mov_i64(rl, t0);
+ tcg_temp_free_i64(t0);
+ } else if (TCG_TARGET_HAS_add2_i64) {
tcg_gen_op6_i64(INDEX_op_add2_i64, rl, rh, al, ah, bl, bh);
} else {
TCGv_i64 t0 = tcg_temp_ebb_new_i64();
Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- tcg/tcg-op.c | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-)