@@ -37,7 +37,8 @@ DEF(br, 0, 0, 1, TCG_OPF_BB_END | TCG_OPF_NOT_PRESENT)
DEF(mb, 0, 0, 1, TCG_OPF_NOT_PRESENT)
-DEF(mov_i32, 1, 1, 0, TCG_OPF_NOT_PRESENT)
+DEF(mov, 1, 1, 0, TCG_OPF_INT | TCG_OPF_NOT_PRESENT)
+
DEF(setcond_i32, 1, 2, 1, 0)
DEF(negsetcond_i32, 1, 2, 1, 0)
DEF(movcond_i32, 1, 4, 1, 0)
@@ -98,7 +99,6 @@ DEF(clz_i32, 1, 2, 0, 0)
DEF(ctz_i32, 1, 2, 0, 0)
DEF(ctpop_i32, 1, 1, 0, 0)
-DEF(mov_i64, 1, 1, 0, TCG_OPF_NOT_PRESENT)
DEF(setcond_i64, 1, 2, 1, 0)
DEF(negsetcond_i64, 1, 2, 1, 0)
DEF(movcond_i64, 1, 4, 1, 0)
@@ -741,6 +741,8 @@ enum {
/* Instruction has side effects: it cannot be removed if its outputs
are not used, and might trigger exceptions. */
TCG_OPF_SIDE_EFFECTS = 0x08,
+ /* Instruction operands may be I32 or I64 */
+ TCG_OPF_INT = 0x10,
/* Instruction is optional and not implemented by the host, or insn
is generic and should not be implemented by the host. */
TCG_OPF_NOT_PRESENT = 0x20,
@@ -363,10 +363,8 @@ static bool tcg_opt_gen_mov(OptContext *ctx, TCGOp *op, TCGArg dst, TCGArg src)
switch (ctx->type) {
case TCG_TYPE_I32:
- new_op = INDEX_op_mov_i32;
- break;
case TCG_TYPE_I64:
- new_op = INDEX_op_mov_i64;
+ new_op = INDEX_op_mov;
break;
case TCG_TYPE_V64:
case TCG_TYPE_V128:
@@ -2921,7 +2919,8 @@ void tcg_optimize(TCGContext *s)
case INDEX_op_mb:
done = fold_mb(&ctx, op);
break;
- CASE_OP_32_64_VEC(mov):
+ case INDEX_op_mov:
+ case INDEX_op_mov_vec:
done = fold_mov(&ctx, op);
break;
CASE_OP_32_64(movcond):
@@ -351,7 +351,7 @@ void tcg_gen_discard_i32(TCGv_i32 arg)
void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
{
if (ret != arg) {
- tcg_gen_op2_i32(INDEX_op_mov_i32, ret, arg);
+ tcg_gen_op2_i32(INDEX_op_mov, ret, arg);
}
}
@@ -1411,7 +1411,7 @@ void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
return;
}
if (TCG_TARGET_REG_BITS == 64) {
- tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
+ tcg_gen_op2_i64(INDEX_op_mov, ret, arg);
} else {
TCGTemp *ts = tcgv_i64_temp(arg);
@@ -2187,7 +2187,9 @@ bool tcg_op_supported(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_qemu_st_i128:
return TCG_TARGET_HAS_qemu_ldst_i128;
- case INDEX_op_mov_i32:
+ case INDEX_op_mov:
+ return has_type;
+
case INDEX_op_setcond_i32:
case INDEX_op_brcond_i32:
case INDEX_op_movcond_i32:
@@ -2269,7 +2271,6 @@ bool tcg_op_supported(TCGOpcode op, TCGType type, unsigned flags)
case INDEX_op_setcond2_i32:
return TCG_TARGET_REG_BITS == 32;
- case INDEX_op_mov_i64:
case INDEX_op_setcond_i64:
case INDEX_op_brcond_i64:
case INDEX_op_movcond_i64:
@@ -2840,18 +2841,23 @@ void tcg_dump_ops(TCGContext *s, FILE *f, bool have_prefs)
col += ne_fprintf(f, ",%s", t);
}
} else {
- col += ne_fprintf(f, " %s ", def->name);
+ if (def->flags & TCG_OPF_INT) {
+ col += ne_fprintf(f, " %s_i%d ",
+ def->name,
+ 8 * tcg_type_size(TCGOP_TYPE(op)));
+ } else if (def->flags & TCG_OPF_VECTOR) {
+ col += ne_fprintf(f, "%s v%d,e%d,",
+ def->name,
+ 8 * tcg_type_size(TCGOP_TYPE(op)),
+ 8 << TCGOP_VECE(op));
+ } else {
+ col += ne_fprintf(f, " %s ", def->name);
+ }
nb_oargs = def->nb_oargs;
nb_iargs = def->nb_iargs;
nb_cargs = def->nb_cargs;
- if (def->flags & TCG_OPF_VECTOR) {
- col += ne_fprintf(f, "v%d,e%d,",
- 8 * tcg_type_size(TCGOP_TYPE(op)),
- 8 << TCGOP_VECE(op));
- }
-
k = 0;
for (i = 0; i < nb_oargs; i++) {
const char *sep = k ? "," : "";
@@ -4144,8 +4150,7 @@ liveness_pass_1(TCGContext *s)
/* Incorporate constraints for this operand. */
switch (opc) {
- case INDEX_op_mov_i32:
- case INDEX_op_mov_i64:
+ case INDEX_op_mov:
/* Note that these are TCG_OPF_NOT_PRESENT and do not
have proper constraints. That said, special case
moves to propagate preferences backward. */
@@ -4303,7 +4308,7 @@ liveness_pass_2(TCGContext *s)
}
/* Outputs become available. */
- if (opc == INDEX_op_mov_i32 || opc == INDEX_op_mov_i64) {
+ if (opc == INDEX_op_mov) {
arg_ts = arg_temp(op->args[0]);
dir_ts = arg_ts->state_ptr;
if (dir_ts) {
@@ -6429,8 +6434,7 @@ int tcg_gen_code(TCGContext *s, TranslationBlock *tb, uint64_t pc_start)
TCGOpcode opc = op->opc;
switch (opc) {
- case INDEX_op_mov_i32:
- case INDEX_op_mov_i64:
+ case INDEX_op_mov:
case INDEX_op_mov_vec:
tcg_reg_alloc_mov(s, op);
break;
@@ -463,7 +463,7 @@ uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
regs[r0] = regs[tmp32 ? r3 : r4];
break;
#endif
- CASE_32_64(mov)
+ case INDEX_op_mov:
tci_args_rr(insn, &r0, &r1);
regs[r0] = regs[r1];
break;
@@ -1063,8 +1063,7 @@ int print_insn_tci(bfd_vma addr, disassemble_info *info)
op_name, str_r(r0), str_r(r1), s2);
break;
- case INDEX_op_mov_i32:
- case INDEX_op_mov_i64:
+ case INDEX_op_mov:
case INDEX_op_ext_i32_i64:
case INDEX_op_extu_i32_i64:
case INDEX_op_bswap16_i32:
@@ -391,10 +391,10 @@ Misc
.. list-table::
- * - mov_i32/i64 *t0*, *t1*
+ * - mov *t0*, *t1*
- | *t0* = *t1*
- | Move *t1* to *t0* (both operands must have the same type).
+ | Move *t1* to *t0*.
* - bswap16_i32/i64 *t0*, *t1*, *flags*
@@ -2488,8 +2488,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType ext,
tcg_out_mb(s, a0);
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -2115,7 +2115,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
tcg_out_mb(s, args[0]);
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -3011,8 +3011,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_mb:
tcg_out_mb(s, a0);
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -1690,8 +1690,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
tcg_out_qemu_ldst_i128(s, a0, a1, a2, a3, false);
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -2101,8 +2101,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
case INDEX_op_mb:
tcg_out_mb(s, a0);
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -3468,8 +3468,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
tcg_out_mb(s, args[0]);
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -2380,8 +2380,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
}
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -2776,8 +2776,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
}
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -1512,8 +1512,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
tcg_out_arithi(s, a0, a1, a2, SHIFT_SRA);
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
@@ -483,18 +483,7 @@ static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg val, TCGReg base,
static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
{
- switch (type) {
- case TCG_TYPE_I32:
- tcg_out_op_rr(s, INDEX_op_mov_i32, ret, arg);
- break;
-#if TCG_TARGET_REG_BITS == 64
- case TCG_TYPE_I64:
- tcg_out_op_rr(s, INDEX_op_mov_i64, ret, arg);
- break;
-#endif
- default:
- g_assert_not_reached();
- }
+ tcg_out_op_rr(s, INDEX_op_mov, ret, arg);
return true;
}
@@ -799,8 +788,6 @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc, TCGType type,
tcg_out_op_v(s, opc);
break;
- case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
- case INDEX_op_mov_i64:
case INDEX_op_call: /* Always emitted via tcg_out_call. */
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */