]> git.xonotic.org Git - xonotic/gmqcc.git/blobdiff - ir.c
Fix fold_superfluous
[xonotic/gmqcc.git] / ir.c
diff --git a/ir.c b/ir.c
index a7dd5acbe2a9c1565fd228a0826ff97653a866da..f33e48c54ee64d601cdbbb9f17eb1cc0c48eb126 100644 (file)
--- a/ir.c
+++ b/ir.c
@@ -1878,16 +1878,20 @@ ir_value* ir_block_create_unary(ir_block *self, lex_ctx_t ctx,
         case INSTR_NOT_V:
         case INSTR_NOT_S:
         case INSTR_NOT_ENT:
-        case INSTR_NOT_FNC:
-#if 0
-        case INSTR_NOT_I:
-#endif
+        case INSTR_NOT_FNC: /*
+        case INSTR_NOT_I:   */
             ot = TYPE_FLOAT;
             break;
-        /* QC doesn't have other unary operations. We expect extensions to fill
-         * the above list, otherwise we assume out-type = in-type, eg for an
-         * unary minus
+
+        /*
+         * Negation for virtual instructions is emulated with 0-value. Thankfully
+         * the operand for 0 already exists so we just source it from here.
          */
+        case VINSTR_NEG_F:
+            return ir_block_create_general_instr(self, ctx, label, INSTR_SUB_F, NULL, operand, ot);
+        case VINSTR_NEG_V:
+            return ir_block_create_general_instr(self, ctx, label, INSTR_SUB_V, NULL, operand, ot);
+
         default:
             ot = operand->vtype;
             break;
@@ -2799,10 +2803,6 @@ static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *bloc
     {
         instr = block->instr[i];
 
-        /* Ignore NOP instruction */
-        if (instr->opcode == VINSTR_NOP)
-            continue;
-
         if (instr->opcode == VINSTR_PHI) {
             irerror(block->context, "cannot generate virtual instruction (phi)");
             return false;
@@ -3178,32 +3178,6 @@ static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *bloc
         if (instr->_ops[2])
             stmt.o2.u1 = ir_value_code_addr(instr->_ops[2]);
 
-        if (stmt.opcode == INSTR_NOT_F) {
-            /*
-             * We can optimize for superfluous cases of not.
-             */
-            if (i + 4 <= vec_size(block->instr)) {
-                for (j = 0; j < 2; j++) {
-                    if (ir_value_code_addr(block->instr[i+j]->_ops[0]) != ir_value_code_addr(block->instr[i+j]->_ops[1]))
-                        break;
-                }
-                if (--j && block->instr[i+2]->_ops[0] && block->instr[i+2]->_ops[1]
-                        && block->instr[i+3]->_ops[0] && block->instr[i+2]->_ops[1]
-                        && ir_value_code_addr(block->instr[i+2]->_ops[1]) == ir_value_code_addr(block->instr[i+3]->_ops[0])
-                        && ir_value_code_addr(block->instr[i+2]->_ops[0]) == ir_value_code_addr(block->instr[i+3]->_ops[1]))
-                {
-                    code_push_statement(code, &stmt, instr->context);
-                    code_push_statement(code, &stmt, instr->context);
-                    for (j = 1; j < 4; j++)
-                        block->instr[i+j]->opcode = VINSTR_NOP;
-                    ++opts_optimizationcount[OPTIM_PEEPHOLE];
-                    continue;
-                }
-            }
-            code_push_statement(code, &stmt, instr->context);
-            continue;
-        }
-
         if (stmt.opcode == INSTR_RETURN || stmt.opcode == INSTR_DONE)
         {
             stmt.o1.u1 = stmt.o3.u1;
@@ -3228,7 +3202,6 @@ static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *bloc
                 continue;
             }
         }
-
         code_push_statement(code, &stmt, instr->context);
     }
     return true;