]> git.xonotic.org Git - xonotic/gmqcc.git/commitdiff
Optimize for superfluous cases of NOT, i.e !!!!x can be simplified to !!x.
authorDale Weiler <killfieldengine@gmail.com>
Sat, 28 Sep 2013 09:34:53 +0000 (05:34 -0400)
committerDale Weiler <killfieldengine@gmail.com>
Sat, 28 Sep 2013 09:34:53 +0000 (05:34 -0400)
gmqcc.h
ir.c

diff --git a/gmqcc.h b/gmqcc.h
index d8ad37465ef0525ea2298a9eb484f7a6e9220f04..6f42d440f1c783227ae977278b0bce4b93a4af5d 100644 (file)
--- a/gmqcc.h
+++ b/gmqcc.h
@@ -726,7 +726,13 @@ enum {
     VINSTR_BITXOR,
     VINSTR_BITXOR_V,
     VINSTR_BITXOR_VF,
-    VINSTR_CROSS
+    VINSTR_CROSS,
+
+    /*
+     * An instruction that is never emitted, useful for marking ir_instr
+     * to not be generated (just set the ->opcode member to it).
+     */
+    VINSTR_NOP
 };
 
 /* TODO: elide */
diff --git a/ir.c b/ir.c
index 0411c921d1cce4f58295c4baee2309edcd756c84..e911b026acaae35af153d69d8e54128cf08e27f8 100644 (file)
--- a/ir.c
+++ b/ir.c
@@ -2799,6 +2799,10 @@ static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *bloc
     {
         instr = block->instr[i];
 
+        /* Ignore NOP instruction */
+        if (instr->opcode == VINSTR_NOP)
+            continue;
+
         if (instr->opcode == VINSTR_PHI) {
             irerror(block->context, "cannot generate virtual instruction (phi)");
             return false;
@@ -3174,6 +3178,32 @@ static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *bloc
         if (instr->_ops[2])
             stmt.o2.u1 = ir_value_code_addr(instr->_ops[2]);
 
+        if (stmt.opcode == INSTR_NOT_F) {
+            /*
+             * We can optimize for superfluous cases of not. Consider
+             */
+            if (i + 4 <= vec_size(block->instr)) {
+                for (j = 0; j < 2; j++) {
+                    if (ir_value_code_addr(block->instr[i+j]->_ops[0]) != ir_value_code_addr(block->instr[i+j]->_ops[1]))
+                        break;
+                }
+                if (--j && block->instr[i+2]->_ops[0] && block->instr[i+2]->_ops[1]
+                        && block->instr[i+3]->_ops[0] && block->instr[i+2]->_ops[1]
+                        && ir_value_code_addr(block->instr[i+2]->_ops[1]) == ir_value_code_addr(block->instr[i+3]->_ops[0])
+                        && ir_value_code_addr(block->instr[i+2]->_ops[0]) == ir_value_code_addr(block->instr[i+3]->_ops[1]))
+                {
+                    code_push_statement(code, &stmt, instr->context);
+                    code_push_statement(code, &stmt, instr->context);
+                    for (j = 1; j < 4; j++)
+                        block->instr[i+j]->opcode = VINSTR_NOP;
+                    ++opts_optimizationcount[OPTIM_PEEPHOLE];
+                    continue;
+                }
+            }
+            code_push_statement(code, &stmt, instr->context);
+            continue;
+        }
+
         if (stmt.opcode == INSTR_RETURN || stmt.opcode == INSTR_DONE)
         {
             stmt.o1.u1 = stmt.o3.u1;