X-Git-Url: https://git.xonotic.org/?p=xonotic%2Fgmqcc.git;a=blobdiff_plain;f=ir.c;h=7df8bdbff6d7a51b33561a80edb4c96097dc167c;hp=a7dd5acbe2a9c1565fd228a0826ff97653a866da;hb=337d7ddbf4c330087e9923b580c757ec0f33fcb1;hpb=c4e92df106026d8c225b3edec89ddef49f1caa43 diff --git a/ir.c b/ir.c index a7dd5ac..7df8bdb 100644 --- a/ir.c +++ b/ir.c @@ -1,5 +1,5 @@ /* - * Copyright (C) 2012, 2013 + * Copyright (C) 2012, 2013, 2014 * Wolfgang Bumiller * Dale Weiler * @@ -355,6 +355,8 @@ ir_builder* ir_builder_new(const char *modulename) } self->reserved_va_count = NULL; + self->coverage_func = NULL; + self->code = code_init(); return self; @@ -602,6 +604,10 @@ ir_block* ir_function_create_block(lex_ctx_t ctx, ir_function *self, const char ir_block* bn = ir_block_new(self, label); bn->context = ctx; vec_push(self->blocks, bn); + + if ((self->flags & IR_FLAG_BLOCK_COVERAGE) && self->owner->coverage_func) + (void)ir_block_create_call(bn, ctx, NULL, self->owner->coverage_func, false); + return bn; } @@ -613,7 +619,7 @@ static bool instr_is_operation(uint16_t op) (op >= INSTR_NOT_F && op <= INSTR_NOT_FNC) || (op >= INSTR_AND && op <= INSTR_BITOR) || (op >= INSTR_CALL0 && op <= INSTR_CALL8) || - (op >= VINSTR_BITAND_V && op <= VINSTR_CROSS) ); + (op >= VINSTR_BITAND_V && op <= VINSTR_NEG_V) ); } static bool ir_function_pass_peephole(ir_function *self) @@ -1583,7 +1589,9 @@ bool ir_block_create_return(ir_block *self, lex_ctx_t ctx, ir_value *v) ir_instr *in; if (!ir_check_unreachable(self)) return false; + self->final = true; + self->is_return = true; in = ir_instr_new(ctx, self, INSTR_RETURN); if (!in) @@ -1878,16 +1886,20 @@ ir_value* ir_block_create_unary(ir_block *self, lex_ctx_t ctx, case INSTR_NOT_V: case INSTR_NOT_S: case INSTR_NOT_ENT: - case INSTR_NOT_FNC: -#if 0 - case INSTR_NOT_I: -#endif + case INSTR_NOT_FNC: /* + case INSTR_NOT_I: */ ot = TYPE_FLOAT; break; - /* QC doesn't have other unary operations. We expect extensions to fill - * the above list, otherwise we assume out-type = in-type, eg for an - * unary minus + + /* + * Negation for virtual instructions is emulated with 0-value. Thankfully + * the operand for 0 already exists so we just source it from here. */ + case VINSTR_NEG_F: + return ir_block_create_general_instr(self, ctx, label, INSTR_SUB_F, NULL, operand, ot); + case VINSTR_NEG_V: + return ir_block_create_general_instr(self, ctx, label, INSTR_SUB_V, NULL, operand, TYPE_VECTOR); + default: ot = operand->vtype; break; @@ -2799,10 +2811,6 @@ static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *bloc { instr = block->instr[i]; - /* Ignore NOP instruction */ - if (instr->opcode == VINSTR_NOP) - continue; - if (instr->opcode == VINSTR_PHI) { irerror(block->context, "cannot generate virtual instruction (phi)"); return false; @@ -3178,32 +3186,6 @@ static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *bloc if (instr->_ops[2]) stmt.o2.u1 = ir_value_code_addr(instr->_ops[2]); - if (stmt.opcode == INSTR_NOT_F) { - /* - * We can optimize for superfluous cases of not. - */ - if (i + 4 <= vec_size(block->instr)) { - for (j = 0; j < 2; j++) { - if (ir_value_code_addr(block->instr[i+j]->_ops[0]) != ir_value_code_addr(block->instr[i+j]->_ops[1])) - break; - } - if (--j && block->instr[i+2]->_ops[0] && block->instr[i+2]->_ops[1] - && block->instr[i+3]->_ops[0] && block->instr[i+2]->_ops[1] - && ir_value_code_addr(block->instr[i+2]->_ops[1]) == ir_value_code_addr(block->instr[i+3]->_ops[0]) - && ir_value_code_addr(block->instr[i+2]->_ops[0]) == ir_value_code_addr(block->instr[i+3]->_ops[1])) - { - code_push_statement(code, &stmt, instr->context); - code_push_statement(code, &stmt, instr->context); - for (j = 1; j < 4; j++) - block->instr[i+j]->opcode = VINSTR_NOP; - ++opts_optimizationcount[OPTIM_PEEPHOLE]; - continue; - } - } - code_push_statement(code, &stmt, instr->context); - continue; - } - if (stmt.opcode == INSTR_RETURN || stmt.opcode == INSTR_DONE) { stmt.o1.u1 = stmt.o3.u1; @@ -3228,7 +3210,6 @@ static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *bloc continue; } } - code_push_statement(code, &stmt, instr->context); } return true; @@ -4004,10 +3985,6 @@ bool ir_builder_generate(ir_builder *self, const char *filename) #define IND_BUFSZ 1024 -#ifdef _MSC_VER -# define strncat(dst, src, sz) strncat_s(dst, sz, src, _TRUNCATE) -#endif - static const char *qc_opname(int op) { if (op < 0) return ""; @@ -4026,6 +4003,8 @@ static const char *qc_opname(int op) case VINSTR_BITOR_VF: return "BITOR_VF"; case VINSTR_BITXOR_VF: return "BITXOR_VF"; case VINSTR_CROSS: return "CROSS"; + case VINSTR_NEG_F: return "NEG_F"; + case VINSTR_NEG_V: return "NEG_V"; default: return ""; } } @@ -4064,7 +4043,7 @@ void ir_function_dump(ir_function *f, char *ind, return; } oprintf("%sfunction %s\n", ind, f->name); - strncat(ind, "\t", IND_BUFSZ-1); + util_strncat(ind, "\t", IND_BUFSZ-1); if (vec_size(f->locals)) { oprintf("%s%i locals:\n", ind, (int)vec_size(f->locals)); @@ -4160,7 +4139,7 @@ void ir_block_dump(ir_block* b, char *ind, { size_t i; oprintf("%s:%s\n", ind, b->label); - strncat(ind, "\t", IND_BUFSZ-1); + util_strncat(ind, "\t", IND_BUFSZ-1); if (b->instr && b->instr[0]) oprintf("%s (%i) [entry]\n", ind, (int)(b->instr[0]->eid-1)); @@ -4194,7 +4173,7 @@ void ir_instr_dump(ir_instr *in, char *ind, return; } - strncat(ind, "\t", IND_BUFSZ-1); + util_strncat(ind, "\t", IND_BUFSZ-1); if (in->_ops[0] && (in->_ops[1] || in->_ops[2])) { ir_value_dump(in->_ops[0], oprintf);