+}
+
+/***********************************************************************
+ *IR Code-Generation
+ *
+ * Since the IR has the convention of putting 'write' operands
+ * at the beginning, we have to rotate the operands of instructions
+ * properly in order to generate valid QCVM code.
+ *
+ * Having destinations at a fixed position is more convenient. In QC
+ * this is *mostly* OPC, but FTE adds at least 2 instructions which
+ * read from from OPA, and store to OPB rather than OPC. Which is
+ * partially the reason why the implementation of these instructions
+ * in darkplaces has been delayed for so long.
+ *
+ * Breaking conventions is annoying...
+ */
+static bool ir_builder_gen_global(ir_builder *self, ir_value *global, bool islocal);
+
+static bool gen_global_field(ir_value *global)
+{
+ if (global->isconst)
+ {
+ ir_value *fld = global->constval.vpointer;
+ if (!fld) {
+ irerror(global->context, "Invalid field constant with no field: %s", global->name);
+ return false;
+ }
+
+ /* Now, in this case, a relocation would be impossible to code
+ * since it looks like this:
+ * .vector v = origin; <- parse error, wtf is 'origin'?
+ * .vector origin;
+ *
+ * But we will need a general relocation support later anyway
+ * for functions... might as well support that here.
+ */
+ if (!fld->code.globaladdr) {
+ irerror(global->context, "FIXME: Relocation support");
+ return false;
+ }
+
+ /* copy the field's value */
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ vec_push(code_globals, code_globals[fld->code.globaladdr]);
+ if (global->fieldtype == TYPE_VECTOR) {
+ vec_push(code_globals, code_globals[fld->code.globaladdr]+1);
+ vec_push(code_globals, code_globals[fld->code.globaladdr]+2);
+ }
+ }
+ else
+ {
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ vec_push(code_globals, 0);
+ if (global->fieldtype == TYPE_VECTOR) {
+ vec_push(code_globals, 0);
+ vec_push(code_globals, 0);
+ }
+ }
+ if (global->code.globaladdr < 0)
+ return false;
+ return true;
+}
+
+static bool gen_global_pointer(ir_value *global)
+{
+ if (global->isconst)
+ {
+ ir_value *target = global->constval.vpointer;
+ if (!target) {
+ irerror(global->context, "Invalid pointer constant: %s", global->name);
+ /* NULL pointers are pointing to the NULL constant, which also
+ * sits at address 0, but still has an ir_value for itself.
+ */
+ return false;
+ }
+
+ /* Here, relocations ARE possible - in fteqcc-enhanced-qc:
+ * void() foo; <- proto
+ * void() *fooptr = &foo;
+ * void() foo = { code }
+ */
+ if (!target->code.globaladdr) {
+ /* FIXME: Check for the constant nullptr ir_value!
+ * because then code.globaladdr being 0 is valid.
+ */
+ irerror(global->context, "FIXME: Relocation support");
+ return false;
+ }
+
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ vec_push(code_globals, target->code.globaladdr);
+ }
+ else
+ {
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ vec_push(code_globals, 0);
+ }
+ if (global->code.globaladdr < 0)
+ return false;
+ return true;
+}
+
+static bool gen_blocks_recursive(ir_function *func, ir_block *block)
+{
+ prog_section_statement stmt;
+ ir_instr *instr;
+ ir_block *target;
+ ir_block *ontrue;
+ ir_block *onfalse;
+ size_t stidx;
+ size_t i;
+
+tailcall:
+ block->generated = true;
+ block->code_start = vec_size(code_statements);
+ for (i = 0; i < vec_size(block->instr); ++i)
+ {
+ instr = block->instr[i];
+
+ if (instr->opcode == VINSTR_PHI) {
+ irerror(block->context, "cannot generate virtual instruction (phi)");
+ return false;
+ }
+
+ if (instr->opcode == VINSTR_JUMP) {
+ target = instr->bops[0];
+ /* for uncoditional jumps, if the target hasn't been generated
+ * yet, we generate them right here.
+ */
+ if (!target->generated) {
+ block = target;
+ goto tailcall;
+ }
+
+ /* otherwise we generate a jump instruction */
+ stmt.opcode = INSTR_GOTO;
+ stmt.o1.s1 = (target->code_start) - vec_size(code_statements);
+ stmt.o2.s1 = 0;
+ stmt.o3.s1 = 0;
+ vec_push(code_statements, stmt);
+
+ /* no further instructions can be in this block */
+ return true;
+ }
+
+ if (instr->opcode == VINSTR_COND) {
+ ontrue = instr->bops[0];
+ onfalse = instr->bops[1];
+ /* TODO: have the AST signal which block should
+ * come first: eg. optimize IFs without ELSE...
+ */
+
+ stmt.o1.u1 = ir_value_code_addr(instr->_ops[0]);
+ stmt.o2.u1 = 0;
+ stmt.o3.s1 = 0;
+
+ if (ontrue->generated) {
+ stmt.opcode = INSTR_IF;
+ stmt.o2.s1 = (ontrue->code_start) - vec_size(code_statements);
+ vec_push(code_statements, stmt);
+ }
+ if (onfalse->generated) {
+ stmt.opcode = INSTR_IFNOT;
+ stmt.o2.s1 = (onfalse->code_start) - vec_size(code_statements);
+ vec_push(code_statements, stmt);
+ }
+ if (!ontrue->generated) {
+ if (onfalse->generated) {
+ block = ontrue;
+ goto tailcall;
+ }
+ }
+ if (!onfalse->generated) {
+ if (ontrue->generated) {
+ block = onfalse;
+ goto tailcall;
+ }
+ }
+ /* neither ontrue nor onfalse exist */
+ stmt.opcode = INSTR_IFNOT;
+ stidx = vec_size(code_statements);
+ vec_push(code_statements, stmt);
+ /* on false we jump, so add ontrue-path */
+ if (!gen_blocks_recursive(func, ontrue))
+ return false;
+ /* fixup the jump address */
+ code_statements[stidx].o2.s1 = vec_size(code_statements) - stidx;
+ /* generate onfalse path */
+ if (onfalse->generated) {
+ /* fixup the jump address */
+ code_statements[stidx].o2.s1 = (onfalse->code_start) - (stidx);
+ /* may have been generated in the previous recursive call */
+ stmt.opcode = INSTR_GOTO;
+ stmt.o1.s1 = (onfalse->code_start) - vec_size(code_statements);
+ stmt.o2.s1 = 0;
+ stmt.o3.s1 = 0;
+ vec_push(code_statements, stmt);
+ return true;
+ }
+ /* if not, generate now */
+ block = onfalse;
+ goto tailcall;
+ }
+
+ if (instr->opcode >= INSTR_CALL0 && instr->opcode <= INSTR_CALL8) {
+ /* Trivial call translation:
+ * copy all params to OFS_PARM*
+ * if the output's storetype is not store_return,
+ * add append a STORE instruction!
+ *
+ * NOTES on how to do it better without much trouble:
+ * -) The liferanges!
+ * Simply check the liferange of all parameters for
+ * other CALLs. For each param with no CALL in its
+ * liferange, we can store it in an OFS_PARM at
+ * generation already. This would even include later
+ * reuse.... probably... :)
+ */
+ size_t p;
+ ir_value *retvalue;
+
+ for (p = 0; p < vec_size(instr->params); ++p)
+ {
+ ir_value *param = instr->params[p];
+
+ stmt.opcode = INSTR_STORE_F;
+ stmt.o3.u1 = 0;
+
+ if (param->vtype == TYPE_FIELD)
+ stmt.opcode = field_store_instr[param->fieldtype];
+ else
+ stmt.opcode = type_store_instr[param->vtype];
+ stmt.o1.u1 = ir_value_code_addr(param);
+ stmt.o2.u1 = OFS_PARM0 + 3 * p;
+ vec_push(code_statements, stmt);
+ }
+ stmt.opcode = INSTR_CALL0 + vec_size(instr->params);
+ if (stmt.opcode > INSTR_CALL8)
+ stmt.opcode = INSTR_CALL8;
+ stmt.o1.u1 = ir_value_code_addr(instr->_ops[1]);
+ stmt.o2.u1 = 0;
+ stmt.o3.u1 = 0;
+ vec_push(code_statements, stmt);
+
+ retvalue = instr->_ops[0];
+ if (retvalue && retvalue->store != store_return && vec_size(retvalue->life))
+ {
+ /* not to be kept in OFS_RETURN */
+ if (retvalue->vtype == TYPE_FIELD)
+ stmt.opcode = field_store_instr[retvalue->vtype];
+ else
+ stmt.opcode = type_store_instr[retvalue->vtype];
+ stmt.o1.u1 = OFS_RETURN;
+ stmt.o2.u1 = ir_value_code_addr(retvalue);
+ stmt.o3.u1 = 0;
+ vec_push(code_statements, stmt);
+ }
+ continue;
+ }
+
+ if (instr->opcode == INSTR_STATE) {
+ irerror(block->context, "TODO: state instruction");
+ return false;
+ }
+
+ stmt.opcode = instr->opcode;
+ stmt.o1.u1 = 0;
+ stmt.o2.u1 = 0;
+ stmt.o3.u1 = 0;
+
+ /* This is the general order of operands */
+ if (instr->_ops[0])
+ stmt.o3.u1 = ir_value_code_addr(instr->_ops[0]);
+
+ if (instr->_ops[1])
+ stmt.o1.u1 = ir_value_code_addr(instr->_ops[1]);
+
+ if (instr->_ops[2])
+ stmt.o2.u1 = ir_value_code_addr(instr->_ops[2]);
+
+ if (stmt.opcode == INSTR_RETURN || stmt.opcode == INSTR_DONE)
+ {
+ stmt.o1.u1 = stmt.o3.u1;
+ stmt.o3.u1 = 0;
+ }
+ else if ((stmt.opcode >= INSTR_STORE_F &&
+ stmt.opcode <= INSTR_STORE_FNC) ||
+ (stmt.opcode >= INSTR_STOREP_F &&
+ stmt.opcode <= INSTR_STOREP_FNC))
+ {
+ /* 2-operand instructions with A -> B */
+ stmt.o2.u1 = stmt.o3.u1;
+ stmt.o3.u1 = 0;
+ }
+
+ vec_push(code_statements, stmt);
+ }
+ return true;
+}
+
+static bool gen_function_code(ir_function *self)
+{
+ ir_block *block;
+ prog_section_statement stmt;
+
+ /* Starting from entry point, we generate blocks "as they come"
+ * for now. Dead blocks will not be translated obviously.
+ */
+ if (!vec_size(self->blocks)) {
+ irerror(self->context, "Function '%s' declared without body.", self->name);
+ return false;
+ }
+
+ block = self->blocks[0];
+ if (block->generated)
+ return true;
+
+ if (!gen_blocks_recursive(self, block)) {
+ irerror(self->context, "failed to generate blocks for '%s'", self->name);
+ return false;
+ }
+
+ /* otherwise code_write crashes since it debug-prints functions until AINSTR_END */
+ stmt.opcode = AINSTR_END;
+ stmt.o1.u1 = 0;
+ stmt.o2.u1 = 0;
+ stmt.o3.u1 = 0;
+ vec_push(code_statements, stmt);
+ return true;
+}
+
+static qcint ir_builder_filestring(ir_builder *ir, const char *filename)
+{
+ /* NOTE: filename pointers are copied, we never strdup them,
+ * thus we can use pointer-comparison to find the string.
+ */
+ size_t i;
+ qcint str;
+
+ for (i = 0; i < vec_size(ir->filenames); ++i) {
+ if (ir->filenames[i] == filename)
+ return ir->filestrings[i];
+ }
+
+ str = code_genstring(filename);
+ vec_push(ir->filenames, filename);
+ vec_push(ir->filestrings, str);
+ return str;
+}
+
+static bool gen_global_function(ir_builder *ir, ir_value *global)
+{
+ prog_section_function fun;
+ ir_function *irfun;
+
+ size_t i;
+ size_t local_var_end;
+
+ if (!global->isconst || (!global->constval.vfunc))
+ {
+ irerror(global->context, "Invalid state of function-global: not constant: %s", global->name);
+ return false;
+ }
+
+ irfun = global->constval.vfunc;
+
+ fun.name = global->code.name;
+ fun.file = ir_builder_filestring(ir, global->context.file);
+ fun.profile = 0; /* always 0 */
+ fun.nargs = vec_size(irfun->params);
+
+ for (i = 0;i < 8; ++i) {
+ if (i >= fun.nargs)
+ fun.argsize[i] = 0;
+ else
+ fun.argsize[i] = type_sizeof[irfun->params[i]];
+ }
+
+ fun.firstlocal = vec_size(code_globals);
+
+ local_var_end = fun.firstlocal;
+ for (i = 0; i < vec_size(irfun->locals); ++i) {
+ if (!ir_builder_gen_global(ir, irfun->locals[i], true)) {
+ irerror(irfun->locals[i]->context, "Failed to generate local %s", irfun->locals[i]->name);
+ return false;
+ }
+ }
+ if (vec_size(irfun->locals)) {
+ ir_value *last = vec_last(irfun->locals);
+ local_var_end = last->code.globaladdr;
+ local_var_end += type_sizeof[last->vtype];
+ }
+ for (i = 0; i < vec_size(irfun->values); ++i)
+ {
+ /* generate code.globaladdr for ssa values */
+ ir_value *v = irfun->values[i];
+ ir_value_code_setaddr(v, local_var_end + v->code.local);
+ }
+ for (i = 0; i < irfun->allocated_locals; ++i) {
+ /* fill the locals with zeros */
+ vec_push(code_globals, 0);
+ }
+
+ fun.locals = vec_size(code_globals) - fun.firstlocal;
+
+ if (irfun->builtin)
+ fun.entry = irfun->builtin;
+ else {
+ irfun->code_function_def = vec_size(code_functions);
+ fun.entry = vec_size(code_statements);
+ }
+
+ vec_push(code_functions, fun);
+ return true;
+}
+
+static bool gen_global_function_code(ir_builder *ir, ir_value *global)
+{
+ prog_section_function *fundef;
+ ir_function *irfun;
+
+ irfun = global->constval.vfunc;
+ if (!irfun) {
+ irwarning(global->context, WARN_IMPLICIT_FUNCTION_POINTER,
+ "function `%s` has no body and in QC implicitly becomes a function-pointer", global->name);
+ /* this was a function pointer, don't generate code for those */
+ return true;
+ }
+
+ if (irfun->builtin)
+ return true;
+
+ if (irfun->code_function_def < 0) {
+ irerror(irfun->context, "`%s`: IR global wasn't generated, failed to access function-def", irfun->name);
+ return false;
+ }
+ fundef = &code_functions[irfun->code_function_def];
+
+ fundef->entry = vec_size(code_statements);
+ if (!gen_function_code(irfun)) {
+ irerror(irfun->context, "Failed to generate code for function %s", irfun->name);
+ return false;
+ }
+ return true;
+}
+
+static bool ir_builder_gen_global(ir_builder *self, ir_value *global, bool islocal)
+{
+ size_t i;
+ int32_t *iptr;
+ prog_section_def def;
+
+ def.type = global->vtype;
+ def.offset = vec_size(code_globals);
+
+ if (global->name) {
+ if (global->name[0] == '#') {
+ if (!self->str_immediate)
+ self->str_immediate = code_genstring("IMMEDIATE");
+ def.name = global->code.name = self->str_immediate;
+ }
+ else
+ def.name = global->code.name = code_genstring(global->name);
+ }
+ else
+ def.name = 0;
+
+ switch (global->vtype)
+ {
+ case TYPE_VOID:
+ if (!strcmp(global->name, "end_sys_globals")) {
+ /* TODO: remember this point... all the defs before this one
+ * should be checksummed and added to progdefs.h when we generate it.
+ */
+ }
+ else if (!strcmp(global->name, "end_sys_fields")) {
+ /* TODO: same as above but for entity-fields rather than globsl
+ */
+ }
+ else
+ irwarning(global->context, WARN_VOID_VARIABLES, "unrecognized variable of type void `%s`",
+ global->name);
+ /* I'd argue setting it to 0 is sufficient, but maybe some depend on knowing how far
+ * the system fields actually go? Though the engine knows this anyway...
+ * Maybe this could be an -foption
+ * fteqcc creates data for end_sys_* - of size 1, so let's do the same
+ */
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ vec_push(code_globals, 0);
+ /* Add the def */
+ vec_push(code_defs, def);
+ return true;
+ case TYPE_POINTER:
+ vec_push(code_defs, def);
+ return gen_global_pointer(global);
+ case TYPE_FIELD:
+ vec_push(code_defs, def);
+ return gen_global_field(global);
+ case TYPE_ENTITY:
+ /* fall through */
+ case TYPE_FLOAT:
+ {
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ if (global->isconst) {
+ iptr = (int32_t*)&global->constval.ivec[0];
+ vec_push(code_globals, *iptr);
+ } else {
+ vec_push(code_globals, 0);
+ if (!islocal)
+ def.type |= DEF_SAVEGLOBAL;
+ }
+ vec_push(code_defs, def);
+
+ return global->code.globaladdr >= 0;
+ }
+ case TYPE_STRING:
+ {
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ if (global->isconst) {
+ vec_push(code_globals, code_genstring(global->constval.vstring));
+ } else {
+ vec_push(code_globals, 0);
+ if (!islocal)
+ def.type |= DEF_SAVEGLOBAL;
+ }
+ vec_push(code_defs, def);
+ return global->code.globaladdr >= 0;
+ }
+ case TYPE_VECTOR:
+ {
+ size_t d;
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ if (global->isconst) {
+ iptr = (int32_t*)&global->constval.ivec[0];
+ vec_push(code_globals, iptr[0]);
+ if (global->code.globaladdr < 0)
+ return false;
+ for (d = 1; d < type_sizeof[global->vtype]; ++d)
+ {
+ vec_push(code_globals, iptr[d]);
+ }
+ } else {
+ vec_push(code_globals, 0);
+ if (global->code.globaladdr < 0)
+ return false;
+ for (d = 1; d < type_sizeof[global->vtype]; ++d)
+ {
+ vec_push(code_globals, 0);
+ }
+ if (!islocal)
+ def.type |= DEF_SAVEGLOBAL;
+ }
+
+ vec_push(code_defs, def);
+ return global->code.globaladdr >= 0;
+ }
+ case TYPE_FUNCTION:
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ if (!global->isconst) {
+ vec_push(code_globals, 0);
+ if (global->code.globaladdr < 0)
+ return false;
+ } else {
+ vec_push(code_globals, vec_size(code_functions));
+ if (!gen_global_function(self, global))
+ return false;
+ if (!islocal)
+ def.type |= DEF_SAVEGLOBAL;
+ }
+ vec_push(code_defs, def);
+ return true;
+ case TYPE_VARIANT:
+ /* assume biggest type */
+ ir_value_code_setaddr(global, vec_size(code_globals));
+ vec_push(code_globals, 0);
+ for (i = 1; i < type_sizeof[TYPE_VARIANT]; ++i)
+ vec_push(code_globals, 0);
+ return true;
+ default:
+ /* refuse to create 'void' type or any other fancy business. */
+ irerror(global->context, "Invalid type for global variable `%s`: %s",
+ global->name, type_name[global->vtype]);
+ return false;
+ }
+}
+
+static bool ir_builder_gen_field(ir_builder *self, ir_value *field)
+{
+ prog_section_def def;
+ prog_section_field fld;
+
+ def.type = field->vtype;
+ def.offset = vec_size(code_globals);
+
+ /* create a global named the same as the field */
+ if (opts_standard == COMPILER_GMQCC) {
+ /* in our standard, the global gets a dot prefix */
+ size_t len = strlen(field->name);
+ char name[1024];
+
+ /* we really don't want to have to allocate this, and 1024
+ * bytes is more than enough for a variable/field name
+ */
+ if (len+2 >= sizeof(name)) {
+ irerror(field->context, "invalid field name size: %u", (unsigned int)len);
+ return false;
+ }
+
+ name[0] = '.';
+ memcpy(name+1, field->name, len); /* no strncpy - we used strlen above */
+ name[len+1] = 0;
+
+ def.name = code_genstring(name);
+ fld.name = def.name + 1; /* we reuse that string table entry */
+ } else {
+ /* in plain QC, there cannot be a global with the same name,
+ * and so we also name the global the same.
+ * FIXME: fteqcc should create a global as well
+ * check if it actually uses the same name. Probably does
+ */
+ def.name = code_genstring(field->name);
+ fld.name = def.name;
+ }
+
+ field->code.name = def.name;
+
+ vec_push(code_defs, def);
+
+ fld.type = field->fieldtype;
+
+ if (fld.type == TYPE_VOID) {
+ irerror(field->context, "field is missing a type: %s - don't know its size", field->name);
+ return false;
+ }
+
+ fld.offset = code_alloc_field(type_sizeof[field->fieldtype]);
+
+ vec_push(code_fields, fld);
+
+ ir_value_code_setaddr(field, vec_size(code_globals));
+ vec_push(code_globals, fld.offset);
+ if (fld.type == TYPE_VECTOR) {
+ vec_push(code_globals, fld.offset+1);
+ vec_push(code_globals, fld.offset+2);
+ }
+
+ return field->code.globaladdr >= 0;
+}
+
+bool ir_builder_generate(ir_builder *self, const char *filename)
+{
+ prog_section_statement stmt;
+ size_t i;
+
+ code_init();
+
+ for (i = 0; i < vec_size(self->globals); ++i)
+ {
+ if (!ir_builder_gen_global(self, self->globals[i], false)) {
+ return false;
+ }
+ }
+
+ for (i = 0; i < vec_size(self->fields); ++i)
+ {
+ if (!ir_builder_gen_field(self, self->fields[i])) {
+ return false;
+ }
+ }
+
+ /* generate function code */
+ for (i = 0; i < vec_size(self->globals); ++i)
+ {
+ if (self->globals[i]->vtype == TYPE_FUNCTION) {
+ if (!gen_global_function_code(self, self->globals[i])) {
+ return false;
+ }
+ }
+ }
+
+ /* DP errors if the last instruction is not an INSTR_DONE
+ * and for debugging purposes we add an additional AINSTR_END
+ * to the end of functions, so here it goes:
+ */
+ stmt.opcode = INSTR_DONE;
+ stmt.o1.u1 = 0;
+ stmt.o2.u1 = 0;
+ stmt.o3.u1 = 0;
+ vec_push(code_statements, stmt);
+
+ printf("writing '%s'...\n", filename);
+ return code_write(filename);