]> git.xonotic.org Git - xonotic/gmqcc.git/blob - ir.cpp
temp committing major c++ification
[xonotic/gmqcc.git] / ir.cpp
1 #include <stdlib.h>
2 #include <string.h>
3
4 #include "gmqcc.h"
5 #include "ir.h"
6
7 /***********************************************************************
8  * Type sizes used at multiple points in the IR codegen
9  */
10
11 const char *type_name[TYPE_COUNT] = {
12     "void",
13     "string",
14     "float",
15     "vector",
16     "entity",
17     "field",
18     "function",
19     "pointer",
20     "integer",
21     "variant",
22     "struct",
23     "union",
24     "array",
25
26     "nil",
27     "<no-expression>"
28 };
29
30 static size_t type_sizeof_[TYPE_COUNT] = {
31     1, /* TYPE_VOID     */
32     1, /* TYPE_STRING   */
33     1, /* TYPE_FLOAT    */
34     3, /* TYPE_VECTOR   */
35     1, /* TYPE_ENTITY   */
36     1, /* TYPE_FIELD    */
37     1, /* TYPE_FUNCTION */
38     1, /* TYPE_POINTER  */
39     1, /* TYPE_INTEGER  */
40     3, /* TYPE_VARIANT  */
41     0, /* TYPE_STRUCT   */
42     0, /* TYPE_UNION    */
43     0, /* TYPE_ARRAY    */
44     0, /* TYPE_NIL      */
45     0, /* TYPE_NOESPR   */
46 };
47
48 const uint16_t type_store_instr[TYPE_COUNT] = {
49     INSTR_STORE_F, /* should use I when having integer support */
50     INSTR_STORE_S,
51     INSTR_STORE_F,
52     INSTR_STORE_V,
53     INSTR_STORE_ENT,
54     INSTR_STORE_FLD,
55     INSTR_STORE_FNC,
56     INSTR_STORE_ENT, /* should use I */
57 #if 0
58     INSTR_STORE_I, /* integer type */
59 #else
60     INSTR_STORE_F,
61 #endif
62
63     INSTR_STORE_V, /* variant, should never be accessed */
64
65     VINSTR_END, /* struct */
66     VINSTR_END, /* union  */
67     VINSTR_END, /* array  */
68     VINSTR_END, /* nil    */
69     VINSTR_END, /* noexpr */
70 };
71
72 const uint16_t field_store_instr[TYPE_COUNT] = {
73     INSTR_STORE_FLD,
74     INSTR_STORE_FLD,
75     INSTR_STORE_FLD,
76     INSTR_STORE_V,
77     INSTR_STORE_FLD,
78     INSTR_STORE_FLD,
79     INSTR_STORE_FLD,
80     INSTR_STORE_FLD,
81 #if 0
82     INSTR_STORE_FLD, /* integer type */
83 #else
84     INSTR_STORE_FLD,
85 #endif
86
87     INSTR_STORE_V, /* variant, should never be accessed */
88
89     VINSTR_END, /* struct */
90     VINSTR_END, /* union  */
91     VINSTR_END, /* array  */
92     VINSTR_END, /* nil    */
93     VINSTR_END, /* noexpr */
94 };
95
96 const uint16_t type_storep_instr[TYPE_COUNT] = {
97     INSTR_STOREP_F, /* should use I when having integer support */
98     INSTR_STOREP_S,
99     INSTR_STOREP_F,
100     INSTR_STOREP_V,
101     INSTR_STOREP_ENT,
102     INSTR_STOREP_FLD,
103     INSTR_STOREP_FNC,
104     INSTR_STOREP_ENT, /* should use I */
105 #if 0
106     INSTR_STOREP_ENT, /* integer type */
107 #else
108     INSTR_STOREP_F,
109 #endif
110
111     INSTR_STOREP_V, /* variant, should never be accessed */
112
113     VINSTR_END, /* struct */
114     VINSTR_END, /* union  */
115     VINSTR_END, /* array  */
116     VINSTR_END, /* nil    */
117     VINSTR_END, /* noexpr */
118 };
119
120 const uint16_t type_eq_instr[TYPE_COUNT] = {
121     INSTR_EQ_F, /* should use I when having integer support */
122     INSTR_EQ_S,
123     INSTR_EQ_F,
124     INSTR_EQ_V,
125     INSTR_EQ_E,
126     INSTR_EQ_E, /* FLD has no comparison */
127     INSTR_EQ_FNC,
128     INSTR_EQ_E, /* should use I */
129 #if 0
130     INSTR_EQ_I,
131 #else
132     INSTR_EQ_F,
133 #endif
134
135     INSTR_EQ_V, /* variant, should never be accessed */
136
137     VINSTR_END, /* struct */
138     VINSTR_END, /* union  */
139     VINSTR_END, /* array  */
140     VINSTR_END, /* nil    */
141     VINSTR_END, /* noexpr */
142 };
143
144 const uint16_t type_ne_instr[TYPE_COUNT] = {
145     INSTR_NE_F, /* should use I when having integer support */
146     INSTR_NE_S,
147     INSTR_NE_F,
148     INSTR_NE_V,
149     INSTR_NE_E,
150     INSTR_NE_E, /* FLD has no comparison */
151     INSTR_NE_FNC,
152     INSTR_NE_E, /* should use I */
153 #if 0
154     INSTR_NE_I,
155 #else
156     INSTR_NE_F,
157 #endif
158
159     INSTR_NE_V, /* variant, should never be accessed */
160
161     VINSTR_END, /* struct */
162     VINSTR_END, /* union  */
163     VINSTR_END, /* array  */
164     VINSTR_END, /* nil    */
165     VINSTR_END, /* noexpr */
166 };
167
168 const uint16_t type_not_instr[TYPE_COUNT] = {
169     INSTR_NOT_F, /* should use I when having integer support */
170     VINSTR_END,  /* not to be used, depends on string related -f flags */
171     INSTR_NOT_F,
172     INSTR_NOT_V,
173     INSTR_NOT_ENT,
174     INSTR_NOT_ENT,
175     INSTR_NOT_FNC,
176     INSTR_NOT_ENT, /* should use I */
177 #if 0
178     INSTR_NOT_I, /* integer type */
179 #else
180     INSTR_NOT_F,
181 #endif
182
183     INSTR_NOT_V, /* variant, should never be accessed */
184
185     VINSTR_END, /* struct */
186     VINSTR_END, /* union  */
187     VINSTR_END, /* array  */
188     VINSTR_END, /* nil    */
189     VINSTR_END, /* noexpr */
190 };
191
192 /* protos */
193 static void            ir_value_dump(ir_value*, int (*oprintf)(const char*,...));
194
195 static ir_value*       ir_gen_extparam_proto(ir_builder *ir);
196 static void            ir_gen_extparam      (ir_builder *ir);
197
198 static void            ir_function_dump(ir_function*, char *ind, int (*oprintf)(const char*,...));
199
200 static ir_value*       ir_block_create_general_instr(ir_block *self, lex_ctx_t, const char *label,
201                                                      int op, ir_value *a, ir_value *b, qc_type outype);
202 static bool GMQCC_WARN ir_block_create_store(ir_block*, lex_ctx_t, ir_value *target, ir_value *what);
203 static void            ir_block_dump(ir_block*, char *ind, int (*oprintf)(const char*,...));
204
205 static bool            ir_instr_op(ir_instr*, int op, ir_value *value, bool writing);
206 static void            ir_instr_delete(ir_instr*);
207 static void            ir_instr_dump(ir_instr* in, char *ind, int (*oprintf)(const char*,...));
208 /* error functions */
209
210 static void irerror(lex_ctx_t ctx, const char *msg, ...)
211 {
212     va_list ap;
213     va_start(ap, msg);
214     con_cvprintmsg(ctx, LVL_ERROR, "internal error", msg, ap);
215     va_end(ap);
216 }
217
218 static bool GMQCC_WARN irwarning(lex_ctx_t ctx, int warntype, const char *fmt, ...)
219 {
220     bool    r;
221     va_list ap;
222     va_start(ap, fmt);
223     r = vcompile_warning(ctx, warntype, fmt, ap);
224     va_end(ap);
225     return r;
226 }
227
228 /***********************************************************************
229  * Vector utility functions
230  */
231
232 static bool GMQCC_WARN vec_ir_value_find(std::vector<ir_value *> &vec, const ir_value *what, size_t *idx)
233 {
234     for (auto &it : vec) {
235         if (it != what)
236             continue;
237         if (idx)
238             *idx = &it - &vec[0];
239         return true;
240     }
241     return false;
242 }
243
244 static bool GMQCC_WARN vec_ir_block_find(ir_block **vec, ir_block *what, size_t *idx)
245 {
246     size_t i;
247     size_t len = vec_size(vec);
248     for (i = 0; i < len; ++i) {
249         if (vec[i] == what) {
250             if (idx) *idx = i;
251             return true;
252         }
253     }
254     return false;
255 }
256
257 static bool GMQCC_WARN vec_ir_instr_find(std::vector<ir_instr *> &vec, ir_instr *what, size_t *idx)
258 {
259     for (auto &it : vec) {
260         if (it != what)
261             continue;
262         if (idx)
263             *idx = &it - &vec[0];
264         return true;
265     }
266     return false;
267 }
268
269 /***********************************************************************
270  * IR Builder
271  */
272
273 static void ir_block_delete_quick(ir_block* self);
274 static void ir_instr_delete_quick(ir_instr *self);
275 static void ir_function_delete_quick(ir_function *self);
276
277 void* ir_builder::operator new(std::size_t bytes)
278 {
279     return mem_a(bytes);
280 }
281
282 void ir_builder::operator delete(void *ptr)
283 {
284     mem_d(ptr);
285 }
286
287 ir_builder::ir_builder(const std::string& modulename)
288 : name(modulename)
289 {
290     htglobals   = util_htnew(IR_HT_SIZE);
291     htfields    = util_htnew(IR_HT_SIZE);
292     htfunctions = util_htnew(IR_HT_SIZE);
293
294     nil = new ir_value("nil", store_value, TYPE_NIL);
295     nil->cvq = CV_CONST;
296
297     for (size_t i = 0; i != IR_MAX_VINSTR_TEMPS; ++i) {
298         /* we write to them, but they're not supposed to be used outside the IR, so
299          * let's not allow the generation of ir_instrs which use these.
300          * So it's a constant noexpr.
301          */
302         vinstr_temp[i] = new ir_value("vinstr_temp", store_value, TYPE_NOEXPR);
303         vinstr_temp[i]->cvq = CV_CONST;
304     }
305
306     code = code_init();
307 }
308
309 ir_builder::~ir_builder()
310 {
311     util_htdel(htglobals);
312     util_htdel(htfields);
313     util_htdel(htfunctions);
314     for (auto& f : functions)
315         ir_function_delete_quick(f.release());
316     functions.clear(); // delete them now before deleting the rest:
317
318     delete nil;
319
320     for (size_t i = 0; i != IR_MAX_VINSTR_TEMPS; ++i) {
321         delete vinstr_temp[i];
322     }
323
324     code_cleanup(code);
325 }
326
327 static ir_function* ir_builder_get_function(ir_builder *self, const char *name)
328 {
329     return (ir_function*)util_htget(self->htfunctions, name);
330 }
331
332 ir_function* ir_builder_create_function(ir_builder *self, const std::string& name, qc_type outtype)
333 {
334     ir_function *fn = ir_builder_get_function(self, name.c_str());
335     if (fn) {
336         return nullptr;
337     }
338
339     fn = new ir_function(self, outtype);
340     fn->name = name;
341     self->functions.emplace_back(fn);
342     util_htset(self->htfunctions, name.c_str(), fn);
343
344     fn->value = ir_builder_create_global(self, fn->name, TYPE_FUNCTION);
345     if (!fn->value) {
346         delete fn;
347         return nullptr;
348     }
349
350     fn->value->hasvalue = true;
351     fn->value->outtype = outtype;
352     fn->value->constval.vfunc = fn;
353     fn->value->context = fn->context;
354
355     return fn;
356 }
357
358 static ir_value* ir_builder_get_global(ir_builder *self, const char *name)
359 {
360     return (ir_value*)util_htget(self->htglobals, name);
361 }
362
363 ir_value* ir_builder_create_global(ir_builder *self, const std::string& name, qc_type vtype)
364 {
365     ir_value *ve;
366
367     if (name[0] != '#')
368     {
369         ve = ir_builder_get_global(self, name.c_str());
370         if (ve) {
371             return nullptr;
372         }
373     }
374
375     ve = new ir_value(std::string(name), store_global, vtype);
376     self->globals.emplace_back(ve);
377     util_htset(self->htglobals, name.c_str(), ve);
378     return ve;
379 }
380
381 ir_value* ir_builder_get_va_count(ir_builder *self)
382 {
383     if (self->reserved_va_count)
384         return self->reserved_va_count;
385     return (self->reserved_va_count = ir_builder_create_global(self, "reserved:va_count", TYPE_FLOAT));
386 }
387
388 static ir_value* ir_builder_get_field(ir_builder *self, const char *name)
389 {
390     return (ir_value*)util_htget(self->htfields, name);
391 }
392
393
394 ir_value* ir_builder_create_field(ir_builder *self, const std::string& name, qc_type vtype)
395 {
396     ir_value *ve = ir_builder_get_field(self, name.c_str());
397     if (ve) {
398         return nullptr;
399     }
400
401     ve = new ir_value(std::string(name), store_global, TYPE_FIELD);
402     ve->fieldtype = vtype;
403     self->fields.emplace_back(ve);
404     util_htset(self->htfields, name.c_str(), ve);
405     return ve;
406 }
407
408 /***********************************************************************
409  *IR Function
410  */
411
412 static bool ir_function_naive_phi(ir_function*);
413 static void ir_function_enumerate(ir_function*);
414 static bool ir_function_calculate_liferanges(ir_function*);
415 static bool ir_function_allocate_locals(ir_function*);
416
417 void* ir_function::operator new(std::size_t bytes)
418 {
419     return mem_a(bytes);
420 }
421
422 void ir_function::operator delete(void *ptr)
423 {
424     mem_d(ptr);
425 }
426
427 ir_function::ir_function(ir_builder* owner_, qc_type outtype_)
428 : owner(owner_),
429   name("<@unnamed>"),
430   outtype(outtype_)
431 {
432     owner = owner;
433     context.file = "<@no context>";
434     context.line = 0;
435     outtype = outtype;
436 }
437
438 ir_function::~ir_function()
439 {
440 }
441
442 static void ir_function_delete_quick(ir_function *self)
443 {
444     for (auto& b : self->blocks)
445         ir_block_delete_quick(b.release());
446     delete self;
447 }
448
449 static void ir_function_collect_value(ir_function *self, ir_value *v)
450 {
451     self->values.emplace_back(v);
452 }
453
454 ir_block* ir_function_create_block(lex_ctx_t ctx, ir_function *self, const char *label)
455 {
456     ir_block* bn = new ir_block(self, label ? std::string(label) : std::string());
457     bn->context = ctx;
458     self->blocks.emplace_back(bn);
459
460     if ((self->flags & IR_FLAG_BLOCK_COVERAGE) && self->owner->coverage_func)
461         (void)ir_block_create_call(bn, ctx, nullptr, self->owner->coverage_func, false);
462
463     return bn;
464 }
465
466 static bool instr_is_operation(uint16_t op)
467 {
468     return ( (op >= INSTR_MUL_F  && op <= INSTR_GT) ||
469              (op >= INSTR_LOAD_F && op <= INSTR_LOAD_FNC) ||
470              (op == INSTR_ADDRESS) ||
471              (op >= INSTR_NOT_F  && op <= INSTR_NOT_FNC) ||
472              (op >= INSTR_AND    && op <= INSTR_BITOR) ||
473              (op >= INSTR_CALL0  && op <= INSTR_CALL8) ||
474              (op >= VINSTR_BITAND_V && op <= VINSTR_NEG_V) );
475 }
476
477 static bool ir_function_pass_peephole(ir_function *self)
478 {
479     for (auto& bp : self->blocks) {
480         ir_block *block = bp.get();
481         for (size_t i = 0; i < vec_size(block->instr); ++i) {
482             ir_instr *inst;
483             inst = block->instr[i];
484
485             if (i >= 1 &&
486                 (inst->opcode >= INSTR_STORE_F &&
487                  inst->opcode <= INSTR_STORE_FNC))
488             {
489                 ir_instr *store;
490                 ir_instr *oper;
491                 ir_value *value;
492
493                 store = inst;
494
495                 oper  = block->instr[i-1];
496                 if (!instr_is_operation(oper->opcode))
497                     continue;
498
499                 /* Don't change semantics of MUL_VF in engines where these may not alias. */
500                 if (OPTS_FLAG(LEGACY_VECTOR_MATHS)) {
501                     if (oper->opcode == INSTR_MUL_VF && oper->_ops[2]->memberof == oper->_ops[1])
502                         continue;
503                     if (oper->opcode == INSTR_MUL_FV && oper->_ops[1]->memberof == oper->_ops[2])
504                         continue;
505                 }
506
507                 value = oper->_ops[0];
508
509                 /* only do it for SSA values */
510                 if (value->store != store_value)
511                     continue;
512
513                 /* don't optimize out the temp if it's used later again */
514                 if (value->reads.size() != 1)
515                     continue;
516
517                 /* The very next store must use this value */
518                 if (value->reads[0] != store)
519                     continue;
520
521                 /* And of course the store must _read_ from it, so it's in
522                  * OP 1 */
523                 if (store->_ops[1] != value)
524                     continue;
525
526                 ++opts_optimizationcount[OPTIM_PEEPHOLE];
527                 (void)!ir_instr_op(oper, 0, store->_ops[0], true);
528
529                 vec_remove(block->instr, i, 1);
530                 ir_instr_delete(store);
531             }
532             else if (inst->opcode == VINSTR_COND)
533             {
534                 /* COND on a value resulting from a NOT could
535                  * remove the NOT and swap its operands
536                  */
537                 while (true) {
538                     ir_block *tmp;
539                     size_t    inotid;
540                     ir_instr *inot;
541                     ir_value *value;
542                     value = inst->_ops[0];
543
544                     if (value->store != store_value || value->reads.size() != 1 || value->reads[0] != inst)
545                         break;
546
547                     inot = value->writes[0];
548                     if (inot->_ops[0] != value ||
549                         inot->opcode < INSTR_NOT_F ||
550                         inot->opcode > INSTR_NOT_FNC ||
551                         inot->opcode == INSTR_NOT_V || /* can't do these */
552                         inot->opcode == INSTR_NOT_S)
553                     {
554                         break;
555                     }
556
557                     /* count */
558                     ++opts_optimizationcount[OPTIM_PEEPHOLE];
559                     /* change operand */
560                     (void)!ir_instr_op(inst, 0, inot->_ops[1], false);
561                     /* remove NOT */
562                     tmp = inot->owner;
563                     for (inotid = 0; inotid < vec_size(tmp->instr); ++inotid) {
564                         if (tmp->instr[inotid] == inot)
565                             break;
566                     }
567                     if (inotid >= vec_size(tmp->instr)) {
568                         compile_error(inst->context, "sanity-check failed: failed to find instruction to optimize out");
569                         return false;
570                     }
571                     vec_remove(tmp->instr, inotid, 1);
572                     ir_instr_delete(inot);
573                     /* swap ontrue/onfalse */
574                     tmp = inst->bops[0];
575                     inst->bops[0] = inst->bops[1];
576                     inst->bops[1] = tmp;
577                 }
578                 continue;
579             }
580         }
581     }
582
583     return true;
584 }
585
586 static bool ir_function_pass_tailrecursion(ir_function *self)
587 {
588     size_t p;
589
590     for (auto& bp : self->blocks) {
591         ir_block *block = bp.get();
592
593         ir_value *funcval;
594         ir_instr *ret, *call, *store = nullptr;
595
596         if (!block->final || vec_size(block->instr) < 2)
597             continue;
598
599         ret = block->instr[vec_size(block->instr)-1];
600         if (ret->opcode != INSTR_DONE && ret->opcode != INSTR_RETURN)
601             continue;
602
603         call = block->instr[vec_size(block->instr)-2];
604         if (call->opcode >= INSTR_STORE_F && call->opcode <= INSTR_STORE_FNC) {
605             /* account for the unoptimized
606              * CALL
607              * STORE %return, %tmp
608              * RETURN %tmp
609              * version
610              */
611             if (vec_size(block->instr) < 3)
612                 continue;
613
614             store = call;
615             call = block->instr[vec_size(block->instr)-3];
616         }
617
618         if (call->opcode < INSTR_CALL0 || call->opcode > INSTR_CALL8)
619             continue;
620
621         if (store) {
622             /* optimize out the STORE */
623             if (ret->_ops[0]   &&
624                 ret->_ops[0]   == store->_ops[0] &&
625                 store->_ops[1] == call->_ops[0])
626             {
627                 ++opts_optimizationcount[OPTIM_PEEPHOLE];
628                 call->_ops[0] = store->_ops[0];
629                 vec_remove(block->instr, vec_size(block->instr) - 2, 1);
630                 ir_instr_delete(store);
631             }
632             else
633                 continue;
634         }
635
636         if (!call->_ops[0])
637             continue;
638
639         funcval = call->_ops[1];
640         if (!funcval)
641             continue;
642         if (funcval->vtype != TYPE_FUNCTION || funcval->constval.vfunc != self)
643             continue;
644
645         /* now we have a CALL and a RET, check if it's a tailcall */
646         if (ret->_ops[0] && call->_ops[0] != ret->_ops[0])
647             continue;
648
649         ++opts_optimizationcount[OPTIM_TAIL_RECURSION];
650         vec_shrinkby(block->instr, 2);
651
652         block->final = false; /* open it back up */
653
654         /* emite parameter-stores */
655         for (p = 0; p < call->params.size(); ++p) {
656             /* assert(call->params_count <= self->locals_count); */
657             if (!ir_block_create_store(block, call->context, self->locals[p].get(), call->params[p])) {
658                 irerror(call->context, "failed to create tailcall store instruction for parameter %i", (int)p);
659                 return false;
660             }
661         }
662         if (!ir_block_create_jump(block, call->context, self->blocks[0].get())) {
663             irerror(call->context, "failed to create tailcall jump");
664             return false;
665         }
666
667         ir_instr_delete(call);
668         ir_instr_delete(ret);
669     }
670
671     return true;
672 }
673
674 bool ir_function_finalize(ir_function *self)
675 {
676     if (self->builtin)
677         return true;
678
679     if (OPTS_OPTIMIZATION(OPTIM_PEEPHOLE)) {
680         if (!ir_function_pass_peephole(self)) {
681             irerror(self->context, "generic optimization pass broke something in `%s`", self->name.c_str());
682             return false;
683         }
684     }
685
686     if (OPTS_OPTIMIZATION(OPTIM_TAIL_RECURSION)) {
687         if (!ir_function_pass_tailrecursion(self)) {
688             irerror(self->context, "tail-recursion optimization pass broke something in `%s`", self->name.c_str());
689             return false;
690         }
691     }
692
693     if (!ir_function_naive_phi(self)) {
694         irerror(self->context, "internal error: ir_function_naive_phi failed");
695         return false;
696     }
697
698     for (auto& lp : self->locals) {
699         ir_value *v = lp.get();
700         if (v->vtype == TYPE_VECTOR ||
701             (v->vtype == TYPE_FIELD && v->outtype == TYPE_VECTOR))
702         {
703             ir_value_vector_member(v, 0);
704             ir_value_vector_member(v, 1);
705             ir_value_vector_member(v, 2);
706         }
707     }
708     for (auto& vp : self->values) {
709         ir_value *v = vp.get();
710         if (v->vtype == TYPE_VECTOR ||
711             (v->vtype == TYPE_FIELD && v->outtype == TYPE_VECTOR))
712         {
713             ir_value_vector_member(v, 0);
714             ir_value_vector_member(v, 1);
715             ir_value_vector_member(v, 2);
716         }
717     }
718
719     ir_function_enumerate(self);
720
721     if (!ir_function_calculate_liferanges(self))
722         return false;
723     if (!ir_function_allocate_locals(self))
724         return false;
725     return true;
726 }
727
728 ir_value* ir_function_create_local(ir_function *self, const std::string& name, qc_type vtype, bool param)
729 {
730     ir_value *ve;
731
732     if (param &&
733         !self->locals.empty() &&
734         self->locals.back()->store != store_param)
735     {
736         irerror(self->context, "cannot add parameters after adding locals");
737         return nullptr;
738     }
739
740     ve = new ir_value(std::string(name), (param ? store_param : store_local), vtype);
741     if (param)
742         ve->locked = true;
743     self->locals.emplace_back(ve);
744     return ve;
745 }
746
747 /***********************************************************************
748  *IR Block
749  */
750
751 void* ir_block::operator new(std::size_t bytes) {
752   return mem_a(bytes);
753 }
754
755 void ir_block::operator delete(void *data) {
756     mem_d(data);
757 }
758
759 ir_block::ir_block(ir_function* owner, const std::string& name)
760 : owner(owner),
761   label(name)
762 {
763     context.file = "<@no context>";
764     context.line = 0;
765 }
766
767 ir_block::~ir_block()
768 {
769     for (size_t i = 0; i != vec_size(instr); ++i)
770         ir_instr_delete(instr[i]);
771     vec_free(instr);
772     vec_free(entries);
773     vec_free(exits);
774 }
775
776 static void ir_block_delete_quick(ir_block* self)
777 {
778     size_t i;
779     for (i = 0; i != vec_size(self->instr); ++i)
780         ir_instr_delete_quick(self->instr[i]);
781     vec_free(self->instr);
782     delete self;
783 }
784
785 /***********************************************************************
786  *IR Instructions
787  */
788
789 static ir_instr* ir_instr_new(lex_ctx_t ctx, ir_block* owner, int op)
790 {
791     ir_instr *self = new ir_instr;
792     self->owner = owner;
793     self->context = ctx;
794     self->opcode = op;
795     self->_ops[0] = nullptr;
796     self->_ops[1] = nullptr;
797     self->_ops[2] = nullptr;
798     self->bops[0] = nullptr;
799     self->bops[1] = nullptr;
800     self->eid = 0;
801     self->likely = true;
802     return self;
803 }
804
805 static void ir_instr_delete_quick(ir_instr *self)
806 {
807     delete self;
808 }
809
810 static void ir_instr_delete(ir_instr *self)
811 {
812     /* The following calls can only delete from
813      * vectors, we still want to delete this instruction
814      * so ignore the return value. Since with the warn_unused_result attribute
815      * gcc doesn't care about an explicit: (void)foo(); to ignore the result,
816      * I have to improvise here and use if(foo());
817      */
818     for (auto &it : self->phi) {
819         size_t idx;
820         if (vec_ir_instr_find(it.value->writes, self, &idx))
821             it.value->writes.erase(it.value->writes.begin() + idx);
822         if (vec_ir_instr_find(it.value->reads, self, &idx))
823             it.value->reads.erase(it.value->reads.begin() + idx);
824     }
825     for (auto &it : self->params) {
826         size_t idx;
827         if (vec_ir_instr_find(it->writes, self, &idx))
828             it->writes.erase(it->writes.begin() + idx);
829         if (vec_ir_instr_find(it->reads, self, &idx))
830             it->reads.erase(it->reads.begin() + idx);
831     }
832     (void)!ir_instr_op(self, 0, nullptr, false);
833     (void)!ir_instr_op(self, 1, nullptr, false);
834     (void)!ir_instr_op(self, 2, nullptr, false);
835     mem_d(self);
836 }
837
838 static bool ir_instr_op(ir_instr *self, int op, ir_value *v, bool writing)
839 {
840     if (v && v->vtype == TYPE_NOEXPR) {
841         irerror(self->context, "tried to use a NOEXPR value");
842         return false;
843     }
844
845     if (self->_ops[op]) {
846         size_t idx;
847         if (writing && vec_ir_instr_find(self->_ops[op]->writes, self, &idx))
848             self->_ops[op]->writes.erase(self->_ops[op]->writes.begin() + idx);
849         else if (vec_ir_instr_find(self->_ops[op]->reads, self, &idx))
850             self->_ops[op]->reads.erase(self->_ops[op]->reads.begin() + idx);
851     }
852     if (v) {
853         if (writing)
854             v->writes.push_back(self);
855         else
856             v->reads.push_back(self);
857     }
858     self->_ops[op] = v;
859     return true;
860 }
861
862 /***********************************************************************
863  *IR Value
864  */
865
866 static void ir_value_code_setaddr(ir_value *self, int32_t gaddr)
867 {
868     self->code.globaladdr = gaddr;
869     if (self->members[0]) self->members[0]->code.globaladdr = gaddr;
870     if (self->members[1]) self->members[1]->code.globaladdr = gaddr;
871     if (self->members[2]) self->members[2]->code.globaladdr = gaddr;
872 }
873
874 static int32_t ir_value_code_addr(const ir_value *self)
875 {
876     if (self->store == store_return)
877         return OFS_RETURN + self->code.addroffset;
878     return self->code.globaladdr + self->code.addroffset;
879 }
880
881 void* ir_value::operator new(std::size_t bytes) {
882   return mem_a(bytes);
883 }
884
885 void ir_value::operator delete(void *data) {
886     mem_d(data);
887 }
888
889 ir_value::ir_value(std::string&& name_, store_type store_, qc_type vtype_)
890 : name(move(name_)),
891   vtype(vtype_),
892   store(store_)
893 {
894     fieldtype = TYPE_VOID;
895     outtype = TYPE_VOID;
896     flags = 0;
897
898     cvq          = CV_NONE;
899     hasvalue     = false;
900     context.file = "<@no context>";
901     context.line = 0;
902
903     memset(&constval, 0, sizeof(constval));
904     memset(&code,     0, sizeof(code));
905
906     members[0] = nullptr;
907     members[1] = nullptr;
908     members[2] = nullptr;
909     memberof = nullptr;
910
911     unique_life = false;
912     locked = false;
913     callparam  = false;
914 }
915
916 ir_value::~ir_value()
917 {
918     size_t i;
919     if (hasvalue) {
920         if (vtype == TYPE_STRING)
921             mem_d((void*)constval.vstring);
922     }
923     if (!(flags & IR_FLAG_SPLIT_VECTOR)) {
924         for (i = 0; i < 3; ++i) {
925             if (members[i])
926                 delete members[i];
927         }
928     }
929 }
930
931
932 /*  helper function */
933 static ir_value* ir_builder_imm_float(ir_builder *self, float value, bool add_to_list) {
934     ir_value *v = new ir_value("#IMMEDIATE", store_global, TYPE_FLOAT);
935     v->flags |= IR_FLAG_ERASABLE;
936     v->hasvalue = true;
937     v->cvq = CV_CONST;
938     v->constval.vfloat = value;
939
940     self->globals.emplace_back(v);
941     if (add_to_list)
942         self->const_floats.emplace_back(v);
943     return v;
944 }
945
946 ir_value* ir_value_vector_member(ir_value *self, unsigned int member)
947 {
948     std::string name;
949     ir_value *m;
950     if (member >= 3)
951         return nullptr;
952
953     if (self->members[member])
954         return self->members[member];
955
956     if (!self->name.empty()) {
957         char member_name[3] = { '_', char('x' + member), 0 };
958         name = self->name + member_name;
959     }
960
961     if (self->vtype == TYPE_VECTOR)
962     {
963         m = new ir_value(move(name), self->store, TYPE_FLOAT);
964         if (!m)
965             return nullptr;
966         m->context = self->context;
967
968         self->members[member] = m;
969         m->code.addroffset = member;
970     }
971     else if (self->vtype == TYPE_FIELD)
972     {
973         if (self->fieldtype != TYPE_VECTOR)
974             return nullptr;
975         m = new ir_value(move(name), self->store, TYPE_FIELD);
976         if (!m)
977             return nullptr;
978         m->fieldtype = TYPE_FLOAT;
979         m->context = self->context;
980
981         self->members[member] = m;
982         m->code.addroffset = member;
983     }
984     else
985     {
986         irerror(self->context, "invalid member access on %s", self->name.c_str());
987         return nullptr;
988     }
989
990     m->memberof = self;
991     return m;
992 }
993
994 static GMQCC_INLINE size_t ir_value_sizeof(const ir_value *self)
995 {
996     if (self->vtype == TYPE_FIELD && self->fieldtype == TYPE_VECTOR)
997         return type_sizeof_[TYPE_VECTOR];
998     return type_sizeof_[self->vtype];
999 }
1000
1001 static ir_value* ir_value_out(ir_function *owner, const char *name, store_type storetype, qc_type vtype)
1002 {
1003     ir_value *v = new ir_value(name ? std::string(name) : std::string(), storetype, vtype);
1004     if (!v)
1005         return nullptr;
1006     ir_function_collect_value(owner, v);
1007     return v;
1008 }
1009
1010 bool ir_value_set_float(ir_value *self, float f)
1011 {
1012     if (self->vtype != TYPE_FLOAT)
1013         return false;
1014     self->constval.vfloat = f;
1015     self->hasvalue = true;
1016     return true;
1017 }
1018
1019 bool ir_value_set_func(ir_value *self, int f)
1020 {
1021     if (self->vtype != TYPE_FUNCTION)
1022         return false;
1023     self->constval.vint = f;
1024     self->hasvalue = true;
1025     return true;
1026 }
1027
1028 bool ir_value_set_vector(ir_value *self, vec3_t v)
1029 {
1030     if (self->vtype != TYPE_VECTOR)
1031         return false;
1032     self->constval.vvec = v;
1033     self->hasvalue = true;
1034     return true;
1035 }
1036
1037 bool ir_value_set_field(ir_value *self, ir_value *fld)
1038 {
1039     if (self->vtype != TYPE_FIELD)
1040         return false;
1041     self->constval.vpointer = fld;
1042     self->hasvalue = true;
1043     return true;
1044 }
1045
1046 bool ir_value_set_string(ir_value *self, const char *str)
1047 {
1048     if (self->vtype != TYPE_STRING)
1049         return false;
1050     self->constval.vstring = util_strdupe(str);
1051     self->hasvalue = true;
1052     return true;
1053 }
1054
1055 #if 0
1056 bool ir_value_set_int(ir_value *self, int i)
1057 {
1058     if (self->vtype != TYPE_INTEGER)
1059         return false;
1060     self->constval.vint = i;
1061     self->hasvalue = true;
1062     return true;
1063 }
1064 #endif
1065
1066 bool ir_value_lives(ir_value *self, size_t at)
1067 {
1068     for (auto& l : self->life) {
1069         if (l.start <= at && at <= l.end)
1070             return true;
1071         if (l.start > at) /* since it's ordered */
1072             return false;
1073     }
1074     return false;
1075 }
1076
1077 static bool ir_value_life_insert(ir_value *self, size_t idx, ir_life_entry_t e)
1078 {
1079     self->life.insert(self->life.begin() + idx, e);
1080     return true;
1081 }
1082
1083 static bool ir_value_life_merge(ir_value *self, size_t s)
1084 {
1085     size_t i;
1086     const size_t vs = self->life.size();
1087     ir_life_entry_t *life_found = nullptr;
1088     ir_life_entry_t *before = nullptr;
1089     ir_life_entry_t new_entry;
1090
1091     /* Find the first range >= s */
1092     for (i = 0; i < vs; ++i)
1093     {
1094         before = life_found;
1095         life_found = &self->life[i];
1096         if (life_found->start > s)
1097             break;
1098     }
1099     /* nothing found? append */
1100     if (i == vs) {
1101         ir_life_entry_t e;
1102         if (life_found && life_found->end+1 == s)
1103         {
1104             /* previous life range can be merged in */
1105             life_found->end++;
1106             return true;
1107         }
1108         if (life_found && life_found->end >= s)
1109             return false;
1110         e.start = e.end = s;
1111         self->life.emplace_back(e);
1112         return true;
1113     }
1114     /* found */
1115     if (before)
1116     {
1117         if (before->end + 1 == s &&
1118             life_found->start - 1 == s)
1119         {
1120             /* merge */
1121             before->end = life_found->end;
1122             self->life.erase(self->life.begin()+1);
1123             return true;
1124         }
1125         if (before->end + 1 == s)
1126         {
1127             /* extend before */
1128             before->end++;
1129             return true;
1130         }
1131         /* already contained */
1132         if (before->end >= s)
1133             return false;
1134     }
1135     /* extend */
1136     if (life_found->start - 1 == s)
1137     {
1138         life_found->start--;
1139         return true;
1140     }
1141     /* insert a new entry */
1142     new_entry.start = new_entry.end = s;
1143     return ir_value_life_insert(self, i, new_entry);
1144 }
1145
1146 static bool ir_value_life_merge_into(ir_value *self, const ir_value *other)
1147 {
1148     size_t i, myi;
1149
1150     if (other->life.empty())
1151         return true;
1152
1153     if (self->life.empty()) {
1154         self->life = other->life;
1155         return true;
1156     }
1157
1158     myi = 0;
1159     for (i = 0; i < other->life.size(); ++i)
1160     {
1161         const ir_life_entry_t &otherlife = other->life[i];
1162         while (true)
1163         {
1164             ir_life_entry_t *entry = &self->life[myi];
1165
1166             if (otherlife.end+1 < entry->start)
1167             {
1168                 /* adding an interval before entry */
1169                 if (!ir_value_life_insert(self, myi, otherlife))
1170                     return false;
1171                 ++myi;
1172                 break;
1173             }
1174
1175             if (otherlife.start <  entry->start &&
1176                 otherlife.end+1 >= entry->start)
1177             {
1178                 /* starts earlier and overlaps */
1179                 entry->start = otherlife.start;
1180             }
1181
1182             if (otherlife.end   >  entry->end &&
1183                 otherlife.start <= entry->end+1)
1184             {
1185                 /* ends later and overlaps */
1186                 entry->end = otherlife.end;
1187             }
1188
1189             /* see if our change combines it with the next ranges */
1190             while (myi+1 < self->life.size() &&
1191                    entry->end+1 >= self->life[1+myi].start)
1192             {
1193                 /* overlaps with (myi+1) */
1194                 if (entry->end < self->life[1+myi].end)
1195                     entry->end = self->life[1+myi].end;
1196                 self->life.erase(self->life.begin() + (myi + 1));
1197                 entry = &self->life[myi];
1198             }
1199
1200             /* see if we're after the entry */
1201             if (otherlife.start > entry->end)
1202             {
1203                 ++myi;
1204                 /* append if we're at the end */
1205                 if (myi >= self->life.size()) {
1206                     self->life.emplace_back(otherlife);
1207                     break;
1208                 }
1209                 /* otherweise check the next range */
1210                 continue;
1211             }
1212             break;
1213         }
1214     }
1215     return true;
1216 }
1217
1218 static bool ir_values_overlap(const ir_value *a, const ir_value *b)
1219 {
1220     /* For any life entry in A see if it overlaps with
1221      * any life entry in B.
1222      * Note that the life entries are orderes, so we can make a
1223      * more efficient algorithm there than naively translating the
1224      * statement above.
1225      */
1226
1227     const ir_life_entry_t *la, *lb, *enda, *endb;
1228
1229     /* first of all, if either has no life range, they cannot clash */
1230     if (a->life.empty() || b->life.empty())
1231         return false;
1232
1233     la = &a->life.front();
1234     lb = &b->life.front();
1235     enda = &a->life.back() + 1;
1236     endb = &b->life.back() + 1;
1237     while (true)
1238     {
1239         /* check if the entries overlap, for that,
1240          * both must start before the other one ends.
1241          */
1242         if (la->start < lb->end &&
1243             lb->start < la->end)
1244         {
1245             return true;
1246         }
1247
1248         /* entries are ordered
1249          * one entry is earlier than the other
1250          * that earlier entry will be moved forward
1251          */
1252         if (la->start < lb->start)
1253         {
1254             /* order: A B, move A forward
1255              * check if we hit the end with A
1256              */
1257             if (++la == enda)
1258                 break;
1259         }
1260         else /* if (lb->start < la->start)  actually <= */
1261         {
1262             /* order: B A, move B forward
1263              * check if we hit the end with B
1264              */
1265             if (++lb == endb)
1266                 break;
1267         }
1268     }
1269     return false;
1270 }
1271
1272 /***********************************************************************
1273  *IR main operations
1274  */
1275
1276 static bool ir_check_unreachable(ir_block *self)
1277 {
1278     /* The IR should never have to deal with unreachable code */
1279     if (!self->final/* || OPTS_FLAG(ALLOW_UNREACHABLE_CODE)*/)
1280         return true;
1281     irerror(self->context, "unreachable statement (%s)", self->label.c_str());
1282     return false;
1283 }
1284
1285 bool ir_block_create_store_op(ir_block *self, lex_ctx_t ctx, int op, ir_value *target, ir_value *what)
1286 {
1287     ir_instr *in;
1288     if (!ir_check_unreachable(self))
1289         return false;
1290
1291     if (target->store == store_value &&
1292         (op < INSTR_STOREP_F || op > INSTR_STOREP_FNC))
1293     {
1294         irerror(self->context, "cannot store to an SSA value");
1295         irerror(self->context, "trying to store: %s <- %s", target->name.c_str(), what->name.c_str());
1296         irerror(self->context, "instruction: %s", util_instr_str[op]);
1297         return false;
1298     }
1299
1300     in = ir_instr_new(ctx, self, op);
1301     if (!in)
1302         return false;
1303
1304     if (!ir_instr_op(in, 0, target, (op < INSTR_STOREP_F || op > INSTR_STOREP_FNC)) ||
1305         !ir_instr_op(in, 1, what, false))
1306     {
1307         ir_instr_delete(in);
1308         return false;
1309     }
1310     vec_push(self->instr, in);
1311     return true;
1312 }
1313
1314 bool ir_block_create_state_op(ir_block *self, lex_ctx_t ctx, ir_value *frame, ir_value *think)
1315 {
1316     ir_instr *in;
1317     if (!ir_check_unreachable(self))
1318         return false;
1319
1320     in = ir_instr_new(ctx, self, INSTR_STATE);
1321     if (!in)
1322         return false;
1323
1324     if (!ir_instr_op(in, 0, frame, false) ||
1325         !ir_instr_op(in, 1, think, false))
1326     {
1327         ir_instr_delete(in);
1328         return false;
1329     }
1330     vec_push(self->instr, in);
1331     return true;
1332 }
1333
1334 static bool ir_block_create_store(ir_block *self, lex_ctx_t ctx, ir_value *target, ir_value *what)
1335 {
1336     int op = 0;
1337     qc_type vtype;
1338     if (target->vtype == TYPE_VARIANT)
1339         vtype = what->vtype;
1340     else
1341         vtype = target->vtype;
1342
1343 #if 0
1344     if      (vtype == TYPE_FLOAT   && what->vtype == TYPE_INTEGER)
1345         op = INSTR_CONV_ITOF;
1346     else if (vtype == TYPE_INTEGER && what->vtype == TYPE_FLOAT)
1347         op = INSTR_CONV_FTOI;
1348 #endif
1349         op = type_store_instr[vtype];
1350
1351     if (OPTS_FLAG(ADJUST_VECTOR_FIELDS)) {
1352         if (op == INSTR_STORE_FLD && what->fieldtype == TYPE_VECTOR)
1353             op = INSTR_STORE_V;
1354     }
1355
1356     return ir_block_create_store_op(self, ctx, op, target, what);
1357 }
1358
1359 bool ir_block_create_storep(ir_block *self, lex_ctx_t ctx, ir_value *target, ir_value *what)
1360 {
1361     int op = 0;
1362     qc_type vtype;
1363
1364     if (target->vtype != TYPE_POINTER)
1365         return false;
1366
1367     /* storing using pointer - target is a pointer, type must be
1368      * inferred from source
1369      */
1370     vtype = what->vtype;
1371
1372     op = type_storep_instr[vtype];
1373     if (OPTS_FLAG(ADJUST_VECTOR_FIELDS)) {
1374         if (op == INSTR_STOREP_FLD && what->fieldtype == TYPE_VECTOR)
1375             op = INSTR_STOREP_V;
1376     }
1377
1378     return ir_block_create_store_op(self, ctx, op, target, what);
1379 }
1380
1381 bool ir_block_create_return(ir_block *self, lex_ctx_t ctx, ir_value *v)
1382 {
1383     ir_instr *in;
1384     if (!ir_check_unreachable(self))
1385         return false;
1386
1387     self->final = true;
1388
1389     self->is_return = true;
1390     in = ir_instr_new(ctx, self, INSTR_RETURN);
1391     if (!in)
1392         return false;
1393
1394     if (v && !ir_instr_op(in, 0, v, false)) {
1395         ir_instr_delete(in);
1396         return false;
1397     }
1398
1399     vec_push(self->instr, in);
1400     return true;
1401 }
1402
1403 bool ir_block_create_if(ir_block *self, lex_ctx_t ctx, ir_value *v,
1404                         ir_block *ontrue, ir_block *onfalse)
1405 {
1406     ir_instr *in;
1407     if (!ir_check_unreachable(self))
1408         return false;
1409     self->final = true;
1410     /*in = ir_instr_new(ctx, self, (v->vtype == TYPE_STRING ? INSTR_IF_S : INSTR_IF_F));*/
1411     in = ir_instr_new(ctx, self, VINSTR_COND);
1412     if (!in)
1413         return false;
1414
1415     if (!ir_instr_op(in, 0, v, false)) {
1416         ir_instr_delete(in);
1417         return false;
1418     }
1419
1420     in->bops[0] = ontrue;
1421     in->bops[1] = onfalse;
1422
1423     vec_push(self->instr, in);
1424
1425     vec_push(self->exits, ontrue);
1426     vec_push(self->exits, onfalse);
1427     vec_push(ontrue->entries,  self);
1428     vec_push(onfalse->entries, self);
1429     return true;
1430 }
1431
1432 bool ir_block_create_jump(ir_block *self, lex_ctx_t ctx, ir_block *to)
1433 {
1434     ir_instr *in;
1435     if (!ir_check_unreachable(self))
1436         return false;
1437     self->final = true;
1438     in = ir_instr_new(ctx, self, VINSTR_JUMP);
1439     if (!in)
1440         return false;
1441
1442     in->bops[0] = to;
1443     vec_push(self->instr, in);
1444
1445     vec_push(self->exits, to);
1446     vec_push(to->entries, self);
1447     return true;
1448 }
1449
1450 bool ir_block_create_goto(ir_block *self, lex_ctx_t ctx, ir_block *to)
1451 {
1452     self->owner->flags |= IR_FLAG_HAS_GOTO;
1453     return ir_block_create_jump(self, ctx, to);
1454 }
1455
1456 ir_instr* ir_block_create_phi(ir_block *self, lex_ctx_t ctx, const char *label, qc_type ot)
1457 {
1458     ir_value *out;
1459     ir_instr *in;
1460     if (!ir_check_unreachable(self))
1461         return nullptr;
1462     in = ir_instr_new(ctx, self, VINSTR_PHI);
1463     if (!in)
1464         return nullptr;
1465     out = ir_value_out(self->owner, label, store_value, ot);
1466     if (!out) {
1467         ir_instr_delete(in);
1468         return nullptr;
1469     }
1470     if (!ir_instr_op(in, 0, out, true)) {
1471         ir_instr_delete(in);
1472         return nullptr;
1473     }
1474     vec_push(self->instr, in);
1475     return in;
1476 }
1477
1478 ir_value* ir_phi_value(ir_instr *self)
1479 {
1480     return self->_ops[0];
1481 }
1482
1483 void ir_phi_add(ir_instr* self, ir_block *b, ir_value *v)
1484 {
1485     ir_phi_entry_t pe;
1486
1487     if (!vec_ir_block_find(self->owner->entries, b, nullptr)) {
1488         /* Must not be possible to cause this, otherwise the AST
1489          * is doing something wrong.
1490          */
1491         irerror(self->context, "Invalid entry block for PHI");
1492         exit(EXIT_FAILURE);
1493     }
1494
1495     pe.value = v;
1496     pe.from = b;
1497     v->reads.push_back(self);
1498     self->phi.push_back(pe);
1499 }
1500
1501 /* call related code */
1502 ir_instr* ir_block_create_call(ir_block *self, lex_ctx_t ctx, const char *label, ir_value *func, bool noreturn)
1503 {
1504     ir_value *out;
1505     ir_instr *in;
1506     if (!ir_check_unreachable(self))
1507         return nullptr;
1508     in = ir_instr_new(ctx, self, (noreturn ? VINSTR_NRCALL : INSTR_CALL0));
1509     if (!in)
1510         return nullptr;
1511     if (noreturn) {
1512         self->final = true;
1513         self->is_return = true;
1514     }
1515     out = ir_value_out(self->owner, label, (func->outtype == TYPE_VOID) ? store_return : store_value, func->outtype);
1516     if (!out) {
1517         ir_instr_delete(in);
1518         return nullptr;
1519     }
1520     if (!ir_instr_op(in, 0, out, true) ||
1521         !ir_instr_op(in, 1, func, false))
1522     {
1523         ir_instr_delete(in);
1524         return nullptr;
1525     }
1526     vec_push(self->instr, in);
1527     /*
1528     if (noreturn) {
1529         if (!ir_block_create_return(self, ctx, nullptr)) {
1530             compile_error(ctx, "internal error: failed to generate dummy-return instruction");
1531             ir_instr_delete(in);
1532             return nullptr;
1533         }
1534     }
1535     */
1536     return in;
1537 }
1538
1539 ir_value* ir_call_value(ir_instr *self)
1540 {
1541     return self->_ops[0];
1542 }
1543
1544 void ir_call_param(ir_instr* self, ir_value *v)
1545 {
1546     self->params.push_back(v);
1547     v->reads.push_back(self);
1548 }
1549
1550 /* binary op related code */
1551
1552 ir_value* ir_block_create_binop(ir_block *self, lex_ctx_t ctx,
1553                                 const char *label, int opcode,
1554                                 ir_value *left, ir_value *right)
1555 {
1556     qc_type ot = TYPE_VOID;
1557     switch (opcode) {
1558         case INSTR_ADD_F:
1559         case INSTR_SUB_F:
1560         case INSTR_DIV_F:
1561         case INSTR_MUL_F:
1562         case INSTR_MUL_V:
1563         case INSTR_AND:
1564         case INSTR_OR:
1565 #if 0
1566         case INSTR_AND_I:
1567         case INSTR_AND_IF:
1568         case INSTR_AND_FI:
1569         case INSTR_OR_I:
1570         case INSTR_OR_IF:
1571         case INSTR_OR_FI:
1572 #endif
1573         case INSTR_BITAND:
1574         case INSTR_BITOR:
1575         case VINSTR_BITXOR:
1576 #if 0
1577         case INSTR_SUB_S: /* -- offset of string as float */
1578         case INSTR_MUL_IF:
1579         case INSTR_MUL_FI:
1580         case INSTR_DIV_IF:
1581         case INSTR_DIV_FI:
1582         case INSTR_BITOR_IF:
1583         case INSTR_BITOR_FI:
1584         case INSTR_BITAND_FI:
1585         case INSTR_BITAND_IF:
1586         case INSTR_EQ_I:
1587         case INSTR_NE_I:
1588 #endif
1589             ot = TYPE_FLOAT;
1590             break;
1591 #if 0
1592         case INSTR_ADD_I:
1593         case INSTR_ADD_IF:
1594         case INSTR_ADD_FI:
1595         case INSTR_SUB_I:
1596         case INSTR_SUB_FI:
1597         case INSTR_SUB_IF:
1598         case INSTR_MUL_I:
1599         case INSTR_DIV_I:
1600         case INSTR_BITAND_I:
1601         case INSTR_BITOR_I:
1602         case INSTR_XOR_I:
1603         case INSTR_RSHIFT_I:
1604         case INSTR_LSHIFT_I:
1605             ot = TYPE_INTEGER;
1606             break;
1607 #endif
1608         case INSTR_ADD_V:
1609         case INSTR_SUB_V:
1610         case INSTR_MUL_VF:
1611         case INSTR_MUL_FV:
1612         case VINSTR_BITAND_V:
1613         case VINSTR_BITOR_V:
1614         case VINSTR_BITXOR_V:
1615         case VINSTR_BITAND_VF:
1616         case VINSTR_BITOR_VF:
1617         case VINSTR_BITXOR_VF:
1618         case VINSTR_CROSS:
1619 #if 0
1620         case INSTR_DIV_VF:
1621         case INSTR_MUL_IV:
1622         case INSTR_MUL_VI:
1623 #endif
1624             ot = TYPE_VECTOR;
1625             break;
1626 #if 0
1627         case INSTR_ADD_SF:
1628             ot = TYPE_POINTER;
1629             break;
1630 #endif
1631     /*
1632      * after the following default case, the value of opcode can never
1633      * be 1, 2, 3, 4, 5, 6, 7, 8, 9, 62, 63, 64, 65
1634      */
1635         default:
1636             /* ranges: */
1637             /* boolean operations result in floats */
1638
1639             /*
1640              * opcode >= 10 takes true branch opcode is at least 10
1641              * opcode <= 23 takes false branch opcode is at least 24
1642              */
1643             if (opcode >= INSTR_EQ_F && opcode <= INSTR_GT)
1644                 ot = TYPE_FLOAT;
1645
1646             /*
1647              * At condition "opcode <= 23", the value of "opcode" must be
1648              * at least 24.
1649              * At condition "opcode <= 23", the value of "opcode" cannot be
1650              * equal to any of {1, 2, 3, 4, 5, 6, 7, 8, 9, 62, 63, 64, 65}.
1651              * The condition "opcode <= 23" cannot be true.
1652              *
1653              * Thus ot=2 (TYPE_FLOAT) can never be true
1654              */
1655 #if 0
1656             else if (opcode >= INSTR_LE && opcode <= INSTR_GT)
1657                 ot = TYPE_FLOAT;
1658             else if (opcode >= INSTR_LE_I && opcode <= INSTR_EQ_FI)
1659                 ot = TYPE_FLOAT;
1660 #endif
1661             break;
1662     };
1663     if (ot == TYPE_VOID) {
1664         /* The AST or parser were supposed to check this! */
1665         return nullptr;
1666     }
1667
1668     return ir_block_create_general_instr(self, ctx, label, opcode, left, right, ot);
1669 }
1670
1671 ir_value* ir_block_create_unary(ir_block *self, lex_ctx_t ctx,
1672                                 const char *label, int opcode,
1673                                 ir_value *operand)
1674 {
1675     qc_type ot = TYPE_FLOAT;
1676     switch (opcode) {
1677         case INSTR_NOT_F:
1678         case INSTR_NOT_V:
1679         case INSTR_NOT_S:
1680         case INSTR_NOT_ENT:
1681         case INSTR_NOT_FNC: /*
1682         case INSTR_NOT_I:   */
1683             ot = TYPE_FLOAT;
1684             break;
1685
1686         /*
1687          * Negation for virtual instructions is emulated with 0-value. Thankfully
1688          * the operand for 0 already exists so we just source it from here.
1689          */
1690         case VINSTR_NEG_F:
1691             return ir_block_create_general_instr(self, ctx, label, INSTR_SUB_F, nullptr, operand, ot);
1692         case VINSTR_NEG_V:
1693             return ir_block_create_general_instr(self, ctx, label, INSTR_SUB_V, nullptr, operand, TYPE_VECTOR);
1694
1695         default:
1696             ot = operand->vtype;
1697             break;
1698     };
1699     if (ot == TYPE_VOID) {
1700         /* The AST or parser were supposed to check this! */
1701         return nullptr;
1702     }
1703
1704     /* let's use the general instruction creator and pass nullptr for OPB */
1705     return ir_block_create_general_instr(self, ctx, label, opcode, operand, nullptr, ot);
1706 }
1707
1708 static ir_value* ir_block_create_general_instr(ir_block *self, lex_ctx_t ctx, const char *label,
1709                                         int op, ir_value *a, ir_value *b, qc_type outype)
1710 {
1711     ir_instr *instr;
1712     ir_value *out;
1713
1714     out = ir_value_out(self->owner, label, store_value, outype);
1715     if (!out)
1716         return nullptr;
1717
1718     instr = ir_instr_new(ctx, self, op);
1719     if (!instr) {
1720         return nullptr;
1721     }
1722
1723     if (!ir_instr_op(instr, 0, out, true) ||
1724         !ir_instr_op(instr, 1, a, false) ||
1725         !ir_instr_op(instr, 2, b, false) )
1726     {
1727         goto on_error;
1728     }
1729
1730     vec_push(self->instr, instr);
1731
1732     return out;
1733 on_error:
1734     ir_instr_delete(instr);
1735     return nullptr;
1736 }
1737
1738 ir_value* ir_block_create_fieldaddress(ir_block *self, lex_ctx_t ctx, const char *label, ir_value *ent, ir_value *field)
1739 {
1740     ir_value *v;
1741
1742     /* Support for various pointer types todo if so desired */
1743     if (ent->vtype != TYPE_ENTITY)
1744         return nullptr;
1745
1746     if (field->vtype != TYPE_FIELD)
1747         return nullptr;
1748
1749     v = ir_block_create_general_instr(self, ctx, label, INSTR_ADDRESS, ent, field, TYPE_POINTER);
1750     v->fieldtype = field->fieldtype;
1751     return v;
1752 }
1753
1754 ir_value* ir_block_create_load_from_ent(ir_block *self, lex_ctx_t ctx, const char *label, ir_value *ent, ir_value *field, qc_type outype)
1755 {
1756     int op;
1757     if (ent->vtype != TYPE_ENTITY)
1758         return nullptr;
1759
1760     /* at some point we could redirect for TYPE_POINTER... but that could lead to carelessness */
1761     if (field->vtype != TYPE_FIELD)
1762         return nullptr;
1763
1764     switch (outype)
1765     {
1766         case TYPE_FLOAT:    op = INSTR_LOAD_F;   break;
1767         case TYPE_VECTOR:   op = INSTR_LOAD_V;   break;
1768         case TYPE_STRING:   op = INSTR_LOAD_S;   break;
1769         case TYPE_FIELD:    op = INSTR_LOAD_FLD; break;
1770         case TYPE_ENTITY:   op = INSTR_LOAD_ENT; break;
1771         case TYPE_FUNCTION: op = INSTR_LOAD_FNC; break;
1772 #if 0
1773         case TYPE_POINTER: op = INSTR_LOAD_I;   break;
1774         case TYPE_INTEGER: op = INSTR_LOAD_I;   break;
1775 #endif
1776         default:
1777             irerror(self->context, "invalid type for ir_block_create_load_from_ent: %s", type_name[outype]);
1778             return nullptr;
1779     }
1780
1781     return ir_block_create_general_instr(self, ctx, label, op, ent, field, outype);
1782 }
1783
1784 /* PHI resolving breaks the SSA, and must thus be the last
1785  * step before life-range calculation.
1786  */
1787
1788 static bool ir_block_naive_phi(ir_block *self);
1789 bool ir_function_naive_phi(ir_function *self)
1790 {
1791     for (auto& b : self->blocks)
1792         if (!ir_block_naive_phi(b.get()))
1793             return false;
1794     return true;
1795 }
1796
1797 static bool ir_block_naive_phi(ir_block *self)
1798 {
1799     size_t i;
1800     /* FIXME: optionally, create_phi can add the phis
1801      * to a list so we don't need to loop through blocks
1802      * - anyway: "don't optimize YET"
1803      */
1804     for (i = 0; i < vec_size(self->instr); ++i)
1805     {
1806         ir_instr *instr = self->instr[i];
1807         if (instr->opcode != VINSTR_PHI)
1808             continue;
1809
1810         vec_remove(self->instr, i, 1);
1811         --i; /* NOTE: i+1 below */
1812
1813         for (auto &it : instr->phi) {
1814             ir_value *v = it.value;
1815             ir_block *b = it.from;
1816             if (v->store == store_value && v->reads.size() == 1 && v->writes.size() == 1) {
1817                 /* replace the value */
1818                 if (!ir_instr_op(v->writes[0], 0, instr->_ops[0], true))
1819                     return false;
1820             } else {
1821                 /* force a move instruction */
1822                 ir_instr *prevjump = vec_last(b->instr);
1823                 vec_pop(b->instr);
1824                 b->final = false;
1825                 instr->_ops[0]->store = store_global;
1826                 if (!ir_block_create_store(b, instr->context, instr->_ops[0], v))
1827                     return false;
1828                 instr->_ops[0]->store = store_value;
1829                 vec_push(b->instr, prevjump);
1830                 b->final = true;
1831             }
1832         }
1833         ir_instr_delete(instr);
1834     }
1835     return true;
1836 }
1837
1838 /***********************************************************************
1839  *IR Temp allocation code
1840  * Propagating value life ranges by walking through the function backwards
1841  * until no more changes are made.
1842  * In theory this should happen once more than once for every nested loop
1843  * level.
1844  * Though this implementation might run an additional time for if nests.
1845  */
1846
1847 /* Enumerate instructions used by value's life-ranges
1848  */
1849 static void ir_block_enumerate(ir_block *self, size_t *_eid)
1850 {
1851     size_t i;
1852     size_t eid = *_eid;
1853     for (i = 0; i < vec_size(self->instr); ++i)
1854     {
1855         self->instr[i]->eid = eid++;
1856     }
1857     *_eid = eid;
1858 }
1859
1860 /* Enumerate blocks and instructions.
1861  * The block-enumeration is unordered!
1862  * We do not really use the block enumreation, however
1863  * the instruction enumeration is important for life-ranges.
1864  */
1865 void ir_function_enumerate(ir_function *self)
1866 {
1867     size_t instruction_id = 0;
1868     size_t block_eid = 0;
1869     for (auto& block : self->blocks)
1870     {
1871         /* each block now gets an additional "entry" instruction id
1872          * we can use to avoid point-life issues
1873          */
1874         block->entry_id = instruction_id;
1875         block->eid      = block_eid;
1876         ++instruction_id;
1877         ++block_eid;
1878
1879         ir_block_enumerate(block.get(), &instruction_id);
1880     }
1881 }
1882
1883 /* Local-value allocator
1884  * After finishing creating the liferange of all values used in a function
1885  * we can allocate their global-positions.
1886  * This is the counterpart to register-allocation in register machines.
1887  */
1888 struct function_allocator {
1889     ir_value **locals;
1890     size_t *sizes;
1891     size_t *positions;
1892     bool *unique;
1893 };
1894
1895 static bool function_allocator_alloc(function_allocator *alloc, ir_value *var)
1896 {
1897     ir_value *slot;
1898     size_t vsize = ir_value_sizeof(var);
1899
1900     var->code.local = vec_size(alloc->locals);
1901
1902     slot = new ir_value("reg", store_global, var->vtype);
1903     if (!slot)
1904         return false;
1905
1906     if (!ir_value_life_merge_into(slot, var))
1907         goto localerror;
1908
1909     vec_push(alloc->locals, slot);
1910     vec_push(alloc->sizes, vsize);
1911     vec_push(alloc->unique, var->unique_life);
1912
1913     return true;
1914
1915 localerror:
1916     delete slot;
1917     return false;
1918 }
1919
1920 static bool ir_function_allocator_assign(ir_function *self, function_allocator *alloc, ir_value *v)
1921 {
1922     size_t a;
1923     ir_value *slot;
1924
1925     if (v->unique_life)
1926         return function_allocator_alloc(alloc, v);
1927
1928     for (a = 0; a < vec_size(alloc->locals); ++a)
1929     {
1930         /* if it's reserved for a unique liferange: skip */
1931         if (alloc->unique[a])
1932             continue;
1933
1934         slot = alloc->locals[a];
1935
1936         /* never resize parameters
1937          * will be required later when overlapping temps + locals
1938          */
1939         if (a < vec_size(self->params) &&
1940             alloc->sizes[a] < ir_value_sizeof(v))
1941         {
1942             continue;
1943         }
1944
1945         if (ir_values_overlap(v, slot))
1946             continue;
1947
1948         if (!ir_value_life_merge_into(slot, v))
1949             return false;
1950
1951         /* adjust size for this slot */
1952         if (alloc->sizes[a] < ir_value_sizeof(v))
1953             alloc->sizes[a] = ir_value_sizeof(v);
1954
1955         v->code.local = a;
1956         return true;
1957     }
1958     if (a >= vec_size(alloc->locals)) {
1959         if (!function_allocator_alloc(alloc, v))
1960             return false;
1961     }
1962     return true;
1963 }
1964
1965 bool ir_function_allocate_locals(ir_function *self)
1966 {
1967     bool   retval = true;
1968     size_t pos;
1969     bool   opt_gt = OPTS_OPTIMIZATION(OPTIM_GLOBAL_TEMPS);
1970
1971     function_allocator lockalloc, globalloc;
1972
1973     if (self->locals.empty() && self->values.empty())
1974         return true;
1975
1976     globalloc.locals    = nullptr;
1977     globalloc.sizes     = nullptr;
1978     globalloc.positions = nullptr;
1979     globalloc.unique    = nullptr;
1980     lockalloc.locals    = nullptr;
1981     lockalloc.sizes     = nullptr;
1982     lockalloc.positions = nullptr;
1983     lockalloc.unique    = nullptr;
1984
1985     size_t i;
1986     for (i = 0; i < self->locals.size(); ++i)
1987     {
1988         ir_value *v = self->locals[i].get();
1989         if ((self->flags & IR_FLAG_MASK_NO_LOCAL_TEMPS) || !OPTS_OPTIMIZATION(OPTIM_LOCAL_TEMPS)) {
1990             v->locked      = true;
1991             v->unique_life = true;
1992         }
1993         else if (i >= vec_size(self->params))
1994             break;
1995         else
1996             v->locked = true; /* lock parameters locals */
1997         if (!function_allocator_alloc((v->locked || !opt_gt ? &lockalloc : &globalloc), v))
1998             goto error;
1999     }
2000     for (; i < self->locals.size(); ++i)
2001     {
2002         ir_value *v = self->locals[i].get();
2003         if (v->life.empty())
2004             continue;
2005         if (!ir_function_allocator_assign(self, (v->locked || !opt_gt ? &lockalloc : &globalloc), v))
2006             goto error;
2007     }
2008
2009     /* Allocate a slot for any value that still exists */
2010     for (i = 0; i < self->values.size(); ++i)
2011     {
2012         ir_value *v = self->values[i].get();
2013
2014         if (v->life.empty())
2015             continue;
2016
2017         /* CALL optimization:
2018          * If the value is a parameter-temp: 1 write, 1 read from a CALL
2019          * and it's not "locked", write it to the OFS_PARM directly.
2020          */
2021         if (OPTS_OPTIMIZATION(OPTIM_CALL_STORES) && !v->locked && !v->unique_life) {
2022             if (v->reads.size() == 1 && v->writes.size() == 1 &&
2023                 (v->reads[0]->opcode == VINSTR_NRCALL ||
2024                  (v->reads[0]->opcode >= INSTR_CALL0 && v->reads[0]->opcode <= INSTR_CALL8)
2025                 )
2026                )
2027             {
2028                 size_t param;
2029                 ir_instr *call = v->reads[0];
2030                 if (!vec_ir_value_find(call->params, v, &param)) {
2031                     irerror(call->context, "internal error: unlocked parameter %s not found", v->name.c_str());
2032                     goto error;
2033                 }
2034                 ++opts_optimizationcount[OPTIM_CALL_STORES];
2035                 v->callparam = true;
2036                 if (param < 8)
2037                     ir_value_code_setaddr(v, OFS_PARM0 + 3*param);
2038                 else {
2039                     size_t nprotos = self->owner->extparam_protos.size();
2040                     ir_value *ep;
2041                     param -= 8;
2042                     if (nprotos > param)
2043                         ep = self->owner->extparam_protos[param].get();
2044                     else
2045                     {
2046                         ep = ir_gen_extparam_proto(self->owner);
2047                         while (++nprotos <= param)
2048                             ep = ir_gen_extparam_proto(self->owner);
2049                     }
2050                     ir_instr_op(v->writes[0], 0, ep, true);
2051                     call->params[param+8] = ep;
2052                 }
2053                 continue;
2054             }
2055             if (v->writes.size() == 1 && v->writes[0]->opcode == INSTR_CALL0) {
2056                 v->store = store_return;
2057                 if (v->members[0]) v->members[0]->store = store_return;
2058                 if (v->members[1]) v->members[1]->store = store_return;
2059                 if (v->members[2]) v->members[2]->store = store_return;
2060                 ++opts_optimizationcount[OPTIM_CALL_STORES];
2061                 continue;
2062             }
2063         }
2064
2065         if (!ir_function_allocator_assign(self, (v->locked || !opt_gt ? &lockalloc : &globalloc), v))
2066             goto error;
2067     }
2068
2069     if (!lockalloc.sizes && !globalloc.sizes) {
2070         goto cleanup;
2071     }
2072     vec_push(lockalloc.positions, 0);
2073     vec_push(globalloc.positions, 0);
2074
2075     /* Adjust slot positions based on sizes */
2076     if (lockalloc.sizes) {
2077         pos = (vec_size(lockalloc.sizes) ? lockalloc.positions[0] : 0);
2078         for (i = 1; i < vec_size(lockalloc.sizes); ++i)
2079         {
2080             pos = lockalloc.positions[i-1] + lockalloc.sizes[i-1];
2081             vec_push(lockalloc.positions, pos);
2082         }
2083         self->allocated_locals = pos + vec_last(lockalloc.sizes);
2084     }
2085     if (globalloc.sizes) {
2086         pos = (vec_size(globalloc.sizes) ? globalloc.positions[0] : 0);
2087         for (i = 1; i < vec_size(globalloc.sizes); ++i)
2088         {
2089             pos = globalloc.positions[i-1] + globalloc.sizes[i-1];
2090             vec_push(globalloc.positions, pos);
2091         }
2092         self->globaltemps = pos + vec_last(globalloc.sizes);
2093     }
2094
2095     /* Locals need to know their new position */
2096     for (auto& local : self->locals) {
2097         if (local->locked || !opt_gt)
2098             local->code.local = lockalloc.positions[local->code.local];
2099         else
2100             local->code.local = globalloc.positions[local->code.local];
2101     }
2102     /* Take over the actual slot positions on values */
2103     for (auto& value : self->values) {
2104         if (value->locked || !opt_gt)
2105             value->code.local = lockalloc.positions[value->code.local];
2106         else
2107             value->code.local = globalloc.positions[value->code.local];
2108     }
2109
2110     goto cleanup;
2111
2112 error:
2113     retval = false;
2114 cleanup:
2115     for (i = 0; i < vec_size(lockalloc.locals); ++i)
2116         delete lockalloc.locals[i];
2117     for (i = 0; i < vec_size(globalloc.locals); ++i)
2118         delete globalloc.locals[i];
2119     vec_free(globalloc.unique);
2120     vec_free(globalloc.locals);
2121     vec_free(globalloc.sizes);
2122     vec_free(globalloc.positions);
2123     vec_free(lockalloc.unique);
2124     vec_free(lockalloc.locals);
2125     vec_free(lockalloc.sizes);
2126     vec_free(lockalloc.positions);
2127     return retval;
2128 }
2129
2130 /* Get information about which operand
2131  * is read from, or written to.
2132  */
2133 static void ir_op_read_write(int op, size_t *read, size_t *write)
2134 {
2135     switch (op)
2136     {
2137     case VINSTR_JUMP:
2138     case INSTR_GOTO:
2139         *write = 0;
2140         *read = 0;
2141         break;
2142     case INSTR_IF:
2143     case INSTR_IFNOT:
2144 #if 0
2145     case INSTR_IF_S:
2146     case INSTR_IFNOT_S:
2147 #endif
2148     case INSTR_RETURN:
2149     case VINSTR_COND:
2150         *write = 0;
2151         *read = 1;
2152         break;
2153     case INSTR_STOREP_F:
2154     case INSTR_STOREP_V:
2155     case INSTR_STOREP_S:
2156     case INSTR_STOREP_ENT:
2157     case INSTR_STOREP_FLD:
2158     case INSTR_STOREP_FNC:
2159         *write = 0;
2160         *read  = 7;
2161         break;
2162     default:
2163         *write = 1;
2164         *read = 6;
2165         break;
2166     };
2167 }
2168
2169 static bool ir_block_living_add_instr(ir_block *self, size_t eid) {
2170     bool changed = false;
2171     for (auto &it : self->living)
2172         if (ir_value_life_merge(it, eid))
2173             changed = true;
2174     return changed;
2175 }
2176
2177 static bool ir_block_living_lock(ir_block *self) {
2178     bool changed = false;
2179     for (auto &it : self->living) {
2180         if (it->locked)
2181             continue;
2182         it->locked = true;
2183         changed = true;
2184     }
2185     return changed;
2186 }
2187
2188 static bool ir_block_life_propagate(ir_block *self, bool *changed)
2189 {
2190     ir_instr *instr;
2191     ir_value *value;
2192     size_t i, o, p, mem;
2193     /* bitmasks which operands are read from or written to */
2194     size_t read, write;
2195     char dbg_ind[16];
2196     dbg_ind[0] = '#';
2197     dbg_ind[1] = '0';
2198     (void)dbg_ind;
2199
2200     self->living.clear();
2201
2202     p = vec_size(self->exits);
2203     for (i = 0; i < p; ++i) {
2204         ir_block *prev = self->exits[i];
2205         for (auto &it : prev->living)
2206             if (!vec_ir_value_find(self->living, it, nullptr))
2207                 self->living.push_back(it);
2208     }
2209
2210     i = vec_size(self->instr);
2211     while (i)
2212     { --i;
2213         instr = self->instr[i];
2214
2215         /* See which operands are read and write operands */
2216         ir_op_read_write(instr->opcode, &read, &write);
2217
2218         /* Go through the 3 main operands
2219          * writes first, then reads
2220          */
2221         for (o = 0; o < 3; ++o)
2222         {
2223             if (!instr->_ops[o]) /* no such operand */
2224                 continue;
2225
2226             value = instr->_ops[o];
2227
2228             /* We only care about locals */
2229             /* we also calculate parameter liferanges so that locals
2230              * can take up parameter slots */
2231             if (value->store != store_value &&
2232                 value->store != store_local &&
2233                 value->store != store_param)
2234                 continue;
2235
2236             /* write operands */
2237             /* When we write to a local, we consider it "dead" for the
2238              * remaining upper part of the function, since in SSA a value
2239              * can only be written once (== created)
2240              */
2241             if (write & (1<<o))
2242             {
2243                 size_t idx;
2244                 bool in_living = vec_ir_value_find(self->living, value, &idx);
2245                 if (!in_living)
2246                 {
2247                     /* If the value isn't alive it hasn't been read before... */
2248                     /* TODO: See if the warning can be emitted during parsing or AST processing
2249                      * otherwise have warning printed here.
2250                      * IF printing a warning here: include filecontext_t,
2251                      * and make sure it's only printed once
2252                      * since this function is run multiple times.
2253                      */
2254                     /* con_err( "Value only written %s\n", value->name); */
2255                     if (ir_value_life_merge(value, instr->eid))
2256                         *changed = true;
2257                 } else {
2258                     /* since 'living' won't contain it
2259                      * anymore, merge the value, since
2260                      * (A) doesn't.
2261                      */
2262                     if (ir_value_life_merge(value, instr->eid))
2263                         *changed = true;
2264                     // Then remove
2265                     self->living.erase(self->living.begin() + idx);
2266                 }
2267                 /* Removing a vector removes all members */
2268                 for (mem = 0; mem < 3; ++mem) {
2269                     if (value->members[mem] && vec_ir_value_find(self->living, value->members[mem], &idx)) {
2270                         if (ir_value_life_merge(value->members[mem], instr->eid))
2271                             *changed = true;
2272                         self->living.erase(self->living.begin() + idx);
2273                     }
2274                 }
2275                 /* Removing the last member removes the vector */
2276                 if (value->memberof) {
2277                     value = value->memberof;
2278                     for (mem = 0; mem < 3; ++mem) {
2279                         if (value->members[mem] && vec_ir_value_find(self->living, value->members[mem], nullptr))
2280                             break;
2281                     }
2282                     if (mem == 3 && vec_ir_value_find(self->living, value, &idx)) {
2283                         if (ir_value_life_merge(value, instr->eid))
2284                             *changed = true;
2285                         self->living.erase(self->living.begin() + idx);
2286                     }
2287                 }
2288             }
2289         }
2290
2291         /* These operations need a special case as they can break when using
2292          * same source and destination operand otherwise, as the engine may
2293          * read the source multiple times. */
2294         if (instr->opcode == INSTR_MUL_VF ||
2295             instr->opcode == VINSTR_BITAND_VF ||
2296             instr->opcode == VINSTR_BITOR_VF ||
2297             instr->opcode == VINSTR_BITXOR ||
2298             instr->opcode == VINSTR_BITXOR_VF ||
2299             instr->opcode == VINSTR_BITXOR_V ||
2300             instr->opcode == VINSTR_CROSS)
2301         {
2302             value = instr->_ops[2];
2303             /* the float source will get an additional lifetime */
2304             if (ir_value_life_merge(value, instr->eid+1))
2305                 *changed = true;
2306             if (value->memberof && ir_value_life_merge(value->memberof, instr->eid+1))
2307                 *changed = true;
2308         }
2309
2310         if (instr->opcode == INSTR_MUL_FV ||
2311             instr->opcode == INSTR_LOAD_V ||
2312             instr->opcode == VINSTR_BITXOR ||
2313             instr->opcode == VINSTR_BITXOR_VF ||
2314             instr->opcode == VINSTR_BITXOR_V ||
2315             instr->opcode == VINSTR_CROSS)
2316         {
2317             value = instr->_ops[1];
2318             /* the float source will get an additional lifetime */
2319             if (ir_value_life_merge(value, instr->eid+1))
2320                 *changed = true;
2321             if (value->memberof && ir_value_life_merge(value->memberof, instr->eid+1))
2322                 *changed = true;
2323         }
2324
2325         for (o = 0; o < 3; ++o)
2326         {
2327             if (!instr->_ops[o]) /* no such operand */
2328                 continue;
2329
2330             value = instr->_ops[o];
2331
2332             /* We only care about locals */
2333             /* we also calculate parameter liferanges so that locals
2334              * can take up parameter slots */
2335             if (value->store != store_value &&
2336                 value->store != store_local &&
2337                 value->store != store_param)
2338                 continue;
2339
2340             /* read operands */
2341             if (read & (1<<o))
2342             {
2343                 if (!vec_ir_value_find(self->living, value, nullptr))
2344                     self->living.push_back(value);
2345                 /* reading adds the full vector */
2346                 if (value->memberof && !vec_ir_value_find(self->living, value->memberof, nullptr))
2347                     self->living.push_back(value->memberof);
2348                 for (mem = 0; mem < 3; ++mem) {
2349                     if (value->members[mem] && !vec_ir_value_find(self->living, value->members[mem], nullptr))
2350                         self->living.push_back(value->members[mem]);
2351                 }
2352             }
2353         }
2354         /* PHI operands are always read operands */
2355         for (auto &it : instr->phi) {
2356             value = it.value;
2357             if (!vec_ir_value_find(self->living, value, nullptr))
2358                 self->living.push_back(value);
2359             /* reading adds the full vector */
2360             if (value->memberof && !vec_ir_value_find(self->living, value->memberof, nullptr))
2361                 self->living.push_back(value->memberof);
2362             for (mem = 0; mem < 3; ++mem) {
2363                 if (value->members[mem] && !vec_ir_value_find(self->living, value->members[mem], nullptr))
2364                     self->living.push_back(value->members[mem]);
2365             }
2366         }
2367
2368         /* on a call, all these values must be "locked" */
2369         if (instr->opcode >= INSTR_CALL0 && instr->opcode <= INSTR_CALL8) {
2370             if (ir_block_living_lock(self))
2371                 *changed = true;
2372         }
2373         /* call params are read operands too */
2374         for (auto &it : instr->params) {
2375             value = it;
2376             if (!vec_ir_value_find(self->living, value, nullptr))
2377                 self->living.push_back(value);
2378             /* reading adds the full vector */
2379             if (value->memberof && !vec_ir_value_find(self->living, value->memberof, nullptr))
2380                 self->living.push_back(value->memberof);
2381             for (mem = 0; mem < 3; ++mem) {
2382                 if (value->members[mem] && !vec_ir_value_find(self->living, value->members[mem], nullptr))
2383                     self->living.push_back(value->members[mem]);
2384             }
2385         }
2386
2387         /* (A) */
2388         if (ir_block_living_add_instr(self, instr->eid))
2389             *changed = true;
2390     }
2391     /* the "entry" instruction ID */
2392     if (ir_block_living_add_instr(self, self->entry_id))
2393         *changed = true;
2394
2395     return true;
2396 }
2397
2398 bool ir_function_calculate_liferanges(ir_function *self)
2399 {
2400     size_t i, s;
2401     bool changed;
2402
2403     /* parameters live at 0 */
2404     for (i = 0; i < vec_size(self->params); ++i)
2405         if (!ir_value_life_merge(self->locals[i].get(), 0))
2406             compile_error(self->context, "internal error: failed value-life merging");
2407
2408     do {
2409         self->run_id++;
2410         changed = false;
2411         i = self->blocks.size();
2412         while (i--) {
2413             ir_block_life_propagate(self->blocks[i].get(), &changed);
2414         }
2415     } while (changed);
2416
2417     if (self->blocks.size()) {
2418         ir_block *block = self->blocks[0].get();
2419         for (auto &it : block->living) {
2420             ir_value *v = it;
2421             if (v->store != store_local)
2422                 continue;
2423             if (v->vtype == TYPE_VECTOR)
2424                 continue;
2425             self->flags |= IR_FLAG_HAS_UNINITIALIZED;
2426             /* find the instruction reading from it */
2427             for (s = 0; s < v->reads.size(); ++s) {
2428                 if (v->reads[s]->eid == v->life[0].end)
2429                     break;
2430             }
2431             if (s < v->reads.size()) {
2432                 if (irwarning(v->context, WARN_USED_UNINITIALIZED,
2433                               "variable `%s` may be used uninitialized in this function\n"
2434                               " -> %s:%i",
2435                               v->name.c_str(),
2436                               v->reads[s]->context.file, v->reads[s]->context.line)
2437                    )
2438                 {
2439                     return false;
2440                 }
2441                 continue;
2442             }
2443             if (v->memberof) {
2444                 ir_value *vec = v->memberof;
2445                 for (s = 0; s < vec->reads.size(); ++s) {
2446                     if (vec->reads[s]->eid == v->life[0].end)
2447                         break;
2448                 }
2449                 if (s < vec->reads.size()) {
2450                     if (irwarning(v->context, WARN_USED_UNINITIALIZED,
2451                                   "variable `%s` may be used uninitialized in this function\n"
2452                                   " -> %s:%i",
2453                                   v->name.c_str(),
2454                                   vec->reads[s]->context.file, vec->reads[s]->context.line)
2455                        )
2456                     {
2457                         return false;
2458                     }
2459                     continue;
2460                 }
2461             }
2462             if (irwarning(v->context, WARN_USED_UNINITIALIZED,
2463                           "variable `%s` may be used uninitialized in this function", v->name.c_str()))
2464             {
2465                 return false;
2466             }
2467         }
2468     }
2469     return true;
2470 }
2471
2472 /***********************************************************************
2473  *IR Code-Generation
2474  *
2475  * Since the IR has the convention of putting 'write' operands
2476  * at the beginning, we have to rotate the operands of instructions
2477  * properly in order to generate valid QCVM code.
2478  *
2479  * Having destinations at a fixed position is more convenient. In QC
2480  * this is *mostly* OPC,  but FTE adds at least 2 instructions which
2481  * read from from OPA,  and store to OPB rather than OPC.   Which is
2482  * partially the reason why the implementation of these instructions
2483  * in darkplaces has been delayed for so long.
2484  *
2485  * Breaking conventions is annoying...
2486  */
2487 static bool ir_builder_gen_global(ir_builder *self, ir_value *global, bool islocal);
2488
2489 static bool gen_global_field(code_t *code, ir_value *global)
2490 {
2491     if (global->hasvalue)
2492     {
2493         ir_value *fld = global->constval.vpointer;
2494         if (!fld) {
2495             irerror(global->context, "Invalid field constant with no field: %s", global->name.c_str());
2496             return false;
2497         }
2498
2499         /* copy the field's value */
2500         ir_value_code_setaddr(global, code->globals.size());
2501         code->globals.push_back(fld->code.fieldaddr);
2502         if (global->fieldtype == TYPE_VECTOR) {
2503             code->globals.push_back(fld->code.fieldaddr+1);
2504             code->globals.push_back(fld->code.fieldaddr+2);
2505         }
2506     }
2507     else
2508     {
2509         ir_value_code_setaddr(global, code->globals.size());
2510         code->globals.push_back(0);
2511         if (global->fieldtype == TYPE_VECTOR) {
2512             code->globals.push_back(0);
2513             code->globals.push_back(0);
2514         }
2515     }
2516     if (global->code.globaladdr < 0)
2517         return false;
2518     return true;
2519 }
2520
2521 static bool gen_global_pointer(code_t *code, ir_value *global)
2522 {
2523     if (global->hasvalue)
2524     {
2525         ir_value *target = global->constval.vpointer;
2526         if (!target) {
2527             irerror(global->context, "Invalid pointer constant: %s", global->name.c_str());
2528             /* nullptr pointers are pointing to the nullptr constant, which also
2529              * sits at address 0, but still has an ir_value for itself.
2530              */
2531             return false;
2532         }
2533
2534         /* Here, relocations ARE possible - in fteqcc-enhanced-qc:
2535          * void() foo; <- proto
2536          * void() *fooptr = &foo;
2537          * void() foo = { code }
2538          */
2539         if (!target->code.globaladdr) {
2540             /* FIXME: Check for the constant nullptr ir_value!
2541              * because then code.globaladdr being 0 is valid.
2542              */
2543             irerror(global->context, "FIXME: Relocation support");
2544             return false;
2545         }
2546
2547         ir_value_code_setaddr(global, code->globals.size());
2548         code->globals.push_back(target->code.globaladdr);
2549     }
2550     else
2551     {
2552         ir_value_code_setaddr(global, code->globals.size());
2553         code->globals.push_back(0);
2554     }
2555     if (global->code.globaladdr < 0)
2556         return false;
2557     return true;
2558 }
2559
2560 static bool gen_blocks_recursive(code_t *code, ir_function *func, ir_block *block)
2561 {
2562     prog_section_statement_t stmt;
2563     ir_instr *instr;
2564     ir_block *target;
2565     ir_block *ontrue;
2566     ir_block *onfalse;
2567     size_t    stidx;
2568     size_t    i;
2569     int       j;
2570
2571     block->generated = true;
2572     block->code_start = code->statements.size();
2573     for (i = 0; i < vec_size(block->instr); ++i)
2574     {
2575         instr = block->instr[i];
2576
2577         if (instr->opcode == VINSTR_PHI) {
2578             irerror(block->context, "cannot generate virtual instruction (phi)");
2579             return false;
2580         }
2581
2582         if (instr->opcode == VINSTR_JUMP) {
2583             target = instr->bops[0];
2584             /* for uncoditional jumps, if the target hasn't been generated
2585              * yet, we generate them right here.
2586              */
2587             if (!target->generated)
2588                 return gen_blocks_recursive(code, func, target);
2589
2590             /* otherwise we generate a jump instruction */
2591             stmt.opcode = INSTR_GOTO;
2592             stmt.o1.s1 = target->code_start - code->statements.size();
2593             stmt.o2.s1 = 0;
2594             stmt.o3.s1 = 0;
2595             if (stmt.o1.s1 != 1)
2596                 code_push_statement(code, &stmt, instr->context);
2597
2598             /* no further instructions can be in this block */
2599             return true;
2600         }
2601
2602         if (instr->opcode == VINSTR_BITXOR) {
2603             stmt.opcode = INSTR_BITOR;
2604             stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]);
2605             stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]);
2606             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2607             code_push_statement(code, &stmt, instr->context);
2608             stmt.opcode = INSTR_BITAND;
2609             stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]);
2610             stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]);
2611             stmt.o3.s1 = ir_value_code_addr(func->owner->vinstr_temp[0]);
2612             code_push_statement(code, &stmt, instr->context);
2613             stmt.opcode = INSTR_SUB_F;
2614             stmt.o1.s1 = ir_value_code_addr(instr->_ops[0]);
2615             stmt.o2.s1 = ir_value_code_addr(func->owner->vinstr_temp[0]);
2616             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2617             code_push_statement(code, &stmt, instr->context);
2618
2619             /* instruction generated */
2620             continue;
2621         }
2622
2623         if (instr->opcode == VINSTR_BITAND_V) {
2624             stmt.opcode = INSTR_BITAND;
2625             stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]);
2626             stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]);
2627             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2628             code_push_statement(code, &stmt, instr->context);
2629             ++stmt.o1.s1;
2630             ++stmt.o2.s1;
2631             ++stmt.o3.s1;
2632             code_push_statement(code, &stmt, instr->context);
2633             ++stmt.o1.s1;
2634             ++stmt.o2.s1;
2635             ++stmt.o3.s1;
2636             code_push_statement(code, &stmt, instr->context);
2637
2638             /* instruction generated */
2639             continue;
2640         }
2641
2642         if (instr->opcode == VINSTR_BITOR_V) {
2643             stmt.opcode = INSTR_BITOR;
2644             stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]);
2645             stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]);
2646             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2647             code_push_statement(code, &stmt, instr->context);
2648             ++stmt.o1.s1;
2649             ++stmt.o2.s1;
2650             ++stmt.o3.s1;
2651             code_push_statement(code, &stmt, instr->context);
2652             ++stmt.o1.s1;
2653             ++stmt.o2.s1;
2654             ++stmt.o3.s1;
2655             code_push_statement(code, &stmt, instr->context);
2656
2657             /* instruction generated */
2658             continue;
2659         }
2660
2661         if (instr->opcode == VINSTR_BITXOR_V) {
2662             for (j = 0; j < 3; ++j) {
2663                 stmt.opcode = INSTR_BITOR;
2664                 stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]) + j;
2665                 stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]) + j;
2666                 stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]) + j;
2667                 code_push_statement(code, &stmt, instr->context);
2668                 stmt.opcode = INSTR_BITAND;
2669                 stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]) + j;
2670                 stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]) + j;
2671                 stmt.o3.s1 = ir_value_code_addr(func->owner->vinstr_temp[0]) + j;
2672                 code_push_statement(code, &stmt, instr->context);
2673             }
2674             stmt.opcode = INSTR_SUB_V;
2675             stmt.o1.s1 = ir_value_code_addr(instr->_ops[0]);
2676             stmt.o2.s1 = ir_value_code_addr(func->owner->vinstr_temp[0]);
2677             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2678             code_push_statement(code, &stmt, instr->context);
2679
2680             /* instruction generated */
2681             continue;
2682         }
2683
2684         if (instr->opcode == VINSTR_BITAND_VF) {
2685             stmt.opcode = INSTR_BITAND;
2686             stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]);
2687             stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]);
2688             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2689             code_push_statement(code, &stmt, instr->context);
2690             ++stmt.o1.s1;
2691             ++stmt.o3.s1;
2692             code_push_statement(code, &stmt, instr->context);
2693             ++stmt.o1.s1;
2694             ++stmt.o3.s1;
2695             code_push_statement(code, &stmt, instr->context);
2696
2697             /* instruction generated */
2698             continue;
2699         }
2700
2701         if (instr->opcode == VINSTR_BITOR_VF) {
2702             stmt.opcode = INSTR_BITOR;
2703             stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]);
2704             stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]);
2705             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2706             code_push_statement(code, &stmt, instr->context);
2707             ++stmt.o1.s1;
2708             ++stmt.o3.s1;
2709             code_push_statement(code, &stmt, instr->context);
2710             ++stmt.o1.s1;
2711             ++stmt.o3.s1;
2712             code_push_statement(code, &stmt, instr->context);
2713
2714             /* instruction generated */
2715             continue;
2716         }
2717
2718         if (instr->opcode == VINSTR_BITXOR_VF) {
2719             for (j = 0; j < 3; ++j) {
2720                 stmt.opcode = INSTR_BITOR;
2721                 stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]) + j;
2722                 stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]);
2723                 stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]) + j;
2724                 code_push_statement(code, &stmt, instr->context);
2725                 stmt.opcode = INSTR_BITAND;
2726                 stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]) + j;
2727                 stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]);
2728                 stmt.o3.s1 = ir_value_code_addr(func->owner->vinstr_temp[0]) + j;
2729                 code_push_statement(code, &stmt, instr->context);
2730             }
2731             stmt.opcode = INSTR_SUB_V;
2732             stmt.o1.s1 = ir_value_code_addr(instr->_ops[0]);
2733             stmt.o2.s1 = ir_value_code_addr(func->owner->vinstr_temp[0]);
2734             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2735             code_push_statement(code, &stmt, instr->context);
2736
2737             /* instruction generated */
2738             continue;
2739         }
2740
2741         if (instr->opcode == VINSTR_CROSS) {
2742             stmt.opcode = INSTR_MUL_F;
2743             for (j = 0; j < 3; ++j) {
2744                 stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]) + (j + 1) % 3;
2745                 stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]) + (j + 2) % 3;
2746                 stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]) + j;
2747                 code_push_statement(code, &stmt, instr->context);
2748                 stmt.o1.s1 = ir_value_code_addr(instr->_ops[1]) + (j + 2) % 3;
2749                 stmt.o2.s1 = ir_value_code_addr(instr->_ops[2]) + (j + 1) % 3;
2750                 stmt.o3.s1 = ir_value_code_addr(func->owner->vinstr_temp[0]) + j;
2751                 code_push_statement(code, &stmt, instr->context);
2752             }
2753             stmt.opcode = INSTR_SUB_V;
2754             stmt.o1.s1 = ir_value_code_addr(instr->_ops[0]);
2755             stmt.o2.s1 = ir_value_code_addr(func->owner->vinstr_temp[0]);
2756             stmt.o3.s1 = ir_value_code_addr(instr->_ops[0]);
2757             code_push_statement(code, &stmt, instr->context);
2758
2759             /* instruction generated */
2760             continue;
2761         }
2762
2763         if (instr->opcode == VINSTR_COND) {
2764             ontrue  = instr->bops[0];
2765             onfalse = instr->bops[1];
2766             /* TODO: have the AST signal which block should
2767              * come first: eg. optimize IFs without ELSE...
2768              */
2769
2770             stmt.o1.u1 = ir_value_code_addr(instr->_ops[0]);
2771             stmt.o2.u1 = 0;
2772             stmt.o3.s1 = 0;
2773
2774             if (ontrue->generated) {
2775                 stmt.opcode = INSTR_IF;
2776                 stmt.o2.s1 = ontrue->code_start - code->statements.size();
2777                 if (stmt.o2.s1 != 1)
2778                     code_push_statement(code, &stmt, instr->context);
2779             }
2780             if (onfalse->generated) {
2781                 stmt.opcode = INSTR_IFNOT;
2782                 stmt.o2.s1 = onfalse->code_start - code->statements.size();
2783                 if (stmt.o2.s1 != 1)
2784                     code_push_statement(code, &stmt, instr->context);
2785             }
2786             if (!ontrue->generated) {
2787                 if (onfalse->generated)
2788                     return gen_blocks_recursive(code, func, ontrue);
2789             }
2790             if (!onfalse->generated) {
2791                 if (ontrue->generated)
2792                     return gen_blocks_recursive(code, func, onfalse);
2793             }
2794             /* neither ontrue nor onfalse exist */
2795             stmt.opcode = INSTR_IFNOT;
2796             if (!instr->likely) {
2797                 /* Honor the likelyhood hint */
2798                 ir_block *tmp = onfalse;
2799                 stmt.opcode = INSTR_IF;
2800                 onfalse = ontrue;
2801                 ontrue = tmp;
2802             }
2803             stidx = code->statements.size();
2804             code_push_statement(code, &stmt, instr->context);
2805             /* on false we jump, so add ontrue-path */
2806             if (!gen_blocks_recursive(code, func, ontrue))
2807                 return false;
2808             /* fixup the jump address */
2809             code->statements[stidx].o2.s1 = code->statements.size() - stidx;
2810             /* generate onfalse path */
2811             if (onfalse->generated) {
2812                 /* fixup the jump address */
2813                 code->statements[stidx].o2.s1 = onfalse->code_start - stidx;
2814                 if (stidx+2 == code->statements.size() && code->statements[stidx].o2.s1 == 1) {
2815                     code->statements[stidx] = code->statements[stidx+1];
2816                     if (code->statements[stidx].o1.s1 < 0)
2817                         code->statements[stidx].o1.s1++;
2818                     code_pop_statement(code);
2819                 }
2820                 stmt.opcode = code->statements.back().opcode;
2821                 if (stmt.opcode == INSTR_GOTO ||
2822                     stmt.opcode == INSTR_IF ||
2823                     stmt.opcode == INSTR_IFNOT ||
2824                     stmt.opcode == INSTR_RETURN ||
2825                     stmt.opcode == INSTR_DONE)
2826                 {
2827                     /* no use jumping from here */
2828                     return true;
2829                 }
2830                 /* may have been generated in the previous recursive call */
2831                 stmt.opcode = INSTR_GOTO;
2832                 stmt.o1.s1 = onfalse->code_start - code->statements.size();
2833                 stmt.o2.s1 = 0;
2834                 stmt.o3.s1 = 0;
2835                 if (stmt.o1.s1 != 1)
2836                     code_push_statement(code, &stmt, instr->context);
2837                 return true;
2838             }
2839             else if (stidx+2 == code->statements.size() && code->statements[stidx].o2.s1 == 1) {
2840                 code->statements[stidx] = code->statements[stidx+1];
2841                 if (code->statements[stidx].o1.s1 < 0)
2842                     code->statements[stidx].o1.s1++;
2843                 code_pop_statement(code);
2844             }
2845             /* if not, generate now */
2846             return gen_blocks_recursive(code, func, onfalse);
2847         }
2848
2849         if ( (instr->opcode >= INSTR_CALL0 && instr->opcode <= INSTR_CALL8)
2850            || instr->opcode == VINSTR_NRCALL)
2851         {
2852             size_t p, first;
2853             ir_value *retvalue;
2854
2855             first = instr->params.size();
2856             if (first > 8)
2857                 first = 8;
2858             for (p = 0; p < first; ++p)
2859             {
2860                 ir_value *param = instr->params[p];
2861                 if (param->callparam)
2862                     continue;
2863
2864                 stmt.opcode = INSTR_STORE_F;
2865                 stmt.o3.u1 = 0;
2866
2867                 if (param->vtype == TYPE_FIELD)
2868                     stmt.opcode = field_store_instr[param->fieldtype];
2869                 else if (param->vtype == TYPE_NIL)
2870                     stmt.opcode = INSTR_STORE_V;
2871                 else
2872                     stmt.opcode = type_store_instr[param->vtype];
2873                 stmt.o1.u1 = ir_value_code_addr(param);
2874                 stmt.o2.u1 = OFS_PARM0 + 3 * p;
2875
2876                 if (param->vtype == TYPE_VECTOR && (param->flags & IR_FLAG_SPLIT_VECTOR)) {
2877                     /* fetch 3 separate floats */
2878                     stmt.opcode = INSTR_STORE_F;
2879                     stmt.o1.u1 = ir_value_code_addr(param->members[0]);
2880                     code_push_statement(code, &stmt, instr->context);
2881                     stmt.o2.u1++;
2882                     stmt.o1.u1 = ir_value_code_addr(param->members[1]);
2883                     code_push_statement(code, &stmt, instr->context);
2884                     stmt.o2.u1++;
2885                     stmt.o1.u1 = ir_value_code_addr(param->members[2]);
2886                     code_push_statement(code, &stmt, instr->context);
2887                 }
2888                 else
2889                     code_push_statement(code, &stmt, instr->context);
2890             }
2891             /* Now handle extparams */
2892             first = instr->params.size();
2893             for (; p < first; ++p)
2894             {
2895                 ir_builder *ir = func->owner;
2896                 ir_value *param = instr->params[p];
2897                 ir_value *targetparam;
2898
2899                 if (param->callparam)
2900                     continue;
2901
2902                 if (p-8 >= ir->extparams.size())
2903                     ir_gen_extparam(ir);
2904
2905                 targetparam = ir->extparams[p-8].get();
2906
2907                 stmt.opcode = INSTR_STORE_F;
2908                 stmt.o3.u1 = 0;
2909
2910                 if (param->vtype == TYPE_FIELD)
2911                     stmt.opcode = field_store_instr[param->fieldtype];
2912                 else if (param->vtype == TYPE_NIL)
2913                     stmt.opcode = INSTR_STORE_V;
2914                 else
2915                     stmt.opcode = type_store_instr[param->vtype];
2916                 stmt.o1.u1 = ir_value_code_addr(param);
2917                 stmt.o2.u1 = ir_value_code_addr(targetparam);
2918                 if (param->vtype == TYPE_VECTOR && (param->flags & IR_FLAG_SPLIT_VECTOR)) {
2919                     /* fetch 3 separate floats */
2920                     stmt.opcode = INSTR_STORE_F;
2921                     stmt.o1.u1 = ir_value_code_addr(param->members[0]);
2922                     code_push_statement(code, &stmt, instr->context);
2923                     stmt.o2.u1++;
2924                     stmt.o1.u1 = ir_value_code_addr(param->members[1]);
2925                     code_push_statement(code, &stmt, instr->context);
2926                     stmt.o2.u1++;
2927                     stmt.o1.u1 = ir_value_code_addr(param->members[2]);
2928                     code_push_statement(code, &stmt, instr->context);
2929                 }
2930                 else
2931                     code_push_statement(code, &stmt, instr->context);
2932             }
2933
2934             stmt.opcode = INSTR_CALL0 + instr->params.size();
2935             if (stmt.opcode > INSTR_CALL8)
2936                 stmt.opcode = INSTR_CALL8;
2937             stmt.o1.u1 = ir_value_code_addr(instr->_ops[1]);
2938             stmt.o2.u1 = 0;
2939             stmt.o3.u1 = 0;
2940             code_push_statement(code, &stmt, instr->context);
2941
2942             retvalue = instr->_ops[0];
2943             if (retvalue && retvalue->store != store_return &&
2944                 (retvalue->store == store_global || retvalue->life.size()))
2945             {
2946                 /* not to be kept in OFS_RETURN */
2947                 if (retvalue->vtype == TYPE_FIELD && OPTS_FLAG(ADJUST_VECTOR_FIELDS))
2948                     stmt.opcode = field_store_instr[retvalue->fieldtype];
2949                 else
2950                     stmt.opcode = type_store_instr[retvalue->vtype];
2951                 stmt.o1.u1 = OFS_RETURN;
2952                 stmt.o2.u1 = ir_value_code_addr(retvalue);
2953                 stmt.o3.u1 = 0;
2954                 code_push_statement(code, &stmt, instr->context);
2955             }
2956             continue;
2957         }
2958
2959         if (instr->opcode == INSTR_STATE) {
2960             stmt.opcode = instr->opcode;
2961             if (instr->_ops[0])
2962                 stmt.o1.u1 = ir_value_code_addr(instr->_ops[0]);
2963             if (instr->_ops[1])
2964                 stmt.o2.u1 = ir_value_code_addr(instr->_ops[1]);
2965             stmt.o3.u1 = 0;
2966             code_push_statement(code, &stmt, instr->context);
2967             continue;
2968         }
2969
2970         stmt.opcode = instr->opcode;
2971         stmt.o1.u1 = 0;
2972         stmt.o2.u1 = 0;
2973         stmt.o3.u1 = 0;
2974
2975         /* This is the general order of operands */
2976         if (instr->_ops[0])
2977             stmt.o3.u1 = ir_value_code_addr(instr->_ops[0]);
2978
2979         if (instr->_ops[1])
2980             stmt.o1.u1 = ir_value_code_addr(instr->_ops[1]);
2981
2982         if (instr->_ops[2])
2983             stmt.o2.u1 = ir_value_code_addr(instr->_ops[2]);
2984
2985         if (stmt.opcode == INSTR_RETURN || stmt.opcode == INSTR_DONE)
2986         {
2987             stmt.o1.u1 = stmt.o3.u1;
2988             stmt.o3.u1 = 0;
2989         }
2990         else if ((stmt.opcode >= INSTR_STORE_F &&
2991                   stmt.opcode <= INSTR_STORE_FNC) ||
2992                  (stmt.opcode >= INSTR_STOREP_F &&
2993                   stmt.opcode <= INSTR_STOREP_FNC))
2994         {
2995             /* 2-operand instructions with A -> B */
2996             stmt.o2.u1 = stmt.o3.u1;
2997             stmt.o3.u1 = 0;
2998
2999             /* tiny optimization, don't output
3000              * STORE a, a
3001              */
3002             if (stmt.o2.u1 == stmt.o1.u1 &&
3003                 OPTS_OPTIMIZATION(OPTIM_PEEPHOLE))
3004             {
3005                 ++opts_optimizationcount[OPTIM_PEEPHOLE];
3006                 continue;
3007             }
3008         }
3009         code_push_statement(code, &stmt, instr->context);
3010     }
3011     return true;
3012 }
3013
3014 static bool gen_function_code(code_t *code, ir_function *self)
3015 {
3016     ir_block *block;
3017     prog_section_statement_t stmt, *retst;
3018
3019     /* Starting from entry point, we generate blocks "as they come"
3020      * for now. Dead blocks will not be translated obviously.
3021      */
3022     if (self->blocks.empty()) {
3023         irerror(self->context, "Function '%s' declared without body.", self->name.c_str());
3024         return false;
3025     }
3026
3027     block = self->blocks[0].get();
3028     if (block->generated)
3029         return true;
3030
3031     if (!gen_blocks_recursive(code, self, block)) {
3032         irerror(self->context, "failed to generate blocks for '%s'", self->name.c_str());
3033         return false;
3034     }
3035
3036     /* code_write and qcvm -disasm need to know that the function ends here */
3037     retst = &code->statements.back();
3038     if (OPTS_OPTIMIZATION(OPTIM_VOID_RETURN) &&
3039         self->outtype == TYPE_VOID &&
3040         retst->opcode == INSTR_RETURN &&
3041         !retst->o1.u1 && !retst->o2.u1 && !retst->o3.u1)
3042     {
3043         retst->opcode = INSTR_DONE;
3044         ++opts_optimizationcount[OPTIM_VOID_RETURN];
3045     } else {
3046         lex_ctx_t last;
3047
3048         stmt.opcode = INSTR_DONE;
3049         stmt.o1.u1  = 0;
3050         stmt.o2.u1  = 0;
3051         stmt.o3.u1  = 0;
3052         last.line   = code->linenums.back();
3053         last.column = code->columnnums.back();
3054
3055         code_push_statement(code, &stmt, last);
3056     }
3057     return true;
3058 }
3059
3060 static qcint_t ir_builder_filestring(ir_builder *ir, const char *filename)
3061 {
3062     /* NOTE: filename pointers are copied, we never strdup them,
3063      * thus we can use pointer-comparison to find the string.
3064      */
3065     qcint_t  str;
3066
3067     for (size_t i = 0; i != ir->filenames.size(); ++i) {
3068         if (!strcmp(ir->filenames[i], filename))
3069             return i;
3070     }
3071
3072     str = code_genstring(ir->code, filename);
3073     ir->filenames.push_back(filename);
3074     ir->filestrings.push_back(str);
3075     return str;
3076 }
3077
3078 static bool gen_global_function(ir_builder *ir, ir_value *global)
3079 {
3080     prog_section_function_t fun;
3081     ir_function            *irfun;
3082
3083     size_t i;
3084
3085     if (!global->hasvalue || (!global->constval.vfunc)) {
3086         irerror(global->context, "Invalid state of function-global: not constant: %s", global->name.c_str());
3087         return false;
3088     }
3089
3090     irfun = global->constval.vfunc;
3091     fun.name = global->code.name;
3092     fun.file = ir_builder_filestring(ir, global->context.file);
3093     fun.profile = 0; /* always 0 */
3094     fun.nargs = vec_size(irfun->params);
3095     if (fun.nargs > 8)
3096         fun.nargs = 8;
3097
3098     for (i = 0; i < 8; ++i) {
3099         if ((int32_t)i >= fun.nargs)
3100             fun.argsize[i] = 0;
3101         else
3102             fun.argsize[i] = type_sizeof_[irfun->params[i]];
3103     }
3104
3105     fun.firstlocal = 0;
3106     fun.locals = irfun->allocated_locals;
3107
3108     if (irfun->builtin)
3109         fun.entry = irfun->builtin+1;
3110     else {
3111         irfun->code_function_def = ir->code->functions.size();
3112         fun.entry = ir->code->statements.size();
3113     }
3114
3115     ir->code->functions.push_back(fun);
3116     return true;
3117 }
3118
3119 static ir_value* ir_gen_extparam_proto(ir_builder *ir)
3120 {
3121     char      name[128];
3122
3123     util_snprintf(name, sizeof(name), "EXTPARM#%i", (int)(ir->extparam_protos.size()));
3124     ir_value *global = new ir_value(name, store_global, TYPE_VECTOR);
3125     ir->extparam_protos.emplace_back(global);
3126
3127     return global;
3128 }
3129
3130 static void ir_gen_extparam(ir_builder *ir)
3131 {
3132     prog_section_def_t def;
3133     ir_value          *global;
3134
3135     if (ir->extparam_protos.size() < ir->extparams.size()+1)
3136         global = ir_gen_extparam_proto(ir);
3137     else
3138         global = ir->extparam_protos[ir->extparams.size()].get();
3139
3140     def.name = code_genstring(ir->code, global->name.c_str());
3141     def.type = TYPE_VECTOR;
3142     def.offset = ir->code->globals.size();
3143
3144     ir->code->defs.push_back(def);
3145
3146     ir_value_code_setaddr(global, def.offset);
3147
3148     ir->code->globals.push_back(0);
3149     ir->code->globals.push_back(0);
3150     ir->code->globals.push_back(0);
3151
3152     ir->extparams.emplace_back(global);
3153 }
3154
3155 static bool gen_function_extparam_copy(code_t *code, ir_function *self)
3156 {
3157     ir_builder *ir = self->owner;
3158
3159     size_t numparams = vec_size(self->params);
3160     if (!numparams)
3161         return true;
3162
3163     prog_section_statement_t stmt;
3164     stmt.opcode = INSTR_STORE_F;
3165     stmt.o3.s1 = 0;
3166     for (size_t i = 8; i < numparams; ++i) {
3167         size_t ext = i - 8;
3168         if (ext >= ir->extparams.size())
3169             ir_gen_extparam(ir);
3170
3171         ir_value *ep = ir->extparams[ext].get();
3172
3173         stmt.opcode = type_store_instr[self->locals[i]->vtype];
3174         if (self->locals[i]->vtype == TYPE_FIELD &&
3175             self->locals[i]->fieldtype == TYPE_VECTOR)
3176         {
3177             stmt.opcode = INSTR_STORE_V;
3178         }
3179         stmt.o1.u1 = ir_value_code_addr(ep);
3180         stmt.o2.u1 = ir_value_code_addr(self->locals[i].get());
3181         code_push_statement(code, &stmt, self->context);
3182     }
3183
3184     return true;
3185 }
3186
3187 static bool gen_function_varargs_copy(code_t *code, ir_function *self)
3188 {
3189     size_t i, ext, numparams, maxparams;
3190
3191     ir_builder *ir = self->owner;
3192     ir_value   *ep;
3193     prog_section_statement_t stmt;
3194
3195     numparams = vec_size(self->params);
3196     if (!numparams)
3197         return true;
3198
3199     stmt.opcode = INSTR_STORE_V;
3200     stmt.o3.s1 = 0;
3201     maxparams = numparams + self->max_varargs;
3202     for (i = numparams; i < maxparams; ++i) {
3203         if (i < 8) {
3204             stmt.o1.u1 = OFS_PARM0 + 3*i;
3205             stmt.o2.u1 = ir_value_code_addr(self->locals[i].get());
3206             code_push_statement(code, &stmt, self->context);
3207             continue;
3208         }
3209         ext = i - 8;
3210         while (ext >= ir->extparams.size())
3211             ir_gen_extparam(ir);
3212
3213         ep = ir->extparams[ext].get();
3214
3215         stmt.o1.u1 = ir_value_code_addr(ep);
3216         stmt.o2.u1 = ir_value_code_addr(self->locals[i].get());
3217         code_push_statement(code, &stmt, self->context);
3218     }
3219
3220     return true;
3221 }
3222
3223 static bool gen_function_locals(ir_builder *ir, ir_value *global)
3224 {
3225     prog_section_function_t *def;
3226     ir_function             *irfun;
3227     uint32_t                 firstlocal, firstglobal;
3228
3229     irfun = global->constval.vfunc;
3230     def   = &ir->code->functions[0] + irfun->code_function_def;
3231
3232     if (OPTS_OPTION_BOOL(OPTION_G) ||
3233         !OPTS_OPTIMIZATION(OPTIM_OVERLAP_LOCALS)        ||
3234         (irfun->flags & IR_FLAG_MASK_NO_OVERLAP))
3235     {
3236         firstlocal = def->firstlocal = ir->code->globals.size();
3237     } else {
3238         firstlocal = def->firstlocal = ir->first_common_local;
3239         ++opts_optimizationcount[OPTIM_OVERLAP_LOCALS];
3240     }
3241
3242     firstglobal = (OPTS_OPTIMIZATION(OPTIM_GLOBAL_TEMPS) ? ir->first_common_globaltemp : firstlocal);
3243
3244     for (size_t i = ir->code->globals.size(); i < firstlocal + irfun->allocated_locals; ++i)
3245         ir->code->globals.push_back(0);
3246
3247     for (auto& lp : irfun->locals) {
3248         ir_value *v = lp.get();
3249         if (v->locked || !OPTS_OPTIMIZATION(OPTIM_GLOBAL_TEMPS)) {
3250             ir_value_code_setaddr(v, firstlocal + v->code.local);
3251             if (!ir_builder_gen_global(ir, v, true)) {
3252                 irerror(v->context, "failed to generate local %s", v->name.c_str());
3253                 return false;
3254             }
3255         }
3256         else
3257             ir_value_code_setaddr(v, firstglobal + v->code.local);
3258     }
3259     for (auto& vp : irfun->values) {
3260         ir_value *v = vp.get();
3261         if (v->callparam)
3262             continue;
3263         if (v->locked)
3264             ir_value_code_setaddr(v, firstlocal + v->code.local);
3265         else
3266             ir_value_code_setaddr(v, firstglobal + v->code.local);
3267     }
3268     return true;
3269 }
3270
3271 static bool gen_global_function_code(ir_builder *ir, ir_value *global)
3272 {
3273     prog_section_function_t *fundef;
3274     ir_function             *irfun;
3275
3276     (void)ir;
3277
3278     irfun = global->constval.vfunc;
3279     if (!irfun) {
3280         if (global->cvq == CV_NONE) {
3281             if (irwarning(global->context, WARN_IMPLICIT_FUNCTION_POINTER,
3282                           "function `%s` has no body and in QC implicitly becomes a function-pointer",
3283                           global->name.c_str()))
3284             {
3285                 /* Not bailing out just now. If this happens a lot you don't want to have
3286                  * to rerun gmqcc for each such function.
3287                  */
3288
3289                 /* return false; */
3290             }
3291         }
3292         /* this was a function pointer, don't generate code for those */
3293         return true;
3294     }
3295
3296     if (irfun->builtin)
3297         return true;
3298
3299     /*
3300      * If there is no definition and the thing is eraseable, we can ignore
3301      * outputting the function to begin with.
3302      */
3303     if (global->flags & IR_FLAG_ERASABLE && irfun->code_function_def < 0) {
3304         return true;
3305     }
3306
3307     if (irfun->code_function_def < 0) {
3308         irerror(irfun->context, "`%s`: IR global wasn't generated, failed to access function-def", irfun->name.c_str());
3309         return false;
3310     }
3311     fundef = &ir->code->functions[irfun->code_function_def];
3312
3313     fundef->entry = ir->code->statements.size();
3314     if (!gen_function_locals(ir, global)) {
3315         irerror(irfun->context, "Failed to generate locals for function %s", irfun->name.c_str());
3316         return false;
3317     }
3318     if (!gen_function_extparam_copy(ir->code, irfun)) {
3319         irerror(irfun->context, "Failed to generate extparam-copy code for function %s", irfun->name.c_str());
3320         return false;
3321     }
3322     if (irfun->max_varargs && !gen_function_varargs_copy(ir->code, irfun)) {
3323         irerror(irfun->context, "Failed to generate vararg-copy code for function %s", irfun->name.c_str());
3324         return false;
3325     }
3326     if (!gen_function_code(ir->code, irfun)) {
3327         irerror(irfun->context, "Failed to generate code for function %s", irfun->name.c_str());
3328         return false;
3329     }
3330     return true;
3331 }
3332
3333 static void gen_vector_defs(code_t *code, prog_section_def_t def, const char *name)
3334 {
3335     char  *component;
3336     size_t len, i;
3337
3338     if (!name || name[0] == '#' || OPTS_FLAG(SINGLE_VECTOR_DEFS))
3339         return;
3340
3341     def.type = TYPE_FLOAT;
3342
3343     len = strlen(name);
3344
3345     component = (char*)mem_a(len+3);
3346     memcpy(component, name, len);
3347     len += 2;
3348     component[len-0] = 0;
3349     component[len-2] = '_';
3350
3351     component[len-1] = 'x';
3352
3353     for (i = 0; i < 3; ++i) {
3354         def.name = code_genstring(code, component);
3355         code->defs.push_back(def);
3356         def.offset++;
3357         component[len-1]++;
3358     }
3359
3360     mem_d(component);
3361 }
3362
3363 static void gen_vector_fields(code_t *code, prog_section_field_t fld, const char *name)
3364 {
3365     char  *component;
3366     size_t len, i;
3367
3368     if (!name || OPTS_FLAG(SINGLE_VECTOR_DEFS))
3369         return;
3370
3371     fld.type = TYPE_FLOAT;
3372
3373     len = strlen(name);
3374
3375     component = (char*)mem_a(len+3);
3376     memcpy(component, name, len);
3377     len += 2;
3378     component[len-0] = 0;
3379     component[len-2] = '_';
3380
3381     component[len-1] = 'x';
3382
3383     for (i = 0; i < 3; ++i) {
3384         fld.name = code_genstring(code, component);
3385         code->fields.push_back(fld);
3386         fld.offset++;
3387         component[len-1]++;
3388     }
3389
3390     mem_d(component);
3391 }
3392
3393 static bool ir_builder_gen_global(ir_builder *self, ir_value *global, bool islocal)
3394 {
3395     size_t             i;
3396     int32_t           *iptr;
3397     prog_section_def_t def;
3398     bool               pushdef = opts.optimizeoff;
3399
3400     /* we don't generate split-vectors */
3401     if (global->vtype == TYPE_VECTOR && (global->flags & IR_FLAG_SPLIT_VECTOR))
3402         return true;
3403
3404     def.type = global->vtype;
3405     def.offset = self->code->globals.size();
3406     def.name = 0;
3407     if (OPTS_OPTION_BOOL(OPTION_G) || !islocal)
3408     {
3409         pushdef = true;
3410
3411         /*
3412          * if we're eraseable and the function isn't referenced ignore outputting
3413          * the function.
3414          */
3415         if (global->flags & IR_FLAG_ERASABLE && global->reads.empty()) {
3416             return true;
3417         }
3418
3419         if (OPTS_OPTIMIZATION(OPTIM_STRIP_CONSTANT_NAMES) &&
3420             !(global->flags & IR_FLAG_INCLUDE_DEF) &&
3421             (global->name[0] == '#' || global->cvq == CV_CONST))
3422         {
3423             pushdef = false;
3424         }
3425
3426         if (pushdef) {
3427             if (global->name[0] == '#') {
3428                 if (!self->str_immediate)
3429                     self->str_immediate = code_genstring(self->code, "IMMEDIATE");
3430                 def.name = global->code.name = self->str_immediate;
3431             }
3432             else
3433                 def.name = global->code.name = code_genstring(self->code, global->name.c_str());
3434         }
3435         else
3436             def.name   = 0;
3437         if (islocal) {
3438             def.offset = ir_value_code_addr(global);
3439             self->code->defs.push_back(def);
3440             if (global->vtype == TYPE_VECTOR)
3441                 gen_vector_defs(self->code, def, global->name.c_str());
3442             else if (global->vtype == TYPE_FIELD && global->fieldtype == TYPE_VECTOR)
3443                 gen_vector_defs(self->code, def, global->name.c_str());
3444             return true;
3445         }
3446     }
3447     if (islocal)
3448         return true;
3449
3450     switch (global->vtype)
3451     {
3452     case TYPE_VOID:
3453         if (0 == global->name.compare("end_sys_globals")) {
3454             // TODO: remember this point... all the defs before this one
3455             // should be checksummed and added to progdefs.h when we generate it.
3456         }
3457         else if (0 == global->name.compare("end_sys_fields")) {
3458             // TODO: same as above but for entity-fields rather than globsl
3459         }
3460         else if(irwarning(global->context, WARN_VOID_VARIABLES, "unrecognized variable of type void `%s`",
3461                           global->name.c_str()))
3462         {
3463             /* Not bailing out */
3464             /* return false; */
3465         }
3466         /* I'd argue setting it to 0 is sufficient, but maybe some depend on knowing how far
3467          * the system fields actually go? Though the engine knows this anyway...
3468          * Maybe this could be an -foption
3469          * fteqcc creates data for end_sys_* - of size 1, so let's do the same
3470          */
3471         ir_value_code_setaddr(global, self->code->globals.size());
3472         self->code->globals.push_back(0);
3473         /* Add the def */
3474         if (pushdef) self->code->defs.push_back(def);
3475         return true;
3476     case TYPE_POINTER:
3477         if (pushdef) self->code->defs.push_back(def);
3478         return gen_global_pointer(self->code, global);
3479     case TYPE_FIELD:
3480         if (pushdef) {
3481             self->code->defs.push_back(def);
3482             if (global->fieldtype == TYPE_VECTOR)
3483                 gen_vector_defs(self->code, def, global->name.c_str());
3484         }
3485         return gen_global_field(self->code, global);
3486     case TYPE_ENTITY:
3487         /* fall through */
3488     case TYPE_FLOAT:
3489     {
3490         ir_value_code_setaddr(global, self->code->globals.size());
3491         if (global->hasvalue) {
3492             iptr = (int32_t*)&global->constval.ivec[0];
3493             self->code->globals.push_back(*iptr);
3494         } else {
3495             self->code->globals.push_back(0);
3496         }
3497         if (!islocal && global->cvq != CV_CONST)
3498             def.type |= DEF_SAVEGLOBAL;
3499         if (pushdef) self->code->defs.push_back(def);
3500
3501         return global->code.globaladdr >= 0;
3502     }
3503     case TYPE_STRING:
3504     {
3505         ir_value_code_setaddr(global, self->code->globals.size());
3506         if (global->hasvalue) {
3507             uint32_t load = code_genstring(self->code, global->constval.vstring);
3508             self->code->globals.push_back(load);
3509         } else {
3510             self->code->globals.push_back(0);
3511         }
3512         if (!islocal && global->cvq != CV_CONST)
3513             def.type |= DEF_SAVEGLOBAL;
3514         if (pushdef) self->code->defs.push_back(def);
3515         return global->code.globaladdr >= 0;
3516     }
3517     case TYPE_VECTOR:
3518     {
3519         size_t d;
3520         ir_value_code_setaddr(global, self->code->globals.size());
3521         if (global->hasvalue) {
3522             iptr = (int32_t*)&global->constval.ivec[0];
3523             self->code->globals.push_back(iptr[0]);
3524             if (global->code.globaladdr < 0)
3525                 return false;
3526             for (d = 1; d < type_sizeof_[global->vtype]; ++d) {
3527                 self->code->globals.push_back(iptr[d]);
3528             }
3529         } else {
3530             self->code->globals.push_back(0);
3531             if (global->code.globaladdr < 0)
3532                 return false;
3533             for (d = 1; d < type_sizeof_[global->vtype]; ++d) {
3534                 self->code->globals.push_back(0);
3535             }
3536         }
3537         if (!islocal && global->cvq != CV_CONST)
3538             def.type |= DEF_SAVEGLOBAL;
3539
3540         if (pushdef) {
3541             self->code->defs.push_back(def);
3542             def.type &= ~DEF_SAVEGLOBAL;
3543             gen_vector_defs(self->code, def, global->name.c_str());
3544         }
3545         return global->code.globaladdr >= 0;
3546     }
3547     case TYPE_FUNCTION:
3548         ir_value_code_setaddr(global, self->code->globals.size());
3549         if (!global->hasvalue) {
3550             self->code->globals.push_back(0);
3551             if (global->code.globaladdr < 0)
3552                 return false;
3553         } else {
3554             self->code->globals.push_back(self->code->functions.size());
3555             if (!gen_global_function(self, global))
3556                 return false;
3557         }
3558         if (!islocal && global->cvq != CV_CONST)
3559             def.type |= DEF_SAVEGLOBAL;
3560         if (pushdef) self->code->defs.push_back(def);
3561         return true;
3562     case TYPE_VARIANT:
3563         /* assume biggest type */
3564             ir_value_code_setaddr(global, self->code->globals.size());
3565             self->code->globals.push_back(0);
3566             for (i = 1; i < type_sizeof_[TYPE_VARIANT]; ++i)
3567                 self->code->globals.push_back(0);
3568             return true;
3569     default:
3570         /* refuse to create 'void' type or any other fancy business. */
3571         irerror(global->context, "Invalid type for global variable `%s`: %s",
3572                 global->name.c_str(), type_name[global->vtype]);
3573         return false;
3574     }
3575 }
3576
3577 static GMQCC_INLINE void ir_builder_prepare_field(code_t *code, ir_value *field)
3578 {
3579     field->code.fieldaddr = code_alloc_field(code, type_sizeof_[field->fieldtype]);
3580 }
3581
3582 static bool ir_builder_gen_field(ir_builder *self, ir_value *field)
3583 {
3584     prog_section_def_t def;
3585     prog_section_field_t fld;
3586
3587     (void)self;
3588
3589     def.type   = (uint16_t)field->vtype;
3590     def.offset = (uint16_t)self->code->globals.size();
3591
3592     /* create a global named the same as the field */
3593     if (OPTS_OPTION_U32(OPTION_STANDARD) == COMPILER_GMQCC) {
3594         /* in our standard, the global gets a dot prefix */
3595         size_t len = field->name.length();
3596         char name[1024];
3597
3598         /* we really don't want to have to allocate this, and 1024
3599          * bytes is more than enough for a variable/field name
3600          */
3601         if (len+2 >= sizeof(name)) {
3602             irerror(field->context, "invalid field name size: %u", (unsigned int)len);
3603             return false;
3604         }
3605
3606         name[0] = '.';
3607         memcpy(name+1, field->name.c_str(), len); // no strncpy - we used strlen above
3608         name[len+1] = 0;
3609
3610         def.name = code_genstring(self->code, name);
3611         fld.name = def.name + 1; /* we reuse that string table entry */
3612     } else {
3613         /* in plain QC, there cannot be a global with the same name,
3614          * and so we also name the global the same.
3615          * FIXME: fteqcc should create a global as well
3616          * check if it actually uses the same name. Probably does
3617          */
3618         def.name = code_genstring(self->code, field->name.c_str());
3619         fld.name = def.name;
3620     }
3621
3622     field->code.name = def.name;
3623
3624     self->code->defs.push_back(def);
3625
3626     fld.type = field->fieldtype;
3627
3628     if (fld.type == TYPE_VOID) {
3629         irerror(field->context, "field is missing a type: %s - don't know its size", field->name.c_str());
3630         return false;
3631     }
3632
3633     fld.offset = field->code.fieldaddr;
3634
3635     self->code->fields.push_back(fld);
3636
3637     ir_value_code_setaddr(field, self->code->globals.size());
3638     self->code->globals.push_back(fld.offset);
3639     if (fld.type == TYPE_VECTOR) {
3640         self->code->globals.push_back(fld.offset+1);
3641         self->code->globals.push_back(fld.offset+2);
3642     }
3643
3644     if (field->fieldtype == TYPE_VECTOR) {
3645         gen_vector_defs  (self->code, def, field->name.c_str());
3646         gen_vector_fields(self->code, fld, field->name.c_str());
3647     }
3648
3649     return field->code.globaladdr >= 0;
3650 }
3651
3652 static void ir_builder_collect_reusables(ir_builder *builder) {
3653     std::vector<ir_value*> reusables;
3654
3655     for (auto& gp : builder->globals) {
3656         ir_value *value = gp.get();
3657         if (value->vtype != TYPE_FLOAT || !value->hasvalue)
3658             continue;
3659         if (value->cvq == CV_CONST || (value->name.length() >= 1 && value->name[0] == '#'))
3660             reusables.emplace_back(value);
3661     }
3662     builder->const_floats = move(reusables);
3663 }
3664
3665 static void ir_builder_split_vector(ir_builder *self, ir_value *vec) {
3666     ir_value* found[3] = { nullptr, nullptr, nullptr };
3667
3668     // must not be written to
3669     if (vec->writes.size())
3670         return;
3671     // must not be trying to access individual members
3672     if (vec->members[0] || vec->members[1] || vec->members[2])
3673         return;
3674     // should be actually used otherwise it won't be generated anyway
3675     if (vec->reads.empty())
3676         return;
3677     //size_t count = vec->reads.size();
3678     //if (!count)
3679     //    return;
3680
3681     // may only be used directly as function parameters, so if we find some other instruction cancel
3682     for (ir_instr *user : vec->reads) {
3683         // we only split vectors if they're used directly as parameter to a call only!
3684         if ((user->opcode < INSTR_CALL0 || user->opcode > INSTR_CALL8) && user->opcode != VINSTR_NRCALL)
3685             return;
3686     }
3687
3688     vec->flags |= IR_FLAG_SPLIT_VECTOR;
3689
3690     // find existing floats making up the split
3691     for (ir_value *c : self->const_floats) {
3692         if (!found[0] && c->constval.vfloat == vec->constval.vvec.x)
3693             found[0] = c;
3694         if (!found[1] && c->constval.vfloat == vec->constval.vvec.y)
3695             found[1] = c;
3696         if (!found[2] && c->constval.vfloat == vec->constval.vvec.z)
3697             found[2] = c;
3698         if (found[0] && found[1] && found[2])
3699             break;
3700     }
3701
3702     // generate floats for not yet found components
3703     if (!found[0])
3704         found[0] = ir_builder_imm_float(self, vec->constval.vvec.x, true);
3705     if (!found[1]) {
3706         if (vec->constval.vvec.y == vec->constval.vvec.x)
3707             found[1] = found[0];
3708         else
3709             found[1] = ir_builder_imm_float(self, vec->constval.vvec.y, true);
3710     }
3711     if (!found[2]) {
3712         if (vec->constval.vvec.z == vec->constval.vvec.x)
3713             found[2] = found[0];
3714         else if (vec->constval.vvec.z == vec->constval.vvec.y)
3715             found[2] = found[1];
3716         else
3717             found[2] = ir_builder_imm_float(self, vec->constval.vvec.z, true);
3718     }
3719
3720     // the .members array should be safe to use here
3721     vec->members[0] = found[0];
3722     vec->members[1] = found[1];
3723     vec->members[2] = found[2];
3724
3725     // register the readers for these floats
3726     found[0]->reads.insert(found[0]->reads.end(), vec->reads.begin(), vec->reads.end());
3727     found[1]->reads.insert(found[1]->reads.end(), vec->reads.begin(), vec->reads.end());
3728     found[2]->reads.insert(found[2]->reads.end(), vec->reads.begin(), vec->reads.end());
3729 }
3730
3731 static void ir_builder_split_vectors(ir_builder *self) {
3732     for (auto& gp : self->globals) {
3733         ir_value *v = gp.get();
3734         if (v->vtype != TYPE_VECTOR || !v->name.length() || v->name[0] != '#')
3735             continue;
3736         ir_builder_split_vector(self, v);
3737     }
3738 }
3739
3740 bool ir_builder_generate(ir_builder *self, const char *filename)
3741 {
3742     prog_section_statement_t stmt;
3743     char  *lnofile = nullptr;
3744
3745     if (OPTS_FLAG(SPLIT_VECTOR_PARAMETERS)) {
3746         ir_builder_collect_reusables(self);
3747         if (!self->const_floats.empty())
3748             ir_builder_split_vectors(self);
3749     }
3750
3751     for (auto& fp : self->fields)
3752         ir_builder_prepare_field(self->code, fp.get());
3753
3754     for (auto& gp : self->globals) {
3755         ir_value *global = gp.get();
3756         if (!ir_builder_gen_global(self, global, false)) {
3757             return false;
3758         }
3759         if (global->vtype == TYPE_FUNCTION) {
3760             ir_function *func = global->constval.vfunc;
3761             if (func && self->max_locals < func->allocated_locals &&
3762                 !(func->flags & IR_FLAG_MASK_NO_OVERLAP))
3763             {
3764                 self->max_locals = func->allocated_locals;
3765             }
3766             if (func && self->max_globaltemps < func->globaltemps)
3767                 self->max_globaltemps = func->globaltemps;
3768         }
3769     }
3770
3771     for (auto& fp : self->fields) {
3772         if (!ir_builder_gen_field(self, fp.get()))
3773             return false;
3774     }
3775
3776     // generate nil
3777     ir_value_code_setaddr(self->nil, self->code->globals.size());
3778     self->code->globals.push_back(0);
3779     self->code->globals.push_back(0);
3780     self->code->globals.push_back(0);
3781
3782     // generate virtual-instruction temps
3783     for (size_t i = 0; i < IR_MAX_VINSTR_TEMPS; ++i) {
3784         ir_value_code_setaddr(self->vinstr_temp[i], self->code->globals.size());
3785         self->code->globals.push_back(0);
3786         self->code->globals.push_back(0);
3787         self->code->globals.push_back(0);
3788     }
3789
3790     // generate global temps
3791     self->first_common_globaltemp = self->code->globals.size();
3792     self->code->globals.insert(self->code->globals.end(), self->max_globaltemps, 0);
3793     // FIXME:DELME:
3794     //for (size_t i = 0; i < self->max_globaltemps; ++i) {
3795     //    self->code->globals.push_back(0);
3796     //}
3797     // generate common locals
3798     self->first_common_local = self->code->globals.size();
3799     self->code->globals.insert(self->code->globals.end(), self->max_locals, 0);
3800     // FIXME:DELME:
3801     //for (i = 0; i < self->max_locals; ++i) {
3802     //    self->code->globals.push_back(0);
3803     //}
3804
3805     // generate function code
3806
3807     for (auto& gp : self->globals) {
3808         ir_value *global = gp.get();
3809         if (global->vtype == TYPE_FUNCTION) {
3810             if (!gen_global_function_code(self, global)) {
3811                 return false;
3812             }
3813         }
3814     }
3815
3816     if (self->code->globals.size() >= 65536) {
3817         irerror(self->globals.back()->context,
3818             "This progs file would require more globals than the metadata can handle (%zu). Bailing out.",
3819             self->code->globals.size());
3820         return false;
3821     }
3822
3823     /* DP errors if the last instruction is not an INSTR_DONE. */
3824     if (self->code->statements.back().opcode != INSTR_DONE)
3825     {
3826         lex_ctx_t last;
3827
3828         stmt.opcode = INSTR_DONE;
3829         stmt.o1.u1  = 0;
3830         stmt.o2.u1  = 0;
3831         stmt.o3.u1  = 0;
3832         last.line   = self->code->linenums.back();
3833         last.column = self->code->columnnums.back();
3834
3835         code_push_statement(self->code, &stmt, last);
3836     }
3837
3838     if (OPTS_OPTION_BOOL(OPTION_PP_ONLY))
3839         return true;
3840
3841     if (self->code->statements.size() != self->code->linenums.size()) {
3842         con_err("Linecounter wrong: %lu != %lu\n",
3843                 self->code->statements.size(),
3844                 self->code->linenums.size());
3845     } else if (OPTS_FLAG(LNO)) {
3846         char  *dot;
3847         size_t filelen = strlen(filename);
3848
3849         memcpy(vec_add(lnofile, filelen+1), filename, filelen+1);
3850         dot = strrchr(lnofile, '.');
3851         if (!dot) {
3852             vec_pop(lnofile);
3853         } else {
3854             vec_shrinkto(lnofile, dot - lnofile);
3855         }
3856         memcpy(vec_add(lnofile, 5), ".lno", 5);
3857     }
3858
3859     if (!code_write(self->code, filename, lnofile)) {
3860         vec_free(lnofile);
3861         return false;
3862     }
3863
3864     vec_free(lnofile);
3865     return true;
3866 }
3867
3868 /***********************************************************************
3869  *IR DEBUG Dump functions...
3870  */
3871
3872 #define IND_BUFSZ 1024
3873
3874 static const char *qc_opname(int op)
3875 {
3876     if (op < 0) return "<INVALID>";
3877     if (op < VINSTR_END)
3878         return util_instr_str[op];
3879     switch (op) {
3880         case VINSTR_END:       return "END";
3881         case VINSTR_PHI:       return "PHI";
3882         case VINSTR_JUMP:      return "JUMP";
3883         case VINSTR_COND:      return "COND";
3884         case VINSTR_BITXOR:    return "BITXOR";
3885         case VINSTR_BITAND_V:  return "BITAND_V";
3886         case VINSTR_BITOR_V:   return "BITOR_V";
3887         case VINSTR_BITXOR_V:  return "BITXOR_V";
3888         case VINSTR_BITAND_VF: return "BITAND_VF";
3889         case VINSTR_BITOR_VF:  return "BITOR_VF";
3890         case VINSTR_BITXOR_VF: return "BITXOR_VF";
3891         case VINSTR_CROSS:     return "CROSS";
3892         case VINSTR_NEG_F:     return "NEG_F";
3893         case VINSTR_NEG_V:     return "NEG_V";
3894         default:               return "<UNK>";
3895     }
3896 }
3897
3898 void ir_builder_dump(ir_builder *b, int (*oprintf)(const char*, ...))
3899 {
3900     size_t i;
3901     char indent[IND_BUFSZ];
3902     indent[0] = '\t';
3903     indent[1] = 0;
3904
3905     oprintf("module %s\n", b->name.c_str());
3906     for (i = 0; i < b->globals.size(); ++i)
3907     {
3908         oprintf("global ");
3909         if (b->globals[i]->hasvalue)
3910             oprintf("%s = ", b->globals[i]->name.c_str());
3911         ir_value_dump(b->globals[i].get(), oprintf);
3912         oprintf("\n");
3913     }
3914     for (i = 0; i < b->functions.size(); ++i)
3915         ir_function_dump(b->functions[i].get(), indent, oprintf);
3916     oprintf("endmodule %s\n", b->name.c_str());
3917 }
3918
3919 static const char *storenames[] = {
3920     "[global]", "[local]", "[param]", "[value]", "[return]"
3921 };
3922
3923 void ir_function_dump(ir_function *f, char *ind,
3924                       int (*oprintf)(const char*, ...))
3925 {
3926     size_t i;
3927     if (f->builtin != 0) {
3928         oprintf("%sfunction %s = builtin %i\n", ind, f->name.c_str(), -f->builtin);
3929         return;
3930     }
3931     oprintf("%sfunction %s\n", ind, f->name.c_str());
3932     util_strncat(ind, "\t", IND_BUFSZ-1);
3933     if (f->locals.size())
3934     {
3935         oprintf("%s%i locals:\n", ind, (int)f->locals.size());
3936         for (i = 0; i < f->locals.size(); ++i) {
3937             oprintf("%s\t", ind);
3938             ir_value_dump(f->locals[i].get(), oprintf);
3939             oprintf("\n");
3940         }
3941     }
3942     oprintf("%sliferanges:\n", ind);
3943     for (i = 0; i < f->locals.size(); ++i) {
3944         const char *attr = "";
3945         size_t l, m;
3946         ir_value *v = f->locals[i].get();
3947         if (v->unique_life && v->locked)
3948             attr = "unique,locked ";
3949         else if (v->unique_life)
3950             attr = "unique ";
3951         else if (v->locked)
3952             attr = "locked ";
3953         oprintf("%s\t%s: %s %s %s%s@%i ", ind, v->name.c_str(), type_name[v->vtype],
3954                 storenames[v->store],
3955                 attr, (v->callparam ? "callparam " : ""),
3956                 (int)v->code.local);
3957         if (v->life.empty())
3958             oprintf("[null]");
3959         for (l = 0; l < v->life.size(); ++l) {
3960             oprintf("[%i,%i] ", v->life[l].start, v->life[l].end);
3961         }
3962         oprintf("\n");
3963         for (m = 0; m < 3; ++m) {
3964             ir_value *vm = v->members[m];
3965             if (!vm)
3966                 continue;
3967             oprintf("%s\t%s: @%i ", ind, vm->name.c_str(), (int)vm->code.local);
3968             for (l = 0; l < vm->life.size(); ++l) {
3969                 oprintf("[%i,%i] ", vm->life[l].start, vm->life[l].end);
3970             }
3971             oprintf("\n");
3972         }
3973     }
3974     for (i = 0; i < f->values.size(); ++i) {
3975         const char *attr = "";
3976         size_t l, m;
3977         ir_value *v = f->values[i].get();
3978         if (v->unique_life && v->locked)
3979             attr = "unique,locked ";
3980         else if (v->unique_life)
3981             attr = "unique ";
3982         else if (v->locked)
3983             attr = "locked ";
3984         oprintf("%s\t%s: %s %s %s%s@%i ", ind, v->name.c_str(), type_name[v->vtype],
3985                 storenames[v->store],
3986                 attr, (v->callparam ? "callparam " : ""),
3987                 (int)v->code.local);
3988         if (v->life.empty())
3989             oprintf("[null]");
3990         for (l = 0; l < v->life.size(); ++l) {
3991             oprintf("[%i,%i] ", v->life[l].start, v->life[l].end);
3992         }
3993         oprintf("\n");
3994         for (m = 0; m < 3; ++m) {
3995             ir_value *vm = v->members[m];
3996             if (!vm)
3997                 continue;
3998             if (vm->unique_life && vm->locked)
3999                 attr = "unique,locked ";
4000             else if (vm->unique_life)
4001                 attr = "unique ";
4002             else if (vm->locked)
4003                 attr = "locked ";
4004             oprintf("%s\t%s: %s@%i ", ind, vm->name.c_str(), attr, (int)vm->code.local);
4005             for (l = 0; l < vm->life.size(); ++l) {
4006                 oprintf("[%i,%i] ", vm->life[l].start, vm->life[l].end);
4007             }
4008             oprintf("\n");
4009         }
4010     }
4011     if (f->blocks.size())
4012     {
4013         oprintf("%slife passes: %i\n", ind, (int)f->run_id);
4014         for (i = 0; i < f->blocks.size(); ++i) {
4015             ir_block_dump(f->blocks[i].get(), ind, oprintf);
4016         }
4017
4018     }
4019     ind[strlen(ind)-1] = 0;
4020     oprintf("%sendfunction %s\n", ind, f->name.c_str());
4021 }
4022
4023 void ir_block_dump(ir_block* b, char *ind,
4024                    int (*oprintf)(const char*, ...))
4025 {
4026     size_t i;
4027     oprintf("%s:%s\n", ind, b->label.c_str());
4028     util_strncat(ind, "\t", IND_BUFSZ-1);
4029
4030     if (b->instr && b->instr[0])
4031         oprintf("%s (%i) [entry]\n", ind, (int)(b->instr[0]->eid-1));
4032     for (i = 0; i < vec_size(b->instr); ++i)
4033         ir_instr_dump(b->instr[i], ind, oprintf);
4034     ind[strlen(ind)-1] = 0;
4035 }
4036
4037 static void dump_phi(ir_instr *in, int (*oprintf)(const char*, ...))
4038 {
4039     oprintf("%s <- phi ", in->_ops[0]->name.c_str());
4040     for (auto &it : in->phi) {
4041         oprintf("([%s] : %s) ", it.from->label.c_str(),
4042                                 it.value->name.c_str());
4043     }
4044     oprintf("\n");
4045 }
4046
4047 void ir_instr_dump(ir_instr *in, char *ind,
4048                        int (*oprintf)(const char*, ...))
4049 {
4050     size_t i;
4051     const char *comma = nullptr;
4052
4053     oprintf("%s (%i) ", ind, (int)in->eid);
4054
4055     if (in->opcode == VINSTR_PHI) {
4056         dump_phi(in, oprintf);
4057         return;
4058     }
4059
4060     util_strncat(ind, "\t", IND_BUFSZ-1);
4061
4062     if (in->_ops[0] && (in->_ops[1] || in->_ops[2])) {
4063         ir_value_dump(in->_ops[0], oprintf);
4064         if (in->_ops[1] || in->_ops[2])
4065             oprintf(" <- ");
4066     }
4067     if (in->opcode == INSTR_CALL0 || in->opcode == VINSTR_NRCALL) {
4068         oprintf("CALL%i\t", in->params.size());
4069     } else
4070         oprintf("%s\t", qc_opname(in->opcode));
4071
4072     if (in->_ops[0] && !(in->_ops[1] || in->_ops[2])) {
4073         ir_value_dump(in->_ops[0], oprintf);
4074         comma = ",\t";
4075     }
4076     else
4077     {
4078         for (i = 1; i != 3; ++i) {
4079             if (in->_ops[i]) {
4080                 if (comma)
4081                     oprintf(comma);
4082                 ir_value_dump(in->_ops[i], oprintf);
4083                 comma = ",\t";
4084             }
4085         }
4086     }
4087     if (in->bops[0]) {
4088         if (comma)
4089             oprintf(comma);
4090         oprintf("[%s]", in->bops[0]->label.c_str());
4091         comma = ",\t";
4092     }
4093     if (in->bops[1])
4094         oprintf("%s[%s]", comma, in->bops[1]->label.c_str());
4095     if (in->params.size()) {
4096         oprintf("\tparams: ");
4097         for (auto &it : in->params)
4098             oprintf("%s, ", it->name.c_str());
4099     }
4100     oprintf("\n");
4101     ind[strlen(ind)-1] = 0;
4102 }
4103
4104 static void ir_value_dump_string(const char *str, int (*oprintf)(const char*, ...))
4105 {
4106     oprintf("\"");
4107     for (; *str; ++str) {
4108         switch (*str) {
4109             case '\n': oprintf("\\n"); break;
4110             case '\r': oprintf("\\r"); break;
4111             case '\t': oprintf("\\t"); break;
4112             case '\v': oprintf("\\v"); break;
4113             case '\f': oprintf("\\f"); break;
4114             case '\b': oprintf("\\b"); break;
4115             case '\a': oprintf("\\a"); break;
4116             case '\\': oprintf("\\\\"); break;
4117             case '"': oprintf("\\\""); break;
4118             default: oprintf("%c", *str); break;
4119         }
4120     }
4121     oprintf("\"");
4122 }
4123
4124 void ir_value_dump(ir_value* v, int (*oprintf)(const char*, ...))
4125 {
4126     if (v->hasvalue) {
4127         switch (v->vtype) {
4128             default:
4129             case TYPE_VOID:
4130                 oprintf("(void)");
4131                 break;
4132             case TYPE_FUNCTION:
4133                 oprintf("fn:%s", v->name.c_str());
4134                 break;
4135             case TYPE_FLOAT:
4136                 oprintf("%g", v->constval.vfloat);
4137                 break;
4138             case TYPE_VECTOR:
4139                 oprintf("'%g %g %g'",
4140                         v->constval.vvec.x,
4141                         v->constval.vvec.y,
4142                         v->constval.vvec.z);
4143                 break;
4144             case TYPE_ENTITY:
4145                 oprintf("(entity)");
4146                 break;
4147             case TYPE_STRING:
4148                 ir_value_dump_string(v->constval.vstring, oprintf);
4149                 break;
4150 #if 0
4151             case TYPE_INTEGER:
4152                 oprintf("%i", v->constval.vint);
4153                 break;
4154 #endif
4155             case TYPE_POINTER:
4156                 oprintf("&%s",
4157                     v->constval.vpointer->name.c_str());
4158                 break;
4159         }
4160     } else {
4161         oprintf("%s", v->name.c_str());
4162     }
4163 }
4164
4165 void ir_value_dump_life(const ir_value *self, int (*oprintf)(const char*,...))
4166 {
4167     oprintf("Life of %12s:", self->name.c_str());
4168     for (size_t i = 0; i < self->life.size(); ++i)
4169     {
4170         oprintf(" + [%i, %i]\n", self->life[i].start, self->life[i].end);
4171     }
4172 }