-
-
Notifications
You must be signed in to change notification settings - Fork 2.5k
/
zir_sema.zig
2597 lines (2288 loc) · 110 KB
/
zir_sema.zig
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
//! Semantic analysis of ZIR instructions.
//! This file operates on a `Module` instance, transforming untyped ZIR
//! instructions into semantically-analyzed IR instructions. It does type
//! checking, comptime control flow, and safety-check generation. This is the
//! the heart of the Zig compiler.
//! When deciding if something goes into this file or into Module, here is a
//! guiding principle: if it has to do with (untyped) ZIR instructions, it goes
//! here. If the analysis operates on typed IR instructions, it goes in Module.
const std = @import("std");
const mem = std.mem;
const Allocator = std.mem.Allocator;
const assert = std.debug.assert;
const log = std.log.scoped(.sema);
const Value = @import("value.zig").Value;
const Type = @import("type.zig").Type;
const TypedValue = @import("TypedValue.zig");
const ir = @import("ir.zig");
const zir = @import("zir.zig");
const Module = @import("Module.zig");
const Inst = ir.Inst;
const Body = ir.Body;
const trace = @import("tracy.zig").trace;
const Scope = Module.Scope;
const InnerError = Module.InnerError;
const Decl = Module.Decl;
pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*Inst {
switch (old_inst.tag) {
.alloc => return zirAlloc(mod, scope, old_inst.castTag(.alloc).?),
.alloc_mut => return zirAllocMut(mod, scope, old_inst.castTag(.alloc_mut).?),
.alloc_inferred => return zirAllocInferred(mod, scope, old_inst.castTag(.alloc_inferred).?, .inferred_alloc_const),
.alloc_inferred_mut => return zirAllocInferred(mod, scope, old_inst.castTag(.alloc_inferred_mut).?, .inferred_alloc_mut),
.arg => return zirArg(mod, scope, old_inst.castTag(.arg).?),
.bitcast_ref => return zirBitcastRef(mod, scope, old_inst.castTag(.bitcast_ref).?),
.bitcast_result_ptr => return zirBitcastResultPtr(mod, scope, old_inst.castTag(.bitcast_result_ptr).?),
.block => return zirBlock(mod, scope, old_inst.castTag(.block).?, false),
.block_comptime => return zirBlock(mod, scope, old_inst.castTag(.block_comptime).?, true),
.block_flat => return zirBlockFlat(mod, scope, old_inst.castTag(.block_flat).?, false),
.block_comptime_flat => return zirBlockFlat(mod, scope, old_inst.castTag(.block_comptime_flat).?, true),
.@"break" => return zirBreak(mod, scope, old_inst.castTag(.@"break").?),
.breakpoint => return zirBreakpoint(mod, scope, old_inst.castTag(.breakpoint).?),
.break_void => return zirBreakVoid(mod, scope, old_inst.castTag(.break_void).?),
.call => return zirCall(mod, scope, old_inst.castTag(.call).?),
.coerce_result_ptr => return zirCoerceResultPtr(mod, scope, old_inst.castTag(.coerce_result_ptr).?),
.compile_error => return zirCompileError(mod, scope, old_inst.castTag(.compile_error).?),
.compile_log => return zirCompileLog(mod, scope, old_inst.castTag(.compile_log).?),
.@"const" => return zirConst(mod, scope, old_inst.castTag(.@"const").?),
.dbg_stmt => return zirDbgStmt(mod, scope, old_inst.castTag(.dbg_stmt).?),
.decl_ref => return zirDeclRef(mod, scope, old_inst.castTag(.decl_ref).?),
.decl_ref_str => return zirDeclRefStr(mod, scope, old_inst.castTag(.decl_ref_str).?),
.decl_val => return zirDeclVal(mod, scope, old_inst.castTag(.decl_val).?),
.ensure_result_used => return zirEnsureResultUsed(mod, scope, old_inst.castTag(.ensure_result_used).?),
.ensure_result_non_error => return zirEnsureResultNonError(mod, scope, old_inst.castTag(.ensure_result_non_error).?),
.indexable_ptr_len => return zirIndexablePtrLen(mod, scope, old_inst.castTag(.indexable_ptr_len).?),
.ref => return zirRef(mod, scope, old_inst.castTag(.ref).?),
.resolve_inferred_alloc => return zirResolveInferredAlloc(mod, scope, old_inst.castTag(.resolve_inferred_alloc).?),
.ret_ptr => return zirRetPtr(mod, scope, old_inst.castTag(.ret_ptr).?),
.ret_type => return zirRetType(mod, scope, old_inst.castTag(.ret_type).?),
.store_to_block_ptr => return zirStoreToBlockPtr(mod, scope, old_inst.castTag(.store_to_block_ptr).?),
.store_to_inferred_ptr => return zirStoreToInferredPtr(mod, scope, old_inst.castTag(.store_to_inferred_ptr).?),
.single_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.single_const_ptr_type).?, false, .One),
.single_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.single_mut_ptr_type).?, true, .One),
.many_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.many_const_ptr_type).?, false, .Many),
.many_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.many_mut_ptr_type).?, true, .Many),
.c_const_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.c_const_ptr_type).?, false, .C),
.c_mut_ptr_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.c_mut_ptr_type).?, true, .C),
.const_slice_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.const_slice_type).?, false, .Slice),
.mut_slice_type => return zirSimplePtrType(mod, scope, old_inst.castTag(.mut_slice_type).?, true, .Slice),
.ptr_type => return zirPtrType(mod, scope, old_inst.castTag(.ptr_type).?),
.store => return zirStore(mod, scope, old_inst.castTag(.store).?),
.set_eval_branch_quota => return zirSetEvalBranchQuota(mod, scope, old_inst.castTag(.set_eval_branch_quota).?),
.str => return zirStr(mod, scope, old_inst.castTag(.str).?),
.int => return zirInt(mod, scope, old_inst.castTag(.int).?),
.int_type => return zirIntType(mod, scope, old_inst.castTag(.int_type).?),
.loop => return zirLoop(mod, scope, old_inst.castTag(.loop).?),
.param_type => return zirParamType(mod, scope, old_inst.castTag(.param_type).?),
.ptrtoint => return zirPtrtoint(mod, scope, old_inst.castTag(.ptrtoint).?),
.field_ptr => return zirFieldPtr(mod, scope, old_inst.castTag(.field_ptr).?),
.field_val => return zirFieldVal(mod, scope, old_inst.castTag(.field_val).?),
.field_ptr_named => return zirFieldPtrNamed(mod, scope, old_inst.castTag(.field_ptr_named).?),
.field_val_named => return zirFieldValNamed(mod, scope, old_inst.castTag(.field_val_named).?),
.deref => return zirDeref(mod, scope, old_inst.castTag(.deref).?),
.as => return zirAs(mod, scope, old_inst.castTag(.as).?),
.@"asm" => return zirAsm(mod, scope, old_inst.castTag(.@"asm").?),
.unreachable_safe => return zirUnreachable(mod, scope, old_inst.castTag(.unreachable_safe).?, true),
.unreachable_unsafe => return zirUnreachable(mod, scope, old_inst.castTag(.unreachable_unsafe).?, false),
.@"return" => return zirReturn(mod, scope, old_inst.castTag(.@"return").?),
.return_void => return zirReturnVoid(mod, scope, old_inst.castTag(.return_void).?),
.@"fn" => return zirFn(mod, scope, old_inst.castTag(.@"fn").?),
.@"export" => return zirExport(mod, scope, old_inst.castTag(.@"export").?),
.primitive => return zirPrimitive(mod, scope, old_inst.castTag(.primitive).?),
.fn_type => return zirFnType(mod, scope, old_inst.castTag(.fn_type).?, false),
.fn_type_cc => return zirFnTypeCc(mod, scope, old_inst.castTag(.fn_type_cc).?, false),
.fn_type_var_args => return zirFnType(mod, scope, old_inst.castTag(.fn_type_var_args).?, true),
.fn_type_cc_var_args => return zirFnTypeCc(mod, scope, old_inst.castTag(.fn_type_cc_var_args).?, true),
.intcast => return zirIntcast(mod, scope, old_inst.castTag(.intcast).?),
.bitcast => return zirBitcast(mod, scope, old_inst.castTag(.bitcast).?),
.floatcast => return zirFloatcast(mod, scope, old_inst.castTag(.floatcast).?),
.elem_ptr => return zirElemPtr(mod, scope, old_inst.castTag(.elem_ptr).?),
.elem_val => return zirElemVal(mod, scope, old_inst.castTag(.elem_val).?),
.add => return zirArithmetic(mod, scope, old_inst.castTag(.add).?),
.addwrap => return zirArithmetic(mod, scope, old_inst.castTag(.addwrap).?),
.sub => return zirArithmetic(mod, scope, old_inst.castTag(.sub).?),
.subwrap => return zirArithmetic(mod, scope, old_inst.castTag(.subwrap).?),
.mul => return zirArithmetic(mod, scope, old_inst.castTag(.mul).?),
.mulwrap => return zirArithmetic(mod, scope, old_inst.castTag(.mulwrap).?),
.div => return zirArithmetic(mod, scope, old_inst.castTag(.div).?),
.mod_rem => return zirArithmetic(mod, scope, old_inst.castTag(.mod_rem).?),
.array_cat => return zirArrayCat(mod, scope, old_inst.castTag(.array_cat).?),
.array_mul => return zirArrayMul(mod, scope, old_inst.castTag(.array_mul).?),
.bit_and => return zirBitwise(mod, scope, old_inst.castTag(.bit_and).?),
.bit_not => return zirBitNot(mod, scope, old_inst.castTag(.bit_not).?),
.bit_or => return zirBitwise(mod, scope, old_inst.castTag(.bit_or).?),
.xor => return zirBitwise(mod, scope, old_inst.castTag(.xor).?),
.shl => return zirShl(mod, scope, old_inst.castTag(.shl).?),
.shr => return zirShr(mod, scope, old_inst.castTag(.shr).?),
.cmp_lt => return zirCmp(mod, scope, old_inst.castTag(.cmp_lt).?, .lt),
.cmp_lte => return zirCmp(mod, scope, old_inst.castTag(.cmp_lte).?, .lte),
.cmp_eq => return zirCmp(mod, scope, old_inst.castTag(.cmp_eq).?, .eq),
.cmp_gte => return zirCmp(mod, scope, old_inst.castTag(.cmp_gte).?, .gte),
.cmp_gt => return zirCmp(mod, scope, old_inst.castTag(.cmp_gt).?, .gt),
.cmp_neq => return zirCmp(mod, scope, old_inst.castTag(.cmp_neq).?, .neq),
.condbr => return zirCondbr(mod, scope, old_inst.castTag(.condbr).?),
.is_null => return zirIsNull(mod, scope, old_inst.castTag(.is_null).?, false),
.is_non_null => return zirIsNull(mod, scope, old_inst.castTag(.is_non_null).?, true),
.is_null_ptr => return zirIsNullPtr(mod, scope, old_inst.castTag(.is_null_ptr).?, false),
.is_non_null_ptr => return zirIsNullPtr(mod, scope, old_inst.castTag(.is_non_null_ptr).?, true),
.is_err => return zirIsErr(mod, scope, old_inst.castTag(.is_err).?),
.is_err_ptr => return zirIsErrPtr(mod, scope, old_inst.castTag(.is_err_ptr).?),
.bool_not => return zirBoolNot(mod, scope, old_inst.castTag(.bool_not).?),
.typeof => return zirTypeof(mod, scope, old_inst.castTag(.typeof).?),
.typeof_peer => return zirTypeofPeer(mod, scope, old_inst.castTag(.typeof_peer).?),
.optional_type => return zirOptionalType(mod, scope, old_inst.castTag(.optional_type).?),
.optional_type_from_ptr_elem => return zirOptionalTypeFromPtrElem(mod, scope, old_inst.castTag(.optional_type_from_ptr_elem).?),
.optional_payload_safe => return zirOptionalPayload(mod, scope, old_inst.castTag(.optional_payload_safe).?, true),
.optional_payload_unsafe => return zirOptionalPayload(mod, scope, old_inst.castTag(.optional_payload_unsafe).?, false),
.optional_payload_safe_ptr => return zirOptionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_safe_ptr).?, true),
.optional_payload_unsafe_ptr => return zirOptionalPayloadPtr(mod, scope, old_inst.castTag(.optional_payload_unsafe_ptr).?, false),
.err_union_payload_safe => return zirErrUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_safe).?, true),
.err_union_payload_unsafe => return zirErrUnionPayload(mod, scope, old_inst.castTag(.err_union_payload_unsafe).?, false),
.err_union_payload_safe_ptr => return zirErrUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_safe_ptr).?, true),
.err_union_payload_unsafe_ptr => return zirErrUnionPayloadPtr(mod, scope, old_inst.castTag(.err_union_payload_unsafe_ptr).?, false),
.err_union_code => return zirErrUnionCode(mod, scope, old_inst.castTag(.err_union_code).?),
.err_union_code_ptr => return zirErrUnionCodePtr(mod, scope, old_inst.castTag(.err_union_code_ptr).?),
.ensure_err_payload_void => return zirEnsureErrPayloadVoid(mod, scope, old_inst.castTag(.ensure_err_payload_void).?),
.array_type => return zirArrayType(mod, scope, old_inst.castTag(.array_type).?),
.array_type_sentinel => return zirArrayTypeSentinel(mod, scope, old_inst.castTag(.array_type_sentinel).?),
.enum_literal => return zirEnumLiteral(mod, scope, old_inst.castTag(.enum_literal).?),
.merge_error_sets => return zirMergeErrorSets(mod, scope, old_inst.castTag(.merge_error_sets).?),
.error_union_type => return zirErrorUnionType(mod, scope, old_inst.castTag(.error_union_type).?),
.anyframe_type => return zirAnyframeType(mod, scope, old_inst.castTag(.anyframe_type).?),
.error_set => return zirErrorSet(mod, scope, old_inst.castTag(.error_set).?),
.error_value => return zirErrorValue(mod, scope, old_inst.castTag(.error_value).?),
.slice => return zirSlice(mod, scope, old_inst.castTag(.slice).?),
.slice_start => return zirSliceStart(mod, scope, old_inst.castTag(.slice_start).?),
.import => return zirImport(mod, scope, old_inst.castTag(.import).?),
.bool_and => return zirBoolOp(mod, scope, old_inst.castTag(.bool_and).?),
.bool_or => return zirBoolOp(mod, scope, old_inst.castTag(.bool_or).?),
.void_value => return mod.constVoid(scope, old_inst.src),
.switchbr => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr).?, false),
.switchbr_ref => return zirSwitchBr(mod, scope, old_inst.castTag(.switchbr_ref).?, true),
.switch_range => return zirSwitchRange(mod, scope, old_inst.castTag(.switch_range).?),
.@"await" => return zirAwait(mod, scope, old_inst.castTag(.@"await").?),
.nosuspend_await => return zirAwait(mod, scope, old_inst.castTag(.nosuspend_await).?),
.@"resume" => return zirResume(mod, scope, old_inst.castTag(.@"resume").?),
.@"suspend" => return zirSuspend(mod, scope, old_inst.castTag(.@"suspend").?),
.suspend_block => return zirSuspendBlock(mod, scope, old_inst.castTag(.suspend_block).?),
.container_field_named,
.container_field_typed,
.container_field,
.enum_type,
.union_type,
.struct_type,
=> return mod.fail(scope, old_inst.src, "TODO analyze container instructions", .{}),
}
}
pub fn analyzeBody(mod: *Module, block: *Scope.Block, body: zir.Body) !void {
const tracy = trace(@src());
defer tracy.end();
for (body.instructions) |src_inst| {
const analyzed_inst = try analyzeInst(mod, &block.base, src_inst);
try block.inst_table.putNoClobber(src_inst, analyzed_inst);
if (analyzed_inst.ty.zigTypeTag() == .NoReturn) {
break;
}
}
}
pub fn analyzeBodyValueAsType(
mod: *Module,
block_scope: *Scope.Block,
zir_result_inst: *zir.Inst,
body: zir.Body,
) !Type {
try analyzeBody(mod, block_scope, body);
const result_inst = block_scope.inst_table.get(zir_result_inst).?;
const val = try mod.resolveConstValue(&block_scope.base, result_inst);
return val.toType(block_scope.base.arena());
}
pub fn resolveInst(mod: *Module, scope: *Scope, zir_inst: *zir.Inst) InnerError!*Inst {
const block = scope.cast(Scope.Block).?;
return block.inst_table.get(zir_inst).?; // Instruction does not dominate all uses!
}
fn resolveConstString(mod: *Module, scope: *Scope, old_inst: *zir.Inst) ![]u8 {
const new_inst = try resolveInst(mod, scope, old_inst);
const wanted_type = Type.initTag(.const_slice_u8);
const coerced_inst = try mod.coerce(scope, wanted_type, new_inst);
const val = try mod.resolveConstValue(scope, coerced_inst);
return val.toAllocatedBytes(scope.arena());
}
fn resolveType(mod: *Module, scope: *Scope, old_inst: *zir.Inst) !Type {
const new_inst = try resolveInst(mod, scope, old_inst);
const wanted_type = Type.initTag(.@"type");
const coerced_inst = try mod.coerce(scope, wanted_type, new_inst);
const val = try mod.resolveConstValue(scope, coerced_inst);
return val.toType(scope.arena());
}
/// Appropriate to call when the coercion has already been done by result
/// location semantics. Asserts the value fits in the provided `Int` type.
/// Only supports `Int` types 64 bits or less.
fn resolveAlreadyCoercedInt(
mod: *Module,
scope: *Scope,
old_inst: *zir.Inst,
comptime Int: type,
) !Int {
comptime assert(@typeInfo(Int).Int.bits <= 64);
const new_inst = try resolveInst(mod, scope, old_inst);
const val = try mod.resolveConstValue(scope, new_inst);
switch (@typeInfo(Int).Int.signedness) {
.signed => return @intCast(Int, val.toSignedInt()),
.unsigned => return @intCast(Int, val.toUnsignedInt()),
}
}
fn resolveInt(mod: *Module, scope: *Scope, old_inst: *zir.Inst, dest_type: Type) !u64 {
const new_inst = try resolveInst(mod, scope, old_inst);
const coerced = try mod.coerce(scope, dest_type, new_inst);
const val = try mod.resolveConstValue(scope, coerced);
return val.toUnsignedInt();
}
pub fn resolveInstConst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!TypedValue {
const new_inst = try resolveInst(mod, scope, old_inst);
const val = try mod.resolveConstValue(scope, new_inst);
return TypedValue{
.ty = new_inst.ty,
.val = val,
};
}
fn zirConst(mod: *Module, scope: *Scope, const_inst: *zir.Inst.Const) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// Move the TypedValue from old memory to new memory. This allows freeing the ZIR instructions
// after analysis.
const typed_value_copy = try const_inst.positionals.typed_value.copy(scope.arena());
return mod.constInst(scope, const_inst.base.src, typed_value_copy);
}
fn analyzeConstInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!TypedValue {
const new_inst = try analyzeInst(mod, scope, old_inst);
return TypedValue{
.ty = new_inst.ty,
.val = try mod.resolveConstValue(scope, new_inst),
};
}
fn zirBitcastRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.fail(scope, inst.base.src, "TODO implement zir_sema.zirBitcastRef", .{});
}
fn zirBitcastResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.fail(scope, inst.base.src, "TODO implement zir_sema.zirBitcastResultPtr", .{});
}
fn zirCoerceResultPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.fail(scope, inst.base.src, "TODO implement zirCoerceResultPtr", .{});
}
fn zirRetPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const ret_type = fn_ty.fnReturnType();
const ptr_type = try mod.simplePtrType(scope, inst.base.src, ret_type, true, .One);
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
fn zirRef(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
return mod.analyzeRef(scope, inst.base.src, operand);
}
fn zirRetType(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const ret_type = fn_ty.fnReturnType();
return mod.constType(scope, inst.base.src, ret_type);
}
fn zirEnsureResultUsed(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
switch (operand.ty.zigTypeTag()) {
.Void, .NoReturn => return mod.constVoid(scope, operand.src),
else => return mod.fail(scope, operand.src, "expression value is ignored", .{}),
}
}
fn zirEnsureResultNonError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
switch (operand.ty.zigTypeTag()) {
.ErrorSet, .ErrorUnion => return mod.fail(scope, operand.src, "error is discarded", .{}),
else => return mod.constVoid(scope, operand.src),
}
}
fn zirIndexablePtrLen(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const array_ptr = try resolveInst(mod, scope, inst.positionals.operand);
const elem_ty = array_ptr.ty.elemType();
if (!elem_ty.isIndexable()) {
const msg = msg: {
const msg = try mod.errMsg(
scope,
inst.base.src,
"type '{}' does not support indexing",
.{elem_ty},
);
errdefer msg.destroy(mod.gpa);
try mod.errNote(
scope,
inst.base.src,
msg,
"for loop operand must be an array, slice, tuple, or vector",
.{},
);
break :msg msg;
};
return mod.failWithOwnedErrorMsg(scope, msg);
}
const result_ptr = try mod.namedFieldPtr(scope, inst.base.src, array_ptr, "len", inst.base.src);
return mod.analyzeDeref(scope, inst.base.src, result_ptr, result_ptr.src);
}
fn zirAlloc(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const var_type = try resolveType(mod, scope, inst.positionals.operand);
const ptr_type = try mod.simplePtrType(scope, inst.base.src, var_type, true, .One);
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
fn zirAllocMut(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const var_type = try resolveType(mod, scope, inst.positionals.operand);
try mod.validateVarType(scope, inst.base.src, var_type);
const ptr_type = try mod.simplePtrType(scope, inst.base.src, var_type, true, .One);
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
return mod.addNoOp(b, inst.base.src, ptr_type, .alloc);
}
fn zirAllocInferred(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.NoOp,
mut_tag: Type.Tag,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const val_payload = try scope.arena().create(Value.Payload.InferredAlloc);
val_payload.* = .{
.data = .{},
};
// `Module.constInst` does not add the instruction to the block because it is
// not needed in the case of constant values. However here, we plan to "downgrade"
// to a normal instruction when we hit `resolve_inferred_alloc`. So we append
// to the block even though it is currently a `.constant`.
const result = try mod.constInst(scope, inst.base.src, .{
.ty = switch (mut_tag) {
.inferred_alloc_const => Type.initTag(.inferred_alloc_const),
.inferred_alloc_mut => Type.initTag(.inferred_alloc_mut),
else => unreachable,
},
.val = Value.initPayload(&val_payload.base),
});
const block = try mod.requireFunctionBlock(scope, inst.base.src);
try block.instructions.append(mod.gpa, result);
return result;
}
fn zirResolveInferredAlloc(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.UnOp,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, inst.positionals.operand);
const ptr_val = ptr.castTag(.constant).?.val;
const inferred_alloc = ptr_val.castTag(.inferred_alloc).?;
const peer_inst_list = inferred_alloc.data.stored_inst_list.items;
const final_elem_ty = try mod.resolvePeerTypes(scope, peer_inst_list);
const var_is_mut = switch (ptr.ty.tag()) {
.inferred_alloc_const => false,
.inferred_alloc_mut => true,
else => unreachable,
};
if (var_is_mut) {
try mod.validateVarType(scope, inst.base.src, final_elem_ty);
}
const final_ptr_ty = try mod.simplePtrType(scope, inst.base.src, final_elem_ty, true, .One);
// Change it to a normal alloc.
ptr.ty = final_ptr_ty;
ptr.tag = .alloc;
return mod.constVoid(scope, inst.base.src);
}
fn zirStoreToBlockPtr(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.BinOp,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
const value = try resolveInst(mod, scope, inst.positionals.rhs);
const ptr_ty = try mod.simplePtrType(scope, inst.base.src, value.ty, true, .One);
// TODO detect when this store should be done at compile-time. For example,
// if expressions should force it when the condition is compile-time known.
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const bitcasted_ptr = try mod.addUnOp(b, inst.base.src, ptr_ty, .bitcast, ptr);
return mod.storePtr(scope, inst.base.src, bitcasted_ptr, value);
}
fn zirStoreToInferredPtr(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.BinOp,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
const value = try resolveInst(mod, scope, inst.positionals.rhs);
const inferred_alloc = ptr.castTag(.constant).?.val.castTag(.inferred_alloc).?;
// Add the stored instruction to the set we will use to resolve peer types
// for the inferred allocation.
try inferred_alloc.data.stored_inst_list.append(scope.arena(), value);
// Create a runtime bitcast instruction with exactly the type the pointer wants.
const ptr_ty = try mod.simplePtrType(scope, inst.base.src, value.ty, true, .One);
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
const bitcasted_ptr = try mod.addUnOp(b, inst.base.src, ptr_ty, .bitcast, ptr);
return mod.storePtr(scope, inst.base.src, bitcasted_ptr, value);
}
fn zirSetEvalBranchQuota(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.UnOp,
) InnerError!*Inst {
const b = try mod.requireFunctionBlock(scope, inst.base.src);
const quota = try resolveAlreadyCoercedInt(mod, scope, inst.positionals.operand, u32);
if (b.branch_quota.* < quota)
b.branch_quota.* = quota;
return mod.constVoid(scope, inst.base.src);
}
fn zirStore(mod: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const ptr = try resolveInst(mod, scope, inst.positionals.lhs);
const value = try resolveInst(mod, scope, inst.positionals.rhs);
return mod.storePtr(scope, inst.base.src, ptr, value);
}
fn zirParamType(mod: *Module, scope: *Scope, inst: *zir.Inst.ParamType) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const fn_inst = try resolveInst(mod, scope, inst.positionals.func);
const arg_index = inst.positionals.arg_index;
const fn_ty: Type = switch (fn_inst.ty.zigTypeTag()) {
.Fn => fn_inst.ty,
.BoundFn => {
return mod.fail(scope, fn_inst.src, "TODO implement zirParamType for method call syntax", .{});
},
else => {
return mod.fail(scope, fn_inst.src, "expected function, found '{}'", .{fn_inst.ty});
},
};
const param_count = fn_ty.fnParamLen();
if (arg_index >= param_count) {
if (fn_ty.fnIsVarArgs()) {
return mod.constType(scope, inst.base.src, Type.initTag(.var_args_param));
}
return mod.fail(scope, inst.base.src, "arg index {d} out of bounds; '{}' has {d} argument(s)", .{
arg_index,
fn_ty,
param_count,
});
}
// TODO support generic functions
const param_type = fn_ty.fnParamType(arg_index);
return mod.constType(scope, inst.base.src, param_type);
}
fn zirStr(mod: *Module, scope: *Scope, str_inst: *zir.Inst.Str) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
// The bytes references memory inside the ZIR module, which can get deallocated
// after semantic analysis is complete. We need the memory to be in the new anonymous Decl's arena.
var new_decl_arena = std.heap.ArenaAllocator.init(mod.gpa);
errdefer new_decl_arena.deinit();
const arena_bytes = try new_decl_arena.allocator.dupe(u8, str_inst.positionals.bytes);
const decl_ty = try Type.Tag.array_u8_sentinel_0.create(&new_decl_arena.allocator, arena_bytes.len);
const decl_val = try Value.Tag.bytes.create(&new_decl_arena.allocator, arena_bytes);
const new_decl = try mod.createAnonymousDecl(scope, &new_decl_arena, .{
.ty = decl_ty,
.val = decl_val,
});
return mod.analyzeDeclRef(scope, str_inst.base.src, new_decl);
}
fn zirInt(mod: *Module, scope: *Scope, inst: *zir.Inst.Int) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.constIntBig(scope, inst.base.src, Type.initTag(.comptime_int), inst.positionals.int);
}
fn zirExport(mod: *Module, scope: *Scope, export_inst: *zir.Inst.Export) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const symbol_name = try resolveConstString(mod, scope, export_inst.positionals.symbol_name);
const exported_decl = mod.lookupDeclName(scope, export_inst.positionals.decl_name) orelse
return mod.fail(scope, export_inst.base.src, "decl '{s}' not found", .{export_inst.positionals.decl_name});
try mod.analyzeExport(scope, export_inst.base.src, symbol_name, exported_decl);
return mod.constVoid(scope, export_inst.base.src);
}
fn zirCompileError(mod: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const msg = try resolveConstString(mod, scope, inst.positionals.operand);
return mod.fail(scope, inst.base.src, "{s}", .{msg});
}
fn zirCompileLog(mod: *Module, scope: *Scope, inst: *zir.Inst.CompileLog) InnerError!*Inst {
var managed = mod.compile_log_text.toManaged(mod.gpa);
defer mod.compile_log_text = managed.moveToUnmanaged();
const writer = managed.writer();
for (inst.positionals.to_log) |arg_inst, i| {
if (i != 0) try writer.print(", ", .{});
const arg = try resolveInst(mod, scope, arg_inst);
if (arg.value()) |val| {
try writer.print("@as({}, {})", .{ arg.ty, val });
} else {
try writer.print("@as({}, [runtime value])", .{arg.ty});
}
}
try writer.print("\n", .{});
const gop = try mod.compile_log_decls.getOrPut(mod.gpa, scope.ownerDecl().?);
if (!gop.found_existing) {
gop.entry.value = .{
.file_scope = scope.getFileScope(),
.byte_offset = inst.base.src,
};
}
return mod.constVoid(scope, inst.base.src);
}
fn zirArg(mod: *Module, scope: *Scope, inst: *zir.Inst.Arg) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
if (b.inlining) |inlining| {
const param_index = inlining.param_index;
inlining.param_index += 1;
return inlining.casted_args[param_index];
}
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
const param_index = b.instructions.items.len;
const param_count = fn_ty.fnParamLen();
if (param_index >= param_count) {
return mod.fail(scope, inst.base.src, "parameter index {d} outside list of length {d}", .{
param_index,
param_count,
});
}
const param_type = fn_ty.fnParamType(param_index);
const name = try scope.arena().dupeZ(u8, inst.positionals.name);
return mod.addArg(b, inst.base.src, param_type, name);
}
fn zirLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const parent_block = scope.cast(Scope.Block).?;
// Reserve space for a Loop instruction so that generated Break instructions can
// point to it, even if it doesn't end up getting used because the code ends up being
// comptime evaluated.
const loop_inst = try parent_block.arena.create(Inst.Loop);
loop_inst.* = .{
.base = .{
.tag = Inst.Loop.base_tag,
.ty = Type.initTag(.noreturn),
.src = inst.base.src,
},
.body = undefined,
};
var child_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.owner_decl = parent_block.owner_decl,
.src_decl = parent_block.src_decl,
.instructions = .{},
.arena = parent_block.arena,
.inlining = parent_block.inlining,
.is_comptime = parent_block.is_comptime,
.branch_quota = parent_block.branch_quota,
};
defer child_block.instructions.deinit(mod.gpa);
try analyzeBody(mod, &child_block, inst.positionals.body);
// Loop repetition is implied so the last instruction may or may not be a noreturn instruction.
try parent_block.instructions.append(mod.gpa, &loop_inst.base);
loop_inst.body = .{ .instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items) };
return &loop_inst.base;
}
fn zirBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const parent_block = scope.cast(Scope.Block).?;
var child_block = parent_block.makeSubBlock();
defer child_block.instructions.deinit(mod.gpa);
child_block.is_comptime = child_block.is_comptime or is_comptime;
try analyzeBody(mod, &child_block, inst.positionals.body);
// Move the analyzed instructions into the parent block arena.
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
// The result of a flat block is the last instruction.
const zir_inst_list = inst.positionals.body.instructions;
const last_zir_inst = zir_inst_list[zir_inst_list.len - 1];
return resolveInst(mod, scope, last_zir_inst);
}
fn zirBlock(
mod: *Module,
scope: *Scope,
inst: *zir.Inst.Block,
is_comptime: bool,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const parent_block = scope.cast(Scope.Block).?;
// Reserve space for a Block instruction so that generated Break instructions can
// point to it, even if it doesn't end up getting used because the code ends up being
// comptime evaluated.
const block_inst = try parent_block.arena.create(Inst.Block);
block_inst.* = .{
.base = .{
.tag = Inst.Block.base_tag,
.ty = undefined, // Set after analysis.
.src = inst.base.src,
},
.body = undefined,
};
var child_block: Scope.Block = .{
.parent = parent_block,
.inst_table = parent_block.inst_table,
.func = parent_block.func,
.owner_decl = parent_block.owner_decl,
.src_decl = parent_block.src_decl,
.instructions = .{},
.arena = parent_block.arena,
// TODO @as here is working around a stage1 miscompilation bug :(
.label = @as(?Scope.Block.Label, Scope.Block.Label{
.zir_block = inst,
.merges = .{
.results = .{},
.br_list = .{},
.block_inst = block_inst,
},
}),
.inlining = parent_block.inlining,
.is_comptime = is_comptime or parent_block.is_comptime,
.branch_quota = parent_block.branch_quota,
};
const merges = &child_block.label.?.merges;
defer child_block.instructions.deinit(mod.gpa);
defer merges.results.deinit(mod.gpa);
defer merges.br_list.deinit(mod.gpa);
try analyzeBody(mod, &child_block, inst.positionals.body);
return analyzeBlockBody(mod, scope, &child_block, merges);
}
fn analyzeBlockBody(
mod: *Module,
scope: *Scope,
child_block: *Scope.Block,
merges: *Scope.Block.Merges,
) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const parent_block = scope.cast(Scope.Block).?;
// Blocks must terminate with noreturn instruction.
assert(child_block.instructions.items.len != 0);
assert(child_block.instructions.items[child_block.instructions.items.len - 1].ty.isNoReturn());
if (merges.results.items.len == 0) {
// No need for a block instruction. We can put the new instructions
// directly into the parent block.
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
return copied_instructions[copied_instructions.len - 1];
}
if (merges.results.items.len == 1) {
const last_inst_index = child_block.instructions.items.len - 1;
const last_inst = child_block.instructions.items[last_inst_index];
if (last_inst.breakBlock()) |br_block| {
if (br_block == merges.block_inst) {
// No need for a block instruction. We can put the new instructions directly
// into the parent block. Here we omit the break instruction.
const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items[0..last_inst_index]);
try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
return merges.results.items[0];
}
}
}
// It is impossible to have the number of results be > 1 in a comptime scope.
assert(!child_block.is_comptime); // Should already got a compile error in the condbr condition.
// Need to set the type and emit the Block instruction. This allows machine code generation
// to emit a jump instruction to after the block when it encounters the break.
try parent_block.instructions.append(mod.gpa, &merges.block_inst.base);
const resolved_ty = try mod.resolvePeerTypes(scope, merges.results.items);
merges.block_inst.base.ty = resolved_ty;
merges.block_inst.body = .{
.instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items),
};
// Now that the block has its type resolved, we need to go back into all the break
// instructions, and insert type coercion on the operands.
for (merges.br_list.items) |br| {
if (br.operand.ty.eql(resolved_ty)) {
// No type coercion needed.
continue;
}
var coerce_block = parent_block.makeSubBlock();
defer coerce_block.instructions.deinit(mod.gpa);
const coerced_operand = try mod.coerce(&coerce_block.base, resolved_ty, br.operand);
// If no instructions were produced, such as in the case of a coercion of a
// constant value to a new type, we can simply point the br operand to it.
if (coerce_block.instructions.items.len == 0) {
br.operand = coerced_operand;
continue;
}
assert(coerce_block.instructions.items[coerce_block.instructions.items.len - 1] == coerced_operand);
// Here we depend on the br instruction having been over-allocated (if necessary)
// inide analyzeBreak so that it can be converted into a br_block_flat instruction.
const br_src = br.base.src;
const br_ty = br.base.ty;
const br_block_flat = @ptrCast(*Inst.BrBlockFlat, br);
br_block_flat.* = .{
.base = .{
.src = br_src,
.ty = br_ty,
.tag = .br_block_flat,
},
.block = merges.block_inst,
.body = .{
.instructions = try parent_block.arena.dupe(*Inst, coerce_block.instructions.items),
},
};
}
return &merges.block_inst.base;
}
fn zirBreakpoint(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const b = try mod.requireRuntimeBlock(scope, inst.base.src);
return mod.addNoOp(b, inst.base.src, Type.initTag(.void), .breakpoint);
}
fn zirBreak(mod: *Module, scope: *Scope, inst: *zir.Inst.Break) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const operand = try resolveInst(mod, scope, inst.positionals.operand);
const block = inst.positionals.block;
return analyzeBreak(mod, scope, inst.base.src, block, operand);
}
fn zirBreakVoid(mod: *Module, scope: *Scope, inst: *zir.Inst.BreakVoid) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const block = inst.positionals.block;
const void_inst = try mod.constVoid(scope, inst.base.src);
return analyzeBreak(mod, scope, inst.base.src, block, void_inst);
}
fn analyzeBreak(
mod: *Module,
scope: *Scope,
src: usize,
zir_block: *zir.Inst.Block,
operand: *Inst,
) InnerError!*Inst {
var opt_block = scope.cast(Scope.Block);
while (opt_block) |block| {
if (block.label) |*label| {
if (label.zir_block == zir_block) {
const b = try mod.requireFunctionBlock(scope, src);
// Here we add a br instruction, but we over-allocate a little bit
// (if necessary) to make it possible to convert the instruction into
// a br_block_flat instruction later.
const br = @ptrCast(*Inst.Br, try b.arena.alignedAlloc(
u8,
Inst.convertable_br_align,
Inst.convertable_br_size,
));
br.* = .{
.base = .{
.tag = .br,
.ty = Type.initTag(.noreturn),
.src = src,
},
.operand = operand,
.block = label.merges.block_inst,
};
try b.instructions.append(mod.gpa, &br.base);
try label.merges.results.append(mod.gpa, operand);
try label.merges.br_list.append(mod.gpa, br);
return &br.base;
}
}
opt_block = block.parent;
} else unreachable;
}
fn zirDbgStmt(mod: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
if (scope.cast(Scope.Block)) |b| {
if (!b.is_comptime) {
return mod.addNoOp(b, inst.base.src, Type.initTag(.void), .dbg_stmt);
}
}
return mod.constVoid(scope, inst.base.src);
}
fn zirDeclRefStr(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRefStr) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const decl_name = try resolveConstString(mod, scope, inst.positionals.name);
return mod.analyzeDeclRefByName(scope, inst.base.src, decl_name);
}
fn zirDeclRef(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclRef) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.analyzeDeclRef(scope, inst.base.src, inst.positionals.decl);
}
fn zirDeclVal(mod: *Module, scope: *Scope, inst: *zir.Inst.DeclVal) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
return mod.analyzeDeclVal(scope, inst.base.src, inst.positionals.decl);
}
fn zirCall(mod: *Module, scope: *Scope, inst: *zir.Inst.Call) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const func = try resolveInst(mod, scope, inst.positionals.func);
if (func.ty.zigTypeTag() != .Fn)
return mod.fail(scope, inst.positionals.func.src, "type '{}' not a function", .{func.ty});
const cc = func.ty.fnCallingConvention();
if (cc == .Naked) {
// TODO add error note: declared here
return mod.fail(
scope,
inst.positionals.func.src,
"unable to call function with naked calling convention",
.{},
);
}
const call_params_len = inst.positionals.args.len;
const fn_params_len = func.ty.fnParamLen();
if (func.ty.fnIsVarArgs()) {
assert(cc == .C);
if (call_params_len < fn_params_len) {
// TODO add error note: declared here
return mod.fail(
scope,
inst.positionals.func.src,
"expected at least {d} argument(s), found {d}",
.{ fn_params_len, call_params_len },
);
}
} else if (fn_params_len != call_params_len) {
// TODO add error note: declared here
return mod.fail(
scope,
inst.positionals.func.src,
"expected {d} argument(s), found {d}",
.{ fn_params_len, call_params_len },
);
}
if (inst.positionals.modifier == .compile_time) {
return mod.fail(scope, inst.base.src, "TODO implement comptime function calls", .{});
}
if (inst.positionals.modifier != .auto) {
return mod.fail(scope, inst.base.src, "TODO implement call with modifier {}", .{inst.positionals.modifier});
}
// TODO handle function calls of generic functions
const casted_args = try scope.arena().alloc(*Inst, call_params_len);
for (inst.positionals.args) |src_arg, i| {
// the args are already casted to the result of a param type instruction.
casted_args[i] = try resolveInst(mod, scope, src_arg);
}
const ret_type = func.ty.fnReturnType();
const b = try mod.requireFunctionBlock(scope, inst.base.src);
const is_comptime_call = b.is_comptime or inst.positionals.modifier == .compile_time;
const is_inline_call = is_comptime_call or inst.positionals.modifier == .always_inline or
func.ty.fnCallingConvention() == .Inline;
if (is_inline_call) {
const func_val = try mod.resolveConstValue(scope, func);
const module_fn = switch (func_val.tag()) {
.function => func_val.castTag(.function).?.data,
.extern_fn => return mod.fail(scope, inst.base.src, "{s} call of extern function", .{
@as([]const u8, if (is_comptime_call) "comptime" else "inline"),
}),
else => unreachable,