-
Notifications
You must be signed in to change notification settings - Fork 17
/
genzig.zig
2487 lines (2296 loc) · 112 KB
/
genzig.zig
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
const std = @import("std");
const ArrayList = std.ArrayList;
const StringHashMap = std.StringHashMap;
const json = std.json;
const StringPool = @import("stringpool.zig").StringPool;
const path_sep = std.fs.path.sep_str;
const cameltosnake = @import("cameltosnake.zig");
const common = @import("common.zig");
const Nothing = common.Nothing;
const jsonPanic = common.jsonPanic;
const jsonPanicMsg = common.jsonPanicMsg;
const jsonEnforce = common.jsonEnforce;
const jsonEnforceMsg = common.jsonEnforceMsg;
const fmtJson = common.fmtJson;
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
const allocator = &arena.allocator;
const autogen_header = "//! NOTE: this file is autogenerated, DO NOT MODIFY\n";
var global_symbol_pool = StringPool.init(allocator);
var global_symbol_none : StringPool.Val = undefined;
var global_symbol_None : StringPool.Val = undefined;
var global_pass1: json.ObjectMap = undefined;
var global_notnull: json.ObjectMap = undefined;
const ValueType = enum {
Byte,
UInt16,
Int32,
UInt32,
Int64,
UInt64,
Single,
Double,
String,
PropertyKey,
};
const global_value_type_map = std.ComptimeStringMap(ValueType, .{
.{ "Byte", ValueType.Byte },
.{ "UInt16", ValueType.UInt16 },
.{ "Int32", ValueType.Int32 },
.{ "UInt32", ValueType.UInt32 },
.{ "Int64", ValueType.Int64 },
.{ "UInt64", ValueType.UInt64 },
.{ "Single", ValueType.Single },
.{ "Double", ValueType.Double },
.{ "String", ValueType.String },
.{ "PropertyKey", ValueType.PropertyKey },
});
fn valueTypeToZigType(t: ValueType) []const u8 {
return switch (t) {
.Byte => return "u8",
.UInt16 => return "u16",
.Int32 => return "i32",
.UInt32 => return "u32",
.Int64 => return "i64",
.UInt64 => return "u64",
.Single => return "f32",
.Double => return "f64",
.String => return "[]const u8",
.PropertyKey => @panic("cannot call valueTypeToZigType for ValueType.PropertyKey"),
};
}
const Pass1TypeKindCategory = enum { default, ptr, com };
const pass1_type_kind_info = std.ComptimeStringMap(Pass1TypeKindCategory, .{
.{ "Integral", .default },
.{ "Enum", .default },
.{ "Struct", .default },
.{ "Union", .default },
.{ "Com", .com },
.{ "Pointer", .ptr },
.{ "FunctionPointer", .ptr },
});
const NativeType = enum {
Boolean,
SByte,
Byte,
Int16,
UInt16,
Int32,
UInt32,
Int64,
UInt64,
Char,
Single,
Double,
String,
IntPtr,
UIntPtr,
Guid,
};
const global_native_type_map = std.ComptimeStringMap(NativeType, .{
.{ "Boolean", NativeType.Boolean },
.{ "SByte", NativeType.SByte },
.{ "Byte", NativeType.Byte },
.{ "Int16", NativeType.Int16 },
.{ "UInt16", NativeType.UInt16 },
.{ "Int32", NativeType.Int32 },
.{ "UInt32", NativeType.UInt32 },
.{ "Int64", NativeType.Int64 },
.{ "UInt64", NativeType.UInt64 },
.{ "Char", NativeType.Char },
.{ "Single", NativeType.Single },
.{ "Double", NativeType.Double },
.{ "String", NativeType.String },
.{ "IntPtr", NativeType.IntPtr },
.{ "UIntPtr", NativeType.UIntPtr },
.{ "Guid", NativeType.Guid },
});
fn nativeTypeToZigType(t: NativeType) []const u8 {
return switch (t) {
.Boolean => return "bool",
.SByte => return "i8",
.Byte => return "u8",
.Int16 => return "i16",
.UInt16 => return "u16",
.Int32 => return "i32",
.UInt32 => return "u32",
.Int64 => return "i64",
.UInt64 => return "u64",
.Char => return "u16",
.Single => return "f32",
.Double => return "f64",
.String => return "[]const u8",
.IntPtr => return "isize",
.UIntPtr => return "usize",
.Guid => @panic("cannot call nativeTypeToZigType for NativeType.Guid"),
};
}
const TargetKind = enum {
Default,
FunctionPointer,
Com,
};
const target_kind_map = std.ComptimeStringMap(TargetKind, .{
.{ "Default", TargetKind.Default },
.{ "FunctionPointer", TargetKind.FunctionPointer },
.{ "Com", TargetKind.Com },
});
const Module = struct {
optional_parent: ?*Module,
name: StringPool.Val,
zig_basename: []const u8,
children: StringPool.HashMap(*Module),
file: ?SdkFile,
pub fn alloc(optional_parent: ?*Module, name: StringPool.Val) !*Module {
var module = try allocator.create(Module);
module.* = Module {
.optional_parent = optional_parent,
.name = name,
.zig_basename = try std.mem.concat(allocator, u8, &[_][]const u8 { name.slice, ".zig" }),
.children = StringPool.HashMap(*Module).init(allocator),
.file = null,
};
return module;
}
};
const import_prefix_table = &[_][]const u8 {
"",
"../",
"../../",
"../../../",
"../../../../",
};
const ApiImport = struct {
arches: ArchFlags,
api: StringPool.Val,
};
// TODO: this is defined in std, maybe it should be pub?
var failAllocator = std.mem.Allocator{
.allocFn = failAllocatorAlloc,
.resizeFn = std.mem.Allocator.noResize,
};
fn failAllocatorAlloc(self: *std.mem.Allocator, n: usize, alignment: u29, len_align: u29, ra: usize) std.mem.Allocator.Error![]u8 {
_ = self;
_ = n;
_ = alignment;
_ = len_align;
_ = ra;
return error.OutOfMemory;
}
const empty_json_object_map = json.ObjectMap {
.ctx = .{ },
.allocator = &failAllocator,
.unmanaged = .{ },
};
const SdkFile = struct {
json_basename: []const u8,
json_name: []const u8,
zig_name: []const u8,
depth: u2,
const_exports: ArrayList(StringPool.Val),
uses_guid: bool,
top_level_api_imports: StringPool.HashMap(ApiImport),
type_exports: StringPool.HashMap(Nothing),
func_exports: StringPool.HashMap(Nothing),
// this field is only needed to workaround: https://github.com/ziglang/zig/issues/4476
tmp_func_ptr_workaround_list: ArrayList(StringPool.Val),
param_names_to_avoid_map_get_fn: *const fn(s: []const u8) ?Nothing,
not_null_funcs: json.ObjectMap,
not_null_funcs_applied: StringPool.HashMap(Nothing),
pub fn getWin32DirImportPrefix(self: SdkFile) []const u8 {
return import_prefix_table[self.depth];
}
pub fn getSrcDirImportPrefix(self: SdkFile) []const u8 {
return import_prefix_table[self.depth + 1];
}
pub fn addApiImport(self: *SdkFile, arches: ArchFlags, name: []const u8, api: []const u8, parents: json.Array) !void {
if (!std.mem.eql(u8, self.json_name, api)) {
const top_level_symbol = try global_symbol_pool.add(
if (parents.items.len == 0) name else parents.items[0].String);
const pool_api = try global_symbol_pool.add(api);
if (self.top_level_api_imports.getPtr(top_level_symbol)) |import| {
jsonEnforceMsg(pool_api.eql(import.api), "symbol conflict '{s}', api mismatch '{s}' and '{s}'", .{name, pool_api, import.api});
//jsonEnforceMsg(other.parents.len == 0, "symbol conflict '{s}', parents mismatch", .{name});
const new_flags = import.arches.flags | arches.flags;
if (new_flags != import.arches.flags) {
import.arches.flags = new_flags;
}
} else {
try self.top_level_api_imports.put(top_level_symbol, .{ .arches = arches, .api = pool_api } );
}
}
}
};
const notnull_filename: []const u8 = "notnull.json";
const Times = struct {
parse_time_millis : i64 = 0,
read_time_millis : i64 = 0,
generate_time_millis : i64 = 0,
};
var global_times = Times {};
pub fn main() !u8 {
return main2() catch |e| switch (e) {
error.AlreadyReported => return 0xff,
else => return e,
};
}
fn main2() !u8 {
const main_start_millis = std.time.milliTimestamp();
var print_time_summary = false;
defer {
if (print_time_summary) {
var total_millis = std.time.milliTimestamp() - main_start_millis;
if (total_millis == 0) total_millis = 1; // prevent divide by 0
std.debug.warn("Parse Time: {} millis ({}%)\n", .{global_times.parse_time_millis, @divTrunc(100 * global_times.parse_time_millis, total_millis)});
std.debug.warn("Read Time : {} millis ({}%)\n", .{global_times.read_time_millis , @divTrunc(100 * global_times.read_time_millis, total_millis)});
std.debug.warn("Gen Time : {} millis ({}%)\n", .{global_times.generate_time_millis , @divTrunc(100 * global_times.generate_time_millis, total_millis)});
std.debug.warn("Total Time: {} millis\n", .{total_millis});
}
}
global_symbol_none = try global_symbol_pool.add("none");
global_symbol_None = try global_symbol_pool.add("None");
var win32json_dir = try common.openWin32JsonDir(std.fs.cwd());
defer win32json_dir.close();
const cwd = std.fs.cwd();
const zigwin32_dir_name = "zigwin32";
cwd.access(zigwin32_dir_name, .{}) catch |e| switch (e) {
error.FileNotFound => {
std.debug.warn("Error: repository '{s}' does not exist, clone it with:\n", .{zigwin32_dir_name});
std.debug.warn(" git clone https://github.com/marlersoft/zigwin32 {s}" ++ path_sep ++ zigwin32_dir_name ++ "\n", .{
try common.getcwd(allocator)
});
return error.AlreadyReported;
},
else => return e,
};
global_pass1 = try readJson("pass1.json");
global_notnull = try readJson(notnull_filename);
var out_dir = try cwd.openDir(zigwin32_dir_name, .{});
defer out_dir.close();
out_dir.deleteFile("win32.zig") catch |e| switch (e) {
error.FileNotFound => {},
else => return e,
};
try cleanDir(out_dir, "win32");
var out_win32_dir = try out_dir.openDir("win32", .{});
defer out_win32_dir.close();
const src_modules = &[_][]const u8 {
"zig",
"missing",
"windowlongptr",
};
const root_module = try Module.alloc(null, try global_symbol_pool.add("win32"));
{
var api_dir = try win32json_dir.openDir("api", .{.iterate = true}) ;
defer api_dir.close();
var api_list = std.ArrayList([]const u8).init(allocator);
defer {
for (api_list.items) |api_name| {
allocator.free(api_name);
}
api_list.deinit();
}
try common.readApiList(api_dir, &api_list);
// sort the list of APIs so our api order is not dependent on the file-system ordering
std.sort.sort([]const u8, api_list.items, Nothing {}, common.asciiLessThanIgnoreCase);
std.debug.warn("-----------------------------------------------------------------------\n", .{});
std.debug.warn("loading {} api json files...\n", .{api_list.items.len});
for (api_list.items) |api_json_basename, api_index| {
const api_num = api_index + 1;
std.debug.warn("{}/{}: loading '{s}'\n", .{api_num, api_list.items.len, api_json_basename});
//
// TODO: would things run faster if I just memory mapped the file?
//
var file = try api_dir.openFile(api_json_basename, .{});
defer file.close();
try readAndGenerateApiFile(root_module, out_win32_dir, api_json_basename, file);
}
for (src_modules ++ &[_][]const u8 {
"everything",
}) |submodule_str| {
const submodule = try global_symbol_pool.add(submodule_str);
try root_module.children.put(submodule, try Module.alloc(root_module, submodule));
}
try generateContainerModules(out_dir, root_module);
try generateEverythingModule(out_win32_dir, root_module);
}
// copy zig.zig, missing.zig and windowlongptr.zig modules
{
var src_dir = try cwd.openDir("src", .{});
defer src_dir.close();
inline for (src_modules) |mod| {
try src_dir.copyFile(mod ++ ".zig", out_win32_dir, mod ++ ".zig", .{});
}
}
print_time_summary = true;
return 0;
}
fn readJson(filename: []const u8) !std.json.ObjectMap {
const content = blk: {
const file = try std.fs.cwd().openFile(filename, .{});
defer file.close();
break :blk try file.readToEndAlloc(allocator, std.math.maxInt(usize));
};
var json_tree = blk: {
var parser = json.Parser.init(allocator, false); // false is copy_strings
defer parser.deinit();
const start = if (std.mem.startsWith(u8, content, "\xEF\xBB\xBF")) 3 else @as(usize, 0);
const json_content = content[start..];
std.log.info("parsing '{s}'...", .{filename});
break :blk try parser.parse(json_content);
};
return json_tree.root.Object;
}
fn gatherSdkFiles(sdk_files: *ArrayList(*SdkFile), module: *Module) anyerror!void {
if (module.file) |_| {
try sdk_files.append(&module.file.?);
}
const children = try allocMapValues(allocator, *Module, module.children);
defer allocator.free(children);
std.sort.sort(*Module, children, {}, moduleLessThan); // sort so the order is predictable
for (children) |child| {
try gatherSdkFiles(sdk_files, child);
}
}
fn generateEverythingModule(out_win32_dir: std.fs.Dir, root_module: *Module) !void {
var everything_file = try out_win32_dir.createFile("everything.zig", .{});
defer everything_file.close();
const writer = everything_file.writer();
try writer.writeAll(removeCr(autogen_header ++
\\//! This module contains aliases to ALL symbols inside the Win32 SDK. It allows
\\//! an application to access any and all symbols through a single import.
\\
\\pub const L = @import("zig.zig").L;
\\
\\pub usingnamespace @import("missing.zig");
\\
));
var sdk_files = ArrayList(*SdkFile).init(allocator);
defer sdk_files.deinit();
try gatherSdkFiles(&sdk_files, root_module);
// TODO: workaround issue where constants/functions are defined more than once, not sure what the right solution
// is for all these, maybe some modules are not compatible with each other. This could just be the permanent
// solution as well, if there are conflicts, we could just say the user has to import the specific module they want.
// TODO: I think the right way to reslve conflicts in everything.zig is to have a priority order for the apis.
// If I just sort the API's in the right order, more common apis go first, then my current logic will work.
var shared_export_map = StringPool.HashMap(*SdkFile).init(allocator);
defer shared_export_map.deinit();
// populate the shared_export_map, start with types first
// because types can be referenced within the modules (unlike consts/functions)
for (sdk_files.items) |sdk_file| {
var type_export_it = sdk_file.type_exports.iterator();
while (type_export_it.next()) |kv| {
const type_name = kv.key_ptr.*;
if (shared_export_map.get(type_name)) |_| {
//try shared_export_map.put(type_name, .{ .first_sdk_file_ptr = entry.first_sdk_file_ptr, .duplicates = entry.duplicates + 1 });
} else {
//try shared_export_map.put(type_name, .{ .first_sdk_file_ptr = sdk_file, .duplicates = 0 });
try shared_export_map.put(type_name, sdk_file);
}
}
}
for (sdk_files.items) |sdk_file| {
try writer.print("// {s} exports {} constants:\n", .{sdk_file.zig_name, sdk_file.const_exports.items.len});
for (sdk_file.const_exports.items) |constant| {
if (shared_export_map.get(constant)) |other_sdk_file| {
try writer.print("// WARNING: redifinition of constant symbol '{s}' in module '{s}' (going with module '{s}')\n", .{
constant, sdk_file.zig_name, other_sdk_file.zig_name});
} else {
try writer.print("pub const {s} = @import(\"../win32.zig\").{s}.{0s};\n", .{constant, sdk_file.zig_name});
try shared_export_map.put(constant, sdk_file);
}
}
try writer.print("// {s} exports {} types:\n", .{sdk_file.zig_name, sdk_file.type_exports.count()});
var export_it = sdk_file.type_exports.iterator();
while (export_it.next()) |kv| {
const type_name = kv.key_ptr.*;
const first_type_sdk = shared_export_map.get(type_name) orelse unreachable;
if (first_type_sdk != sdk_file) {
try writer.print("// WARNING: redefinition of type symbol '{s}' from '{s}', going with '{s}'\n", .{
type_name, sdk_file.zig_name, first_type_sdk.zig_name});
} else {
try writer.print("pub const {s} = @import(\"../win32.zig\").{s}.{0s};\n", .{type_name, sdk_file.zig_name});
}
}
try writer.print("// {s} exports {} functions:\n", .{sdk_file.zig_name, sdk_file.func_exports.count()});
var func_it = sdk_file.func_exports.iterator();
while (func_it.next()) |kv| {
const func = kv.key_ptr.*;
if (shared_export_map.get(func)) |other_sdk_file| {
try writer.print("// WARNING: redifinition of function '{s}' in module '{s}' (going with module '{s}')\n", .{
func, sdk_file.zig_name, other_sdk_file.zig_name});
} else {
try writer.print("pub const {s} = @import(\"../win32.zig\").{s}.{0s};\n", .{func, sdk_file.zig_name});
try shared_export_map.put(func, sdk_file);
}
}
}
}
fn allocMapValues(alloc: *std.mem.Allocator, comptime T: type, map: anytype) ![]T {
var values = try alloc.alloc(T, map.count());
errdefer alloc.free(values);
{
var i: usize = 0;
var it = map.iterator(); while (it.next()) |entry| : (i += 1) {
values[i] = entry.value_ptr.*;
}
std.debug.assert(i == map.count());
}
return values;
}
fn moduleLessThan(context: void, lhs: *Module, rhs: *Module) bool {
_ = context;
return std.ascii.lessThanIgnoreCase(lhs.name.slice, rhs.name.slice);
}
fn generateContainerModules(dir: std.fs.Dir, module: *Module) anyerror!void {
if (module.children.count() == 0) {
return;
}
var file = blk: {
if (module.file) |_| {
const file = try dir.openFile(module.zig_basename, .{ .read = false, .write = true });
try file.seekFromEnd(0);
break :blk file;
}
break :blk try dir.createFile(module.zig_basename, .{});
};
defer file.close();
const writer = file.writer();
const children = try allocMapValues(allocator, *Module, module.children);
defer allocator.free(children);
std.sort.sort(*Module, children, {}, moduleLessThan);
if (module.file) |_| {
try writer.print("//--------------------------------------------------------------------------------\n", .{});
try writer.print("// Section: SubModules ({})\n", .{children.len});
try writer.print("//--------------------------------------------------------------------------------\n", .{});
} else {
try writer.writeAll(autogen_header);
}
for (children) |child| {
try writer.print("pub const {s} = @import(\"{s}/{0s}.zig\");\n", .{child.name, module.name.slice});
}
if (module.file) |_| { } else {
try writer.writeAll(removeCr(
\\test {
\\ @import("std").testing.refAllDecls(@This());
\\}
\\
));
}
var next_dir = try dir.openDir(module.name.slice, .{});
defer next_dir.close();
for (children) |child| {
try generateContainerModules(next_dir, child);
}
}
fn readAndGenerateApiFile(root_module: *Module, out_dir: std.fs.Dir, json_basename: []const u8, file: std.fs.File) !void {
const read_start_millis = std.time.milliTimestamp();
const content = try file.readToEndAlloc(allocator, std.math.maxInt(usize));
const read_end_millis = std.time.milliTimestamp();
global_times.read_time_millis += read_end_millis - read_start_millis;
defer allocator.free(content);
std.debug.warn(" read {} bytes\n", .{content.len});
// Parsing the JSON is VERY VERY SLOW!!!!!!
var json_tree = blk: {
var parser = json.Parser.init(allocator, false); // false is copy_strings
defer parser.deinit();
const start = if (std.mem.startsWith(u8, content, "\xEF\xBB\xBF")) 3 else @as(usize, 0);
const json_content = content[start..];
break :blk try parser.parse(json_content);
};
defer json_tree.deinit();
global_times.parse_time_millis += std.time.milliTimestamp() - read_end_millis;
const json_basename_copy = try std.mem.dupe(allocator, u8, json_basename);
const json_name = json_basename_copy[0..json_basename_copy.len - ".json".len];
const zig_name = try cameltosnake.camelToSnakeAlloc(allocator, json_name);
errdefer allocator.free(zig_name);
var module_dir = out_dir;
defer if (module_dir.fd != out_dir.fd) module_dir.close();
var module: *Module = root_module;
var depth: u2 = 0;
{
var it = std.mem.tokenize(u8, zig_name, ".");
while (it.next()) |name_part| {
if (module != root_module) {
depth += 1;
if (module.children.count() == 0) {
try module_dir.makeDir(module.name.slice);
}
const next_dir = try module_dir.openDir(module.name.slice, .{});
if (module_dir.fd != out_dir.fd)
module_dir.close();
module_dir = next_dir;
}
const name_pool = try global_symbol_pool.add(name_part);
if (module.children.get(name_pool)) |existing| {
module = existing;
} else {
const new_module = try Module.alloc(module, name_pool);
try module.children.put(name_pool, new_module);
module = new_module;
}
}
}
if (module.file) |_| {
jsonPanicMsg("qualified name '{s}' already has an sdk file?", .{zig_name});
}
var not_null_funcs = empty_json_object_map;
if (global_notnull.get(json_name)) |*api_node| {
const api_obj = api_node.Object;
try jsonObjEnforceKnownFieldsOnly(api_obj, &[_][]const u8 {"Functions"}, notnull_filename);
not_null_funcs = (try jsonObjGetRequired(api_obj, "Functions", notnull_filename)).Object;
}
module.file = SdkFile {
.json_basename = json_basename_copy,
.json_name = json_name,
.zig_name = zig_name,
.depth = depth,
.const_exports = ArrayList(StringPool.Val).init(allocator),
.uses_guid = false,
.top_level_api_imports = StringPool.HashMap(ApiImport).init(allocator),
.type_exports = StringPool.HashMap(Nothing).init(allocator),
.func_exports = StringPool.HashMap(Nothing).init(allocator),
.tmp_func_ptr_workaround_list = ArrayList(StringPool.Val).init(allocator),
.param_names_to_avoid_map_get_fn = getParamNamesToAvoidMapGetFn(json_name),
.not_null_funcs = not_null_funcs,
.not_null_funcs_applied = StringPool.HashMap(Nothing).init(allocator),
};
const generate_start_millis = std.time.milliTimestamp();
try generateFile(module_dir, module, json_tree);
global_times.generate_time_millis += std.time.milliTimestamp() - generate_start_millis;
}
pub fn EmptyComptimeStringMap(comptime V: type) type { return struct {
pub fn get(str: []const u8) ?V { _ = str; return null; }
};}
fn generateFile(module_dir: std.fs.Dir, module: *Module, tree: json.ValueTree) !void {
const sdk_file = &module.file.?;
var out_file = try module_dir.createFile(module.zig_basename, .{});
defer out_file.close();
var code_writer = CodeWriter { .writer = out_file.writer(), .depth = 0, .midline = false };
const writer = &code_writer;
try writer.writeBlock(autogen_header);
// We can't import the everything module because it will re-introduce the same symbols we are exporting
//try writer.print("usingnamespace @import(\"everything.zig\");\n", .{});
const root_obj = tree.root.Object;
const constants_array = (try jsonObjGetRequired(root_obj, "Constants", sdk_file)).Array;
const types_array = (try jsonObjGetRequired(root_obj, "Types", sdk_file)).Array;
const functions_array = (try jsonObjGetRequired(root_obj, "Functions", sdk_file)).Array;
const unicode_aliases = (try jsonObjGetRequired(root_obj, "UnicodeAliases", sdk_file)).Array;
try writer.line("//--------------------------------------------------------------------------------");
try writer.linef("// Section: Constants ({})", .{constants_array.items.len});
try writer.line("//--------------------------------------------------------------------------------");
for (constants_array.items) |*constant_node_ptr| {
try generateConstant(sdk_file, writer, constant_node_ptr.Object);
}
std.debug.assert(constants_array.items.len == sdk_file.const_exports.items.len);
try writer.line("");
try writer.line("//--------------------------------------------------------------------------------");
try writer.linef("// Section: Types ({})", .{types_array.items.len});
try writer.line("//--------------------------------------------------------------------------------");
{
var enum_alias_conflicts = StringPool.HashMap(StringPool.Val).init(allocator);
defer enum_alias_conflicts.deinit();
for (types_array.items) |*type_node_ptr| {
try generateType(sdk_file, writer, type_node_ptr.Object, &enum_alias_conflicts);
try writer.line("");
}
}
try writer.line("");
try writer.line("//--------------------------------------------------------------------------------");
try writer.linef("// Section: Functions ({})", .{functions_array.items.len});
try writer.line("//--------------------------------------------------------------------------------");
for (functions_array.items) |*function_node_ptr| {
try generateFunction(sdk_file, writer, function_node_ptr.Object, .fixed, null, null);
try writer.line("");
}
std.debug.assert(functions_array.items.len >= sdk_file.func_exports.count());
try writer.line("");
try writer.line("//--------------------------------------------------------------------------------");
try writer.linef("// Section: Unicode Aliases ({})", .{unicode_aliases.items.len});
try writer.line("//--------------------------------------------------------------------------------");
try generateUnicodeAliases(sdk_file, writer, unicode_aliases.items);
const import_total = @boolToInt(sdk_file.uses_guid) + sdk_file.top_level_api_imports.count();
try writer.line("//--------------------------------------------------------------------------------");
try writer.linef("// Section: Imports ({})", .{import_total});
try writer.line("//--------------------------------------------------------------------------------");
if (sdk_file.uses_guid) {
try writer.linef("const Guid = @import(\"{s}zig.zig\").Guid;", .{sdk_file.getWin32DirImportPrefix()});
}
{
// TODO: the generated code would probalby be cleaner if I combine symbols
// that belong to the same arch set
var it = sdk_file.top_level_api_imports.iterator();
while (it.next()) |import| {
const name = import.key_ptr.*;
const api_upper = import.value_ptr.api;
const arches = import.value_ptr.arches;
// TODO: should I cache this mapping from api ref to api import path?
const api_path = try cameltosnake.camelToSnakeAlloc(allocator, api_upper.slice);
defer allocator.free(api_path);
for (api_path) |c, i| {
if (c == '.')
api_path[i] = '/';
}
var vis: []const u8 = "";
if (arches.flags != ArchFlags.all.flags) {
try generateArchPrefix(writer, sdk_file.depth, arches, "");
writer.depth += 1;
vis = "pub ";
}
try writer.linef("{s}const {s} = @import(\"{s}{s}.zig\").{1s};",
.{vis, name, sdk_file.getWin32DirImportPrefix(), api_path});
if (arches.flags != ArchFlags.all.flags) {
writer.depth -= 1;
generateArchSuffix(writer);
}
}
}
try writer.writeBlock(comptime removeCr(
\\
\\test {
\\
));
if (sdk_file.tmp_func_ptr_workaround_list.items.len > 0) {
try writer.line(" // The following '_ = <FuncPtrType>' lines are a workaround for https://github.com/ziglang/zig/issues/4476");
for (sdk_file.tmp_func_ptr_workaround_list.items) |func_ptr_type| {
try writer.linef(" if (@hasDecl(@This(), \"{s}\")) {{ _ = {0s}; }}", .{func_ptr_type});
}
try writer.line("");
}
try writer.writeBlock(comptime removeCr(
\\ @setEvalBranchQuota(
\\ @import("std").meta.declarations(@This()).len * 3
\\ );
\\
\\ // reference all the pub declarations
\\ if (!@import("std").builtin.is_test) return;
\\ inline for (@import("std").meta.declarations(@This())) |decl| {
\\ if (decl.is_pub) {
\\ _ = decl;
\\ }
\\ }
\\}
\\
));
// check that all notnull stuff was applied
{
var it = sdk_file.not_null_funcs.iterator();
var error_count : u32 = 0;
while (it.next()) |api| {
const pool_name = try global_symbol_pool.add(api.key_ptr.*);
if (sdk_file.not_null_funcs_applied.get(pool_name)) |_| { } else {
std.log.err("notnull.json api '{s}' function '{s}' was not applied", .{sdk_file.json_name, pool_name});
error_count += 1;
}
}
sdk_file.not_null_funcs_applied.deinit();
if (error_count > 0) {
return error.AlreadyReported;
}
}
}
fn typeIsVoid(type_obj: json.ObjectMap, sdk_file: *SdkFile) !bool {
const kind = (try jsonObjGetRequired(type_obj, "Kind", sdk_file)).String;
if (std.mem.eql(u8, kind, "Native")) {
const name = (try jsonObjGetRequired(type_obj, "Name", sdk_file)).String;
return std.mem.eql(u8, name, "void");
}
return false;
}
// convenient function that combines both adding type refs and creating a formatter
// for the type. These 2 operations are orthogonal, however, combining them helps ensure
// that generating a type reference is not done without also adding that reference to the api
// being generated.
fn addTypeRefs(sdk_file: *SdkFile, arches: ArchFlags, type_ref: json.ObjectMap, options: TypeRefFormatter.Options, nested_context: ?*const NestedContext) anyerror!TypeRefFormatter {
try addTypeRefsNoFormatter(sdk_file, arches, type_ref);
return fmtTypeRef(type_ref, arches, options, nested_context);
}
fn addTypeRefsNoFormatter(sdk_file: *SdkFile, arches: ArchFlags, type_ref: json.ObjectMap) anyerror!void {
const kind = (try jsonObjGetRequired(type_ref, "Kind", sdk_file)).String;
if (std.mem.eql(u8, kind, "Native")) {
try jsonObjEnforceKnownFieldsOnly(type_ref, &[_][]const u8 {"Kind", "Name"}, sdk_file);
const name = (try jsonObjGetRequired(type_ref, "Name", sdk_file)).String;
if (std.mem.eql(u8, name, "Void")) {
// void is special
} else if (std.mem.eql(u8, name, "Guid")) {
sdk_file.uses_guid = true;
} else if (global_native_type_map.get(name) == null) {
std.debug.panic("unknown Native type '{s}'", .{name});
}
} else if (std.mem.eql(u8, kind, "ApiRef")) {
try jsonObjEnforceKnownFieldsOnly(type_ref, &[_][]const u8 {"Kind", "Name", "TargetKind", "Api", "Parents"}, sdk_file);
const tmp_name = (try jsonObjGetRequired(type_ref, "Name", sdk_file)).String;
const api = (try jsonObjGetRequired(type_ref, "Api", sdk_file)).String;
const parents = (try jsonObjGetRequired(type_ref, "Parents", sdk_file)).Array;
try sdk_file.addApiImport(arches, tmp_name, api, parents);
} else if (std.mem.eql(u8, kind, "PointerTo")) {
try jsonObjEnforceKnownFieldsOnly(type_ref, &[_][]const u8 {"Kind", "Child"}, sdk_file);
try addTypeRefsNoFormatter(sdk_file, arches, (try jsonObjGetRequired(type_ref, "Child", sdk_file)).Object);
} else if (std.mem.eql(u8, kind, "Array")) {
try jsonObjEnforceKnownFieldsOnly(type_ref, &[_][]const u8 {"Kind", "Shape", "Child"}, sdk_file);
try addTypeRefsNoFormatter(sdk_file, arches, (try jsonObjGetRequired(type_ref, "Child", sdk_file)).Object);
} else if (std.mem.eql(u8, kind, "LPArray")) {
try jsonObjEnforceKnownFieldsOnly(type_ref, &[_][]const u8 {"Kind", "NullNullTerm", "CountConst", "CountParamIndex", "Child"}, sdk_file);
try addTypeRefsNoFormatter(sdk_file, arches, (try jsonObjGetRequired(type_ref, "Child", sdk_file)).Object);
} else if (std.mem.eql(u8, kind, "MissingClrType")) {
try jsonObjEnforceKnownFieldsOnly(type_ref, &[_][]const u8 {"Kind", "Name", "Namespace"}, sdk_file);
} else {
jsonPanicMsg("kind '{s}' is not implemented", .{kind});
}
}
const ContainerKind = enum { Struct, Union };
pub fn getAnonKind(s: []const u8) ?ContainerKind {
//if (std.mem.startsWith(u8, s, "_Anonymous")) {
if (std.mem.endsWith(u8, s, "_e__Struct"))
return .Struct;
if (std.mem.endsWith(u8, s, "_e__Union"))
return .Union;
// jsonPanicMsg("type '{s}' starts with '_Anonymous' but does not have an expected end", .{s});
//}
return null;
}
// Provides access to nested types accessible from the current scope
const NestedContext = struct {
nested_types: json.Array,
parent: ?*const NestedContext,
pub fn contains(self: NestedContext, name: []const u8) bool {
std.debug.assert(self.nested_types.items.len > 0);
for (self.nested_types.items) |*nested_type_node_ptr| {
const nested_type_obj = nested_type_node_ptr.Object;
const nested_name = (nested_type_obj.get("Name") orelse jsonPanic()).String;
if (std.mem.eql(u8, nested_name, name))
return true;
}
if (self.parent) |p| return p.contains(name);
return false;
}
};
const NullModifier = u3;
// we need to know if the type is the top-level type or a child type of something like a pointer
// so we can generate the correct `void` type. Top level void types become void, but pointers
// to void types must become pointers to the `c_void` type.
// Need to know if it is an array specifically because array pointers cannot point to opaque types
// with an unknown size.
const DepthContext = enum {top_level, child, array};
const TypeRefFormatter = struct {
pub const Reason = enum { var_decl, direct_type_access };
pub const Options = struct {
reason: Reason,
is_const: bool = false,
in: bool = false,
out: bool = false,
optional: bool = false,
// TODO: handle this option
not_null_term: bool = false,
// TODO: handle this option
null_null_term: bool = true,
// TODO: what to do with this?
ret_val: bool = false,
// TODO: don't know what to do with this yet
com_out_ptr: bool = false,
// TODO: don't know what to do with this yet
do_not_release: bool = false,
// TODO: don't know what to do with this yet
reserved: bool = false,
optional_bytes_param_index: ?i16 = null,
anon_types: ?*const AnonTypes,
null_modifier: NullModifier,
};
type_ref: json.ObjectMap,
arches: ArchFlags,
options: Options,
nested_context: ?*const NestedContext,
};
pub fn fmtTypeRef(type_ref: json.ObjectMap, arches: ArchFlags, options: TypeRefFormatter.Options, nested_context: ?*const NestedContext) TypeRefFormatter {
return .{ .type_ref = type_ref, .arches = arches, .options = options, .nested_context = nested_context };
}
fn generateTypeRef(sdk_file: *SdkFile, writer: *CodeWriter, self: TypeRefFormatter) !void {
try generateTypeRefRec(sdk_file, writer, self, .top_level);
}
fn generateTypeRefRec(sdk_file: *SdkFile, writer: *CodeWriter, self: TypeRefFormatter, depth_context: DepthContext) anyerror!void {
const kind = (try jsonObjGetRequired(self.type_ref, "Kind", sdk_file)).String;
if (std.mem.eql(u8, kind, "Native")) {
try jsonObjEnforceKnownFieldsOnly(self.type_ref, &[_][]const u8 {"Kind", "Name"}, sdk_file);
const name = (try jsonObjGetRequired(self.type_ref, "Name", sdk_file)).String;
if (std.mem.eql(u8, name, "Void")) {
const type_name: []const u8 = switch (depth_context) {
.top_level => "void",
.child => "c_void",
// if we are rendering the element of an array, then we have to know the size, we default to u8
// because most void pointers in C are measured in terms of u8 bytes
.array => "u8",
};
try writer.writef("{s}", .{type_name}, .{.start=.any,.nl=false});
} else if (std.mem.eql(u8, name, "Guid")) {
try writer.write("Guid", .{.start=.any,.nl=false});
} else {
const native_type = global_native_type_map.get(name) orelse std.debug.panic("unknown Native type '{s}'", .{name});
try writer.writef("{s}", .{nativeTypeToZigType(native_type)}, .{.start=.any,.nl=false});
}
} else if (std.mem.eql(u8, kind, "ApiRef")) {
try jsonObjEnforceKnownFieldsOnly(self.type_ref, &[_][]const u8 {"Kind", "Name", "TargetKind", "Api", "Parents"}, sdk_file);
const name = (try jsonObjGetRequired(self.type_ref, "Name", sdk_file)).String;
const api = (try jsonObjGetRequired(self.type_ref, "Api", sdk_file)).String;
if (getAnonKind(name)) |anon_kind| {
const anon_types = self.options.anon_types orelse
jsonPanicMsg("missing anonymous type '{s}' (this scope does not have any anonymous types)!", .{name});
const name_pool = try global_symbol_pool.add(name);
const type_obj = anon_types.types.get(name_pool) orelse
jsonPanicMsg("missing anonymous type '{s}'!", .{name});
try generateStructOrUnionDef(sdk_file, writer, type_obj, self.arches, anon_kind, self.nested_context);
try writer.write("}", .{.nl=false});
return;
}
const parents = (try jsonObjGetRequired(self.type_ref, "Parents", sdk_file)).Array;
const type_kind_category = blk: {
const pass1_api_map = (global_pass1.get(api) orelse
jsonPanicMsg("type '{s}' is from API '{s}' that is missing from pass1 data", .{name, api})).Object;
const pass1_type_obj = (pass1_api_map.get(name) orelse {
if (parents.items.len == 0) {
const in_nested_context = if (self.nested_context) |c| c.contains(name) else false;
if (!in_nested_context) {
jsonPanicMsg("type '{s}' from API '{s}' is missing from pass1 data, has no parents and is not in the current nested context!", .{name, api});
}
}
// this means its a nested type which are always structs/unions
break :blk Pass1TypeKindCategory.default;
}).Object;
try jsonObjEnforceKnownFieldsOnly(pass1_type_obj, &[_][]const u8 {"Kind"}, sdk_file);
const type_kind = (try jsonObjGetRequired(pass1_type_obj, "Kind", sdk_file)).String;
break :blk pass1_type_kind_info.get(type_kind) orelse
jsonPanicMsg("unknown pass1 type kind '{s}'", .{type_kind});
};
if (self.options.reason == .var_decl) {
switch (type_kind_category) {
.default => {},
.ptr => {
if (self.options.null_modifier & 1 == 0) {
try writer.write("?", .{.start=.any,.nl=false});
}
},
.com => {
if (self.options.null_modifier & 1 == 0) {
try writer.write("?", .{.start=.any,.nl=false});
}
try writer.write("*", .{.start=.any,.nl=false});
},
}
}
// special handling for PSTR and PWSTR for now. This is because those types
// have hardcoded non-const and null-terminated, so we can't reference them if our usage
// doesn't match.
// If there are more cases that behave like this, I will likely need to implement a 2-pass
// system where the first pass I gather all the type definitions so that on the second pass
// I'll know whether each type is a pointer like this and can fix things like this.
const special : enum { pstr, pwstr, other } = blk: {
if (std.mem.eql(u8, name, "PSTR")) break :blk .pstr;
if (std.mem.eql(u8, name, "PWSTR")) break :blk .pwstr;
break :blk .other;
};
if (special == .pstr or special == .pwstr) {
// if we deviated from the options we set for PSTR/PWSTR, then generate the native zig
// type directly instead of referencing the PSTR/PWSTR type
if (self.options.is_const or self.options.not_null_term) {
// can't put these expressions in the print argument tuple because of https://github.com/ziglang/zig/issues/8036
const base_type = if (special == .pstr) "u8" else "u16";
const sentinel_suffix = if (self.options.not_null_term) "" else ":0";
const const_str = if (self.options.is_const) "const " else "";
try writer.writef("[*{s}]{s}{s}", .{sentinel_suffix, const_str, base_type}, .{.start=.any,.nl=false});
return;
}
}
// for now, all nested type references MUST be in the same scope so this
// just causes issues
//for (parents.items) |*parent_ptr| {
// try writer.writef("{s}", .{parent_ptr.String}, .{.start=.any,.nl=false});
// try writer.write(".", .{.start=.any,.nl=false});