diff --git a/.gitmodules b/.gitmodules index d7ebcc45..982918ef 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,6 +1,8 @@ [submodule "community-wgsl"] path = community-wgsl url = https://github.com/wgsl-tooling-wg/community-wgsl + update = merge [submodule "wesl-testsuite"] path = wesl-testsuite url = https://github.com/wgsl-tooling-wg/wesl-testsuite.git + update = merge diff --git a/linker/packages/bulk-test/vite.config.ts b/linker/packages/bulk-test/vite.config.ts index 36d04d02..46e86ff7 100644 --- a/linker/packages/bulk-test/vite.config.ts +++ b/linker/packages/bulk-test/vite.config.ts @@ -5,6 +5,7 @@ import tsconfigPaths from "vite-tsconfig-paths"; const config: UserConfig = { plugins: [tsconfigPaths()], test: { + testTimeout: 10000, maxWorkers: 20, sequence: { concurrent: true, diff --git a/linker/packages/cli/src/test/wgsl-link.test.ts b/linker/packages/cli/src/test/wgsl-link.test.ts index 8679b343..e378be72 100644 --- a/linker/packages/cli/src/test/wgsl-link.test.ts +++ b/linker/packages/cli/src/test/wgsl-link.test.ts @@ -11,14 +11,15 @@ import("./src/test/wgsl/main.wgsl?raw"); import("./src/test/wgsl/util.wgsl?raw"); const testDir = dirname(fileURLToPath(import.meta.url)); -const wgslDir = path.join(testDir, "wgsl"); -const mainPath = path.join(wgslDir, "main.wgsl"); -const utilPath = path.join(wgslDir, "util.wgsl"); +const wgslDir = testDir + "/wgsl"; +const mainPath = wgslDir + "/main.wgsl"; +const utilPath = wgslDir + "/util.wgsl"; test("simple link", async () => { const logged = await cliLine(`${mainPath} ${utilPath} --baseDir ${wgslDir}`); expect(logged).toMatchInlineSnapshot(` " + fn main() { foo(); } @@ -34,7 +35,7 @@ test("simple link", async () => { `); }); -const packagePath = /package::.*::(.*)$/gm; +const packagePath = /^package::.*::(.*)$/gm; test("link --details", async () => { const line = `${mainPath} ${utilPath} --baseDir ${wgslDir} --baseDir ./src/test/wgsl @@ -42,6 +43,7 @@ test("link --details", async () => { --emit false`; const logged = await cliLine(line); + // Remove the directory specific path before the logs const noPackagePaths = logged.replace(packagePath, "package::$1"); expectTrimmedMatch( noPackagePaths, @@ -51,9 +53,8 @@ test("link --details", async () => { ->ast module - import package/util/foo - text 'import ./util/foo; - ' + import package::util::foo; + text ' ' fn main() diff --git a/linker/packages/cli/src/test/wgsl/main.wgsl b/linker/packages/cli/src/test/wgsl/main.wgsl index f99fbb18..c9f06443 100644 --- a/linker/packages/cli/src/test/wgsl/main.wgsl +++ b/linker/packages/cli/src/test/wgsl/main.wgsl @@ -1,4 +1,4 @@ -import ./util/foo; +import package::util::foo; fn main() { foo(); diff --git a/linker/packages/linker/src/AbstractElems.ts b/linker/packages/linker/src/AbstractElems.ts index a33af913..afb1647b 100644 --- a/linker/packages/linker/src/AbstractElems.ts +++ b/linker/packages/linker/src/AbstractElems.ts @@ -1,4 +1,4 @@ -import { ImportTree } from "./ImportTree.ts"; +import { ImportStatement } from "./ImportStatement.ts"; import { DeclIdent, RefIdent, SrcModule } from "./Scope.ts"; /** @@ -161,7 +161,7 @@ export interface GlobalVarElem extends ElemWithContentsBase { /** an import statement */ export interface ImportElem extends ElemWithContentsBase { kind: "import"; - imports: ImportTree; + imports: ImportStatement; } /** an entire file */ diff --git a/linker/packages/linker/src/BindIdents.ts b/linker/packages/linker/src/BindIdents.ts index 6a4ca5ee..52aba258 100644 --- a/linker/packages/linker/src/BindIdents.ts +++ b/linker/packages/linker/src/BindIdents.ts @@ -197,9 +197,8 @@ function matchingImport( ): string[] | undefined { const identParts = ident.originalName.split("::"); for (const flat of flatImports) { - const impTail = overlapTail(flat.importPath, identParts); - if (impTail) { - return [...flat.modulePath, ...impTail]; + if (flat.importPath.at(-1) === identParts.at(0)) { + return [...flat.modulePath, ...identParts.slice(1)]; } } } diff --git a/linker/packages/linker/src/FlattenTreeImport.ts b/linker/packages/linker/src/FlattenTreeImport.ts index 3f893324..b96200b4 100644 --- a/linker/packages/linker/src/FlattenTreeImport.ts +++ b/linker/packages/linker/src/FlattenTreeImport.ts @@ -1,10 +1,9 @@ -import { tracing } from "mini-parse"; import { - ImportTree, - PathSegment, - SegmentList, - SimpleSegment, -} from "./ImportTree.js"; + ImportCollection, + ImportItem, + ImportSegment, + ImportStatement, +} from "./ImportStatement.js"; export interface FlatImport { importPath: string[]; @@ -16,44 +15,41 @@ export interface FlatImport { * * @return map from import path (with 'as' renaming) to module Path */ -export function flattenTreeImport(imp: ImportTree): FlatImport[] { - return recursiveResolve([], [], imp.segments); +export function flattenTreeImport(imp: ImportStatement): FlatImport[] { + return recursiveResolve([], [], imp.segments, imp.finalSegment); /** recurse through segments of path, producing */ function recursiveResolve( resolvedImportPath: string[], resolvedExportPath: string[], - remainingPath: PathSegment[], + remainingPath: ImportSegment[], + finalSegment: ImportCollection | ImportItem, ): FlatImport[] { - const [segment, ...rest] = remainingPath; - if (segment === undefined) { - throw new Error(`undefined segment ${imp.segments}`); - } - if (segment instanceof SimpleSegment) { - const importPath = [...resolvedImportPath, segment.as || segment.name]; + if (remainingPath.length > 0) { + const [segment, ...rest] = remainingPath; + const importPath = [...resolvedImportPath, segment.name]; const modulePath = [...resolvedExportPath, segment.name]; - if (rest.length) { - // we're in the middle of the path so keep recursing - return recursiveResolve(importPath, modulePath, rest); - } else { - return [{ importPath, modulePath }]; - } - } - if (segment instanceof SegmentList) { + return recursiveResolve(importPath, modulePath, rest, finalSegment); + } else if (finalSegment instanceof ImportCollection) { // resolve path with each element in the list - return segment.list.flatMap(elem => { - const rPath = [elem, ...rest]; - return recursiveResolve(resolvedImportPath, resolvedExportPath, rPath); + return finalSegment.subTrees.flatMap(elem => { + return recursiveResolve( + resolvedImportPath, + resolvedExportPath, + elem.segments, + elem.finalSegment, + ); }); - } else if (segment instanceof ImportTree) { - return recursiveResolve( - resolvedImportPath, - resolvedExportPath, - segment.segments, - ); + } else if (finalSegment instanceof ImportItem) { + const importPath = [ + ...resolvedImportPath, + finalSegment.as || finalSegment.name, + ]; + const modulePath = [...resolvedExportPath, finalSegment.name]; + return [{ importPath, modulePath }]; + } else { + console.error(finalSegment); + throw new Error("unknown segment type", { cause: finalSegment }); } - - if (tracing) console.log("unknown segment type", segment); // should be impossible - return []; } } diff --git a/linker/packages/linker/src/ImportGrammar.ts b/linker/packages/linker/src/ImportGrammar.ts index 29ee93a2..18d5fa21 100644 --- a/linker/packages/linker/src/ImportGrammar.ts +++ b/linker/packages/linker/src/ImportGrammar.ts @@ -1,156 +1,125 @@ import { - disablePreParse, + delimited, kind, - makeEolf, - matchOneOf, NoTags, opt, or, Parser, + preceded, repeat, repeatPlus, + req, seq, + seqObj, setTraceName, TagRecord, tagScope, - tokenMatcher, + terminated, tokens, - tokenSkipSet, tracing, withSepPlus, } from "mini-parse"; +import { mainTokens } from "./WESLTokens.js"; import { - importElem, - importList, - importSegment, - importTree, -} from "./WESLCollect.js"; -import { digits, eol, ident } from "./WESLTokens.js"; - -// TODO now that ';' is required, special ws and eol handling is probably not needed. -const skipWsSet = new Set(["ws"]); -function skipWs(p: Parser): Parser { - return tokenSkipSet(skipWsSet, p); + ImportCollection, + ImportItem, + ImportSegment, + ImportStatement, +} from "./ImportStatement.js"; +import { ImportElem } from "./AbstractElems.js"; +import { importElem } from "./WESLCollect.js"; + +const wordToken = kind(mainTokens.ident) + // TODO: Remove this temporary hack to make the tests pass once we have a better tokenizer + .map(v => (v.value === "super" ? null : v.value)); + +function segment(text: string) { + return new ImportSegment(text); } -function noSkipWs(p: Parser): Parser { - return tokenSkipSet(null, p); +function segments( + ...values: (ImportSegment | ImportSegment[])[] +): ImportSegment[] { + return values.flat(); } -const importSymbolSet = "/ { } , ( ) .. . * ; @ #"; // Had to add @ and # here to get the parsing tests to work. Weird. -const importSymbol = matchOneOf(importSymbolSet); - -// TODO reconsider whether we need a separate token set for import statements vs wgsl/wesl -export const importTokens = tokenMatcher({ - ws: /\s+/, - importSymbol, - ident, // TODO allow '-' in pkg names? - digits, -}); - -export const eolTokens = tokenMatcher({ - ws: /[ \t]+/, // don't include \n, for eolf - eol, -}); - -const eolf = disablePreParse(makeEolf(eolTokens, importTokens.ws)); -const wordToken = kind(importTokens.ident); - -// forward references for mutual recursion -let packagePath: Parser = null as any; - -// prettier-ignore -const simpleSegment = tagScope( - wordToken .ptag("segment").collect(importSegment), -); - -// prettier-ignore /** last simple segment is allowed to have an 'as' rename */ -const lastSimpleSegment = tagScope( - seq( - wordToken .ptag("segment"), - skipWs(opt(seq("as", wordToken .ptag("as")))), - ) .collect(importSegment), -); - -/** an item an a collection list {a, b} */ -// prettier-ignore -const collectionItem = or( - tagScope(or(() => packagePath) .collect(importTree)), - lastSimpleSegment, +const item_import = seq(wordToken, opt(preceded("as", wordToken))).mapValue( + v => new ImportItem(v[0], v[1]), ); -// prettier-ignore -const importCollection = tagScope( - seq( - "{", - skipWs( - seq( - withSepPlus(",", () => collectionItem .ctag("list")), - "}", - ), +// forward references for mutual recursion +let import_collection: Parser = null as any; + +const import_path = seqObj({ + segments: repeatPlus(terminated(wordToken.mapValue(segment), "::")), + final: or(() => import_collection, item_import), +}).mapValue(v => new ImportStatement(v.segments, v.final)); + +import_collection = delimited( + "{", + withSepPlus(",", () => + or( + import_path, + item_import.mapValue(v => new ImportStatement([], v)), ), - ).collect(importList), -); - -/** a relative path element like "./" or "../" */ -// prettier-ignore -const relativeSegment = tagScope( - seq( - or(".", "..") .ptag("segment"), - "/" - ) .collect(importSegment), -) .ctag("p"); - -const lastSegment = or(lastSimpleSegment, importCollection); - -// prettier-ignore -const packageTail = seq( - repeat( - seq( - simpleSegment .ctag("p"), - "/" - ) - ), - lastSegment .ctag("p"), + ).mapValue(v => new ImportCollection(v)), + "}", ); -/** a module path starting with ../ or ./ */ -const relativePath = seq(repeatPlus(relativeSegment), packageTail); - -// prettier-ignore -const packagePrefix = tagScope( - seq( - wordToken .ptag("segment"), - "/" - ) .collect(importSegment), -) .ctag("p"); +const import_relative = seq( + or("package", "super").mapValue(segment), + "::", + repeat(terminated(or("super").mapValue(segment), "::")), +).mapValue(v => segments(v[0], v[2])); -/** a module path, starting with a simple element */ -packagePath = seq(packagePrefix, packageTail); - -const fullPath = noSkipWs( - seq(kind(importTokens.ws), or(relativePath, packagePath)), +const import_package = terminated(wordToken.mapValue(segment), "::").mapValue( + segments, ); /** parse a WESL style wgsl import statement. */ -// prettier-ignore -export const weslImport = tagScope( +export const weslImport: Parser = tagScope( tokens( - importTokens, - seq("import", fullPath, opt(";"), eolf) .collect(importElem), + mainTokens, + delimited( + "import", + req( + seq( + or(import_relative, import_package), + or(import_collection, import_path, item_import), + ), + ).mapValue(v => { + if (v[1] instanceof ImportStatement) { + return new ImportStatement( + segments(v[0], v[1].segments), + v[1].finalSegment, + ); + } else { + return new ImportStatement(v[0], v[1]); + } + }), + req(";"), + ) + .map( + (v): ImportElem => ({ + kind: "import", + contents: [], + imports: v.value, + start: v.start, + end: v.end, + }), + ) + .ptag("owo") + .collect(importElem), ), ); if (tracing) { const names: Record> = { - simpleSegment, - lastSimpleSegment, - importCollection, - relativeSegment, - relativePath, - packagePrefix, - packagePath, - fullPath, + item_import, + import_path, + import_collection, + import_relative, + import_package, weslImport, }; diff --git a/linker/packages/linker/src/ImportStatement.ts b/linker/packages/linker/src/ImportStatement.ts new file mode 100644 index 00000000..966c752d --- /dev/null +++ b/linker/packages/linker/src/ImportStatement.ts @@ -0,0 +1,42 @@ +/** + * An import statement, which is tree shaped. + * `import foo::bar::{baz, cat as neko}; + */ +export class ImportStatement { + constructor( + public segments: ImportSegment[], + public finalSegment: ImportCollection | ImportItem, + ) {} +} + +/** + * A collection of import trees. + * `{baz, cat as neko}` + */ +export class ImportCollection { + constructor(public subTrees: ImportStatement[]) {} +} + +/** + * A primitive segment in an import statement. + * `foo` + */ +export class ImportSegment { + constructor(public name: string) {} +} + +/** Stop Typescript from accepting ImportItems wherever ImportSegments are required */ +const itemSymbol: unique symbol = Symbol("item"); + +/** + * A renamed item at the end of an import statement. + * `cat as neko` + */ +export class ImportItem { + constructor( + public name: string, + public as?: string, + ) {} + + [itemSymbol]: undefined; +} diff --git a/linker/packages/linker/src/ImportTree.ts b/linker/packages/linker/src/ImportTree.ts deleted file mode 100644 index afc31a88..00000000 --- a/linker/packages/linker/src/ImportTree.ts +++ /dev/null @@ -1,19 +0,0 @@ -export class ImportTree { - /** segments in path order */ - constructor(public segments: PathSegment[]) {} -} - -export type PathSegment = SimpleSegment | ImportTree | SegmentList; - -export class SimpleSegment { - constructor( - public name: string, - public as?: string, - public args?: string[], // generic args (only allowed on final segment). TODO drop - ) {} -} - -/** or choices for this path segment */ -export class SegmentList { - constructor(public list: PathSegment[]) {} -} diff --git a/linker/packages/linker/src/ParseWESL.ts b/linker/packages/linker/src/ParseWESL.ts index 789358a6..fb1373f2 100644 --- a/linker/packages/linker/src/ParseWESL.ts +++ b/linker/packages/linker/src/ParseWESL.ts @@ -1,7 +1,7 @@ import { AppState, matchingLexer, ParserInit, SrcMap } from "mini-parse"; import { ModuleElem } from "./AbstractElems.ts"; import { FlatImport, flattenTreeImport } from "./FlattenTreeImport.ts"; -import { ImportTree } from "./ImportTree.ts"; +import { ImportStatement } from "./ImportStatement.ts"; import { emptyScope, resetScopeIds, Scope, SrcModule } from "./Scope.ts"; import { OpenElem } from "./WESLCollect.ts"; import { weslRoot } from "./WESLGrammar.ts"; @@ -27,7 +27,7 @@ export interface WeslAST { rootScope: Scope; /** imports found in this module */ - imports: ImportTree[]; + imports: ImportStatement[]; } /** an extended version of the AST */ diff --git a/linker/packages/linker/src/WESLCollect.ts b/linker/packages/linker/src/WESLCollect.ts index 7840d83e..cef25dc2 100644 --- a/linker/packages/linker/src/WESLCollect.ts +++ b/linker/packages/linker/src/WESLCollect.ts @@ -27,12 +27,6 @@ import { TypeRefElem, VarElem, } from "./AbstractElems.ts"; -import { - ImportTree, - PathSegment, - SegmentList, - SimpleSegment, -} from "./ImportTree.ts"; import { StableState, WeslAST, @@ -41,6 +35,15 @@ import { } from "./ParseWESL.ts"; import { DeclIdent, emptyBodyScope, RefIdent, Scope } from "./Scope.ts"; +export const importElem = collectElem( + "import", + (cc: CollectContext, openElem: PartElem) => { + const importElem = cc.tags.owo?.[0] as ImportElem; // LATER ts typing + (cc.app.stable as StableState).imports.push(importElem.imports); + return importElem; + }, +); + /** add an elem to the .contents array of the currently containing element */ function addToOpenElem(cc: CollectContext, elem: AbstractElem): void { const weslContext: WeslParseContext = cc.app.context; @@ -334,34 +337,6 @@ export const collectModule = collectElem( }, ); -export function importList(cc: CollectContext): SegmentList { - const list = cc.tags.list as PathSegment[]; - return new SegmentList(list); -} - -export function importSegment(cc: CollectContext): SimpleSegment { - const segOrig = cc.tags.segment?.[0] as string; - const seg = segOrig === "." ? "package" : segOrig; // TODO convert legacy syntax for now - return new SimpleSegment(seg, cc.tags.as?.[0]); -} - -export function importTree(cc: CollectContext): ImportTree { - const path = cc.tags.p?.flat() as PathSegment[]; // LATER fix typing - return new ImportTree(path); -} - -export const importElem = collectElem( - "import", - (cc: CollectContext, openElem: PartElem) => { - const path = cc.tags.p as PathSegment[]; // LATER ts typing - const imports = new ImportTree(path); - const partialElem: ImportElem = { ...openElem, imports }; - const importElem = withTextCover(partialElem, cc); - (cc.app.stable as StableState).imports.push(imports); - return importElem; - }, -); - /** collect a scope start starts before and ends after a parser */ export function scopeCollect(): CollectPair { return { diff --git a/linker/packages/linker/src/WESLGrammar.ts b/linker/packages/linker/src/WESLGrammar.ts index 19463160..4ed6d257 100644 --- a/linker/packages/linker/src/WESLGrammar.ts +++ b/linker/packages/linker/src/WESLGrammar.ts @@ -589,8 +589,8 @@ export const weslRoot = preParse( comment, seq( repeat(weslImport), - repeat(or(global_directive, weslImport)), - repeat(or(global_decl, weslImport)), + repeat(global_directive), + repeat(global_decl), req(end), ) .collect(collectModule, "collectModule"), ); diff --git a/linker/packages/linker/src/WESLTokens.ts b/linker/packages/linker/src/WESLTokens.ts index 962727eb..3a396a84 100644 --- a/linker/packages/linker/src/WESLTokens.ts +++ b/linker/packages/linker/src/WESLTokens.ts @@ -1,4 +1,4 @@ -import { matchOneOf, tokenMatcher } from "mini-parse"; +import { matchOneOf, Token, tokenMatcher } from "mini-parse"; import { textureStorageTypes } from "./StandardTypes.ts"; // https://www.w3.org/TR/WGSL/#blankspace-and-line-breaks @@ -54,6 +54,7 @@ export const mainTokens = tokenMatcher( "main", ); +/** For parsing generics */ export const bracketTokens = tokenMatcher( { bracket: /<|>/, @@ -86,11 +87,3 @@ export const argsTokens = tokenMatcher( }, "argsTokens", ); - -export const rootWs = tokenMatcher( - { - blanks: /\s+/, - other: /[^\s]+/, - }, - "rootWs", -); diff --git a/linker/packages/linker/src/debug/ImportToString.ts b/linker/packages/linker/src/debug/ImportToString.ts index b59d5587..c7b6fe2d 100644 --- a/linker/packages/linker/src/debug/ImportToString.ts +++ b/linker/packages/linker/src/debug/ImportToString.ts @@ -1,26 +1,28 @@ import { - ImportTree, - PathSegment, - SegmentList, - SimpleSegment, -} from "../ImportTree.ts"; + ImportCollection, + ImportItem, + ImportStatement, +} from "../ImportStatement.ts"; -export function importToString(tree: ImportTree): string { - return tree.segments.map(s => segmentToString(s)).join("/"); +export function importToString(tree: ImportStatement): string { + return importToStringImpl(tree) + ";"; } -function segmentToString(segment: PathSegment): string { - if (segment instanceof SimpleSegment) { - const { name, as, args } = segment; +function importToStringImpl(tree: ImportStatement): string { + return [ + ...tree.segments.map(s => s.name), + segmentToString(tree.finalSegment), + ].join("::"); +} + +function segmentToString(segment: ImportCollection | ImportItem): string { + if (segment instanceof ImportItem) { + const { name, as } = segment; const asMsg = as ? ` as ${as}` : ""; - const argsMsg = args ? `(${args.join(", ")})` : ""; - return `${name}${argsMsg}${asMsg}`; - } - if (segment instanceof SegmentList) { - return `{${segment.list.map(s => segmentToString(s)).join(", ")}}`; - } - if (segment instanceof ImportTree) { - return `(${importToString(segment)})`; + return `${name}${asMsg}`; + } else if (segment instanceof ImportCollection) { + return `{${segment.subTrees.map(s => importToStringImpl(s)).join(", ")}}`; + } else { + return `|unknown segment type ${(segment as any).constructor.name}|`; } - return `|unknown segment type ${(segment as any).constructor.name}|`; } diff --git a/linker/packages/linker/src/test/FlattenTreeImport.test.ts b/linker/packages/linker/src/test/FlattenTreeImport.test.ts index 3b8007d7..284a1eb8 100644 --- a/linker/packages/linker/src/test/FlattenTreeImport.test.ts +++ b/linker/packages/linker/src/test/FlattenTreeImport.test.ts @@ -1,17 +1,26 @@ import { expect, test } from "vitest"; import { flattenTreeImport } from "../FlattenTreeImport.ts"; -import { ImportTree, SegmentList, SimpleSegment } from "../ImportTree.ts"; +import { + ImportCollection, + ImportItem, + ImportSegment, + ImportStatement, +} from "../ImportStatement.ts"; test("complex tree import", () => { - const zap = new SimpleSegment("zap"); - const foo = new SimpleSegment("foo", "bar"); // foo as bar - const doh = new SimpleSegment("doh"); - const bib = new SimpleSegment("bib"); - const bog = new SimpleSegment("bog"); - const subtree = new ImportTree([bib, bog]); - const list = new SegmentList([foo, doh, subtree]); + const zap = new ImportSegment("zap"); + const foo = new ImportItem("foo", "bar"); // foo as bar + const doh = new ImportItem("doh"); + const bib = new ImportSegment("bib"); + const bog = new ImportItem("bog"); + const subtree = new ImportStatement([bib], bog); + const list = new ImportCollection([ + new ImportStatement([], foo), + new ImportStatement([], doh), + subtree, + ]); - const tree = new ImportTree([zap, list]); + const tree = new ImportStatement([zap], list); const flattened = flattenTreeImport(tree); expect(flattened).toMatchInlineSnapshot(` [ diff --git a/linker/packages/linker/src/test/ImportCases.test.ts b/linker/packages/linker/src/test/ImportCases.test.ts index 421d74ed..10a41468 100644 --- a/linker/packages/linker/src/test/ImportCases.test.ts +++ b/linker/packages/linker/src/test/ImportCases.test.ts @@ -15,7 +15,7 @@ interface LinkExpectation { // wgsl example src, indexed by name const examplesByName = new Map(importCases.map(t => [t.name, t.src])); -test("import ./bar/foo", ctx => { +test("import package::bar::foo;", ctx => { linkTest2(ctx.task.name, { linked: ` fn main() { diff --git a/linker/packages/linker/src/test/LinkPackage.test.ts b/linker/packages/linker/src/test/LinkPackage.test.ts index 38125d97..a52b6bc9 100644 --- a/linker/packages/linker/src/test/LinkPackage.test.ts +++ b/linker/packages/linker/src/test/LinkPackage.test.ts @@ -5,7 +5,7 @@ import { link } from "../Linker.ts"; test("import rand() from a package", () => { const src = ` - import random_wgsl/pcg_2u_3f; + import random_wgsl::pcg_2u_3f; struct Uniforms { frame: u32 } @binding(0) @group(0) var u: Uniforms; diff --git a/linker/packages/linker/src/test/ParseWESL.test.ts b/linker/packages/linker/src/test/ParseWESL.test.ts index 92ea262f..7e2f7cd0 100644 --- a/linker/packages/linker/src/test/ParseWESL.test.ts +++ b/linker/packages/linker/src/test/ParseWESL.test.ts @@ -721,14 +721,14 @@ test("parse @attribute before fn", () => { `); }); -test("import ./foo/bar;", ctx => { +test("import package::foo::bar;", ctx => { const src = ctx.task.name; const ast = parseTest(src); + console.log(ast.moduleElem); const astString = astToString(ast.moduleElem); expect(astString).toMatchInlineSnapshot(` "module - import package/foo/bar - text 'import ./foo/bar;'" + import package::foo::bar;" `); }); @@ -960,64 +960,59 @@ test("var foo: vec2= vec2( 0.5, -0.5);", ctx => { `); }); -test("import ./a/b/c", ctx => { +test("import a::b::c;", ctx => { const ast = parseTest(ctx.task.name); const astString = astToString(ast.moduleElem); expect(astString).toMatchInlineSnapshot(` "module - import package/a/b/c - text 'import ./a/b/c'" + import a::b::c;" `); }); -test("import ./file1/{foo, bar}", ctx => { +test("import package::file1::{foo, bar};", ctx => { const src = ctx.task.name; const ast = parseTest(src); const astString = astToString(ast.moduleElem); expect(astString).toMatchInlineSnapshot(` "module - import package/file1/{foo, bar} - text 'import ./file1/{foo, bar}'" + import package::file1::{foo, bar};" `); }); -test("import ./file1/{foo, bar}", ctx => { +test("import package::file1::{foo, bar};", ctx => { const src = ctx.task.name; const ast = parseTest(src); const imps = ast.imports.map(t => importToString(t)).join("\n"); - expect(imps).toMatchInlineSnapshot(`"package/file1/{foo, bar}"`); + expect(imps).toMatchInlineSnapshot(`"package::file1::{foo, bar};"`); }); -test("import foo_bar/boo;", ctx => { +test("import foo_bar::boo;", ctx => { const ast = parseTest(ctx.task.name); const astString = astToString(ast.moduleElem); expect(astString).toMatchInlineSnapshot(` "module - import foo_bar/boo - text 'import foo_bar/boo;'" + import foo_bar::boo;" `); }); -test(`import a/{ b }`, ctx => { +test(`import a::{ b };`, ctx => { const ast = parseTest(ctx.task.name); const astString = astToString(ast.moduleElem); expect(astString).toMatchInlineSnapshot(` "module - import a/{b} - text 'import a/{ b }'" + import a::{b};" `); }); -test(`import a/{ b, c/{d, e}, f }`, ctx => { +test(`import a::{ b, c::{d, e}, f };`, ctx => { const src = ctx.task.name; const ast = parseTest(src); const astString = astToString(ast.moduleElem); expect(astString).toMatchInlineSnapshot(` "module - import a/{b, (c/{d, e}), f} - text 'import a/{ b, c/{d, e}, f }'" + import a::{b, c::{d, e}, f};" `); }); diff --git a/linker/packages/linker/src/test/wgsl_1/main.wgsl b/linker/packages/linker/src/test/wgsl_1/main.wgsl index 16c7d317..98a9e2a6 100644 --- a/linker/packages/linker/src/test/wgsl_1/main.wgsl +++ b/linker/packages/linker/src/test/wgsl_1/main.wgsl @@ -1,3 +1,3 @@ -import ./wgsl_1/util/bar; +import package::wgsl_1::util::bar; fn main() { bar(); } diff --git a/linker/packages/linker/src/test/wgsl_2/main2.wgsl b/linker/packages/linker/src/test/wgsl_2/main2.wgsl index 9d0079b2..e820f2fa 100644 --- a/linker/packages/linker/src/test/wgsl_2/main2.wgsl +++ b/linker/packages/linker/src/test/wgsl_2/main2.wgsl @@ -1,3 +1,3 @@ -import ./wgsl_2/util2/bar +import package::wgsl_2::util2::bar; fn main() { bar(); } \ No newline at end of file diff --git a/linker/packages/mini-parse/src/CombinatorTypes.ts b/linker/packages/mini-parse/src/CombinatorTypes.ts index 02e135e9..17152a67 100644 --- a/linker/packages/mini-parse/src/CombinatorTypes.ts +++ b/linker/packages/mini-parse/src/CombinatorTypes.ts @@ -100,6 +100,11 @@ export type SeqValues

= { type SeqTags

= Intersection>; +export type SeqObjParser

= Parser< + { [key in keyof P]: ResultFromArg }, + Intersection> +>; + export type OrParser

= Parser< OrValues

, OrNames

diff --git a/linker/packages/mini-parse/src/MatchingLexer.ts b/linker/packages/mini-parse/src/MatchingLexer.ts index 4ebc5406..f8853d49 100644 --- a/linker/packages/mini-parse/src/MatchingLexer.ts +++ b/linker/packages/mini-parse/src/MatchingLexer.ts @@ -1,5 +1,6 @@ import { srcTrace } from "./ParserLogging.js"; import { tracing } from "./ParserTracing.js"; +import { Span } from "./Span.js"; import { SrcMap } from "./SrcMap.js"; import { Token, TokenMatcher } from "./TokenMatcher.js"; @@ -11,7 +12,7 @@ export interface Lexer { withMatcher(newMatcher: TokenMatcher, fn: () => T): T; /** run a function with a substitute set of token kinds to ignore */ - withIgnore(newIgnore: Set, fn: () => T): T; + withIgnore(newIgnore: IgnoreFn | null, fn: () => T): T; /** get or set the current position in the src */ position(pos?: number): number; @@ -28,13 +29,30 @@ export interface Lexer { interface MatcherStackElem { matcher: TokenMatcher; - ignore: Set; + ignoreFn: IgnoreFn; +} + +/** + * To ignore a token, return the start index of where we should look for the next token. + * + * Note: This could be extended to handle nested languages, by extending the return types + * to `enum { Keep, Skip(newPosition), NestedParse(newPosition, result) }` and parsing + * inside of this function. + */ +export type IgnoreFn = (token: Token, span: Span, src: string) => null | number; + +function defaultIgnorer(token: Token, span: Span, src: string): null | number { + if (token.kind === "ws") { + return span[1]; + } else { + return null; + } } export function matchingLexer( src: string, rootMatcher: TokenMatcher, - ignore = new Set(["ws"]), + ignoreFn: IgnoreFn = defaultIgnorer, srcMap?: SrcMap, ): Lexer { let matcher = rootMatcher; @@ -43,9 +61,9 @@ export function matchingLexer( matcher.start(src); function next(): Token | undefined { - const start = matcher.position(); + const start = matcher.position; const { token } = toNextToken(); - if (token && tracing) { + if (tracing && token) { const text = quotedText(token?.text); srcTrace(srcMap ?? src, start, `: ${text} (${token?.kind})`); } @@ -56,75 +74,79 @@ export function matchingLexer( const { p } = toNextToken(); // back up to the position before the first non-ignored token - matcher.position(p); + matcher.position = p; return p; } /** Advance to the next token * @return the token, and the position at the start of the token (after ignored ws) */ function toNextToken(): { p: number; token?: Token } { - let p = matcher.position(); - if (eof()) return { p }; - - // advance til we find a token we're not ignoring - let token = matcher.next(); - while (token && ignore.has(token.kind)) { - p = matcher.position(); // save position before the token + while (true) { + let p = matcher.position; if (eof()) return { p }; - token = matcher.next(); + // advance til we find a token we're not ignoring + let token = matcher.next(); + if (token === undefined) { + return { p, token: undefined }; + } + let skip = ignoreFn(token, [p, matcher.position], src); + if (skip === null) { + return { p, token }; + } else { + matcher.position = skip; + } } - return { p, token }; } - function pushMatcher(newMatcher: TokenMatcher, newIgnore: Set): void { - const position = matcher.position(); - matcherStack.push({ matcher, ignore }); + function pushMatcher(newMatcher: TokenMatcher, newIgnore: IgnoreFn): void { + const position = matcher.position; + matcherStack.push({ matcher, ignoreFn }); newMatcher.start(src, position); matcher = newMatcher; - ignore = newIgnore; + ignoreFn = newIgnore; } function popMatcher(): void { - const position = matcher.position(); + const position = matcher.position; const elem = matcherStack.pop(); if (!elem) { console.error("too many pops"); return; } matcher = elem.matcher; - ignore = elem.ignore; + ignoreFn = elem.ignoreFn; - matcher.position(position); + matcher.position = position; } function position(pos?: number): number { if (pos !== undefined) { matcher.start(src, pos); } - return matcher.position(); + return matcher.position; } function withMatcher(newMatcher: TokenMatcher, fn: () => T): T { - return withMatcherIgnore(newMatcher, ignore, fn); + return withMatcherIgnore(newMatcher, ignoreFn, fn); } - function withIgnore(newIgnore: Set, fn: () => T): T { - return withMatcherIgnore(matcher, newIgnore, fn); + function withIgnore(newIgnore: IgnoreFn | null, fn: () => T): T { + return withMatcherIgnore(matcher, newIgnore ?? defaultIgnorer, fn); } function withMatcherIgnore( tokenMatcher: TokenMatcher, - ignore: Set, + ignoreFn: IgnoreFn, fn: () => T, ): T { - pushMatcher(tokenMatcher, ignore); + pushMatcher(tokenMatcher, ignoreFn); const result = fn(); popMatcher(); return result; } function eof(): boolean { - return matcher.position() === src.length; + return matcher.position === src.length; } return { diff --git a/linker/packages/mini-parse/src/Parser.ts b/linker/packages/mini-parse/src/Parser.ts index 52bdc017..c685a53d 100644 --- a/linker/packages/mini-parse/src/Parser.ts +++ b/linker/packages/mini-parse/src/Parser.ts @@ -1,5 +1,5 @@ import { CombinatorArg, ParserFromArg } from "./CombinatorTypes.js"; -import { Lexer } from "./MatchingLexer.js"; +import { IgnoreFn, Lexer } from "./MatchingLexer.js"; import { collect, CollectFn, @@ -229,11 +229,19 @@ export class Parser { /** map results to a new value, or add to app state as a side effect. * Return null to cause the parser to fail. + * SAFETY: Side-effects should not be done if backtracking could occur! */ map(fn: ParserMapFn): Parser { return map(this, fn); } + /** map results to a new value. + * Return null to cause the parser to fail. + */ + mapValue(fn: (value: T) => U | null): Parser { + return map(this, v => fn(v.value)); + } + /** Queue a function that runs later, typically to collect AST elements from the parse. * when a commit() is parsed. * Collection functions are dropped with parser backtracking, so @@ -509,20 +517,20 @@ function toParser( return newParser; } -const emptySet = new Set(); +const neverIgnore: IgnoreFn = () => null; /** set which token kinds to ignore while executing this parser and its descendants. * If no parameters are provided, no tokens are ignored. */ export function tokenSkipSet( - ignore: Set | undefined | null, + ignoreFn: IgnoreFn | undefined | null, p: Parser, ): Parser { - const ignoreSet = ignore ?? emptySet; - const ignoreValues = [...ignoreSet.values()].toString() || "(null)"; + const ignoreValues = ignoreFn?.toString() ?? "(null)"; + const ignoreParser = parser( `tokenSkipSet ${ignoreValues}`, (ctx: ParserContext): OptParserResult => - ctx.lexer.withIgnore(ignoreSet, () => p._run(ctx)), + ctx.lexer.withIgnore(ignoreFn ?? neverIgnore, () => p._run(ctx)), ); trackChildren(ignoreParser, p); diff --git a/linker/packages/mini-parse/src/ParserCombinator.ts b/linker/packages/mini-parse/src/ParserCombinator.ts index fbbe7386..bb47ce8d 100644 --- a/linker/packages/mini-parse/src/ParserCombinator.ts +++ b/linker/packages/mini-parse/src/ParserCombinator.ts @@ -4,6 +4,7 @@ import { ParserFromArg, ParserFromRepeatArg, ResultFromArg, + SeqObjParser, SeqParser, SeqValues, TagsFromArg, @@ -56,6 +57,32 @@ export class ParseError extends Error { } } +/** Parse for a particular kind of token, + * @return the matching text */ +export function token(kindStr: string, value: string): Parser { + return simpleParser( + `token '${kindStr}' ${quotedText(value)}`, + (state: ParserContext): string | null => { + const next = state.lexer.next(); + return next?.kind === kindStr && next.text === value ? next.text : null; + }, + ); +} + +/** Parse for a particular kind of token, + * @return the matching text */ +export function tokenOf(kindStr: string, values: string[]): Parser { + return simpleParser( + `tokenOf '${kindStr}'`, + (state: ParserContext): string | null => { + const next = state.lexer.next(); + return next?.kind === kindStr && values.includes(next.text) ? + next.text + : null; + }, + ); +} + /** Parse for a particular kind of token, * @return the matching text */ export function kind(kindStr: string): Parser { @@ -107,6 +134,110 @@ export function seq

(...args: P): SeqParser

{ return seqParser as SeqParser

; } +/** Parse a sequence of parsers. Each named parser will be executed, and place its result in an equally named output. + * @return an object of all parsed results, or null if any parser fails */ +export function seqObj

( + args: P, +): SeqObjParser

{ + const parsers = Object.entries(args).map( + ([name, arg]) => [name as keyof P, parserArg(arg)] as const, + ); + const seqObjParser = parser("seqObj", (ctx: ParserContext) => { + const values: Partial> = {}; + let tagged = {}; + let failed = false; + for (const [name, p] of parsers) { + const result = p._run(ctx); + if (result === null) { + failed = true; + break; + } + + tagged = mergeTags(tagged, result.tags); + values[name] = result.value; + } + if (failed) return null; + return { value: values, tags: tagged }; + }).collect({ before: pushOpenArray, after: closeArray }); + + trackChildren(seqObjParser, ...parsers.map(v => v[1])); + + return seqObjParser as SeqObjParser

; +} + +/** Parse two values, and discard the first value + * @return the second value, or null if any parser fails */ +export function preceded

( + ignoredArg: CombinatorArg, + arg: P, +): ParserFromArg

{ + const ignored = parserArg(ignoredArg); + const p = parserArg(arg); + const precededParser: ParserFromArg

= parser( + "preceded", + (ctx: ParserContext) => { + const ignoredResult = ignored._run(ctx); + if (ignoredResult === null) return null; + const result = p._run(ctx); + return result; + }, + ).collect({ before: pushOpenArray, after: closeArray }); + + trackChildren(precededParser, ignored, p); + + return precededParser; +} + +/** Parse two values, and discard the second value + * @return the second value, or null if any parser fails */ +export function terminated

( + arg: P, + ignoredArg: CombinatorArg, +): ParserFromArg

{ + const p = parserArg(arg); + const ignored = parserArg(ignoredArg); + const terminatedParser: ParserFromArg

= parser( + "terminated", + (ctx: ParserContext) => { + const result = p._run(ctx); + const ignoredResult = ignored._run(ctx); + if (ignoredResult === null) return null; + return result; + }, + ).collect({ before: pushOpenArray, after: closeArray }); + + trackChildren(terminatedParser, ignored, p); + + return terminatedParser; +} + +/** Parse three values, and only keep the middle value + * @return the second value, or null if any parser fails */ +export function delimited

( + ignoredArg1: CombinatorArg, + arg: P, + ignoredArg2: CombinatorArg, +): ParserFromArg

{ + const ignored1 = parserArg(ignoredArg1); + const p = parserArg(arg); + const ignored2 = parserArg(ignoredArg2); + const delimitedParser: ParserFromArg

= parser( + "delimited", + (ctx: ParserContext) => { + const ignoredResult1 = ignored1._run(ctx); + if (ignoredResult1 === null) return null; + const result = p._run(ctx); + const ignoredResult2 = ignored2._run(ctx); + if (ignoredResult2 === null) return null; + return result; + }, + ).collect({ before: pushOpenArray, after: closeArray }); + + trackChildren(delimitedParser, ignored1, p, ignored2); + + return delimitedParser; +} + /** Try parsing with one or more parsers, * @return the first successful parse */ export function or

(...args: P): OrParser

{ diff --git a/linker/packages/mini-parse/src/Span.ts b/linker/packages/mini-parse/src/Span.ts new file mode 100644 index 00000000..d5998cbd --- /dev/null +++ b/linker/packages/mini-parse/src/Span.ts @@ -0,0 +1,4 @@ +/** + * An range, from start (inclusive) to end (exclusive). + */ +export type Span = readonly [number, number]; diff --git a/linker/packages/mini-parse/src/TokenMatcher.ts b/linker/packages/mini-parse/src/TokenMatcher.ts index dec6a278..557410be 100644 --- a/linker/packages/mini-parse/src/TokenMatcher.ts +++ b/linker/packages/mini-parse/src/TokenMatcher.ts @@ -1,4 +1,5 @@ import { srcLog } from "./ParserLogging.js"; +import { Span } from "./Span.js"; export interface Token { kind: string; @@ -13,7 +14,8 @@ export type FullTokenMatcher = TokenMatcher & { export interface TokenMatcher { start(src: string, position?: number): void; next(): Token | undefined; - position(position?: number): number; + get position(): number; + set position(position: number); _debugName?: string; } @@ -39,7 +41,7 @@ export function tokenMatcher>( const groups: string[] = Object.keys(matchers); let src: string; // cache of tokens by position, so we don't have to reparse after backtracking - const cache = new Cache(5); + const cache = new Cache(5); const expParts = Object.entries(matchers).map(toRegexSource).join("|"); const exp = new RegExp(expParts, "midgu"); @@ -58,63 +60,61 @@ export function tokenMatcher>( const startPos = exp.lastIndex; const found = cache.get(startPos); if (found) { - exp.lastIndex += found.text.length; - return found; + exp.lastIndex = found.span[1]; + return found.token; } const matches = exp.exec(src); const matchedIndex = findGroupDex(matches?.indices); if (matchedIndex) { - const { startEnd, groupDex } = matchedIndex; + const { span, groupDex } = matchedIndex; const kind = groups[groupDex]; - const text = src.slice(startEnd[0], startEnd[1]); + const text = src.slice(span[0], span[1]); const token = { kind, text }; - if (startPos != startEnd[0]) { + if (startPos !== span[0]) { // TODO report filename as well // regex didn't recognize some characters and skipped ahead to match srcLog( src, startPos, - `tokens ${debugName} skipped: '${src.slice(startPos, startEnd[0])}' to get to: '${text}'`, + `tokens ${debugName} skipped: '${src.slice(startPos, span[0])}' to get to: '${text}'`, ); throw new Error("token matcher should match all input"); } - cache.set(startPos, token); + cache.set(startPos, { token, span }); return token; } } - function position(pos?: number): number { - if (pos !== undefined) { - exp.lastIndex = pos; - } - return exp.lastIndex; - } - const keyEntries = groups.map(k => [k, k]); const keys = Object.fromEntries(keyEntries); return { ...keys, start, next, - position, + get position(): number { + return exp.lastIndex; + }, + set position(pos: number) { + exp.lastIndex = pos; + }, _debugName: debugName, } as FullTokenMatcher; } interface MatchedIndex { - startEnd: [number, number]; + span: Span; groupDex: number; } function findGroupDex( indices: RegExpIndicesArray | undefined, ): MatchedIndex | undefined { - if (indices) { + if (indices !== undefined) { for (let i = 1; i < indices.length; i++) { - const startEnd = indices[i]; - if (startEnd) { - return { startEnd, groupDex: i - 1 }; + const span = indices[i]; + if (span !== undefined) { + return { span, groupDex: i - 1 }; } } } diff --git a/linker/packages/mini-parse/src/index.ts b/linker/packages/mini-parse/src/index.ts index b4651284..991f25cd 100644 --- a/linker/packages/mini-parse/src/index.ts +++ b/linker/packages/mini-parse/src/index.ts @@ -9,3 +9,4 @@ export * from "./SrcMap.js"; export * from "./SrcMapBuilder.js"; export * from "./TokenMatcher.js"; export * from "./WrappedLog.js"; +export * from "./Span.js"; diff --git a/linker/packages/packager/src/test/packager.test.ts b/linker/packages/packager/src/test/packager.test.ts index a6a8e0f9..59aef1f2 100644 --- a/linker/packages/packager/src/test/packager.test.ts +++ b/linker/packages/packager/src/test/packager.test.ts @@ -26,7 +26,7 @@ test("package two wgsl files", async () => { "edition": "wesl_unstable_2024_1", "modules": { "util.wgsl": "fn foo() {}", - "lib.wesl": "import ./util.wgsl\\n" + "lib.wesl": "import package::util;\\n" } } diff --git a/linker/packages/packager/src/test/wesl-package/src/lib.wesl b/linker/packages/packager/src/test/wesl-package/src/lib.wesl index ce8e9d01..47896afe 100644 --- a/linker/packages/packager/src/test/wesl-package/src/lib.wesl +++ b/linker/packages/packager/src/test/wesl-package/src/lib.wesl @@ -1 +1 @@ -import ./util.wgsl +import package::util; diff --git a/linker/packages/plugin-test/shaders/app.wesl b/linker/packages/plugin-test/shaders/app.wesl index e06e2a37..38baef1a 100644 --- a/linker/packages/plugin-test/shaders/app.wesl +++ b/linker/packages/plugin-test/shaders/app.wesl @@ -1,4 +1,4 @@ -import ./uniforms/Uniforms; +import package::uniforms::Uniforms; struct MyBindings { @group(0) @binding(0) particles: ptr, read_write>, diff --git a/linker/packages/plugin-test/shaders/layoutTest.wesl b/linker/packages/plugin-test/shaders/layoutTest.wesl index 9b25f46d..c80ccf59 100644 --- a/linker/packages/plugin-test/shaders/layoutTest.wesl +++ b/linker/packages/plugin-test/shaders/layoutTest.wesl @@ -1,4 +1,4 @@ -import ./uniforms/Uniforms; +import package::uniforms::Uniforms; struct MyBindings { @group(0) @binding(0) particles: ptr, read_write>, diff --git a/linker/packages/plugin-test/src/test/LayoutReflection.test.ts b/linker/packages/plugin-test/src/test/LayoutReflection.test.ts index d7bdded3..8f9c6df8 100644 --- a/linker/packages/plugin-test/src/test/LayoutReflection.test.ts +++ b/linker/packages/plugin-test/src/test/LayoutReflection.test.ts @@ -11,10 +11,14 @@ const testDir = dirname(fileURLToPath(import.meta.url)); test("vite generates binding layout", async () => { // vite is configured to use the wesl plugin // build a test program that imports using the '?reflect' import pattern - await exec(`cd ${testDir}; pnpm vite build`); + await exec(`pnpm vite build`, { + cwd: testDir, + }); const outFile = path.join("dist", "testMain.cjs"); // the test program testMain.ts logs the layout entries to the console for verification - const result = await exec(`cd ${testDir}; pnpm node ${outFile}`); + const result = await exec(`pnpm node ${outFile}`, { + cwd: testDir, + }); expect(result.stdout).toMatchInlineSnapshot(` "{ diff --git a/linker/packages/plugin/src/weslPlugin.ts b/linker/packages/plugin/src/weslPlugin.ts index e7c33665..9c4bb83c 100644 --- a/linker/packages/plugin/src/weslPlugin.ts +++ b/linker/packages/plugin/src/weslPlugin.ts @@ -140,6 +140,7 @@ async function loadWesl( const { weslToml } = options; const tomlDir = weslToml ? path.dirname(weslToml) : process.cwd(); + const globs = weslFiles.map(g => tomlDir + "/" + g); const futureFiles = globs.map(g => glob(g)); const files = (await Promise.all(futureFiles)).flat(); diff --git a/linker/packages/random_wgsl/package.json b/linker/packages/random_wgsl/package.json index 849149c1..7303e177 100644 --- a/linker/packages/random_wgsl/package.json +++ b/linker/packages/random_wgsl/package.json @@ -19,4 +19,4 @@ "devDependencies": { "wesl": "workspace:*" } -} +} \ No newline at end of file diff --git a/wesl-testsuite b/wesl-testsuite index e795048a..5dee6f98 160000 --- a/wesl-testsuite +++ b/wesl-testsuite @@ -1 +1 @@ -Subproject commit e795048a6475cfd9f42d21f0ea61c01595a8e5d5 +Subproject commit 5dee6f98e8ca641ab09f92b3f898a15c2359841d