From abb968e704920df3c45fbd39b1aafdb0e78f67e5 Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Thu, 12 Mar 2020 19:47:11 +0100 Subject: [PATCH 01/14] InterpreterApp parent 7f427a8fecd97a58635a3260a1492268d654e1a7 author Javier Cabrera 1584053392 +0100 committer Javier Cabrera 1587397289 +0200 Executing binary files Executing text format Adding tracing support Mappping app args to tracer options Starting parameters parsing Forcing entrypoint function name in the cli app Fixing help messages Renaming to cli Removing PublishModule mixing from cli project Listing available functions if no name is provided Listing available functions if no name is provided Update cli/src/InterpreterApp.scala Co-Authored-By: Lucas Satabin Improving code Wrapping side-effects Changing s name to main Removing test modules Remove pom settings for cil package Use blocker for readig text files Using the standard `Source` class is not safe, as it executes in the calling execution context. Switching to the fs2 interface makes it more stable and better. Fix examples with new text file reading API Removing unnecessary method in Engine.scala Removing missed printf method Reformating Fix examples with new text file reading API Reformating Fixing examples Fix code issues Sorting cli options Sorting cli options WIP Sorting cli options Successful external jar loading Starting parameters parsing Starting parameters parsing Fix fieldName repetition Fix fieldName repetition Adding WITX parser to create WASI interfaces Removing stdlib module Reformatting Adding snapshopt of WASI as package resource Loading types on demand Traversers for types and interfaces Generating boilerplate from witx Isolating the witx-parser --- .gitignore | 2 +- build.sc | 26 +- examples/docs/annotations.md | 57 +- examples/docs/cfg.md | 14 +- examples/docs/decompiler.md | 18 +- examples/docs/fibo.md | 18 +- examples/docs/logged.md | 38 +- examples/docs/string.md | 34 +- generator/resources/import_template.mustache | 1 + generator/resources/wasi_witx/get.sh | 2 + generator/resources/wasi_witx/typenames.witx | 747 ++++++++++++++++++ .../wasi_witx/wasi_snapshot_preview1.witx | 532 +++++++++++++ generator/src/swam/generator/Generator.scala | 48 +- .../src/swam/generator/ImportGenerator.scala | 33 +- .../witx/LoadTypeEmitTraverser.scala | 89 +++ .../swam/generator/witx/ModuleTraverse.scala | 212 +++++ .../generator/witx/TypesEmitTraverser.scala | 171 ++++ .../witx/WriteTypeEmitTraverser.scala | 78 ++ generator/test/src/generator/WitxTest.scala | 48 ++ runtime/resources/reference.conf | 6 +- runtime/src/swam/runtime/Engine.scala | 4 +- runtime/src/swam/runtime/Interface.scala | 8 + .../src/swam/runtime/imports/IFunction2.scala | 252 ++++++ .../src/swam/runtime/imports/Imports.scala | 100 ++- .../internals/instance/Instantiator.scala | 4 +- .../internals/instance/MemoryInstance.scala | 13 +- .../runtime/internals/interpreter/asm.scala | 6 +- .../src/swam/runtime/trace/JULTracer.scala | 35 +- .../swam/runtime/trace/TracingMemory.scala | 26 +- split_traces.py | 44 ++ text/src/swam/text/Compiler.scala | 9 +- text/src/swam/text/package.scala | 10 +- text/src/swam/text/parser/Lexical.scala | 3 + text/src/swam/witx/WitxParser.scala | 53 ++ text/src/swam/witx/parser/ImportContext.scala | 49 ++ text/src/swam/witx/parser/ModuleParser.scala | 106 +++ text/src/swam/witx/parser/TypesParser.scala | 111 +++ text/src/swam/witx/parser/package.scala | 15 + .../traverser/ModuleInterfaceTraverser.scala | 28 + .../swam/witx/traverser/TypesTraverser.scala | 43 + .../swam/witx/unresolved/Declarations.scala | 16 + text/src/swam/witx/unresolved/Types.scala | 32 + update_wasi_interfaces.sh | 1 + 43 files changed, 3008 insertions(+), 134 deletions(-) create mode 100755 generator/resources/wasi_witx/get.sh create mode 100644 generator/resources/wasi_witx/typenames.witx create mode 100644 generator/resources/wasi_witx/wasi_snapshot_preview1.witx create mode 100644 generator/src/swam/generator/witx/LoadTypeEmitTraverser.scala create mode 100644 generator/src/swam/generator/witx/ModuleTraverse.scala create mode 100644 generator/src/swam/generator/witx/TypesEmitTraverser.scala create mode 100644 generator/src/swam/generator/witx/WriteTypeEmitTraverser.scala create mode 100644 generator/test/src/generator/WitxTest.scala create mode 100644 split_traces.py create mode 100644 text/src/swam/witx/WitxParser.scala create mode 100644 text/src/swam/witx/parser/ImportContext.scala create mode 100644 text/src/swam/witx/parser/ModuleParser.scala create mode 100644 text/src/swam/witx/parser/TypesParser.scala create mode 100644 text/src/swam/witx/parser/package.scala create mode 100644 text/src/swam/witx/traverser/ModuleInterfaceTraverser.scala create mode 100644 text/src/swam/witx/traverser/TypesTraverser.scala create mode 100644 text/src/swam/witx/unresolved/Declarations.scala create mode 100644 text/src/swam/witx/unresolved/Types.scala create mode 100755 update_wasi_interfaces.sh diff --git a/.gitignore b/.gitignore index ad245017..28623204 100644 --- a/.gitignore +++ b/.gitignore @@ -59,5 +59,5 @@ site/output .bloop/ -log.txt +*log.txt *.lck \ No newline at end of file diff --git a/build.sc b/build.sc index ed14a926..fec693ca 100644 --- a/build.sc +++ b/build.sc @@ -1,25 +1,23 @@ import mill._ +import scalalib._ import mill.eval._ import mill.scalalib._ import mill.scalalib.publish._ import mill.scalalib.scalafmt._ - import ammonite.ops._ import mill.modules.Jvm.runSubprocess - import coursier.maven.MavenRepository - import $file.jmh import jmh.Jmh import $file.headers import headers.Headers - import $file.mdoc import mdoc.MdocModule - import $ivy.`com.lihaoyi::mill-contrib-bloop:$MILL_VERSION` +import generator.ivy +import mill.api.PathRef -val swamVersion = "0.5.0" +val swamVersion = "0.6.0-SNAPSHOT" val swamLicense = License.`Apache-2.0` @@ -107,7 +105,7 @@ object text extends SwamModule with PublishModule { object generator extends SwamModule with PublishModule { - def moduleDeps = Seq(core, runtime) + def moduleDeps = Seq(core, runtime, text) def publishVersion = swamVersion @@ -132,17 +130,17 @@ object generator extends SwamModule with PublishModule { developers = Seq(swamDeveloper) ) - object test extends Tests with ScalafmtModule { - def ivyDeps = - Agg(ivy"com.lihaoyi::utest:0.7.1", ivy"com.github.pathikrit::better-files:3.8.0", ivy"com.lihaoyi::pprint:0.5.5") - - def moduleDeps = Seq(generator, text, util.test) + object test extends Tests with ScalafmtModule { + def ivyDeps = Agg(ivy"com.lihaoyi::utest:0.7.1") def testFrameworks = Seq("swam.util.Framework") + def moduleDeps = Seq(core,text, util.test ) } - } + + + object runtime extends SwamModule with PublishModule { def moduleDeps = Seq(core) @@ -183,7 +181,7 @@ object examples extends SwamModule with MdocModule { def moduleDeps = Seq(runtime, text) - def mdocVersion = "1.3.6" + def mdocVersion = "2.1.3" def mdocSite = Map("VERSION" -> swamVersion) diff --git a/examples/docs/annotations.md b/examples/docs/annotations.md index 2f7d1d09..86b8ff0e 100644 --- a/examples/docs/annotations.md +++ b/examples/docs/annotations.md @@ -48,15 +48,20 @@ import java.nio.file.Paths val tcompiler = Compiler[IO] -val engine = Engine[IO] - -val f = (for { - engine <- engine - tcompiler <- tcompiler - mod <- engine.compile(tcompiler.stream(Paths.get("annotations.wat"), true)) - inst <- mod.importing("m", m).instantiate - f <- inst.exports.typed.procedure0("mutate") -} yield f).unsafeRunSync() +val engine = Engine[IO]() + +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) + +val f = + Blocker[IO].use { blocker => + for { + engine <- engine + tcompiler <- tcompiler + mod <- engine.compile(tcompiler.stream(Paths.get("annotations.wat"), true, blocker)) + inst <- mod.importing("m", m).instantiate + f <- inst.exports.typed.procedure0("mutate") + } yield f + }.unsafeRunSync() ``` ```scala mdoc @@ -90,13 +95,16 @@ val pm = new PureModule WebAssembly modules can now [import it, and call it](/examples/pure-annotations.wat): ```scala mdoc:silent -val add42 = (for { - engine <- engine - tcompiler <- tcompiler - mod <- engine.compile(tcompiler.stream(Paths.get("pure-annotations.wat"), true)) - inst <- mod.importing("m", pm).instantiate - f <- inst.exports.typed.function0[Int]("f") -} yield f).unsafeRunSync() +val add42 = + Blocker[IO].use { blocker => + for { + engine <- engine + tcompiler <- tcompiler + mod <- engine.compile(tcompiler.stream(Paths.get("pure-annotations.wat"), true, blocker)) + inst <- mod.importing("m", pm).instantiate + f <- inst.exports.typed.function0[Int]("f") + } yield f + }.unsafeRunSync() ``` Executing the imported function, returns the desired result: @@ -130,13 +138,16 @@ val mIO = new EffectfulModule[IO] Now, WebAssembly modules can [import that module and use the effectful function](/examples/effectful-annotations.wat): ```scala mdoc:silent -val logged = (for { - engine <- engine - tcompiler <- tcompiler - mod <- engine.compile(tcompiler.stream(Paths.get("effectful-annotations.wat"), true)) - inst <- mod.importing("console", mIO).instantiate - f <- inst.exports.typed.function1[Int, Int]("add42") -} yield f).unsafeRunSync() +val logged = + Blocker[IO].use { blocker => + for { + engine <- engine + tcompiler <- tcompiler + mod <- engine.compile(tcompiler.stream(Paths.get("effectful-annotations.wat"), true, blocker)) + inst <- mod.importing("console", mIO).instantiate + f <- inst.exports.typed.function1[Int, Int]("add42") + } yield f + }.unsafeRunSync() ``` We can now run the `logged` function and the parameter is logged to stdout as expected diff --git a/examples/docs/cfg.md b/examples/docs/cfg.md index 5404ac7c..ff79fe37 100644 --- a/examples/docs/cfg.md +++ b/examples/docs/cfg.md @@ -19,12 +19,16 @@ import java.nio.file.Paths val compiler = Compiler[IO] +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) + val cfg = - (for { - compiler <- compiler - naive <- compiler.compile(Paths.get("fibo.wat")).map(_.funcs(0)) - cfg <- CFGicator.buildCFG[IO](naive.body) - } yield cfg).unsafeRunSync() + Blocker[IO].use { blocker => + for { + compiler <- compiler + naive <- compiler.compile(Paths.get("fibo.wat"), blocker).map(_.funcs(0)) + cfg <- CFGicator.buildCFG[IO](naive.body) + } yield cfg + }.unsafeRunSync() ``` The CFG can be traversed in postorder (depth first) using the `CFG.postorder` function, that makes it possible to compute a value by accumulation. diff --git a/examples/docs/decompiler.md b/examples/docs/decompiler.md index 06b5d605..a2001659 100644 --- a/examples/docs/decompiler.md +++ b/examples/docs/decompiler.md @@ -18,14 +18,18 @@ val rdecompiler = RawDecompiler[IO] val tdecompiler = TextDecompiler[IO] +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) + def compdec(p: String): (Doc, Doc) = - (for { - tcompiler <- tcompiler - rdecompiler <- rdecompiler - tdecompiler <- tdecompiler - rd <- rdecompiler.decompile(tcompiler.stream(Paths.get(p), true)) - td <- tdecompiler.decompile(tcompiler.stream(Paths.get(p), true)) - } yield (rd, td)).unsafeRunSync() + Blocker[IO].use { blocker => + for { + tcompiler <- tcompiler + rdecompiler <- rdecompiler + tdecompiler <- tdecompiler + rd <- rdecompiler.decompile(tcompiler.stream(Paths.get(p), true, blocker)) + td <- tdecompiler.decompile(tcompiler.stream(Paths.get(p), true, blocker)) + } yield (rd, td) + }.unsafeRunSync() val (rd, td) = compdec("fibo.wat") ``` diff --git a/examples/docs/fibo.md b/examples/docs/fibo.md index 8db7536a..24d674e2 100644 --- a/examples/docs/fibo.md +++ b/examples/docs/fibo.md @@ -14,15 +14,19 @@ import java.nio.file.Paths val tcompiler = Compiler[IO] -val engine = Engine[IO] +val engine = Engine[IO]() + +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) def instantiate(p: String): Instance[IO] = - (for { - engine <- engine - tcompiler <- tcompiler - m <- engine.compile(tcompiler.stream(Paths.get(p), true)) - i <- m.instantiate - } yield i).unsafeRunSync() + Blocker[IO].use { blocker => + for { + engine <- engine + tcompiler <- tcompiler + m <- engine.compile(tcompiler.stream(Paths.get(p), true, blocker)) + i <- m.instantiate + } yield i + }.unsafeRunSync() def time[T](t: => T): T = { val start = System.currentTimeMillis diff --git a/examples/docs/logged.md b/examples/docs/logged.md index 621c5776..c641a8be 100644 --- a/examples/docs/logged.md +++ b/examples/docs/logged.md @@ -15,17 +15,22 @@ import java.nio.file.Paths val tcompiler = Compiler[IO] -val engine = Engine[IO] +val engine = Engine[IO]() + +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) def log(i: Int) = IO(println(s"got $i")) -val f = (for { - engine <- engine - tcompiler <- tcompiler - mod <- engine.compile(tcompiler.stream(Paths.get("logged.wat"), true)) - inst <- mod.importing("console", "log" -> log _).instantiate - f <- inst.exports.typed.function1[Int, Int]("add42") -} yield f).unsafeRunSync() +val f = + Blocker[IO].use { blocker => + for { + engine <- engine + tcompiler <- tcompiler + mod <- engine.compile(tcompiler.stream(Paths.get("logged.wat"), true, blocker)) + inst <- mod.importing("console", "log" -> log _).instantiate + f <- inst.exports.typed.function1[Int, Int]("add42") + } yield f + }.unsafeRunSync() ``` running function `f` logs the parameter. @@ -38,14 +43,15 @@ It is also possible to use [`HList`s][hlist] to represent imported modules with ```scala mdoc:silent import shapeless._ -for { - engine <- engine - tcompiler <- tcompiler - mod <- engine.compile(tcompiler.stream(Paths.get("logged.wat"), true)) - inst <- mod.importing("console", "log" -> log _ :: "colors" -> 256 :: HNil).instantiate - f <- inst.exports.typed.function1[Int, Int]("add42") -} yield f - +Blocker[IO].use { blocker => + for { + engine <- engine + tcompiler <- tcompiler + mod <- engine.compile(tcompiler.stream(Paths.get("logged.wat"), true, blocker)) + inst <- mod.importing("console", "log" -> log _ :: "colors" -> 256 :: HNil).instantiate + f <- inst.exports.typed.function1[Int, Int]("add42") + } yield f +} ``` [hlist]: https://github.com/milessabin/shapeless/wiki/Feature-overview:-shapeless-2.0.0#heterogenous-lists diff --git a/examples/docs/string.md b/examples/docs/string.md index 4ee5c4fc..b314e8db 100644 --- a/examples/docs/string.md +++ b/examples/docs/string.md @@ -13,23 +13,27 @@ import java.nio.file.Paths val tcompiler = Compiler[IO] -val engine = Engine[IO] +val engine = Engine[IO]() + +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) val strings = - for { - engine <- engine - tcompiler <- tcompiler - m <- engine.compile(tcompiler.stream(Paths.get("string.wat"), true)) - i <- m.instantiate - s1 <- { - import formats.string.cstring - i.exports.typed.global[String]("c-like") - } - s2 <- { - import formats.string.utf8 - i.exports.typed.global[String]("utf8") - } - } yield (s1, s2) + Blocker[IO].use { blocker => + for { + engine <- engine + tcompiler <- tcompiler + m <- engine.compile(tcompiler.stream(Paths.get("string.wat"), true, blocker)) + i <- m.instantiate + s1 <- { + import formats.string.cstring + i.exports.typed.global[String]("c-like") + } + s2 <- { + import formats.string.utf8 + i.exports.typed.global[String]("utf8") + } + } yield (s1, s2) + } val (s1, s2) = strings.unsafeRunSync() ``` diff --git a/generator/resources/import_template.mustache b/generator/resources/import_template.mustache index dce4c340..cdd90baa 100644 --- a/generator/resources/import_template.mustache +++ b/generator/resources/import_template.mustache @@ -1,3 +1,4 @@ +import cats.effect.IO import swam.runtime.imports.{AsInstance, AsInterface, Imports, TCMap} import swam.runtime.formats._ import swam.runtime.formats.DefaultFormatters._ diff --git a/generator/resources/wasi_witx/get.sh b/generator/resources/wasi_witx/get.sh new file mode 100755 index 00000000..7f5db3e7 --- /dev/null +++ b/generator/resources/wasi_witx/get.sh @@ -0,0 +1,2 @@ +wget https://raw.githubusercontent.com/WebAssembly/WASI/master/phases/snapshot/witx/wasi_snapshot_preview1.witx +wget https://raw.githubusercontent.com/WebAssembly/WASI/master/phases/snapshot/witx/typenames.witx diff --git a/generator/resources/wasi_witx/typenames.witx b/generator/resources/wasi_witx/typenames.witx new file mode 100644 index 00000000..2bccd79a --- /dev/null +++ b/generator/resources/wasi_witx/typenames.witx @@ -0,0 +1,747 @@ +;; Type names used by low-level WASI interfaces. +;; +;; Some content here is derived from [CloudABI](https://github.com/NuxiNL/cloudabi). +;; +;; This is a `witx` file. See [here](https://github.com/WebAssembly/WASI/tree/master/docs/witx.md) +;; for an explanation of what that means. + +(typename $size u32) + +;;; Non-negative file size or length of a region within a file. +(typename $filesize u64) + +;;; Timestamp in nanoseconds. +(typename $timestamp u64) + +;;; Identifiers for clocks. +(typename $clockid + (enum u32 + ;;; The clock measuring real time. Time value zero corresponds with + ;;; 1970-01-01T00:00:00Z. + $realtime + ;;; The store-wide monotonic clock, which is defined as a clock measuring + ;;; real time, whose value cannot be adjusted and which cannot have negative + ;;; clock jumps. The epoch of this clock is undefined. The absolute time + ;;; value of this clock therefore has no meaning. + $monotonic + ;;; The CPU-time clock associated with the current process. + $process_cputime_id + ;;; The CPU-time clock associated with the current thread. + $thread_cputime_id + ) +) + +;;; Error codes returned by functions. +;;; Not all of these error codes are returned by the functions provided by this +;;; API; some are used in higher-level library layers, and others are provided +;;; merely for alignment with POSIX. +(typename $errno + (enum u16 + ;;; No error occurred. System call completed successfully. + $success + ;;; Argument list too long. + $2big + ;;; Permission denied. + $acces + ;;; Address in use. + $addrinuse + ;;; Address not available. + $addrnotavail + ;;; Address family not supported. + $afnosupport + ;;; Resource unavailable, or operation would block. + $again + ;;; Connection already in progress. + $already + ;;; Bad file descriptor. + $badf + ;;; Bad message. + $badmsg + ;;; Device or resource busy. + $busy + ;;; Operation canceled. + $canceled + ;;; No child processes. + $child + ;;; Connection aborted. + $connaborted + ;;; Connection refused. + $connrefused + ;;; Connection reset. + $connreset + ;;; Resource deadlock would occur. + $deadlk + ;;; Destination address required. + $destaddrreq + ;;; Mathematics argument out of domain of function. + $dom + ;;; Reserved. + $dquot + ;;; File exists. + $exist + ;;; Bad address. + $fault + ;;; File too large. + $fbig + ;;; Host is unreachable. + $hostunreach + ;;; Identifier removed. + $idrm + ;;; Illegal byte sequence. + $ilseq + ;;; Operation in progress. + $inprogress + ;;; Interrupted function. + $intr + ;;; Invalid argument. + $inval + ;;; I/O error. + $io + ;;; Socket is connected. + $isconn + ;;; Is a directory. + $isdir + ;;; Too many levels of symbolic links. + $loop + ;;; File descriptor value too large. + $mfile + ;;; Too many links. + $mlink + ;;; Message too large. + $msgsize + ;;; Reserved. + $multihop + ;;; Filename too long. + $nametoolong + ;;; Network is down. + $netdown + ;;; Connection aborted by network. + $netreset + ;;; Network unreachable. + $netunreach + ;;; Too many files open in system. + $nfile + ;;; No buffer space available. + $nobufs + ;;; No such device. + $nodev + ;;; No such file or directory. + $noent + ;;; Executable file format error. + $noexec + ;;; No locks available. + $nolck + ;;; Reserved. + $nolink + ;;; Not enough space. + $nomem + ;;; No message of the desired type. + $nomsg + ;;; Protocol not available. + $noprotoopt + ;;; No space left on device. + $nospc + ;;; Function not supported. + $nosys + ;;; The socket is not connected. + $notconn + ;;; Not a directory or a symbolic link to a directory. + $notdir + ;;; Directory not empty. + $notempty + ;;; State not recoverable. + $notrecoverable + ;;; Not a socket. + $notsock + ;;; Not supported, or operation not supported on socket. + $notsup + ;;; Inappropriate I/O control operation. + $notty + ;;; No such device or address. + $nxio + ;;; Value too large to be stored in data type. + $overflow + ;;; Previous owner died. + $ownerdead + ;;; Operation not permitted. + $perm + ;;; Broken pipe. + $pipe + ;;; Protocol error. + $proto + ;;; Protocol not supported. + $protonosupport + ;;; Protocol wrong type for socket. + $prototype + ;;; Result too large. + $range + ;;; Read-only file system. + $rofs + ;;; Invalid seek. + $spipe + ;;; No such process. + $srch + ;;; Reserved. + $stale + ;;; Connection timed out. + $timedout + ;;; Text file busy. + $txtbsy + ;;; Cross-device link. + $xdev + ;;; Extension: Capabilities insufficient. + $notcapable + ) +) + +;;; File descriptor rights, determining which actions may be performed. +(typename $rights + (flags u64 + ;;; The right to invoke `fd_datasync`. + ;; + ;;; If `path_open` is set, includes the right to invoke + ;;; `path_open` with `fdflags::dsync`. + $fd_datasync + ;;; The right to invoke `fd_read` and `sock_recv`. + ;; + ;;; If `rights::fd_seek` is set, includes the right to invoke `fd_pread`. + $fd_read + ;;; The right to invoke `fd_seek`. This flag implies `rights::fd_tell`. + $fd_seek + ;;; The right to invoke `fd_fdstat_set_flags`. + $fd_fdstat_set_flags + ;;; The right to invoke `fd_sync`. + ;; + ;;; If `path_open` is set, includes the right to invoke + ;;; `path_open` with `fdflags::rsync` and `fdflags::dsync`. + $fd_sync + ;;; The right to invoke `fd_seek` in such a way that the file offset + ;;; remains unaltered (i.e., `whence::cur` with offset zero), or to + ;;; invoke `fd_tell`. + $fd_tell + ;;; The right to invoke `fd_write` and `sock_send`. + ;;; If `rights::fd_seek` is set, includes the right to invoke `fd_pwrite`. + $fd_write + ;;; The right to invoke `fd_advise`. + $fd_advise + ;;; The right to invoke `fd_allocate`. + $fd_allocate + ;;; The right to invoke `path_create_directory`. + $path_create_directory + ;;; If `path_open` is set, the right to invoke `path_open` with `oflags::creat`. + $path_create_file + ;;; The right to invoke `path_link` with the file descriptor as the + ;;; source directory. + $path_link_source + ;;; The right to invoke `path_link` with the file descriptor as the + ;;; target directory. + $path_link_target + ;;; The right to invoke `path_open`. + $path_open + ;;; The right to invoke `fd_readdir`. + $fd_readdir + ;;; The right to invoke `path_readlink`. + $path_readlink + ;;; The right to invoke `path_rename` with the file descriptor as the source directory. + $path_rename_source + ;;; The right to invoke `path_rename` with the file descriptor as the target directory. + $path_rename_target + ;;; The right to invoke `path_filestat_get`. + $path_filestat_get + ;;; The right to change a file's size (there is no `path_filestat_set_size`). + ;;; If `path_open` is set, includes the right to invoke `path_open` with `oflags::trunc`. + $path_filestat_set_size + ;;; The right to invoke `path_filestat_set_times`. + $path_filestat_set_times + ;;; The right to invoke `fd_filestat_get`. + $fd_filestat_get + ;;; The right to invoke `fd_filestat_set_size`. + $fd_filestat_set_size + ;;; The right to invoke `fd_filestat_set_times`. + $fd_filestat_set_times + ;;; The right to invoke `path_symlink`. + $path_symlink + ;;; The right to invoke `path_remove_directory`. + $path_remove_directory + ;;; The right to invoke `path_unlink_file`. + $path_unlink_file + ;;; If `rights::fd_read` is set, includes the right to invoke `poll_oneoff` to subscribe to `eventtype::fd_read`. + ;;; If `rights::fd_write` is set, includes the right to invoke `poll_oneoff` to subscribe to `eventtype::fd_write`. + $poll_fd_readwrite + ;;; The right to invoke `sock_shutdown`. + $sock_shutdown + ) +) + +;;; A file descriptor handle. +(typename $fd (handle)) + +;;; A region of memory for scatter/gather reads. +(typename $iovec + (struct + ;;; The address of the buffer to be filled. + (field $buf (@witx pointer u8)) + ;;; The length of the buffer to be filled. + (field $buf_len $size) + ) +) + +;;; A region of memory for scatter/gather writes. +(typename $ciovec + (struct + ;;; The address of the buffer to be written. + (field $buf (@witx const_pointer u8)) + ;;; The length of the buffer to be written. + (field $buf_len $size) + ) +) + +(typename $iovec_array (array $iovec)) +(typename $ciovec_array (array $ciovec)) + +;;; Relative offset within a file. +(typename $filedelta s64) + +;;; The position relative to which to set the offset of the file descriptor. +(typename $whence + (enum u8 + ;;; Seek relative to start-of-file. + $set + ;;; Seek relative to current position. + $cur + ;;; Seek relative to end-of-file. + $end + ) +) + +;;; A reference to the offset of a directory entry. +;;; +;;; The value 0 signifies the start of the directory. +(typename $dircookie u64) + +;;; The type for the $d_namlen field of $dirent. +(typename $dirnamlen u32) + +;;; File serial number that is unique within its file system. +(typename $inode u64) + +;;; The type of a file descriptor or file. +(typename $filetype + (enum u8 + ;;; The type of the file descriptor or file is unknown or is different from any of the other types specified. + $unknown + ;;; The file descriptor or file refers to a block device inode. + $block_device + ;;; The file descriptor or file refers to a character device inode. + $character_device + ;;; The file descriptor or file refers to a directory inode. + $directory + ;;; The file descriptor or file refers to a regular file inode. + $regular_file + ;;; The file descriptor or file refers to a datagram socket. + $socket_dgram + ;;; The file descriptor or file refers to a byte-stream socket. + $socket_stream + ;;; The file refers to a symbolic link inode. + $symbolic_link + ) +) + +;;; A directory entry. +(typename $dirent + (struct + ;;; The offset of the next directory entry stored in this directory. + (field $d_next $dircookie) + ;;; The serial number of the file referred to by this directory entry. + (field $d_ino $inode) + ;;; The length of the name of the directory entry. + (field $d_namlen $dirnamlen) + ;;; The type of the file referred to by this directory entry. + (field $d_type $filetype) + ) +) + +;;; File or memory access pattern advisory information. +(typename $advice + (enum u8 + ;;; The application has no advice to give on its behavior with respect to the specified data. + $normal + ;;; The application expects to access the specified data sequentially from lower offsets to higher offsets. + $sequential + ;;; The application expects to access the specified data in a random order. + $random + ;;; The application expects to access the specified data in the near future. + $willneed + ;;; The application expects that it will not access the specified data in the near future. + $dontneed + ;;; The application expects to access the specified data once and then not reuse it thereafter. + $noreuse + ) +) + +;;; File descriptor flags. +(typename $fdflags + (flags u16 + ;;; Append mode: Data written to the file is always appended to the file's end. + $append + ;;; Write according to synchronized I/O data integrity completion. Only the data stored in the file is synchronized. + $dsync + ;;; Non-blocking mode. + $nonblock + ;;; Synchronized read I/O operations. + $rsync + ;;; Write according to synchronized I/O file integrity completion. In + ;;; addition to synchronizing the data stored in the file, the implementation + ;;; may also synchronously update the file's metadata. + $sync + ) +) + +;;; File descriptor attributes. +(typename $fdstat + (struct + ;;; File type. + (field $fs_filetype $filetype) + ;;; File descriptor flags. + (field $fs_flags $fdflags) + ;;; Rights that apply to this file descriptor. + (field $fs_rights_base $rights) + ;;; Maximum set of rights that may be installed on new file descriptors that + ;;; are created through this file descriptor, e.g., through `path_open`. + (field $fs_rights_inheriting $rights) + ) +) + +;;; Identifier for a device containing a file system. Can be used in combination +;;; with `inode` to uniquely identify a file or directory in the filesystem. +(typename $device u64) + +;;; Which file time attributes to adjust. +(typename $fstflags + (flags u16 + ;;; Adjust the last data access timestamp to the value stored in `filestat::atim`. + $atim + ;;; Adjust the last data access timestamp to the time of clock `clockid::realtime`. + $atim_now + ;;; Adjust the last data modification timestamp to the value stored in `filestat::mtim`. + $mtim + ;;; Adjust the last data modification timestamp to the time of clock `clockid::realtime`. + $mtim_now + ) +) + +;;; Flags determining the method of how paths are resolved. +(typename $lookupflags + (flags u32 + ;;; As long as the resolved path corresponds to a symbolic link, it is expanded. + $symlink_follow + ) +) + +;;; Open flags used by `path_open`. +(typename $oflags + (flags u16 + ;;; Create file if it does not exist. + $creat + ;;; Fail if not a directory. + $directory + ;;; Fail if file already exists. + $excl + ;;; Truncate file to size 0. + $trunc + ) +) + +;;; Number of hard links to an inode. +(typename $linkcount u64) + +;;; File attributes. +(typename $filestat + (struct + ;;; Device ID of device containing the file. + (field $dev $device) + ;;; File serial number. + (field $ino $inode) + ;;; File type. + (field $filetype $filetype) + ;;; Number of hard links to the file. + (field $nlink $linkcount) + ;;; For regular files, the file size in bytes. For symbolic links, the length in bytes of the pathname contained in the symbolic link. + (field $size $filesize) + ;;; Last data access timestamp. + (field $atim $timestamp) + ;;; Last data modification timestamp. + (field $mtim $timestamp) + ;;; Last file status change timestamp. + (field $ctim $timestamp) + ) +) + +;;; User-provided value that may be attached to objects that is retained when +;;; extracted from the implementation. +(typename $userdata u64) + +;;; Type of a subscription to an event or its occurrence. +(typename $eventtype + (enum u8 + ;;; The time value of clock `subscription_clock::id` has + ;;; reached timestamp `subscription_clock::timeout`. + $clock + ;;; File descriptor `subscription_fd_readwrite::file_descriptor` has data + ;;; available for reading. This event always triggers for regular files. + $fd_read + ;;; File descriptor `subscription_fd_readwrite::file_descriptor` has capacity + ;;; available for writing. This event always triggers for regular files. + $fd_write + ) +) + +;;; The state of the file descriptor subscribed to with +;;; `eventtype::fd_read` or `eventtype::fd_write`. +(typename $eventrwflags + (flags u16 + ;;; The peer of this socket has closed or disconnected. + $fd_readwrite_hangup + ) +) + +;;; The contents of an $event when type is `eventtype::fd_read` or +;;; `eventtype::fd_write`. +(typename $event_fd_readwrite + (struct + ;;; The number of bytes available for reading or writing. + (field $nbytes $filesize) + ;;; The state of the file descriptor. + (field $flags $eventrwflags) + ) +) + +;;; An event that occurred. +(typename $event + (struct + ;;; User-provided value that got attached to `subscription::userdata`. + (field $userdata $userdata) + ;;; If non-zero, an error that occurred while processing the subscription request. + (field $error $errno) + ;;; The type of event that occured + (field $type $eventtype) + ;;; The contents of the event, if it is an `eventtype::fd_read` or + ;;; `eventtype::fd_write`. `eventtype::clock` events ignore this field. + (field $fd_readwrite $event_fd_readwrite) + ) +) + +;;; Flags determining how to interpret the timestamp provided in +;;; `subscription_clock::timeout`. +(typename $subclockflags + (flags u16 + ;;; If set, treat the timestamp provided in + ;;; `subscription_clock::timeout` as an absolute timestamp of clock + ;;; `subscription_clock::id`. If clear, treat the timestamp + ;;; provided in `subscription_clock::timeout` relative to the + ;;; current time value of clock `subscription_clock::id`. + $subscription_clock_abstime + ) +) + +;;; The contents of a `subscription` when type is `eventtype::clock`. +(typename $subscription_clock + (struct + ;;; The clock against which to compare the timestamp. + (field $id $clockid) + ;;; The absolute or relative timestamp. + (field $timeout $timestamp) + ;;; The amount of time that the implementation may wait additionally + ;;; to coalesce with other events. + (field $precision $timestamp) + ;;; Flags specifying whether the timeout is absolute or relative + (field $flags $subclockflags) + ) +) + +;;; The contents of a `subscription` when type is type is +;;; `eventtype::fd_read` or `eventtype::fd_write`. +(typename $subscription_fd_readwrite + (struct + ;;; The file descriptor on which to wait for it to become ready for reading or writing. + (field $file_descriptor $fd) + ) +) + +;;; The contents of a `subscription`. +(typename $subscription_u + (union $eventtype + (field $clock $subscription_clock) + (field $fd_read $subscription_fd_readwrite) + (field $fd_write $subscription_fd_readwrite) + ) +) + +;;; Subscription to an event. +(typename $subscription + (struct + ;;; User-provided value that is attached to the subscription in the + ;;; implementation and returned through `event::userdata`. + (field $userdata $userdata) + ;;; The type of the event to which to subscribe, and its contents + (field $u $subscription_u) + ) +) + +;;; Exit code generated by a process when exiting. +(typename $exitcode u32) + +;;; Signal condition. +(typename $signal + (enum u8 + ;;; No signal. Note that POSIX has special semantics for `kill(pid, 0)`, + ;;; so this value is reserved. + $none + ;;; Hangup. + ;;; Action: Terminates the process. + $hup + ;;; Terminate interrupt signal. + ;;; Action: Terminates the process. + $int + ;;; Terminal quit signal. + ;;; Action: Terminates the process. + $quit + ;;; Illegal instruction. + ;;; Action: Terminates the process. + $ill + ;;; Trace/breakpoint trap. + ;;; Action: Terminates the process. + $trap + ;;; Process abort signal. + ;;; Action: Terminates the process. + $abrt + ;;; Access to an undefined portion of a memory object. + ;;; Action: Terminates the process. + $bus + ;;; Erroneous arithmetic operation. + ;;; Action: Terminates the process. + $fpe + ;;; Kill. + ;;; Action: Terminates the process. + $kill + ;;; User-defined signal 1. + ;;; Action: Terminates the process. + $usr1 + ;;; Invalid memory reference. + ;;; Action: Terminates the process. + $segv + ;;; User-defined signal 2. + ;;; Action: Terminates the process. + $usr2 + ;;; Write on a pipe with no one to read it. + ;;; Action: Ignored. + $pipe + ;;; Alarm clock. + ;;; Action: Terminates the process. + $alrm + ;;; Termination signal. + ;;; Action: Terminates the process. + $term + ;;; Child process terminated, stopped, or continued. + ;;; Action: Ignored. + $chld + ;;; Continue executing, if stopped. + ;;; Action: Continues executing, if stopped. + $cont + ;;; Stop executing. + ;;; Action: Stops executing. + $stop + ;;; Terminal stop signal. + ;;; Action: Stops executing. + $tstp + ;;; Background process attempting read. + ;;; Action: Stops executing. + $ttin + ;;; Background process attempting write. + ;;; Action: Stops executing. + $ttou + ;;; High bandwidth data is available at a socket. + ;;; Action: Ignored. + $urg + ;;; CPU time limit exceeded. + ;;; Action: Terminates the process. + $xcpu + ;;; File size limit exceeded. + ;;; Action: Terminates the process. + $xfsz + ;;; Virtual timer expired. + ;;; Action: Terminates the process. + $vtalrm + ;;; Profiling timer expired. + ;;; Action: Terminates the process. + $prof + ;;; Window changed. + ;;; Action: Ignored. + $winch + ;;; I/O possible. + ;;; Action: Terminates the process. + $poll + ;;; Power failure. + ;;; Action: Terminates the process. + $pwr + ;;; Bad system call. + ;;; Action: Terminates the process. + $sys + ) +) + +;;; Flags provided to `sock_recv`. +(typename $riflags + (flags u16 + ;;; Returns the message without removing it from the socket's receive queue. + $recv_peek + ;;; On byte-stream sockets, block until the full amount of data can be returned. + $recv_waitall + ) +) + +;;; Flags returned by `sock_recv`. +(typename $roflags + (flags u16 + ;;; Returned by `sock_recv`: Message data has been truncated. + $recv_data_truncated + ) +) + +;;; Flags provided to `sock_send`. As there are currently no flags +;;; defined, it must be set to zero. +(typename $siflags u16) + +;;; Which channels on a socket to shut down. +(typename $sdflags + (flags u8 + ;;; Disables further receive operations. + $rd + ;;; Disables further send operations. + $wr + ) +) + +;;; Identifiers for preopened capabilities. +(typename $preopentype + (enum u8 + ;;; A pre-opened directory. + $dir + ) +) + +;;; The contents of a $prestat when type is `preopentype::dir`. +(typename $prestat_dir + (struct + ;;; The length of the directory name for use with `fd_prestat_dir_name`. + (field $pr_name_len $size) + ) +) + +;;; Information about a pre-opened capability. +(typename $prestat + (union $preopentype + (field $dir $prestat_dir) + ) +) + diff --git a/generator/resources/wasi_witx/wasi_snapshot_preview1.witx b/generator/resources/wasi_witx/wasi_snapshot_preview1.witx new file mode 100644 index 00000000..98cd9478 --- /dev/null +++ b/generator/resources/wasi_witx/wasi_snapshot_preview1.witx @@ -0,0 +1,532 @@ +;; WASI Preview. This is an evolution of the API that WASI initially +;; launched with. +;; +;; Some content here is derived from [CloudABI](https://github.com/NuxiNL/cloudabi). +;; +;; This is a `witx` file. See [here](https://github.com/WebAssembly/WASI/tree/master/docs/witx.md) +;; for an explanation of what that means. + +(use "typenames.witx") + +(module $wasi_snapshot_preview1 + ;;; Linear memory to be accessed by WASI functions that need it. + (import "memory" (memory)) + + ;;; Read command-line argument data. + ;;; The size of the array should match that returned by `args_sizes_get` + (@interface func (export "args_get") + (param $argv (@witx pointer (@witx pointer u8))) + (param $argv_buf (@witx pointer u8)) + (result $error $errno) + ) + ;;; Return command-line argument data sizes. + (@interface func (export "args_sizes_get") + (result $error $errno) + ;;; The number of arguments. + (result $argc $size) + ;;; The size of the argument string data. + (result $argv_buf_size $size) + ) + + ;;; Read environment variable data. + ;;; The sizes of the buffers should match that returned by `environ_sizes_get`. + (@interface func (export "environ_get") + (param $environ (@witx pointer (@witx pointer u8))) + (param $environ_buf (@witx pointer u8)) + (result $error $errno) + ) + ;;; Return environment variable data sizes. + (@interface func (export "environ_sizes_get") + (result $error $errno) + ;;; The number of environment variable arguments. + (result $environc $size) + ;;; The size of the environment variable data. + (result $environ_buf_size $size) + ) + + ;;; Return the resolution of a clock. + ;;; Implementations are required to provide a non-zero value for supported clocks. For unsupported clocks, + ;;; return `errno::inval`. + ;;; Note: This is similar to `clock_getres` in POSIX. + (@interface func (export "clock_res_get") + ;;; The clock for which to return the resolution. + (param $id $clockid) + (result $error $errno) + ;;; The resolution of the clock. + (result $resolution $timestamp) + ) + ;;; Return the time value of a clock. + ;;; Note: This is similar to `clock_gettime` in POSIX. + (@interface func (export "clock_time_get") + ;;; The clock for which to return the time. + (param $id $clockid) + ;;; The maximum lag (exclusive) that the returned time value may have, compared to its actual value. + (param $precision $timestamp) + (result $error $errno) + ;;; The time value of the clock. + (result $time $timestamp) + ) + + ;;; Provide file advisory information on a file descriptor. + ;;; Note: This is similar to `posix_fadvise` in POSIX. + (@interface func (export "fd_advise") + (param $fd $fd) + ;;; The offset within the file to which the advisory applies. + (param $offset $filesize) + ;;; The length of the region to which the advisory applies. + (param $len $filesize) + ;;; The advice. + (param $advice $advice) + (result $error $errno) + ) + + ;;; Force the allocation of space in a file. + ;;; Note: This is similar to `posix_fallocate` in POSIX. + (@interface func (export "fd_allocate") + (param $fd $fd) + ;;; The offset at which to start the allocation. + (param $offset $filesize) + ;;; The length of the area that is allocated. + (param $len $filesize) + (result $error $errno) + ) + + ;;; Close a file descriptor. + ;;; Note: This is similar to `close` in POSIX. + (@interface func (export "fd_close") + (param $fd $fd) + (result $error $errno) + ) + + ;;; Synchronize the data of a file to disk. + ;;; Note: This is similar to `fdatasync` in POSIX. + (@interface func (export "fd_datasync") + (param $fd $fd) + (result $error $errno) + ) + + ;;; Get the attributes of a file descriptor. + ;;; Note: This returns similar flags to `fsync(fd, F_GETFL)` in POSIX, as well as additional fields. + (@interface func (export "fd_fdstat_get") + (param $fd $fd) + (result $error $errno) + ;;; The buffer where the file descriptor's attributes are stored. + (result $stat $fdstat) + ) + + ;;; Adjust the flags associated with a file descriptor. + ;;; Note: This is similar to `fcntl(fd, F_SETFL, flags)` in POSIX. + (@interface func (export "fd_fdstat_set_flags") + (param $fd $fd) + ;;; The desired values of the file descriptor flags. + (param $flags $fdflags) + (result $error $errno) + ) + + ;;; Adjust the rights associated with a file descriptor. + ;;; This can only be used to remove rights, and returns `errno::notcapable` if called in a way that would attempt to add rights + (@interface func (export "fd_fdstat_set_rights") + (param $fd $fd) + ;;; The desired rights of the file descriptor. + (param $fs_rights_base $rights) + (param $fs_rights_inheriting $rights) + (result $error $errno) + ) + + ;;; Return the attributes of an open file. + (@interface func (export "fd_filestat_get") + (param $fd $fd) + (result $error $errno) + ;;; The buffer where the file's attributes are stored. + (result $buf $filestat) + ) + + ;;; Adjust the size of an open file. If this increases the file's size, the extra bytes are filled with zeros. + ;;; Note: This is similar to `ftruncate` in POSIX. + (@interface func (export "fd_filestat_set_size") + (param $fd $fd) + ;;; The desired file size. + (param $size $filesize) + (result $error $errno) + ) + + ;;; Adjust the timestamps of an open file or directory. + ;;; Note: This is similar to `futimens` in POSIX. + (@interface func (export "fd_filestat_set_times") + (param $fd $fd) + ;;; The desired values of the data access timestamp. + (param $atim $timestamp) + ;;; The desired values of the data modification timestamp. + (param $mtim $timestamp) + ;;; A bitmask indicating which timestamps to adjust. + (param $fst_flags $fstflags) + (result $error $errno) + ) + + ;;; Read from a file descriptor, without using and updating the file descriptor's offset. + ;;; Note: This is similar to `preadv` in POSIX. + (@interface func (export "fd_pread") + (param $fd $fd) + ;;; List of scatter/gather vectors in which to store data. + (param $iovs $iovec_array) + ;;; The offset within the file at which to read. + (param $offset $filesize) + (result $error $errno) + ;;; The number of bytes read. + (result $nread $size) + ) + + ;;; Return a description of the given preopened file descriptor. + (@interface func (export "fd_prestat_get") + (param $fd $fd) + (result $error $errno) + ;;; The buffer where the description is stored. + (result $buf $prestat) + ) + + ;;; Return a description of the given preopened file descriptor. + (@interface func (export "fd_prestat_dir_name") + (param $fd $fd) + ;;; A buffer into which to write the preopened directory name. + (param $path (@witx pointer u8)) + (param $path_len $size) + (result $error $errno) + ) + + ;;; Write to a file descriptor, without using and updating the file descriptor's offset. + ;;; Note: This is similar to `pwritev` in POSIX. + (@interface func (export "fd_pwrite") + (param $fd $fd) + ;;; List of scatter/gather vectors from which to retrieve data. + (param $iovs $ciovec_array) + ;;; The offset within the file at which to write. + (param $offset $filesize) + (result $error $errno) + ;;; The number of bytes written. + (result $nwritten $size) + ) + + ;;; Read from a file descriptor. + ;;; Note: This is similar to `readv` in POSIX. + (@interface func (export "fd_read") + (param $fd $fd) + ;;; List of scatter/gather vectors to which to store data. + (param $iovs $iovec_array) + (result $error $errno) + ;;; The number of bytes read. + (result $nread $size) + ) + + ;;; Read directory entries from a directory. + ;;; When successful, the contents of the output buffer consist of a sequence of + ;;; directory entries. Each directory entry consists of a dirent_t object, + ;;; followed by dirent_t::d_namlen bytes holding the name of the directory + ;;; entry. + ;; + ;;; This function fills the output buffer as much as possible, potentially + ;;; truncating the last directory entry. This allows the caller to grow its + ;;; read buffer size in case it's too small to fit a single large directory + ;;; entry, or skip the oversized directory entry. + (@interface func (export "fd_readdir") + (param $fd $fd) + ;;; The buffer where directory entries are stored + (param $buf (@witx pointer u8)) + (param $buf_len $size) + ;;; The location within the directory to start reading + (param $cookie $dircookie) + (result $error $errno) + ;;; The number of bytes stored in the read buffer. If less than the size of the read buffer, the end of the directory has been reached. + (result $bufused $size) + ) + + ;;; Atomically replace a file descriptor by renumbering another file descriptor. + ;; + ;;; Due to the strong focus on thread safety, this environment does not provide + ;;; a mechanism to duplicate or renumber a file descriptor to an arbitrary + ;;; number, like `dup2()`. This would be prone to race conditions, as an actual + ;;; file descriptor with the same number could be allocated by a different + ;;; thread at the same time. + ;; + ;;; This function provides a way to atomically renumber file descriptors, which + ;;; would disappear if `dup2()` were to be removed entirely. + (@interface func (export "fd_renumber") + (param $fd $fd) + ;;; The file descriptor to overwrite. + (param $to $fd) + (result $error $errno) + ) + + ;;; Move the offset of a file descriptor. + ;;; Note: This is similar to `lseek` in POSIX. + (@interface func (export "fd_seek") + (param $fd $fd) + ;;; The number of bytes to move. + (param $offset $filedelta) + ;;; The base from which the offset is relative. + (param $whence $whence) + (result $error $errno) + ;;; The new offset of the file descriptor, relative to the start of the file. + (result $newoffset $filesize) + ) + + ;;; Synchronize the data and metadata of a file to disk. + ;;; Note: This is similar to `fsync` in POSIX. + (@interface func (export "fd_sync") + (param $fd $fd) + (result $error $errno) + ) + + ;;; Return the current offset of a file descriptor. + ;;; Note: This is similar to `lseek(fd, 0, SEEK_CUR)` in POSIX. + (@interface func (export "fd_tell") + (param $fd $fd) + (result $error $errno) + ;;; The current offset of the file descriptor, relative to the start of the file. + (result $offset $filesize) + ) + + ;;; Write to a file descriptor. + ;;; Note: This is similar to `writev` in POSIX. + (@interface func (export "fd_write") + (param $fd $fd) + ;;; List of scatter/gather vectors from which to retrieve data. + (param $iovs $ciovec_array) + (result $error $errno) + ;;; The number of bytes written. + (result $nwritten $size) + ) + + ;;; Create a directory. + ;;; Note: This is similar to `mkdirat` in POSIX. + (@interface func (export "path_create_directory") + (param $fd $fd) + ;;; The path at which to create the directory. + (param $path string) + (result $error $errno) + ) + + ;;; Return the attributes of a file or directory. + ;;; Note: This is similar to `stat` in POSIX. + (@interface func (export "path_filestat_get") + (param $fd $fd) + ;;; Flags determining the method of how the path is resolved. + (param $flags $lookupflags) + ;;; The path of the file or directory to inspect. + (param $path string) + (result $error $errno) + ;;; The buffer where the file's attributes are stored. + (result $buf $filestat) + ) + + ;;; Adjust the timestamps of a file or directory. + ;;; Note: This is similar to `utimensat` in POSIX. + (@interface func (export "path_filestat_set_times") + (param $fd $fd) + ;;; Flags determining the method of how the path is resolved. + (param $flags $lookupflags) + ;;; The path of the file or directory to operate on. + (param $path string) + ;;; The desired values of the data access timestamp. + (param $atim $timestamp) + ;;; The desired values of the data modification timestamp. + (param $mtim $timestamp) + ;;; A bitmask indicating which timestamps to adjust. + (param $fst_flags $fstflags) + (result $error $errno) + ) + + ;;; Create a hard link. + ;;; Note: This is similar to `linkat` in POSIX. + (@interface func (export "path_link") + (param $old_fd $fd) + ;;; Flags determining the method of how the path is resolved. + (param $old_flags $lookupflags) + ;;; The source path from which to link. + (param $old_path string) + ;;; The working directory at which the resolution of the new path starts. + (param $new_fd $fd) + ;;; The destination path at which to create the hard link. + (param $new_path string) + (result $error $errno) + ) + + ;;; Open a file or directory. + ;; + ;;; The returned file descriptor is not guaranteed to be the lowest-numbered + ;;; file descriptor not currently open; it is randomized to prevent + ;;; applications from depending on making assumptions about indexes, since this + ;;; is error-prone in multi-threaded contexts. The returned file descriptor is + ;;; guaranteed to be less than 2**31. + ;; + ;;; Note: This is similar to `openat` in POSIX. + (@interface func (export "path_open") + (param $fd $fd) + ;;; Flags determining the method of how the path is resolved. + (param $dirflags $lookupflags) + ;;; The relative path of the file or directory to open, relative to the + ;;; `path_open::fd` directory. + (param $path string) + ;;; The method by which to open the file. + (param $oflags $oflags) + ;;; The initial rights of the newly created file descriptor. The + ;;; implementation is allowed to return a file descriptor with fewer rights + ;;; than specified, if and only if those rights do not apply to the type of + ;;; file being opened. + ;; + ;;; The *base* rights are rights that will apply to operations using the file + ;;; descriptor itself, while the *inheriting* rights are rights that apply to + ;;; file descriptors derived from it. + (param $fs_rights_base $rights) + (param $fs_rights_inherting $rights) + (param $fdflags $fdflags) + (result $error $errno) + ;;; The file descriptor of the file that has been opened. + (result $opened_fd $fd) + ) + + ;;; Read the contents of a symbolic link. + ;;; Note: This is similar to `readlinkat` in POSIX. + (@interface func (export "path_readlink") + (param $fd $fd) + ;;; The path of the symbolic link from which to read. + (param $path string) + ;;; The buffer to which to write the contents of the symbolic link. + (param $buf (@witx pointer u8)) + (param $buf_len $size) + (result $error $errno) + ;;; The number of bytes placed in the buffer. + (result $bufused $size) + ) + + ;;; Remove a directory. + ;;; Return `errno::notempty` if the directory is not empty. + ;;; Note: This is similar to `unlinkat(fd, path, AT_REMOVEDIR)` in POSIX. + (@interface func (export "path_remove_directory") + (param $fd $fd) + ;;; The path to a directory to remove. + (param $path string) + (result $error $errno) + ) + + ;;; Rename a file or directory. + ;;; Note: This is similar to `renameat` in POSIX. + (@interface func (export "path_rename") + (param $fd $fd) + ;;; The source path of the file or directory to rename. + (param $old_path string) + ;;; The working directory at which the resolution of the new path starts. + (param $new_fd $fd) + ;;; The destination path to which to rename the file or directory. + (param $new_path string) + (result $error $errno) + ) + + ;;; Create a symbolic link. + ;;; Note: This is similar to `symlinkat` in POSIX. + (@interface func (export "path_symlink") + ;;; The contents of the symbolic link. + (param $old_path string) + (param $fd $fd) + ;;; The destination path at which to create the symbolic link. + (param $new_path string) + (result $error $errno) + ) + + + ;;; Unlink a file. + ;;; Return `errno::isdir` if the path refers to a directory. + ;;; Note: This is similar to `unlinkat(fd, path, 0)` in POSIX. + (@interface func (export "path_unlink_file") + (param $fd $fd) + ;;; The path to a file to unlink. + (param $path string) + (result $error $errno) + ) + + ;;; Concurrently poll for the occurrence of a set of events. + (@interface func (export "poll_oneoff") + ;;; The events to which to subscribe. + (param $in (@witx const_pointer $subscription)) + ;;; The events that have occurred. + (param $out (@witx pointer $event)) + ;;; Both the number of subscriptions and events. + (param $nsubscriptions $size) + (result $error $errno) + ;;; The number of events stored. + (result $nevents $size) + ) + + ;;; Terminate the process normally. An exit code of 0 indicates successful + ;;; termination of the program. The meanings of other values is dependent on + ;;; the environment. + (@interface func (export "proc_exit") + ;;; The exit code returned by the process. + (param $rval $exitcode) + ) + + ;;; Send a signal to the process of the calling thread. + ;;; Note: This is similar to `raise` in POSIX. + (@interface func (export "proc_raise") + ;;; The signal condition to trigger. + (param $sig $signal) + (result $error $errno) + ) + + ;;; Temporarily yield execution of the calling thread. + ;;; Note: This is similar to `sched_yield` in POSIX. + (@interface func (export "sched_yield") + (result $error $errno) + ) + + ;;; Write high-quality random data into a buffer. + ;;; This function blocks when the implementation is unable to immediately + ;;; provide sufficient high-quality random data. + ;;; This function may execute slowly, so when large mounts of random data are + ;;; required, it's advisable to use this function to seed a pseudo-random + ;;; number generator, rather than to provide the random data directly. + (@interface func (export "random_get") + ;;; The buffer to fill with random data. + (param $buf (@witx pointer u8)) + (param $buf_len $size) + (result $error $errno) + ) + + ;;; Receive a message from a socket. + ;;; Note: This is similar to `recv` in POSIX, though it also supports reading + ;;; the data into multiple buffers in the manner of `readv`. + (@interface func (export "sock_recv") + (param $fd $fd) + ;;; List of scatter/gather vectors to which to store data. + (param $ri_data $iovec_array) + ;;; Message flags. + (param $ri_flags $riflags) + (result $error $errno) + ;;; Number of bytes stored in ri_data. + (result $ro_datalen $size) + ;;; Message flags. + (result $ro_flags $roflags) + ) + + ;;; Send a message on a socket. + ;;; Note: This is similar to `send` in POSIX, though it also supports writing + ;;; the data from multiple buffers in the manner of `writev`. + (@interface func (export "sock_send") + (param $fd $fd) + ;;; List of scatter/gather vectors to which to retrieve data + (param $si_data $ciovec_array) + ;;; Message flags. + (param $si_flags $siflags) + (result $error $errno) + ;;; Number of bytes transmitted. + (result $so_datalen $size) + ) + + ;;; Shut down socket send and receive channels. + ;;; Note: This is similar to `shutdown` in POSIX. + (@interface func (export "sock_shutdown") + (param $fd $fd) + ;;; Which channels on the socket to shut down. + (param $how $sdflags) + (result $error $errno) + ) +) diff --git a/generator/src/swam/generator/Generator.scala b/generator/src/swam/generator/Generator.scala index a98f8a4d..5378f356 100644 --- a/generator/src/swam/generator/Generator.scala +++ b/generator/src/swam/generator/Generator.scala @@ -8,6 +8,9 @@ import org.json4s.DefaultFormats import swam.runtime.{Engine, Import} import org.json4s.jackson.Serialization.writePretty import cats.implicits._ +import swam.generator.witx.{TypesEmitTraverser, ModuleTraverse} +import swam.witx.WitxParser +import swam.witx.parser.ImportContext import scala.concurrent.ExecutionContext @@ -15,7 +18,9 @@ case class Config(wasms: Seq[File] = Seq(), printTemplateContext: Boolean = false, createBoilerplate: String = "", className: String = "GeneratedImports", - renderTemplate: File = null) + renderTemplate: File = null, + parseAsWitx: Boolean = false, + includeWitxTypesPath: Seq[String] = Seq()) /** @author Javier Cabrera-Arteaga on 2020-03-07 @@ -45,6 +50,17 @@ object Generator extends IOApp { .action((f, c) => c.copy(printTemplateContext = f, createBoilerplate = "")) // Avoid he boilerplate generation .text("Prints the template context (JSON) to feed the template engine") + opt[Boolean]('x', "witx") + .optional() + .action((f, c) => c.copy(parseAsWitx = f)) + .text("Parse input as witx definition, generating the module boilerplate in scala") + + opt[String]('i', "includes") + .optional() + .unbounded() + .action((f, c) => c.copy(includeWitxTypesPath = c.includeWitxTypesPath :+ f)) + .text("Include folders to look for types definition in witx parsing") + opt[File]('t', "template") .optional() .action((x, c) => c.copy(renderTemplate = x)) @@ -69,9 +85,25 @@ object Generator extends IOApp { wasms.map(w => getImports(w, blocker)).reduce((r, l) => r.combine(l)) } + def parseWitx(witxFile: File, includes: Seq[String], newPackagePath: String) = { + if (newPackagePath.isEmpty) + throw new Exception("You must provide the path to create the boilerplate (--create-boilerplate)") + Blocker[IO] + .use(blocker => { + for { + parser <- IO(WitxParser[IO]) + ctx <- IO(ImportContext[IO]()) + (types, interface) <- parser.parseModuleInterface(witxFile.toPath, blocker, ctx) + scalaTypesTemplate <- IO(new TypesEmitTraverser(types).traverseAll("", (s1, s2) => s1 + s2)) + scalaTraitTemplate <- IO(new ModuleTraverse(interface, types).traverseAll("", (s1, s2) => s1 + s2)) + _ <- generator.createScalaProjectForImports(scalaTypesTemplate, scalaTraitTemplate, newPackagePath) + } yield () + }) + } + def generate(config: Config, imports: Vector[Import]): IO[Unit] = { + if (config.createBoilerplate.isEmpty) { - println() if (!config.printTemplateContext) { config.renderTemplate match { case null => IO(println(generator.generateImportText(imports, config.className))) @@ -90,10 +122,14 @@ object Generator extends IOApp { def run(args: List[String]): IO[ExitCode] = parser.parse(args, Config()) match { case Some(config) => { - for { - imports <- concatImports(config.wasms) - _ <- generate(config, imports) - } yield () + if (config.parseAsWitx) { + parseWitx(config.wasms(0), config.includeWitxTypesPath, config.createBoilerplate) + } else { + for { + imports <- concatImports(config.wasms) + _ <- generate(config, imports) + } yield () + } }.as(ExitCode.Success) case None => IO(parser.reportError("You must provide a WASM file")).as(ExitCode.Error) diff --git a/generator/src/swam/generator/ImportGenerator.scala b/generator/src/swam/generator/ImportGenerator.scala index 7decfa4e..4d42bed4 100644 --- a/generator/src/swam/generator/ImportGenerator.scala +++ b/generator/src/swam/generator/ImportGenerator.scala @@ -12,7 +12,7 @@ import org.fusesource.scalate.TemplateEngine import cats.effect.{Blocker, Effect, IO} import cats.implicits._ import cats.effect._ -import fs2.{text, _} +import fs2._ /** * @author Javier Cabrera-Arteaga on 2020-03-06 @@ -63,7 +63,7 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { .collect { case i: Import.Function => i } // Filter by function type .groupBy(t => t.moduleName) // Group by module .view - .mapValues(_.toSet) // remove duplicated entries + .mapValues(k => k.groupBy(i => i.fieldName).view.mapValues(t => t.last).toSet) // remove duplicated entries // Generating DTO sorted.zipWithIndex.map { @@ -73,7 +73,7 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { "comma" -> (index < sorted.keys.size - 1), "fields" -> functions.toSeq.zipWithIndex .map { - case (field, fieldIndex) => { + case ((name, field), fieldIndex) => { Map( "name" -> field.fieldName, "nameCapital" -> field.fieldName.capitalize, @@ -89,6 +89,12 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { }.toSeq } + def formatText(text: String) = { + val file = Paths.get("Formatted.scala") + + scalafmt.format(config, file, text) + } + /** * Creates the trait to implement the WASM import functions * @param imports @@ -106,9 +112,7 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { "imports" -> getContext(imports) )) - val file = Paths.get("GeneratedImport.scala") - - scalafmt.format(config, file, result) + formatText(result) } def writeToFile(t: String, projectName: String, className: String) = { @@ -116,6 +120,8 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { Blocker[F] .use { blocker => for { + // replace + _ <- io.file.delete(blocker, Paths.get(s"$projectName/src/$className.scala")) _ <- io.file.createDirectories[F](blocker, Paths.get(s"$projectName/src")) // Creates the module structure _ <- fs2 .Stream(t) @@ -144,6 +150,21 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { writeToFile(trait_, projectName, className) } + + /** + * Creates scala project from witx + * @param typesTemplate + * @param traitTemplate + * @return + */ + def createScalaProjectForImports(typesTemplate: String, traitTemplate: String, projectName: String) = { + + for { + _ <- writeToFile(typesTemplate, projectName, "Types") + _ <- writeToFile(traitTemplate, projectName, "Module") + } yield () + + } } object ImportGenerator { diff --git a/generator/src/swam/generator/witx/LoadTypeEmitTraverser.scala b/generator/src/swam/generator/witx/LoadTypeEmitTraverser.scala new file mode 100644 index 00000000..1bffc3dc --- /dev/null +++ b/generator/src/swam/generator/witx/LoadTypeEmitTraverser.scala @@ -0,0 +1,89 @@ +package swam.generator.witx + +import swam.witx.traverser.TypesTraverser +import swam.witx.unresolved.{ + AliasType, + ArrayType, + BaseWitxType, + BasicType, + EnumType, + FlagsType, + Handle, + Pointer, + StructType, + UnionType +} + +/** + * @author Javier Cabrera-Arteaga on 2020-03-23 + */ +class LoadTypeEmitTraverser(prev: String, types: Map[String, BaseWitxType], offset: String = "", mem: String = "") + extends TypesTraverser[String](types) { + + def concatOffsets(offset: String, prev: String) = { + if (prev.isEmpty) offset + else s"$offset + $prev" + } + + override val basicTypeTraverser = { + case (_, t: BasicType) => + t.name match { + case "u8" => s"$mem.readByte(${concatOffsets(offset, prev)}).unsafeRunSync()\n" + case "u16" => s"$mem.readShort(${concatOffsets(offset, prev)}).unsafeRunSync\n" + case "u32" => s"$mem.readInt(${concatOffsets(offset, prev)}).unsafeRunSync\n" + case "u64" => s"$mem.readLong(${concatOffsets(offset, prev)}).unsafeRunSync\n" + case "s64" => s"$mem.readLong(${concatOffsets(offset, prev)}).unsafeRunSync\n" + case "string" => s"$mem.readInt(${concatOffsets(offset, prev)}).unsafeRunSync\n" + } + } + + def getVal(tpe: BaseWitxType): String = tpe match { + case x: AliasType => x.tpeName + case x: BasicType => x.tpeName + case x: EnumType => s"${x.tpeName}Enum.Value" + case x: FlagsType => s"${x.tpeName}Flags.Value" + case x: Pointer => s"Pointer[${getVal(x.tpe)}]" + case x: Handle => x.tpeName + case x: StructType => x.tpeName + case x: UnionType => x.tpeName + case x: ArrayType => x.tpeName + } + + override val aliasTypeTraverser = { + case (_, t: AliasType) => traverse("", types(t.tpe.tpeName)) + } + + override val enumTypeTraverser = { + case (_, t: EnumType) => traverse("", t.tpe) + } + + override val flagsTypeTraverser = { + case (_, t: FlagsType) => traverse("", t.tpe) + } + + override val structTypeTraverser = { + + case (_, t: StructType) => s"${t.tpeName}($mem, ${concatOffsets(offset, prev)})" + + } + + override val handleTypeTraverser = { + case (_, t: Handle) => s"$mem.readInt(${concatOffsets(offset, prev)}).unsafeRunSync\n" + } + + override val unionTypeTraverser = { + case (_, t: UnionType) => + s"${t.tpeName}($mem, ${concatOffsets(offset, prev)})" + } + + override val arrayTypeTraverser = { + case (n, t: ArrayType) => s"new ArrayInstance[${t.tpeName}](, Len, ${t.size} ,(i) => (mem, i)).values)\n" + } + + override val pointerTypeTraverser = { + case (_, p: Pointer) => + s"new Pointer[${getVal(p.tpe)}](mem.readInt($offset).unsafeRunSync, (i) => ${new LoadTypeEmitTraverser("", types, "i", "mem") + .traverse("", p.tpe)}, (i, r) => ${new WriteTypeEmitTraverser("r", "", types, "i", mem) + .traverse("", p.tpe)})" + } +} diff --git a/generator/src/swam/generator/witx/ModuleTraverse.scala b/generator/src/swam/generator/witx/ModuleTraverse.scala new file mode 100644 index 00000000..0658682b --- /dev/null +++ b/generator/src/swam/generator/witx/ModuleTraverse.scala @@ -0,0 +1,212 @@ +package swam.generator.witx + +import swam.witx.traverser.{ModuleInterfaceTraverser, TypesTraverser} +import swam.witx.unresolved.{ + AliasType, + ArrayType, + BaseWitxType, + BasicType, + EnumType, + Field, + FlagsType, + FunctionExport, + Handle, + ModuleInterface, + Pointer, + StructType, + UnionType +} + +/** + * @author Javier Cabrera-Arteaga on 2020-03-21 + */ +class ModuleTraverse(module: ModuleInterface, types: Map[String, BaseWitxType]) + extends ModuleInterfaceTraverser[String](module) { + + case class Adapt(from: String, to: String) + + override val functionExportTraverser = { + case (_, f: FunctionExport) => + s"def ${f.id}Impl${f.params + .map(f => s"${f.id}:${getVal(f.tpe)}") + .mkString("(", ",", ")")}:${mapFieldsToTuple(f.results)}\n\n" + + s"@effectful\ndef ${f.id}(${processParameters(f.params) + .mkString(",")}): F[${mapFieldsToScalaTuple(f.results)}] = F.pure({${processAdaptor(f.params)}\n${processResults(f)}})\n" + + } + + def processParameters(fields: Seq[Field]): Seq[String] = { + if (fields.isEmpty) Seq() + else { + val head = fields.head + val adaptor = mapTypeToWasm(head.tpe) + + adaptor.to match { + case "String" => Seq(s"${head.id}:${adaptor.from}", s"${head.id}Len:Int") ++ processParameters(fields.tail) + case _ => Seq(s"${head.id}:${adaptor.from}") ++ processParameters(fields.tail) + } + } + + // ${f.params.map(m => s"${m.id}:${mapTypeToWasm(m.tpe).from}").mkString(",")} + } + + def mapFieldsToTuple(fields: Seq[Field]) = + if (fields.length > 1) fields.map(t => getVal(t.tpe)).mkString("(", ",", ")") + else if (fields.length == 1) getVal(fields.head.tpe) + else "Unit" + + def mapFieldsToScalaTuple(fields: Seq[Field]) = + if (fields.length > 1) fields.map(t => mapTypeToWasm(t.tpe).from).mkString("(", ",", ")") + else if (fields.length == 1) mapTypeToWasm(fields.head.tpe).from + else "Unit" + + def processAdaptor(fields: Seq[Field]) = { + fields + .map(p => (p, mapTypeToWasm(p.tpe))) + .filter { case (_, adaptor) => adaptor.from != adaptor.to } + .map { + case (field, adaptor) => + s"\tval ${field.id}Adapted: ${adaptor.to} = ${new InitTypeEmitTraverser(field.id).traverse("", field.tpe)}" + } + .mkString("\n") + } + + def processResults(f: FunctionExport) = { + + val adaptors = f.results + .map(p => mapTypeToWasm(p.tpe)) + + val args = f.params + .map(t => (t.id, mapTypeToWasm(t.tpe))) + .map { + case (name, adaptor) => if (adaptor.from == adaptor.to) s"$name" else s"${name}Adapted" + } + .mkString(",") + + println(s""""${f.id}" -> ${f.id} _,""") + + if (f.results.nonEmpty) + s"tryToExecute(${f.id}Impl($args))" + else + s"${f.id}Impl($args)" + } + + def mapTypeToWasm(t: BaseWitxType): Adapt = t match { + case x: BasicType => mapBasicType(x) + case x: AliasType => mapAliasType(x) + case x: EnumType => Adapt("Int", s"${x.tpeName}Enum.Value") + case x: FlagsType => Adapt("Int", s"${x.tpeName}Flags.Value") + case x: StructType => Adapt("Int", x.tpeName) + case x: ArrayType => Adapt("Int", x.tpeName) + case x: UnionType => Adapt("Int", x.tpeName) + case x: Pointer => Adapt("Int", s"Int") + case x: Handle => Adapt("Int", "Int") + } + + def getVal(tpe: BaseWitxType): String = tpe match { + case x: AliasType => x.tpeName + case x: BasicType => x.tpeName + case x: EnumType => s"${x.tpeName}Enum.Value" + case x: FlagsType => s"${x.tpeName}Flags.Value" + case x: Pointer => s"ptr" + case x: Handle => x.tpeName + case x: StructType => x.tpeName + case x: UnionType => x.tpeName + case x: ArrayType => x.tpeName + } + + def mapBasicType(t: BasicType): Adapt = t.name match { + case "u8" => Adapt("Int", "Int") + case "u16" => Adapt("Int", "Short") + case "u32" => Adapt("Int", "Int") + case "u64" => Adapt("Long", "Long") + case "s64" => Adapt("Long", "Long") + case "string" => Adapt("Int", "String") + case "ptr" => Adapt("Int", "Int") + } + + def mapAliasType(t: AliasType): Adapt = mapTypeToWasm(types(t.tpe.tpeName)) + + val header = s"""val name = "${module.id}" + | + | def tryToExecute(a: => errnoEnum.Value) = { + | try a.id + | catch { + | case x: WASIException => x.errno.id + | } + | }""".stripMargin + + val imports = s"""package swam + |package wasi + | + |import Types._ + |import Header._ + |import cats.Applicative + |import cats.effect._ + |import swam.runtime.formats._ + |import swam.runtime.formats.DefaultFormatters._ + |import cats.effect.IO + |import swam.runtime.Memory + |import swam.runtime.imports.annotations.{effect, effectful, module, pure} + |""".stripMargin + + override def traverseAll(zero: String, compose: (String, String) => String) = + s"$imports\n @module\n abstract class Module[@effect F[_]](implicit F: Applicative[F]){\n var mem: Memory[IO] = null \n\n $header \n\n ${super + .traverseAll(zero, compose)}\n }" + + class InitTypeEmitTraverser(name: String) extends TypesTraverser[String](types) { + + override val basicTypeTraverser = { + case (_, t: BasicType) => + t.name match { + case "u8" => s"$name.toByte" + case "u16" => s"$name.toShort" + case "u16" => s"$name.toInt" + case "u64" => s"$name.toLong" + case "s64" => s"$name.toLong" + case "string" => s"getString(mem, $name, ${name}Len)" + case "ptr" => s"$name.toInt" + } + } + + override val aliasTypeTraverser = { + case (_, t: AliasType) => traverse("", types(t.tpe.tpeName)) + } + + override val enumTypeTraverser = { + case (_, t: EnumType) => s"${t.tpeName}Enum($name)" + } + + override val flagsTypeTraverser = { + case (_, t: FlagsType) => s"${t.tpeName}Flags($name)" + } + + override val structTypeTraverser = { + + case (_, t: StructType) => s"${t.tpeName}(mem, $name)" + + } + + override val handleTypeTraverser = { + case (_, t: Handle) => s"Handle($name)" + } + + override val unionTypeTraverser = { + case (_, t: UnionType) => s"${t.tpeName}(mem, $name)" + + } + + // new ArrayInstance[iovec](iovs, iovsLen, 8, (i) => `iovec`(mem, i)).values + override val arrayTypeTraverser = { + case (_, t: ArrayType) => + s"new ArrayInstance[${getVal(t.tpe)}]($name, ${name}Len, ${t.size} ,(i) => ${getVal(t.tpe)}(mem, i)).values" + } + + override val pointerTypeTraverser = { + case (_, p: Pointer) => + s"new Pointer[${getVal(p.tpe)}](mem.readInt($name).unsafeRunSync, (i) => ${new LoadTypeEmitTraverser("", types, mem = "mem", offset = "i") + .traverse("", p.tpe)}, (i, r) => ${new WriteTypeEmitTraverser("r", "", types, mem = "mem", offset = "i") + .traverse("", p.tpe)})" + } + } +} diff --git a/generator/src/swam/generator/witx/TypesEmitTraverser.scala b/generator/src/swam/generator/witx/TypesEmitTraverser.scala new file mode 100644 index 00000000..696f6b70 --- /dev/null +++ b/generator/src/swam/generator/witx/TypesEmitTraverser.scala @@ -0,0 +1,171 @@ +package swam.generator.witx + +import swam.witx.traverser.TypesTraverser +import swam.witx.unresolved.{ + AliasType, + ArrayType, + BaseWitxType, + BasicType, + EnumType, + Field, + FlagsType, + Handle, + Pointer, + StructType, + UnionType +} + +import scala.collection.immutable.HashMap +import scala.collection.mutable + +/** + * @author Javier Cabrera-Arteaga on 2020-03-21 + */ +class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverser[String](types) { + + override val basicTypeTraverser = { + case (_, t: BasicType) => + t.name match { + case "u8" => s"type ${t.tpeName} = Byte\n" + case "u16" => s"type ${t.tpeName} = Short\n" + case "u32" => s"type ${t.tpeName} = Int\n" + case "u64" => s"type ${t.tpeName} = Long\n" + case "s64" => s"type ${t.tpeName} = Long\n" + case "string" => s"type ${t.tpeName} = String\n" + case "ptr" => s"type ${t.tpeName} = Int\n" + } + } + + val predefinedEnumVal: Map[String, String] = HashMap[String, String]( + "fd_datasync" -> "0x0000000000000001", + "fd_read" -> "0x0000000000000002", + "fd_seek" -> "0x0000000000000004", + "fd_fdstat_set_flags" -> "0x0000000000000008", + "fd_sync" -> "0x0000000000000010", + "fd_tell" -> "0x0000000000000020", + "fd_write" -> "0x0000000000000040", + "fd_advise" -> "0x0000000000000080", + "fd_allocate" -> "0x0000000000000100", + "path_create_directory" -> "0x0000000000000200", + "path_create_file" -> "0x0000000000000400", + "path_link_source" -> "0x0000000000000800", + "path_link_target" -> "0x0000000000001000", + "path_open" -> "0x0000000000002000", + "fd_readdir" -> "0x0000000000004000", + "path_readlink" -> "0x0000000000008000", + "path_rename_source" -> "0x0000000000010000", + "path_rename_target" -> "0x0000000000020000", + "path_filestat_get" -> "0x0000000000040000", + "path_filestat_set_size" -> "0x0000000000080000", + "path_filestat_set_times" -> "0x0000000000100000", + "fd_filestat_get" -> "0x0000000000200000", + "fd_filestat_set_size" -> "0x0000000000400000", + "fd_filestat_set_times" -> "0x0000000000800000", + "path_symlink" -> "0x0000000001000000", + "path_remove_directory" -> "0x0000000002000000", + "path_unlink_file" -> "0x0000000004000000", + "poll_fd_readwrite" -> "0x0000000008000000", + "sock_shutdown" -> "0x0000000010000000" + ) + + override val aliasTypeTraverser = { + case (_, t: AliasType) => s"type ${t.tpeName}= ${t.tpe.tpeName}\n" + } + + override val enumTypeTraverser = { + case (_, t: EnumType) => + s"object ${t.tpeName}Enum extends Enumeration { \n\t ${t.names + .map(t => s"\nval `$t` = Value${if (predefinedEnumVal.contains(t)) s"(${predefinedEnumVal(t)})" else " "}\n") + .mkString("\n")}}\n" + } + + override val flagsTypeTraverser = { + case (_, t: FlagsType) => + s"object ${t.tpeName}Flags extends Enumeration { ${t.names.zipWithIndex + .map { + case (name, i) => + s"\t\nval ${name} = Value${if (predefinedEnumVal.contains(name)) s"(${predefinedEnumVal(name)})" + else s"($i)"}\n" + } + .mkString("\n")}}\n\n" + } + + def getVal(tpe: BaseWitxType): String = tpe match { + case x: AliasType => x.tpeName + case x: BasicType => x.tpeName + case x: EnumType => s"${x.tpeName}Enum.Value" + case x: FlagsType => s"${x.tpeName}Flags.Value" + case x: Pointer => s"Pointer[${getVal(x.tpe)}]" + case x: Handle => x.tpeName + case x: StructType => x.tpeName + case x: UnionType => x.tpeName + } + + override val structTypeTraverser = { + + case (_, t: StructType) => { + + s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) extends WASI_STRUCT { + | ${t.fields.zipWithIndex + .map { + case (f, idx) => + s"val `${f.id}` = ${new LoadTypeEmitTraverser(t.fields.map(t => t.tpe.size).slice(0, idx).sum.toString, types, offset = "offset", mem = "mem") + .traverse("", f.tpe)}" + } + .mkString("\n")} + + def write(offset: Int, mem: Memory[IO]) = { + | ${t.fields.zipWithIndex + .map { + case (f, idx) => + s"${new WriteTypeEmitTraverser(f.id, t.fields.map(t => t.tpe.size).slice(0, idx).sum.toString, types, "offset", "mem") + .traverse("", f.tpe)}" + + } + .mkString("\n")} + |} + |}\n\n""".stripMargin + } + + } + + override val unionTypeTraverser = { + case (_, t: UnionType) => { + + s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) extends WASI_STRUCT { // UNION + | ${t.fields.zipWithIndex + .map { + case (f, idx) => + s"val `${f.id}` = ${new LoadTypeEmitTraverser(t.fields.map(t => t.tpe.size).slice(0, idx).sum.toString, types, offset = "offset", mem = "mem") + .traverse("", f.tpe)}" + } + .mkString("\n")} + + def write(offset: Int, mem: Memory[IO]) = { + | ${t.fields.zipWithIndex + .map { + case (f, idx) => + s"${new WriteTypeEmitTraverser(f.id, t.fields.map(t => t.tpe.size).slice(0, idx).sum.toString, types, "offset", "mem") + .traverse("", f.tpe)}" + + } + .mkString("\n")} + |} + |}\n\n""".stripMargin + } + + } + + override val handleTypeTraverser = { + case (_, t: Handle) => s"type ${t.tpeName} = Int\n\n" + } + + override val arrayTypeTraverser = { + case (_, t: ArrayType) => s"type ${t.tpeName} = List[${getVal(t.tpe)}]\n" + } + + override def traverseAll(zero: String, compose: (String, String) => String) = + s"package swam\npackage wasi\n import cats.effect.IO \nimport swam.runtime.Memory \n\nobject Types { \n // This is an autogenerated file, do not change it \n\n ${super + .traverseAll(zero, compose)} }" + +} diff --git a/generator/src/swam/generator/witx/WriteTypeEmitTraverser.scala b/generator/src/swam/generator/witx/WriteTypeEmitTraverser.scala new file mode 100644 index 00000000..38e2b328 --- /dev/null +++ b/generator/src/swam/generator/witx/WriteTypeEmitTraverser.scala @@ -0,0 +1,78 @@ +package swam.generator.witx + +import swam.witx.traverser.TypesTraverser +import swam.witx.unresolved.{ + AliasType, + ArrayType, + BaseWitxType, + BasicType, + EnumType, + Field, + FlagsType, + Handle, + Pointer, + StructType, + UnionType +} + +/** + * @author Javier Cabrera-Arteaga on 2020-03-23 + */ +class WriteTypeEmitTraverser(f: String, + prev: String, + types: Map[String, BaseWitxType], + offset: String = "", + mem: String = "") + extends TypesTraverser[String](types) { + + def concatOffsets(offset: String, prev: String) = { + if (prev.isEmpty) offset + else s"$offset + $prev" + } + + override val basicTypeTraverser = { + case (_, t: BasicType) => + t.name match { + case "u8" => s"$mem.writeByte(${concatOffsets(offset, prev)}, `${f}` ).unsafeRunSync\n" + case "u16" => s"$mem.writeShort(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + case "u32" => s"$mem.writeInt(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + case "u64" => s"$mem.writeLong(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + case "s64" => s"$mem.writeLong(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + case "string" => s"$mem.writeInt(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + } + } + + override val aliasTypeTraverser = { + case (_, t: AliasType) => traverse("", types(t.tpe.tpeName)) + } + + override val enumTypeTraverser = { + case (_, t: EnumType) => traverse("", t.tpe) + } + + override val flagsTypeTraverser = { + case (_, t: FlagsType) => traverse("", t.tpe) + } + + override val structTypeTraverser = { + + case (_, t: StructType) => s"$f.write(${concatOffsets(offset, prev)}, mem)" + + } + + override val handleTypeTraverser = { + case (_, t: Handle) => s"$mem.writeInt(${concatOffsets(offset, prev)}, `${f}`).unsafeRunSync\n" + } + + override val unionTypeTraverser = { + case (_, t: UnionType) => s"$f.write(${concatOffsets(offset, prev)}, mem)" + + } + + override val arrayTypeTraverser = { + case (_, t: ArrayType) => s"$mem.writeInt(${concatOffsets(offset, prev)}, `${f}`).unsafeRunSync\n" + } + override val pointerTypeTraverser = { + case (_, p: Pointer) => s"$mem.writeInt(${concatOffsets(offset, prev)},`${f}`.offset).unsafeRunSync\n" + } +} diff --git a/generator/test/src/generator/WitxTest.scala b/generator/test/src/generator/WitxTest.scala new file mode 100644 index 00000000..280f1a7a --- /dev/null +++ b/generator/test/src/generator/WitxTest.scala @@ -0,0 +1,48 @@ +package swam +package generator + +import java.io +import java.nio.file.Paths + +import utest.{TestSuite, Tests, test} + +import runtime._ +import swam.test.util._ +import utest._ +import better.files._ +import cats.effect._ +import fs2.io.file +import swam.text.parser +import swam.witx.WitxParser +import swam.witx.parser.{ImportContext, TypesParser} +import swam.witx.unresolved._ +import swam.witx +import swam.witx.traverser.{ModuleInterfaceTraverser, TypesTraverser} + +import scala.concurrent.ExecutionContext + +object WitParser extends TestSuite { + + implicit val cs = IO.contextShift(ExecutionContext.Implicits.global) + + def runParse() = { + val wasi_snaphot = Paths.get("generator/resources/wasi_witx/wasi_snapshot_preview1.witx") + + val parser = WitxParser[IO] + val ctx = ImportContext[IO]() + + val (types, interface) = Blocker[IO] + .use(blocker => { + for { + (types, instruction) <- parser.parseModuleInterface(wasi_snaphot, blocker, ctx) + } yield (types, instruction) + }) + .unsafeRunSync() + + } + + val tests = Tests { + "parsing_witx" - runParse() + } + +} diff --git a/runtime/resources/reference.conf b/runtime/resources/reference.conf index 21a88387..a276ecb6 100644 --- a/runtime/resources/reference.conf +++ b/runtime/resources/reference.conf @@ -34,7 +34,7 @@ swam { stack { # the total stack size - size = 64 KiB + size = 4 MiB } @@ -59,7 +59,7 @@ swam { type = console } - filter = "mread|mwrite|msize|mgrow" + filter = "*" # The logging level of the tracer # possible values are: @@ -79,6 +79,8 @@ swam { port = 8080 } + separator = "\n" + file-handler = { #"/" the local pathname separator #"%t" the system temporary directory diff --git a/runtime/src/swam/runtime/Engine.scala b/runtime/src/swam/runtime/Engine.scala index f1fb0ca9..93712fa2 100644 --- a/runtime/src/swam/runtime/Engine.scala +++ b/runtime/src/swam/runtime/Engine.scala @@ -157,11 +157,11 @@ class Engine[F[_]: Effect] private (val conf: EngineConfiguration, object Engine { - def apply[F[_]: Effect](): F[Engine[F]] = + def apply[F[_]: Effect](tracer: Option[Tracer] = None): F[Engine[F]] = for { validator <- Validator[F] conf <- ConfigSource.default.at("swam.runtime").loadF[F, EngineConfiguration] - } yield new Engine[F](conf, validator, None) + } yield new Engine[F](conf, validator, tracer) def apply[F[_]: Effect](conf: EngineConfiguration, validator: Validator[F], tracer: Option[Tracer]): Engine[F] = new Engine[F](conf, validator, tracer) diff --git a/runtime/src/swam/runtime/Interface.scala b/runtime/src/swam/runtime/Interface.scala index d0b74d17..088d60e8 100644 --- a/runtime/src/swam/runtime/Interface.scala +++ b/runtime/src/swam/runtime/Interface.scala @@ -208,6 +208,13 @@ abstract class Memory[F[_]](implicit F: MonadError[F, Throwable]) extends Interf def readDouble(idx: Int): F[Double] = F.catchNonFatal(unsafeReadDouble(idx)) def unsafeReadDouble(idx: Int): Double + /** Reads a byte array at the given index in memory. + * + * $boundaries + */ + def readBytes(idx: Int, dst: Array[Byte]): F[Unit] = F.catchNonFatal(unsafeReadBytes(idx, dst)) + def unsafeReadBytes(idx: Int, dst: Array[Byte]): Unit + /** Writes the bytes in the provided buffer at the given index in memory. * * This method is used by Swam to initialize the memory upon module @@ -217,4 +224,5 @@ abstract class Memory[F[_]](implicit F: MonadError[F, Throwable]) extends Interf */ def writeBytes(idx: Int, bytes: ByteBuffer): F[Unit] = F.catchNonFatal(unsafeWriteBytes(idx, bytes)) def unsafeWriteBytes(idx: Int, bytes: ByteBuffer): Unit + } diff --git a/runtime/src/swam/runtime/imports/IFunction2.scala b/runtime/src/swam/runtime/imports/IFunction2.scala index 63721b31..806ba036 100644 --- a/runtime/src/swam/runtime/imports/IFunction2.scala +++ b/runtime/src/swam/runtime/imports/IFunction2.scala @@ -62,3 +62,255 @@ class IFunction2[F[_], P1, P2, Ret](f: (P1, P2) => F[Ret])(implicit reader1: Val s"function expects ${tpe.params.mkString("(", ", ", ")")} but got ${parameters.map(_.tpe).mkString("(", ", ", ")")}")) } } + +class IFunction3[F[_], P1, P2, P3, Ret](f: (P1, P2, P3) => F[Ret])(implicit reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + writer: ValueWriter[F, Ret], + F: MonadError[F, Throwable]) + extends Function[F] { + val tpe = FuncType(Vector(reader1.swamType, reader2.swamType, reader3.swamType), Vector(writer.swamType)) + def invoke(parameters: Vector[Value], m: Option[Memory[F]]): F[Option[Value]] = + parameters match { + case Seq(p1, p2, p3) => + for { + p1 <- reader1.read(p1, m) + p2 <- reader2.read(p2, m) + p3 <- reader3.read(p3, m) + v <- f(p1, p2, p3) + v <- writer.write(v, m) + } yield Some(v) + case _ => + F.raiseError(new ConversionException( + s"function expects ${tpe.params.mkString("(", ", ", ")")} but got ${parameters.map(_.tpe).mkString("(", ", ", ")")}")) + } +} + +class IFunction4[F[_], P1, P2, P3, P4, Ret](f: (P1, P2, P3, P4) => F[Ret])(implicit reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + writer: ValueWriter[F, Ret], + F: MonadError[F, Throwable]) + extends Function[F] { + val tpe = + FuncType(Vector(reader1.swamType, reader2.swamType, reader3.swamType, reader4.swamType), Vector(writer.swamType)) + def invoke(parameters: Vector[Value], m: Option[Memory[F]]): F[Option[Value]] = + parameters match { + case Seq(p1, p2, p3, p4) => + for { + p1 <- reader1.read(p1, m) + p2 <- reader2.read(p2, m) + p3 <- reader3.read(p3, m) + p4 <- reader4.read(p4, m) + v <- f(p1, p2, p3, p4) + v <- writer.write(v, m) + } yield Some(v) + case _ => + F.raiseError(new ConversionException( + s"function expects ${tpe.params.mkString("(", ", ", ")")} but got ${parameters.map(_.tpe).mkString("(", ", ", ")")}")) + } +} + +class IFunction5[F[_], P1, P2, P3, P4, P5, Ret](f: (P1, P2, P3, P4, P5) => F[Ret])(implicit reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + writer: ValueWriter[F, Ret], + F: MonadError[F, Throwable]) + extends Function[F] { + val tpe = + FuncType(Vector(reader1.swamType, reader2.swamType, reader3.swamType, reader4.swamType, reader5.swamType), + Vector(writer.swamType)) + def invoke(parameters: Vector[Value], m: Option[Memory[F]]): F[Option[Value]] = + parameters match { + case Seq(p1, p2, p3, p4, p5) => + for { + p1 <- reader1.read(p1, m) + p2 <- reader2.read(p2, m) + p3 <- reader3.read(p3, m) + p4 <- reader4.read(p4, m) + p5 <- reader5.read(p5, m) + v <- f(p1, p2, p3, p4, p5) + v <- writer.write(v, m) + } yield Some(v) + case _ => + F.raiseError(new ConversionException( + s"function expects ${tpe.params.mkString("(", ", ", ")")} but got ${parameters.map(_.tpe).mkString("(", ", ", ")")}")) + } +} + +class IFunction6[F[_], P1, P2, P3, P4, P5, P6, Ret](f: (P1, P2, P3, P4, P5, P6) => F[Ret])( + implicit reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + reader6: ValueReader[F, P6], + writer: ValueWriter[F, Ret], + F: MonadError[F, Throwable]) + extends Function[F] { + val tpe = + FuncType(Vector(reader1.swamType, + reader2.swamType, + reader3.swamType, + reader4.swamType, + reader5.swamType, + reader6.swamType), + Vector(writer.swamType)) + def invoke(parameters: Vector[Value], m: Option[Memory[F]]): F[Option[Value]] = + parameters match { + case Seq(p1, p2, p3, p4, p5, p6) => + for { + p1 <- reader1.read(p1, m) + p2 <- reader2.read(p2, m) + p3 <- reader3.read(p3, m) + p4 <- reader4.read(p4, m) + p5 <- reader5.read(p5, m) + p6 <- reader6.read(p6, m) + v <- f(p1, p2, p3, p4, p5, p6) + v <- writer.write(v, m) + } yield Some(v) + case _ => + F.raiseError(new ConversionException( + s"function expects ${tpe.params.mkString("(", ", ", ")")} but got ${parameters.map(_.tpe).mkString("(", ", ", ")")}")) + } +} + +class IFunction7[F[_], P1, P2, P3, P4, P5, P6, P7, Ret](f: (P1, P2, P3, P4, P5, P6, P7) => F[Ret])( + implicit reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + reader6: ValueReader[F, P6], + reader7: ValueReader[F, P7], + writer: ValueWriter[F, Ret], + F: MonadError[F, Throwable]) + extends Function[F] { + val tpe = + FuncType( + Vector(reader1.swamType, + reader2.swamType, + reader3.swamType, + reader4.swamType, + reader5.swamType, + reader6.swamType, + reader7.swamType), + Vector(writer.swamType) + ) + def invoke(parameters: Vector[Value], m: Option[Memory[F]]): F[Option[Value]] = + parameters match { + case Seq(p1, p2, p3, p4, p5, p6, p7) => + for { + p1 <- reader1.read(p1, m) + p2 <- reader2.read(p2, m) + p3 <- reader3.read(p3, m) + p4 <- reader4.read(p4, m) + p5 <- reader5.read(p5, m) + p6 <- reader6.read(p6, m) + p7 <- reader7.read(p7, m) + v <- f(p1, p2, p3, p4, p5, p6, p7) + v <- writer.write(v, m) + } yield Some(v) + case _ => + F.raiseError(new ConversionException( + s"function expects ${tpe.params.mkString("(", ", ", ")")} but got ${parameters.map(_.tpe).mkString("(", ", ", ")")}")) + } +} + +class IFunction8[F[_], P1, P2, P3, P4, P5, P6, P7, P8, Ret](f: (P1, P2, P3, P4, P5, P6, P7, P8) => F[Ret])( + implicit reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + reader6: ValueReader[F, P6], + reader7: ValueReader[F, P7], + reader8: ValueReader[F, P8], + writer: ValueWriter[F, Ret], + F: MonadError[F, Throwable]) + extends Function[F] { + val tpe = + FuncType( + Vector(reader1.swamType, + reader2.swamType, + reader3.swamType, + reader4.swamType, + reader5.swamType, + reader6.swamType, + reader7.swamType, + reader8.swamType), + Vector(writer.swamType) + ) + def invoke(parameters: Vector[Value], m: Option[Memory[F]]): F[Option[Value]] = + parameters match { + case Seq(p1, p2, p3, p4, p5, p6, p7, p8) => + for { + p1 <- reader1.read(p1, m) + p2 <- reader2.read(p2, m) + p3 <- reader3.read(p3, m) + p4 <- reader4.read(p4, m) + p5 <- reader5.read(p5, m) + p6 <- reader6.read(p6, m) + p7 <- reader7.read(p7, m) + p8 <- reader8.read(p8, m) + v <- f(p1, p2, p3, p4, p5, p6, p7, p8) + v <- writer.write(v, m) + } yield Some(v) + case _ => + F.raiseError(new ConversionException( + s"function expects ${tpe.params.mkString("(", ", ", ")")} but got ${parameters.map(_.tpe).mkString("(", ", ", ")")}")) + } +} + +class IFunction9[F[_], P1, P2, P3, P4, P5, P6, P7, P8, P9, Ret](f: (P1, P2, P3, P4, P5, P6, P7, P8, P9) => F[Ret])( + implicit reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + reader6: ValueReader[F, P6], + reader7: ValueReader[F, P7], + reader8: ValueReader[F, P8], + reader9: ValueReader[F, P9], + writer: ValueWriter[F, Ret], + F: MonadError[F, Throwable]) + extends Function[F] { + val tpe = + FuncType( + Vector( + reader1.swamType, + reader2.swamType, + reader3.swamType, + reader4.swamType, + reader5.swamType, + reader6.swamType, + reader7.swamType, + reader8.swamType, + reader9.swamType + ), + Vector(writer.swamType) + ) + def invoke(parameters: Vector[Value], m: Option[Memory[F]]): F[Option[Value]] = + parameters match { + case Seq(p1, p2, p3, p4, p5, p6, p7, p8, p9) => + for { + p1 <- reader1.read(p1, m) + p2 <- reader2.read(p2, m) + p3 <- reader3.read(p3, m) + p4 <- reader4.read(p4, m) + p5 <- reader5.read(p5, m) + p6 <- reader6.read(p6, m) + p7 <- reader7.read(p7, m) + p8 <- reader8.read(p8, m) + p9 <- reader9.read(p9, m) + v <- f(p1, p2, p3, p4, p5, p6, p7, p8, p9) + v <- writer.write(v, m) + } yield Some(v) + case _ => + F.raiseError(new ConversionException( + s"function expects ${tpe.params.mkString("(", ", ", ")")} but got ${parameters.map(_.tpe).mkString("(", ", ", ")")}")) + } +} diff --git a/runtime/src/swam/runtime/imports/Imports.scala b/runtime/src/swam/runtime/imports/Imports.scala index ed9a6da7..70a6bb66 100644 --- a/runtime/src/swam/runtime/imports/Imports.scala +++ b/runtime/src/swam/runtime/imports/Imports.scala @@ -189,6 +189,99 @@ object AsInterface { def view(f: (P1, P2) => F[Ret]) = new IFunction2[F, P1, P2, Ret](f) } + implicit def function3AsInterface[P1, P2, P3, Ret, F[_]]( + implicit F: MonadError[F, Throwable], + reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + writer: ValueWriter[F, Ret]): AsInterface[(P1, P2, P3) => F[Ret], F] = + new AsInterface[(P1, P2, P3) => F[Ret], F] { + def view(f: (P1, P2, P3) => F[Ret]) = new IFunction3[F, P1, P2, P3, Ret](f) + } + + implicit def function4AsInterface[P1, P2, P3, P4, Ret, F[_]]( + implicit F: MonadError[F, Throwable], + reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + writer: ValueWriter[F, Ret]): AsInterface[(P1, P2, P3, P4) => F[Ret], F] = + new AsInterface[(P1, P2, P3, P4) => F[Ret], F] { + def view(f: (P1, P2, P3, P4) => F[Ret]) = new IFunction4[F, P1, P2, P3, P4, Ret](f) + } + + implicit def function5AsInterface[P1, P2, P3, P4, P5, Ret, F[_]]( + implicit F: MonadError[F, Throwable], + reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + writer: ValueWriter[F, Ret]): AsInterface[(P1, P2, P3, P4, P5) => F[Ret], F] = + new AsInterface[(P1, P2, P3, P4, P5) => F[Ret], F] { + def view(f: (P1, P2, P3, P4, P5) => F[Ret]) = new IFunction5[F, P1, P2, P3, P4, P5, Ret](f) + } + + implicit def function6AsInterface[P1, P2, P3, P4, P5, P6, Ret, F[_]]( + implicit F: MonadError[F, Throwable], + reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + reader6: ValueReader[F, P6], + writer: ValueWriter[F, Ret]): AsInterface[(P1, P2, P3, P4, P5, P6) => F[Ret], F] = + new AsInterface[(P1, P2, P3, P4, P5, P6) => F[Ret], F] { + def view(f: (P1, P2, P3, P4, P5, P6) => F[Ret]) = new IFunction6[F, P1, P2, P3, P4, P5, P6, Ret](f) + } + + implicit def function7AsInterface[P1, P2, P3, P4, P5, P6, P7, Ret, F[_]]( + implicit F: MonadError[F, Throwable], + reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + reader6: ValueReader[F, P6], + reader7: ValueReader[F, P7], + writer: ValueWriter[F, Ret]): AsInterface[(P1, P2, P3, P4, P5, P6, P7) => F[Ret], F] = + new AsInterface[(P1, P2, P3, P4, P5, P6, P7) => F[Ret], F] { + def view(f: (P1, P2, P3, P4, P5, P6, P7) => F[Ret]) = new IFunction7[F, P1, P2, P3, P4, P5, P6, P7, Ret](f) + } + + implicit def function8AsInterface[P1, P2, P3, P4, P5, P6, P7, P8, Ret, F[_]]( + implicit F: MonadError[F, Throwable], + reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + reader6: ValueReader[F, P6], + reader7: ValueReader[F, P7], + reader8: ValueReader[F, P8], + writer: ValueWriter[F, Ret]): AsInterface[(P1, P2, P3, P4, P5, P6, P7, P8) => F[Ret], F] = + new AsInterface[(P1, P2, P3, P4, P5, P6, P7, P8) => F[Ret], F] { + def view(f: (P1, P2, P3, P4, P5, P6, P7, P8) => F[Ret]) = + new IFunction8[F, P1, P2, P3, P4, P5, P6, P7, P8, Ret](f) + } + + implicit def function9AsInterface[P1, P2, P3, P4, P5, P6, P7, P8, P9, Ret, F[_]]( + implicit F: MonadError[F, Throwable], + reader1: ValueReader[F, P1], + reader2: ValueReader[F, P2], + reader3: ValueReader[F, P3], + reader4: ValueReader[F, P4], + reader5: ValueReader[F, P5], + reader6: ValueReader[F, P6], + reader7: ValueReader[F, P7], + reader8: ValueReader[F, P8], + reader9: ValueReader[F, P9], + writer: ValueWriter[F, Ret]): AsInterface[(P1, P2, P3, P4, P5, P6, P7, P8, P9) => F[Ret], F] = + new AsInterface[(P1, P2, P3, P4, P5, P6, P7, P8, P9) => F[Ret], F] { + def view(f: (P1, P2, P3, P4, P5, P6, P7, P8, P9) => F[Ret]) = + new IFunction9[F, P1, P2, P3, P4, P5, P6, P7, P8, P9, Ret](f) + } + implicit def arrayAsInterface[F[_]]: AsInterface[Array[Function[F]], F] = new AsInterface[Array[Function[F]], F] { def view(a: Array[Function[F]]) = new Table[F] { @@ -236,7 +329,12 @@ object AsInterface { def unsafeWriteInt(idx: Int, v: Int) = b.putInt(idx, v) def unsafeWriteLong(idx: Int, v: Long) = b.putLong(idx, v) def unsafeWriteShort(idx: Int, v: Short) = b.putShort(idx, v) - + def unsafeReadBytes(idx: Int, dst: Array[Byte]): Unit = { + val old = b.position() + b.position(idx) + b.get(dst) + b.position(old) + } } } diff --git a/runtime/src/swam/runtime/internals/instance/Instantiator.scala b/runtime/src/swam/runtime/internals/instance/Instantiator.scala index 7180cf8b..ff33b549 100644 --- a/runtime/src/swam/runtime/internals/instance/Instantiator.scala +++ b/runtime/src/swam/runtime/internals/instance/Instantiator.scala @@ -56,7 +56,9 @@ private[runtime] class Instantiator[F[_]](engine: Engine[F])(implicit F: Async[F if (provided.tpe <:< imp.tpe) F.pure(Left((idx + 1, acc :+ provided))) else - F.raiseError(new LinkException(s"Expected import of type ${imp.tpe} but got ${provided.tpe}")) + F.raiseError( + new LinkException( + s"Expected import ${imp.moduleName}.${imp.fieldName} of type ${imp.tpe} but got ${provided.tpe}")) } } } diff --git a/runtime/src/swam/runtime/internals/instance/MemoryInstance.scala b/runtime/src/swam/runtime/internals/instance/MemoryInstance.scala index cd464fb4..85b5ec2f 100644 --- a/runtime/src/swam/runtime/internals/instance/MemoryInstance.scala +++ b/runtime/src/swam/runtime/internals/instance/MemoryInstance.scala @@ -23,8 +23,7 @@ import cats.effect._ import java.nio.{ByteBuffer, ByteOrder} -private[runtime] class MemoryInstance[F[_]](min: Int, max: Option[Int], onHeap: Boolean, hardMax: Int)( - implicit F: Async[F]) +class MemoryInstance[F[_]](min: Int, max: Option[Int], onHeap: Boolean, hardMax: Int)(implicit F: Async[F]) extends Memory[F] { val tpe = MemType(Limits(min, max)) @@ -43,8 +42,9 @@ private[runtime] class MemoryInstance[F[_]](min: Int, max: Option[Int], onHeap: def size = buffer.capacity - def unsafeWriteByte(idx: Int, v: Byte) = + def unsafeWriteByte(idx: Int, v: Byte) = { buffer.put(idx, v) + } def unsafeReadByte(idx: Int) = buffer.get(idx) @@ -55,8 +55,9 @@ private[runtime] class MemoryInstance[F[_]](min: Int, max: Option[Int], onHeap: def unsafeReadShort(idx: Int) = buffer.getShort(idx) - def unsafeWriteInt(idx: Int, v: Int) = + def unsafeWriteInt(idx: Int, v: Int) = { buffer.putInt(idx, v) + } def unsafeReadInt(idx: Int) = buffer.getInt(idx) @@ -108,4 +109,8 @@ private[runtime] class MemoryInstance[F[_]](min: Int, max: Option[Int], onHeap: bytes.reset() } + def unsafeReadBytes(idx: Int, dst: Array[Byte]): Unit = { + buffer.position(idx) + buffer.get(dst) + } } diff --git a/runtime/src/swam/runtime/internals/interpreter/asm.scala b/runtime/src/swam/runtime/internals/interpreter/asm.scala index 58488baa..b5884882 100644 --- a/runtime/src/swam/runtime/internals/interpreter/asm.scala +++ b/runtime/src/swam/runtime/internals/interpreter/asm.scala @@ -21,11 +21,11 @@ package interpreter import swam.{syntax => sy} import instance._ - import cats._ import cats.implicits._ +import java.lang.{Double => JDouble, Float => JFloat, Integer => JInt, Long => JLong} -import java.lang.{Integer => JInt, Long => JLong, Float => JFloat, Double => JDouble} +import cats.effect.IO /** `Asm` is the interpreted language. It closely mirrors the WebAssembly bytecode with * few differences: @@ -45,6 +45,8 @@ import java.lang.{Integer => JInt, Long => JLong, Float => JFloat, Double => JDo */ sealed trait AsmInst[F[_]] { def execute(t: Frame[F]): Continuation[F] + + override def toString(): String = s"${getClass.getSimpleName}" } sealed trait Continuation[+F[_]] diff --git a/runtime/src/swam/runtime/trace/JULTracer.scala b/runtime/src/swam/runtime/trace/JULTracer.scala index ec79893f..10de354a 100644 --- a/runtime/src/swam/runtime/trace/JULTracer.scala +++ b/runtime/src/swam/runtime/trace/JULTracer.scala @@ -19,14 +19,18 @@ package runtime package trace import enumeratum._ - import pureconfig._ import pureconfig.error._ - import java.util.logging._ +import cats.effect.Effect +import swam.runtime.config.EngineConfiguration +import swam.validation.Validator + +import pureconfig.generic.auto._ + /** A tracer based on [[https://docs.oracle.com/en/java/javase/13/docs/api/java.logging/java/util/logging/package-summary.html java.util.logging]]. */ -class JULTracer(conf: TraceConfiguration) extends Tracer { +class JULTracer(conf: TraceConfiguration, formatter: Formatter = PureFormatter) extends Tracer { val logger = Logger.getLogger("swam") logger.setLevel(Level.parse(conf.level)) @@ -46,7 +50,7 @@ class JULTracer(conf: TraceConfiguration) extends Tracer { } // TODO add other formatters support - handler.setFormatter(PureFormatter) + handler.setFormatter(formatter) logger.addHandler(handler) def traceEvent(tpe: EventType, args: List[String]): Unit = @@ -55,6 +59,29 @@ class JULTracer(conf: TraceConfiguration) extends Tracer { } +object JULTracer { + def apply(traceFolder: String, + traceNamePattern: String, + formatter: Formatter = PureFormatter, + filter: String = "*"): JULTracer = { + val default = ConfigSource.default + .at("swam.runtime.tracer") + .loadOrThrow[TraceConfiguration] + + val custom = TraceConfiguration( + HandlerType.File, + default.separator, + filter, + default.level, + TracerFileHandlerCondiguration(traceNamePattern, append = false, traceFolder), + default.socketHandler, + default.custom + ) + + new JULTracer(custom, formatter) + } +} + private object PureFormatter extends Formatter { override def format(x: LogRecord): String = diff --git a/runtime/src/swam/runtime/trace/TracingMemory.scala b/runtime/src/swam/runtime/trace/TracingMemory.scala index 00062dd5..51defa2a 100644 --- a/runtime/src/swam/runtime/trace/TracingMemory.scala +++ b/runtime/src/swam/runtime/trace/TracingMemory.scala @@ -22,7 +22,7 @@ import cats._ import java.nio.ByteBuffer /** A memory instance that traces all calls to the underlying memory instance. */ -private[runtime] class TracingMemory[F[_]](inner: Memory[F], tracer: Tracer)(implicit F: MonadError[F, Throwable]) +class TracingMemory[F[_]](val inner: Memory[F], tracer: Tracer)(implicit F: MonadError[F, Throwable]) extends Memory[F] { def tpe: MemType = inner.tpe @@ -38,9 +38,6 @@ private[runtime] class TracingMemory[F[_]](inner: Memory[F], tracer: Tracer)(imp inner.unsafeGrow(by) } - def unsafeWriteBytes(idx: Int, bytes: ByteBuffer): Unit = - inner.unsafeWriteBytes(idx, bytes) - def unsafeWriteByte(idx: Int, v: Byte): Unit = { tracer.traceEvent(EventType.MWrite, List("i8", idx.toString, v.toString)) inner.unsafeWriteByte(idx, v) @@ -48,7 +45,7 @@ private[runtime] class TracingMemory[F[_]](inner: Memory[F], tracer: Tracer)(imp def unsafeReadByte(idx: Int): Byte = { val res = inner.unsafeReadByte(idx) - tracer.traceEvent(EventType.MRead, List("mread", "i8", idx.toString, res.toString)) + tracer.traceEvent(EventType.MRead, List("i8", idx.toString, res.toString)) res } @@ -59,7 +56,7 @@ private[runtime] class TracingMemory[F[_]](inner: Memory[F], tracer: Tracer)(imp def unsafeReadShort(idx: Int): Short = { val res = inner.unsafeReadShort(idx) - tracer.traceEvent(EventType.MRead, List("mread", "i16", idx.toString, res.toString)) + tracer.traceEvent(EventType.MRead, List("i16", idx.toString, res.toString)) res } @@ -70,7 +67,7 @@ private[runtime] class TracingMemory[F[_]](inner: Memory[F], tracer: Tracer)(imp def unsafeReadInt(idx: Int): Int = { val res = inner.unsafeReadInt(idx) - tracer.traceEvent(EventType.MRead, List("mread", "i32", idx.toString, res.toString)) + tracer.traceEvent(EventType.MRead, List("i32", idx.toString, res.toString)) res } @@ -81,7 +78,7 @@ private[runtime] class TracingMemory[F[_]](inner: Memory[F], tracer: Tracer)(imp def unsafeReadLong(idx: Int): Long = { val res = inner.unsafeReadLong(idx) - tracer.traceEvent(EventType.MRead, List("mread", "i64", idx.toString, res.toString)) + tracer.traceEvent(EventType.MRead, List("i64", idx.toString, res.toString)) res } @@ -92,7 +89,7 @@ private[runtime] class TracingMemory[F[_]](inner: Memory[F], tracer: Tracer)(imp def unsafeReadFloat(idx: Int): Float = { val res = inner.unsafeReadFloat(idx) - tracer.traceEvent(EventType.MRead, List("mread", "f32", idx.toString, res.toString)) + tracer.traceEvent(EventType.MRead, List("f32", idx.toString, res.toString)) res } @@ -103,10 +100,19 @@ private[runtime] class TracingMemory[F[_]](inner: Memory[F], tracer: Tracer)(imp def unsafeReadDouble(idx: Int): Double = { val res = inner.unsafeReadDouble(idx) - tracer.traceEvent(EventType.MRead, List("mread", "f64", idx.toString, res.toString)) + tracer.traceEvent(EventType.MRead, List("f64", idx.toString, res.toString)) res } + override def unsafeReadBytes(idx: Int, dst: Array[Byte]): Unit = { + val res = inner.unsafeReadBytes(idx, dst) + tracer.traceEvent(EventType.MRead, List("bytes", idx.toString)) + } + + override def unsafeWriteBytes(idx: Int, bytes: ByteBuffer) = { + inner.unsafeWriteBytes(idx, bytes) + tracer.traceEvent(EventType.MRead, List("bytes", idx.toString)) + } } private[runtime] object TracingMemory { diff --git a/split_traces.py b/split_traces.py new file mode 100644 index 00000000..64d38e59 --- /dev/null +++ b/split_traces.py @@ -0,0 +1,44 @@ +import sys +import os +import re + + +def process_traces(log_file, regexes, preffix): + print(log_file, regexes) + + remaining = open("%s-remaining-log.txt"%preffix, 'w') + + files = {} + + for r in regexes: + files[r] = open("%s-%s-log.txt"%(preffix, r.replace("|", "-")), 'w') + + for l in open(log_file, 'r').readlines(): + found = False + for r in regexes: + if(re.search(r, l)): + found = True + files[r].write(l) + break + if found: + continue + else: + remaining.write("%s"%l) + remaining.close() + + for k, v in files.items(): + v.close() + +def execute_wasm(file): + pass + + +if __name__ == '__main__': + + + + log_file = sys.argv[1] + preffix = sys.argv[2] + regexes = sys.argv[3:] + + process_traces(log_file, regexes, preffix) \ No newline at end of file diff --git a/text/src/swam/text/Compiler.scala b/text/src/swam/text/Compiler.scala index 274ba59f..ca9eb315 100644 --- a/text/src/swam/text/Compiler.scala +++ b/text/src/swam/text/Compiler.scala @@ -37,9 +37,9 @@ class Compiler[F[_]] private (validator: Validator[F])(implicit val F: Effect[F] private val binaryParser = new ModuleParser[F](validator) - def compile(file: Path): F[Module] = + def compile(file: Path, blocker: Blocker, chunkSize: Int = 1024)(implicit cs: ContextShift[F]): F[Module] = for { - input <- F.liftIO(readFile(file)) + input <- readFile(file, blocker, chunkSize) unresolved <- parse(input) mod <- compile(unresolved) } yield mod @@ -53,9 +53,10 @@ class Compiler[F[_]] private (validator: Validator[F])(implicit val F: Effect[F] def stream(module: unresolved.Module, debug: Boolean): Stream[F, Section] = Stream.force(resolver.resolve(module, debug)) - def stream(file: Path, debug: Boolean): Stream[F, Section] = + def stream(file: Path, debug: Boolean, blocker: Blocker, chunkSize: Int = 1024)( + implicit cs: ContextShift[F]): Stream[F, Section] = Stream.force(for { - input <- F.liftIO(readFile(file)) + input <- readFile(file, blocker, chunkSize) unresolved <- parse(input) } yield stream(unresolved, debug)) diff --git a/text/src/swam/text/package.scala b/text/src/swam/text/package.scala index 97768372..24269cc9 100644 --- a/text/src/swam/text/package.scala +++ b/text/src/swam/text/package.scala @@ -17,14 +17,16 @@ package swam import cats.effect._ +import cats.implicits._ -import java.nio.file.Path +import fs2._ -import scala.io.Source +import java.nio.file.Path package object text { - def readFile(f: Path): IO[String] = - IO(Source.fromFile(f.toFile, "UTF-8").mkString) + def readFile[F[_]](path: Path, blocker: Blocker, chunkSize: Int)(implicit F: Sync[F], + cs: ContextShift[F]): F[String] = + io.file.readAll[F](path, blocker, chunkSize).through(text.utf8Decode).compile.foldMonoid } diff --git a/text/src/swam/text/parser/Lexical.scala b/text/src/swam/text/parser/Lexical.scala index 00b165b0..4d2dd490 100644 --- a/text/src/swam/text/parser/Lexical.scala +++ b/text/src/swam/text/parser/Lexical.scala @@ -48,6 +48,9 @@ object Lexical { def id[_: P]: P[String] = P("$" ~ idchar.rep(1).!) + def name[_: P]: P[String] = + P(idchar.rep(1).!) + private def sign[_: P] = P(CharIn("+\\-").?.!.map { case "" | "+" => 1 diff --git a/text/src/swam/witx/WitxParser.scala b/text/src/swam/witx/WitxParser.scala new file mode 100644 index 00000000..40e6300c --- /dev/null +++ b/text/src/swam/witx/WitxParser.scala @@ -0,0 +1,53 @@ +package swam +package witx + +import java.nio.file.Path + +import fastparse._ +import swam.text.{ParserException, readFile} +import swam.witx.parser.{ImportContext, ModuleParser, TypesParser} +import swam.witx.unresolved.{BaseWitxType, ModuleInterface} +import cats.effect._ +import cats.implicits._ + +/** + * @author Javier Cabrera-Arteaga on 2020-03-20 + */ +class WitxParser[F[_]](implicit val F: Effect[F]) { + + def parseModuleInterface(file: Path, blocker: Blocker, ctx: ImportContext[F], chunkSize: Int = 1024)( + implicit cs: ContextShift[F]): F[(Map[String, BaseWitxType], ModuleInterface)] = + for { + input <- readFile(file, blocker, chunkSize) + interface <- parseModuleString(input, ctx) + } yield interface + + private[swam] def parseModuleString(input: String, + ctx: ImportContext[F]): F[(Map[String, BaseWitxType], ModuleInterface)] = + F.liftIO { + IO(fastparse.parse(input, ModuleParser[F](ctx).file(_))).flatMap { + case Parsed.Success(m, _) => IO.pure(m) + case f @ Parsed.Failure(_, idx, _) => IO.raiseError(new ParserException(f.msg, idx)) + } + } + + def parseTypes(file: Path, blocker: Blocker, chunkSize: Int = 1024)( + implicit cs: ContextShift[F]): F[Map[String, BaseWitxType]] = + for { + input <- readFile(file, blocker, chunkSize) + interface <- parseTypesString(input) + } yield interface + + private[swam] def parseTypesString(input: String): F[Map[String, BaseWitxType]] = + F.liftIO { + IO(fastparse.parse(input, TypesParser.file(_))).flatMap { + case Parsed.Success(m, _) => IO.pure(m) + case f @ Parsed.Failure(_, idx, _) => IO.raiseError(new ParserException(f.msg, idx)) + } + } + +} + +object WitxParser { + def apply[F[_]](implicit F: Effect[F]): WitxParser[F] = new WitxParser[F]() +} diff --git a/text/src/swam/witx/parser/ImportContext.scala b/text/src/swam/witx/parser/ImportContext.scala new file mode 100644 index 00000000..d7366b33 --- /dev/null +++ b/text/src/swam/witx/parser/ImportContext.scala @@ -0,0 +1,49 @@ +package swam.witx.parser + +import java.io.File +import java.nio.file.{Files, Path, Paths} + +import cats.effect._ +import fastparse.Parsed +import swam.text.{ParserException, readFile} +import swam.witx.WitxParser +import swam.witx.unresolved.{AliasType, BaseWitxType, ModuleInterface} + +import scala.concurrent.ExecutionContext + +/** + *@author Javier Cabrera-Arteaga on 2020-03-18 + */ +class ImportContext[F[_]](implicit val F: Effect[F]) { + + // TODO FIX ! + implicit val cs = IO.contextShift(ExecutionContext.Implicits.global) + + def load(path: String) = { + Blocker[IO] + .use { blocker => + WitxParser[IO].parseTypes(Paths.get( + s"generator/resources/wasi_witx/$path" + ), + blocker) + } + .unsafeRunSync() + } + + def getInPath(path: String, includes: String*): String = { + if (Files.exists(Paths.get(path))) + path + else + System + .getenv("PATH") + .split(":") + .concat(includes) + .filter(t => Files.exists(Paths.get(s"$t/$path")))(0) + } + +} + +object ImportContext { + + def apply[F[_]: Effect](): ImportContext[F] = new ImportContext[F]() +} diff --git a/text/src/swam/witx/parser/ModuleParser.scala b/text/src/swam/witx/parser/ModuleParser.scala new file mode 100644 index 00000000..ce38632a --- /dev/null +++ b/text/src/swam/witx/parser/ModuleParser.scala @@ -0,0 +1,106 @@ +package swam +package witx +package parser + +import cats.effect.Effect +import fastparse._ +import swam.witx.parser.parser.WitxWhitespace._ +import swam.witx.unresolved.{BaseWitxType, _} + +/** + @author Javier Cabrera-Arteaga on 2020-03-18 + */ +class ModuleParser[F[_]: Effect](val importContext: ImportContext[F], + multipleResult: Boolean = false, + expandArrayTypes: Boolean = true) { + + import swam.text.parser.Lexical._ + import TypesParser.tpe + + def file[_: P] = + P( + ws ~ deps + .rep() + .map( + t => t.reduce((t1, t2) => t1 ++ t2) + ) + .flatMap(types => module(types)) ~ ws ~ End) + + def module[_: P](types: Map[String, BaseWitxType]) = + P( + "(" ~ word("module") ~ id ~ + (`import`.map { case (name, tp) => ImportDeclaration(name, tp) } + | interface(types)) + .rep(1) ~ ")").map { + case (name, decs) => (types, ModuleInterface(name, decs)) + } + + def `import`[_: P] = { + P("(" ~ word("import") ~ string ~ "(" ~ name ~ ")" ~ ")") + } + + def interface[_: P](types: Map[String, BaseWitxType]): P[FunctionExport] = { + P( + "(" ~ word("@interface") ~ func(types) ~ ")" + ) + } + + def splitResultsAndParamsByWASIType(params: Seq[Field], + results: Seq[Field], + types: Map[String, BaseWitxType]): (Seq[Field], Seq[Field]) = { + if (multipleResult) + (params, results) + else { + if (results.length > 1) { + return (params.concat(results.tail.map(t => Field(t.id, types("ptr")))), Seq(results.head)) + } + (params, results) + } + } + + def expandArrayTypes(field: Field, types: Map[String, BaseWitxType]): Seq[Field] = + field.tpe match { + case _: ArrayType => Seq(field, Field(s"${field.id}Len", types("u32"))) + case _ => Seq(field) + } + + def func[_: P](types: Map[String, BaseWitxType]): P[FunctionExport] = { + P(word("func") ~ "(" ~ word("export") ~ string ~ ")" ~ param(types).rep() ~ result(types).rep()).map { + case (name, params, results) => { + val paramsImports = + splitResultsAndParamsByWASIType(params.flatten, + results.flatten.map(r => r.copy(r.id, r.tpe, isResult = true)), + types) + FunctionExport(name, paramsImports._1, paramsImports._2) + } + } + } + + def param[_: P](types: Map[String, BaseWitxType]): P[Seq[Field]] = { + P("(" ~ word("param") ~ field(types) ~ ")") + } + + def result[_: P](types: Map[String, BaseWitxType]): P[Seq[Field]] = { + P("(" ~ word("result") ~ field(types) ~ ")") + } + + def field[_: P](types: Map[String, BaseWitxType]): P[Seq[Field]] = { + P(id ~ tpe(types)).map { + case (name, tpe) => if (expandArrayTypes) expandArrayTypes(Field(name, tpe), types) else Seq(Field(name, tpe)) + } + } + + def ptr[_: P](types: Map[String, BaseWitxType]): P[BaseWitxType] = { + P("(" ~ word("@witx") ~ (word("pointer") | word("const_pointer")) ~ tpe(types) ~ ")") + } + + def deps[_: P] = { + P("(" ~ word("use") ~ string.map(t => importContext.load(t)) ~ ")") + } + +} + +object ModuleParser { + def apply[F[_]: Effect](importContext: ImportContext[F]): ModuleParser[F] = + new ModuleParser[F](importContext) +} diff --git a/text/src/swam/witx/parser/TypesParser.scala b/text/src/swam/witx/parser/TypesParser.scala new file mode 100644 index 00000000..25f417c4 --- /dev/null +++ b/text/src/swam/witx/parser/TypesParser.scala @@ -0,0 +1,111 @@ +package swam +package witx +package parser + +import java.nio.file.Path + +import cats.effect._ +import fastparse._ +import swam.text.{ParserException, readFile} +import swam.witx.parser.parser.WitxWhitespace._ +import swam.witx.unresolved._ + +import scala.collection.immutable.HashMap +import scala.collection.mutable + +/** + @author Javier Cabrera-Arteaga on 2020-03-18 + */ +object TypesParser { + + import text.parser.Lexical._ + + private val declaredTypes = Map[String, BaseWitxType]( + "u32" -> BasicType("u32", 4), + "u64" -> BasicType("u64", 8), + "s64" -> BasicType("s64", 8), + "u8" -> BasicType("u8", 1), + "u16" -> BasicType("u16", 2), + "string" -> BasicType("string", 4), + "ptr" -> BasicType("ptr", 4) + ) + + def file[_: P]: P[Map[String, BaseWitxType]] = { + P(ws ~ types(declaredTypes) ~ ws ~ End) + } + + def types[_: P](types: Map[String, BaseWitxType]): P[Map[String, BaseWitxType]] = { + P( + "(" ~ word("typename") + ~ id.flatMap(id => subtype(id, types)) ~ ")") + .map(t => HashMap[String, BaseWitxType](t.tpeName -> t)) + .rep(1) + .map(kv => kv.fold(declaredTypes)((prev, curr) => curr ++ prev)) + } + + def subtype[_: P](typeId: String, types: Map[String, BaseWitxType]): P[BaseWitxType] = { + P( + name + .map( + t => { + if (!types.contains(t)) throw new Exception(s"Type $t not found") + AliasType(typeId, types(t)) + }) | enum(typeId, types) | flags(typeId, types) | struct(typeId, types) | array(typeId, types) | handle( + typeId) | union(typeId, types) + ) + } + + def struct[_: P](typeId: String, types: Map[String, BaseWitxType]): P[StructType] = { + P("(" ~ word("struct") ~ field(types).rep(1) ~ ")").map { fields => + StructType(typeId, fields) + } + } + + def array[_: P](typeId: String, types: Map[String, BaseWitxType]): P[ArrayType] = { + P("(" ~ word("array") ~ id ~ ")").map { i => + ArrayType(typeId, types(i)) + } + } + + def pointer[_: P](types: Map[String, BaseWitxType]): P[Pointer] = { + P("(" ~ word("@witx") ~ (word("pointer") | word("const_pointer")) ~ tpe(types) ~ ")").map { + case tpe => Pointer(tpe) + } + } + + def handle[_: P](typeId: String): P[Handle] = { + P("(" ~ word("handle") ~ ")").map(_ => Handle(typeId)) + } + + def tpe[_: P](importCtx: Map[String, BaseWitxType]): P[BaseWitxType] = { + P((id | name).map(importCtx) | pointer(importCtx)) + } + + def field[_: P](importCtx: Map[String, BaseWitxType]): P[Field] = { + P( + "(" ~ word("field") ~ id ~ tpe(importCtx) ~ ")" + ).map { + case (name, tpe) => Field(name, tpe, isResult = false) + } + } + + def union[_: P](typeId: String, importCtx: Map[String, BaseWitxType]): P[UnionType] = { + P( + "(" ~ word("union") ~ id ~ field(importCtx).rep(1) ~ ")" + ).map { case (name, fields) => UnionType(typeId, name, fields) } + } + + def enum[_: P](typeId: String, importCtx: Map[String, BaseWitxType]): P[EnumType] = { + P("(" ~ word("enum") ~ name ~ id.rep(1) ~ ")").map { + case (t, names) => + EnumType(typeId, importCtx(t), names) + } + } + + def flags[_: P](typeId: String, importCtx: Map[String, BaseWitxType]): P[FlagsType] = { + P("(" ~ word("flags") ~ name ~ id.rep(1) ~ ")").map { + case (tpe, names) => FlagsType(typeId, importCtx(tpe), names) + } + } + +} diff --git a/text/src/swam/witx/parser/package.scala b/text/src/swam/witx/parser/package.scala new file mode 100644 index 00000000..316c38be --- /dev/null +++ b/text/src/swam/witx/parser/package.scala @@ -0,0 +1,15 @@ +package swam.witx.parser + +import fastparse._ + +import swam.text.parser.Lexical + +package object parser { + + object WitxWhitespace { + implicit val whitespace = { implicit ctx: ParsingRun[_] => + Lexical.ws + } + } + +} diff --git a/text/src/swam/witx/traverser/ModuleInterfaceTraverser.scala b/text/src/swam/witx/traverser/ModuleInterfaceTraverser.scala new file mode 100644 index 00000000..76f34c3b --- /dev/null +++ b/text/src/swam/witx/traverser/ModuleInterfaceTraverser.scala @@ -0,0 +1,28 @@ +package swam.witx.traverser + +import cats.Monad +import swam.witx.unresolved._ + +/** + * @author Javier Cabrera-Arteaga on 2020-03-21 + */ +class ModuleInterfaceTraverser[Res](val interface: ModuleInterface) { + + protected[this] val fst = (res: Res, i: Declaration) => res + + val importDeclarationTraverser: (Res, ImportDeclaration) => Res = fst + val functionExportTraverser: (Res, FunctionExport) => Res = fst + + def traverseAll(zero: Res, compose: (Res, Res) => Res) = + interface.declarations + .map(d => traverse(zero, d)) + .reduce(compose) + + def traverse(zero: Res, x: Declaration): Res = { + x match { + case x: FunctionExport => functionExportTraverser(zero, x) + case x: ImportDeclaration => importDeclarationTraverser(zero, x) + } + } + +} diff --git a/text/src/swam/witx/traverser/TypesTraverser.scala b/text/src/swam/witx/traverser/TypesTraverser.scala new file mode 100644 index 00000000..5d36c1fc --- /dev/null +++ b/text/src/swam/witx/traverser/TypesTraverser.scala @@ -0,0 +1,43 @@ +package swam.witx.traverser + +import cats.Monad +import cats.effect.Effect +import swam.syntax.{Inst, i64} +import swam.witx.unresolved._ + +/** + * @author Javier Cabrera-Arteaga on 2020-03-21 + */ +abstract class TypesTraverser[Res](val types: Map[String, BaseWitxType]) { + + protected[this] val fst = (res: Res, i: BaseWitxType) => res + + val aliasTypeTraverser: (Res, AliasType) => Res = fst + val basicTypeTraverser: (Res, BasicType) => Res = fst + val enumTypeTraverser: (Res, EnumType) => Res = fst + val flagsTypeTraverser: (Res, FlagsType) => Res = fst + val arrayTypeTraverser: (Res, ArrayType) => Res = fst + val pointerTypeTraverser: (Res, Pointer) => Res = fst + val structTypeTraverser: (Res, StructType) => Res = fst + val unionTypeTraverser: (Res, UnionType) => Res = fst + val handleTypeTraverser: (Res, Handle) => Res = fst + + def traverseAll(zero: Res, compose: (Res, Res) => Res) = + types + .map { case (name, t) => traverse(zero, t) } + .reduce(compose) + + def traverse(zero: Res, x: BaseWitxType): Res = { + x match { + case x: AliasType => aliasTypeTraverser(zero, x) + case x: BasicType => basicTypeTraverser(zero, x) + case x: EnumType => enumTypeTraverser(zero, x) + case x: FlagsType => flagsTypeTraverser(zero, x) + case x: StructType => structTypeTraverser(zero, x) + case x: UnionType => unionTypeTraverser(zero, x) + case x: Handle => handleTypeTraverser(zero, x) + case x: ArrayType => arrayTypeTraverser(zero, x) + case x: Pointer => pointerTypeTraverser(zero, x) + } + } +} diff --git a/text/src/swam/witx/unresolved/Declarations.scala b/text/src/swam/witx/unresolved/Declarations.scala new file mode 100644 index 00000000..ac92d0b3 --- /dev/null +++ b/text/src/swam/witx/unresolved/Declarations.scala @@ -0,0 +1,16 @@ +package swam +package witx +package unresolved + +/** + @author Javier Cabrera-Arteaga on 2020-03-21 + */ +sealed trait Declaration + +case class Field(id: String, tpe: BaseWitxType, isResult: Boolean = false) extends Declaration + +case class FunctionExport(id: String, params: Seq[Field], results: Seq[Field]) extends Declaration + +case class ImportDeclaration(name: String, tpe: String) extends Declaration + +case class ModuleInterface(id: String, declarations: Seq[Declaration]) extends Declaration diff --git a/text/src/swam/witx/unresolved/Types.scala b/text/src/swam/witx/unresolved/Types.scala new file mode 100644 index 00000000..7d72cb46 --- /dev/null +++ b/text/src/swam/witx/unresolved/Types.scala @@ -0,0 +1,32 @@ +package swam +package witx +package unresolved + +import swam.witx.traverser.TypesTraverser + +/** + *@author Javier Cabrera-Arteaga on 2020-03-18 + */ +abstract class BaseWitxType(val tpeName: String, val size: Int = 4) + +case class BasicType(name: String, override val size: Int) extends BaseWitxType(name, size) + +case class AliasType(override val tpeName: String, tpe: BaseWitxType) extends BaseWitxType(tpeName, size = tpe.size) + +case class EnumType(override val tpeName: String, tpe: BaseWitxType, names: Seq[String]) + extends BaseWitxType(tpeName, size = tpe.size) + +case class FlagsType(override val tpeName: String, tpe: BaseWitxType, names: Seq[String]) + extends BaseWitxType(tpeName, size = tpe.size) + +case class ArrayType(override val tpeName: String, tpe: BaseWitxType) extends BaseWitxType(tpeName, size = tpe.size) + +case class Pointer(tpe: BaseWitxType) extends BaseWitxType("Pointer", 4 * tpe.size) // 8 bytes + +case class StructType(override val tpeName: String, fields: Seq[Field]) + extends BaseWitxType(tpeName, size = fields.map(t => t.tpe.size).sum) + +case class UnionType(override val tpeName: String, name: String, fields: Seq[Field]) + extends BaseWitxType(tpeName, size = fields.map(t => t.tpe.size).sum) + +case class Handle(override val tpeName: String) extends BaseWitxType(tpeName) diff --git a/update_wasi_interfaces.sh b/update_wasi_interfaces.sh new file mode 100755 index 00000000..d4cd467c --- /dev/null +++ b/update_wasi_interfaces.sh @@ -0,0 +1 @@ +mill generator.run -x True -p wasi -i generator/resources/wasi_witx generator/resources/wasi_witx/wasi_snapshot_preview1.witx \ No newline at end of file From 0addeff76d9d0bb78bc9682ba52c9dd973556c91 Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Mon, 20 Apr 2020 20:21:27 +0200 Subject: [PATCH 02/14] Removing side-effects in TypesParser --- generator/resources/wasi_witx/typenames.witx | 3 +-- generator/test/src/generator/WitxTest.scala | 1 + text/src/swam/witx/parser/TypesParser.scala | 17 ++++++++++------- 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/generator/resources/wasi_witx/typenames.witx b/generator/resources/wasi_witx/typenames.witx index 2bccd79a..f1ef4e84 100644 --- a/generator/resources/wasi_witx/typenames.witx +++ b/generator/resources/wasi_witx/typenames.witx @@ -743,5 +743,4 @@ (union $preopentype (field $dir $prestat_dir) ) -) - +) \ No newline at end of file diff --git a/generator/test/src/generator/WitxTest.scala b/generator/test/src/generator/WitxTest.scala index 280f1a7a..dbb220b6 100644 --- a/generator/test/src/generator/WitxTest.scala +++ b/generator/test/src/generator/WitxTest.scala @@ -39,6 +39,7 @@ object WitParser extends TestSuite { }) .unsafeRunSync() + println(types) } val tests = Tests { diff --git a/text/src/swam/witx/parser/TypesParser.scala b/text/src/swam/witx/parser/TypesParser.scala index 25f417c4..49c9b135 100644 --- a/text/src/swam/witx/parser/TypesParser.scala +++ b/text/src/swam/witx/parser/TypesParser.scala @@ -31,16 +31,19 @@ object TypesParser { ) def file[_: P]: P[Map[String, BaseWitxType]] = { - P(ws ~ types(declaredTypes) ~ ws ~ End) + P(ws ~ _types(declaredTypes) ~ ws ~ End) } - def types[_: P](types: Map[String, BaseWitxType]): P[Map[String, BaseWitxType]] = { + def _types[_: P](types: Map[String, BaseWitxType]): P[Map[String, BaseWitxType]] = { P( - "(" ~ word("typename") - ~ id.flatMap(id => subtype(id, types)) ~ ")") - .map(t => HashMap[String, BaseWitxType](t.tpeName -> t)) - .rep(1) - .map(kv => kv.fold(declaredTypes)((prev, curr) => curr ++ prev)) + _type(types).flatMap(t => _types(t)) | + _type(types) + ) + } + + def _type[_: P](types: Map[String, BaseWitxType]): P[Map[String, BaseWitxType]] = { + P("(" ~ word("typename") ~ id.flatMap(id => subtype(id, types)) ~ ")") + .map(t => HashMap[String, BaseWitxType](t.tpeName -> t) ++ types) } def subtype[_: P](typeId: String, types: Map[String, BaseWitxType]): P[BaseWitxType] = { From c30bf78feef7a428700c73969507c3957e2c82ed Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Mon, 20 Apr 2020 20:23:11 +0200 Subject: [PATCH 03/14] Improving code --- text/src/swam/witx/parser/ModuleParser.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/text/src/swam/witx/parser/ModuleParser.scala b/text/src/swam/witx/parser/ModuleParser.scala index ce38632a..c5c73b3c 100644 --- a/text/src/swam/witx/parser/ModuleParser.scala +++ b/text/src/swam/witx/parser/ModuleParser.scala @@ -22,9 +22,9 @@ class ModuleParser[F[_]: Effect](val importContext: ImportContext[F], ws ~ deps .rep() .map( - t => t.reduce((t1, t2) => t1 ++ t2) + _.flatten ) - .flatMap(types => module(types)) ~ ws ~ End) + .flatMap(types => module(types.toMap)) ~ ws ~ End) def module[_: P](types: Map[String, BaseWitxType]) = P( From e3ae5fe7ccd61b9c0108915bf9c3c4457558ac36 Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Mon, 20 Apr 2020 20:32:12 +0200 Subject: [PATCH 04/14] Moving basic types --- text/src/swam/witx/parser/TypesParser.scala | 14 +++++++------- text/src/swam/witx/unresolved/Types.scala | 12 +++++++++++- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/text/src/swam/witx/parser/TypesParser.scala b/text/src/swam/witx/parser/TypesParser.scala index 49c9b135..0c74f7d3 100644 --- a/text/src/swam/witx/parser/TypesParser.scala +++ b/text/src/swam/witx/parser/TypesParser.scala @@ -21,13 +21,13 @@ object TypesParser { import text.parser.Lexical._ private val declaredTypes = Map[String, BaseWitxType]( - "u32" -> BasicType("u32", 4), - "u64" -> BasicType("u64", 8), - "s64" -> BasicType("s64", 8), - "u8" -> BasicType("u8", 1), - "u16" -> BasicType("u16", 2), - "string" -> BasicType("string", 4), - "ptr" -> BasicType("ptr", 4) + "u32" -> BasicType.u32, + "u64" -> BasicType.u64, + "s64" -> BasicType.s64, + "u8" -> BasicType.u8, + "u16" -> BasicType.u16, + "string" -> BasicType.string, + "ptr" -> BasicType.ptr ) def file[_: P]: P[Map[String, BaseWitxType]] = { diff --git a/text/src/swam/witx/unresolved/Types.scala b/text/src/swam/witx/unresolved/Types.scala index 7d72cb46..123bc4a6 100644 --- a/text/src/swam/witx/unresolved/Types.scala +++ b/text/src/swam/witx/unresolved/Types.scala @@ -9,7 +9,17 @@ import swam.witx.traverser.TypesTraverser */ abstract class BaseWitxType(val tpeName: String, val size: Int = 4) -case class BasicType(name: String, override val size: Int) extends BaseWitxType(name, size) +sealed abstract class BasicType(name: String, override val size: Int) extends BaseWitxType(name, size) + +object BasicType { + case object u8 extends BasicType("u8", 1) + case object u32 extends BasicType("u32", 4) + case object u64 extends BasicType("u64", 8) + case object u16 extends BasicType("u16", 2) + case object s64 extends BasicType("s64", 8) + case object string extends BasicType("string", 4) + case object ptr extends BasicType("ptr", 4) +} case class AliasType(override val tpeName: String, tpe: BaseWitxType) extends BaseWitxType(tpeName, size = tpe.size) From 31dacaa0f6d467a91e8c7844f96a599776f177eb Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Mon, 20 Apr 2020 20:44:28 +0200 Subject: [PATCH 05/14] Moving basic types as classes instead of str --- build.sc | 2 +- generator/resources/witx_module.mustache | 0 generator/resources/witx_types.mustache | 0 .../witx/LoadTypeEmitTraverser.scala | 14 ++++---- .../swam/generator/witx/ModuleTraverse.scala | 32 +++++++++---------- .../generator/witx/TypesEmitTraverser.scala | 16 +++++----- .../witx/WriteTypeEmitTraverser.scala | 14 ++++---- generator/test/src/generator/WitxTest.scala | 21 ++++++++++++ 8 files changed, 60 insertions(+), 39 deletions(-) create mode 100644 generator/resources/witx_module.mustache create mode 100644 generator/resources/witx_types.mustache diff --git a/build.sc b/build.sc index fec693ca..20aa5c0d 100644 --- a/build.sc +++ b/build.sc @@ -134,7 +134,7 @@ object generator extends SwamModule with PublishModule { object test extends Tests with ScalafmtModule { def ivyDeps = Agg(ivy"com.lihaoyi::utest:0.7.1") def testFrameworks = Seq("swam.util.Framework") - def moduleDeps = Seq(core,text, util.test ) + def moduleDeps = Seq(core,text, generator, util.test ) } } diff --git a/generator/resources/witx_module.mustache b/generator/resources/witx_module.mustache new file mode 100644 index 00000000..e69de29b diff --git a/generator/resources/witx_types.mustache b/generator/resources/witx_types.mustache new file mode 100644 index 00000000..e69de29b diff --git a/generator/src/swam/generator/witx/LoadTypeEmitTraverser.scala b/generator/src/swam/generator/witx/LoadTypeEmitTraverser.scala index 1bffc3dc..dee0073c 100644 --- a/generator/src/swam/generator/witx/LoadTypeEmitTraverser.scala +++ b/generator/src/swam/generator/witx/LoadTypeEmitTraverser.scala @@ -27,13 +27,13 @@ class LoadTypeEmitTraverser(prev: String, types: Map[String, BaseWitxType], offs override val basicTypeTraverser = { case (_, t: BasicType) => - t.name match { - case "u8" => s"$mem.readByte(${concatOffsets(offset, prev)}).unsafeRunSync()\n" - case "u16" => s"$mem.readShort(${concatOffsets(offset, prev)}).unsafeRunSync\n" - case "u32" => s"$mem.readInt(${concatOffsets(offset, prev)}).unsafeRunSync\n" - case "u64" => s"$mem.readLong(${concatOffsets(offset, prev)}).unsafeRunSync\n" - case "s64" => s"$mem.readLong(${concatOffsets(offset, prev)}).unsafeRunSync\n" - case "string" => s"$mem.readInt(${concatOffsets(offset, prev)}).unsafeRunSync\n" + t match { + case BasicType.u8 => s"$mem.readByte(${concatOffsets(offset, prev)}).unsafeRunSync()\n" + case BasicType.u16 => s"$mem.readShort(${concatOffsets(offset, prev)}).unsafeRunSync\n" + case BasicType.u32 => s"$mem.readInt(${concatOffsets(offset, prev)}).unsafeRunSync\n" + case BasicType.u64 => s"$mem.readLong(${concatOffsets(offset, prev)}).unsafeRunSync\n" + case BasicType.s64 => s"$mem.readLong(${concatOffsets(offset, prev)}).unsafeRunSync\n" + case BasicType.string => s"$mem.readInt(${concatOffsets(offset, prev)}).unsafeRunSync\n" } } diff --git a/generator/src/swam/generator/witx/ModuleTraverse.scala b/generator/src/swam/generator/witx/ModuleTraverse.scala index 0658682b..e24dcc74 100644 --- a/generator/src/swam/generator/witx/ModuleTraverse.scala +++ b/generator/src/swam/generator/witx/ModuleTraverse.scala @@ -115,14 +115,14 @@ class ModuleTraverse(module: ModuleInterface, types: Map[String, BaseWitxType]) case x: ArrayType => x.tpeName } - def mapBasicType(t: BasicType): Adapt = t.name match { - case "u8" => Adapt("Int", "Int") - case "u16" => Adapt("Int", "Short") - case "u32" => Adapt("Int", "Int") - case "u64" => Adapt("Long", "Long") - case "s64" => Adapt("Long", "Long") - case "string" => Adapt("Int", "String") - case "ptr" => Adapt("Int", "Int") + def mapBasicType(t: BasicType): Adapt = t match { + case BasicType.u8 => Adapt("Int", "Int") + case BasicType.u16 => Adapt("Int", "Short") + case BasicType.u32 => Adapt("Int", "Int") + case BasicType.u64 => Adapt("Long", "Long") + case BasicType.s64 => Adapt("Long", "Long") + case BasicType.string => Adapt("Int", "String") + case BasicType.ptr => Adapt("Int", "Int") } def mapAliasType(t: AliasType): Adapt = mapTypeToWasm(types(t.tpe.tpeName)) @@ -158,14 +158,14 @@ class ModuleTraverse(module: ModuleInterface, types: Map[String, BaseWitxType]) override val basicTypeTraverser = { case (_, t: BasicType) => - t.name match { - case "u8" => s"$name.toByte" - case "u16" => s"$name.toShort" - case "u16" => s"$name.toInt" - case "u64" => s"$name.toLong" - case "s64" => s"$name.toLong" - case "string" => s"getString(mem, $name, ${name}Len)" - case "ptr" => s"$name.toInt" + t match { + case BasicType.u8 => s"$name.toByte" + case BasicType.u16 => s"$name.toShort" + case BasicType.u32 => s"$name.toInt" + case BasicType.u64 => s"$name.toLong" + case BasicType.s64 => s"$name.toLong" + case BasicType.string => s"getString(mem, $name, ${name}Len)" + case BasicType.ptr => s"$name.toInt" } } diff --git a/generator/src/swam/generator/witx/TypesEmitTraverser.scala b/generator/src/swam/generator/witx/TypesEmitTraverser.scala index 696f6b70..4def989b 100644 --- a/generator/src/swam/generator/witx/TypesEmitTraverser.scala +++ b/generator/src/swam/generator/witx/TypesEmitTraverser.scala @@ -25,14 +25,14 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse override val basicTypeTraverser = { case (_, t: BasicType) => - t.name match { - case "u8" => s"type ${t.tpeName} = Byte\n" - case "u16" => s"type ${t.tpeName} = Short\n" - case "u32" => s"type ${t.tpeName} = Int\n" - case "u64" => s"type ${t.tpeName} = Long\n" - case "s64" => s"type ${t.tpeName} = Long\n" - case "string" => s"type ${t.tpeName} = String\n" - case "ptr" => s"type ${t.tpeName} = Int\n" + t match { + case BasicType.u8 => s"type ${t.tpeName} = Byte\n" + case BasicType.u16 => s"type ${t.tpeName} = Short\n" + case BasicType.u32 => s"type ${t.tpeName} = Int\n" + case BasicType.u64 => s"type ${t.tpeName} = Long\n" + case BasicType.s64 => s"type ${t.tpeName} = Long\n" + case BasicType.string => s"type ${t.tpeName} = String\n" + case BasicType.ptr => s"type ${t.tpeName} = Int\n" } } diff --git a/generator/src/swam/generator/witx/WriteTypeEmitTraverser.scala b/generator/src/swam/generator/witx/WriteTypeEmitTraverser.scala index 38e2b328..4235a4ac 100644 --- a/generator/src/swam/generator/witx/WriteTypeEmitTraverser.scala +++ b/generator/src/swam/generator/witx/WriteTypeEmitTraverser.scala @@ -32,13 +32,13 @@ class WriteTypeEmitTraverser(f: String, override val basicTypeTraverser = { case (_, t: BasicType) => - t.name match { - case "u8" => s"$mem.writeByte(${concatOffsets(offset, prev)}, `${f}` ).unsafeRunSync\n" - case "u16" => s"$mem.writeShort(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" - case "u32" => s"$mem.writeInt(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" - case "u64" => s"$mem.writeLong(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" - case "s64" => s"$mem.writeLong(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" - case "string" => s"$mem.writeInt(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + t match { + case BasicType.u8 => s"$mem.writeByte(${concatOffsets(offset, prev)}, `${f}` ).unsafeRunSync\n" + case BasicType.u16 => s"$mem.writeShort(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + case BasicType.u32 => s"$mem.writeInt(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + case BasicType.u64 => s"$mem.writeLong(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + case BasicType.s64 => s"$mem.writeLong(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" + case BasicType.string => s"$mem.writeInt(${concatOffsets(offset, prev)},`${f}`).unsafeRunSync\n" } } diff --git a/generator/test/src/generator/WitxTest.scala b/generator/test/src/generator/WitxTest.scala index dbb220b6..843711aa 100644 --- a/generator/test/src/generator/WitxTest.scala +++ b/generator/test/src/generator/WitxTest.scala @@ -3,6 +3,7 @@ package generator import java.io import java.nio.file.Paths +import org.fusesource.scalate.TemplateEngine import utest.{TestSuite, Tests, test} @@ -42,8 +43,28 @@ object WitParser extends TestSuite { println(types) } + def runGenerator() = { + val wasi_snaphot = Paths.get("generator/resources/wasi_witx/wasi_snapshot_preview1.witx") + + val parser = WitxParser[IO] + val ctx = ImportContext[IO]() + + val (types, interface) = Blocker[IO] + .use(blocker => { + for { + (types, instruction) <- parser.parseModuleInterface(wasi_snaphot, blocker, ctx) + } yield (types, instruction) + }) + .unsafeRunSync() + + val te = new TemplateEngine() + te.boot() + + } + val tests = Tests { "parsing_witx" - runParse() + "generating_boilerplate_mustache" - runGenerator() } } From cca1c15d224db0e5d7e7c47bb56822cd24846b0c Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 11:49:11 +0200 Subject: [PATCH 06/14] Formatting scala text before dumping --- generator/resources/witx_module.mustache | 32 +++++++++++++++++++ generator/resources/witx_types.mustache | 8 +++++ generator/src/swam/generator/Generator.scala | 3 +- .../src/swam/generator/ImportGenerator.scala | 21 +++++++++--- .../swam/generator/witx/ModuleTraverse.scala | 27 ---------------- .../generator/witx/TypesEmitTraverser.scala | 9 ++---- generator/test/src/generator/WitxTest.scala | 25 ++++----------- 7 files changed, 67 insertions(+), 58 deletions(-) diff --git a/generator/resources/witx_module.mustache b/generator/resources/witx_module.mustache index e69de29b..2ce249e3 100644 --- a/generator/resources/witx_module.mustache +++ b/generator/resources/witx_module.mustache @@ -0,0 +1,32 @@ +package swam +package wasi + +import Types._ +import Header._ +import cats.Applicative +import cats.effect._ +import swam.runtime.formats._ +import swam.runtime.formats.DefaultFormatters._ +import cats.effect.IO +import swam.runtime.Memory +import swam.runtime.imports.annotations.{effect, effectful, module, pure} + + + +@module +abstract class Module[@effect F[_]](implicit F: Applicative[F]) +{ + val name = "{{{moduleName}}}" + + def tryToExecute(a: => errnoEnum.Value) = { + try a.id + catch { + case x: WASIException => x.errno.id + } + } + + + var mem: Memory[IO] = null + + {{{content}}} +} diff --git a/generator/resources/witx_types.mustache b/generator/resources/witx_types.mustache index e69de29b..c0a5664b 100644 --- a/generator/resources/witx_types.mustache +++ b/generator/resources/witx_types.mustache @@ -0,0 +1,8 @@ +package swam + +import cats.effect.IO +import swam.runtime.Memory +object Types { + // This is an autogenerated file, do not change it + {{{content}}} +} \ No newline at end of file diff --git a/generator/src/swam/generator/Generator.scala b/generator/src/swam/generator/Generator.scala index 5378f356..0b193799 100644 --- a/generator/src/swam/generator/Generator.scala +++ b/generator/src/swam/generator/Generator.scala @@ -96,7 +96,8 @@ object Generator extends IOApp { (types, interface) <- parser.parseModuleInterface(witxFile.toPath, blocker, ctx) scalaTypesTemplate <- IO(new TypesEmitTraverser(types).traverseAll("", (s1, s2) => s1 + s2)) scalaTraitTemplate <- IO(new ModuleTraverse(interface, types).traverseAll("", (s1, s2) => s1 + s2)) - _ <- generator.createScalaProjectForImports(scalaTypesTemplate, scalaTraitTemplate, newPackagePath) + _ <- generator + .createScalaProjectForImports(interface.id, scalaTypesTemplate, scalaTraitTemplate, newPackagePath) } yield () }) } diff --git a/generator/src/swam/generator/ImportGenerator.scala b/generator/src/swam/generator/ImportGenerator.scala index 4d42bed4..07fc8ffe 100644 --- a/generator/src/swam/generator/ImportGenerator.scala +++ b/generator/src/swam/generator/ImportGenerator.scala @@ -25,6 +25,9 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { val config = Paths.get(".scalafmt.conf") val defaultTemplate = getClass.getClassLoader.getResource("import_template.mustache").getFile + val defaultWitxModuleTemplate = getClass.getClassLoader.getResource("witx_module.mustache").getFile + val defaultWitxTypesTemplate = getClass.getClassLoader.getResource("witx_types.mustache").getFile + /** * Map WASM to scala primitive types * @param v @@ -120,8 +123,6 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { Blocker[F] .use { blocker => for { - // replace - _ <- io.file.delete(blocker, Paths.get(s"$projectName/src/$className.scala")) _ <- io.file.createDirectories[F](blocker, Paths.get(s"$projectName/src")) // Creates the module structure _ <- fs2 .Stream(t) @@ -157,11 +158,21 @@ class ImportGenerator[F[_]: Effect](implicit cs: ContextShift[F]) { * @param traitTemplate * @return */ - def createScalaProjectForImports(typesTemplate: String, traitTemplate: String, projectName: String) = { + def createScalaProjectForImports(moduleName: String, + typesTemplate: String, + traitTemplate: String, + projectName: String) = { + + val te = new TemplateEngine() + te.boot() + + val traitText = formatText( + te.layout(defaultWitxModuleTemplate, Map("content" -> traitTemplate, "moduleName" -> moduleName))) + val typesText = formatText(te.layout(defaultWitxTypesTemplate, Map("content" -> typesTemplate))) for { - _ <- writeToFile(typesTemplate, projectName, "Types") - _ <- writeToFile(traitTemplate, projectName, "Module") + _ <- writeToFile(typesText, projectName, "Types") + _ <- writeToFile(traitText, projectName, "Module") } yield () } diff --git a/generator/src/swam/generator/witx/ModuleTraverse.scala b/generator/src/swam/generator/witx/ModuleTraverse.scala index e24dcc74..fe602a63 100644 --- a/generator/src/swam/generator/witx/ModuleTraverse.scala +++ b/generator/src/swam/generator/witx/ModuleTraverse.scala @@ -127,33 +127,6 @@ class ModuleTraverse(module: ModuleInterface, types: Map[String, BaseWitxType]) def mapAliasType(t: AliasType): Adapt = mapTypeToWasm(types(t.tpe.tpeName)) - val header = s"""val name = "${module.id}" - | - | def tryToExecute(a: => errnoEnum.Value) = { - | try a.id - | catch { - | case x: WASIException => x.errno.id - | } - | }""".stripMargin - - val imports = s"""package swam - |package wasi - | - |import Types._ - |import Header._ - |import cats.Applicative - |import cats.effect._ - |import swam.runtime.formats._ - |import swam.runtime.formats.DefaultFormatters._ - |import cats.effect.IO - |import swam.runtime.Memory - |import swam.runtime.imports.annotations.{effect, effectful, module, pure} - |""".stripMargin - - override def traverseAll(zero: String, compose: (String, String) => String) = - s"$imports\n @module\n abstract class Module[@effect F[_]](implicit F: Applicative[F]){\n var mem: Memory[IO] = null \n\n $header \n\n ${super - .traverseAll(zero, compose)}\n }" - class InitTypeEmitTraverser(name: String) extends TypesTraverser[String](types) { override val basicTypeTraverser = { diff --git a/generator/src/swam/generator/witx/TypesEmitTraverser.scala b/generator/src/swam/generator/witx/TypesEmitTraverser.scala index 4def989b..b01bc491 100644 --- a/generator/src/swam/generator/witx/TypesEmitTraverser.scala +++ b/generator/src/swam/generator/witx/TypesEmitTraverser.scala @@ -105,7 +105,7 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse case (_, t: StructType) => { - s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) extends WASI_STRUCT { + s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) { | ${t.fields.zipWithIndex .map { case (f, idx) => @@ -132,7 +132,7 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse override val unionTypeTraverser = { case (_, t: UnionType) => { - s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) extends WASI_STRUCT { // UNION + s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) { // UNION | ${t.fields.zipWithIndex .map { case (f, idx) => @@ -163,9 +163,4 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse override val arrayTypeTraverser = { case (_, t: ArrayType) => s"type ${t.tpeName} = List[${getVal(t.tpe)}]\n" } - - override def traverseAll(zero: String, compose: (String, String) => String) = - s"package swam\npackage wasi\n import cats.effect.IO \nimport swam.runtime.Memory \n\nobject Types { \n // This is an autogenerated file, do not change it \n\n ${super - .traverseAll(zero, compose)} }" - } diff --git a/generator/test/src/generator/WitxTest.scala b/generator/test/src/generator/WitxTest.scala index 843711aa..c661c41e 100644 --- a/generator/test/src/generator/WitxTest.scala +++ b/generator/test/src/generator/WitxTest.scala @@ -22,6 +22,12 @@ import swam.witx.traverser.{ModuleInterfaceTraverser, TypesTraverser} import scala.concurrent.ExecutionContext +class TypeMustacheWrapper(val inner: BaseWitxType) { + val isEnum = inner.getClass.equals(classOf[EnumType]) + val isFlag = inner.getClass.equals(classOf[FlagsType]) + val isStruct = inner.getClass.equals(classOf[StructType]) +} + object WitParser extends TestSuite { implicit val cs = IO.contextShift(ExecutionContext.Implicits.global) @@ -43,24 +49,7 @@ object WitParser extends TestSuite { println(types) } - def runGenerator() = { - val wasi_snaphot = Paths.get("generator/resources/wasi_witx/wasi_snapshot_preview1.witx") - - val parser = WitxParser[IO] - val ctx = ImportContext[IO]() - - val (types, interface) = Blocker[IO] - .use(blocker => { - for { - (types, instruction) <- parser.parseModuleInterface(wasi_snaphot, blocker, ctx) - } yield (types, instruction) - }) - .unsafeRunSync() - - val te = new TemplateEngine() - te.boot() - - } + def runGenerator() = {} val tests = Tests { "parsing_witx" - runParse() From c55c2633cf9ec42bf27b63a023fa831e971c26fa Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 12:03:25 +0200 Subject: [PATCH 07/14] Fixing ImportContext --- generator/resources/witx_module.mustache | 5 +--- generator/src/swam/generator/Generator.scala | 8 +++--- text/src/swam/witx/parser/ImportContext.scala | 27 +++++++++++-------- 3 files changed, 21 insertions(+), 19 deletions(-) diff --git a/generator/resources/witx_module.mustache b/generator/resources/witx_module.mustache index 2ce249e3..c848cc92 100644 --- a/generator/resources/witx_module.mustache +++ b/generator/resources/witx_module.mustache @@ -14,7 +14,7 @@ import swam.runtime.imports.annotations.{effect, effectful, module, pure} @module -abstract class Module[@effect F[_]](implicit F: Applicative[F]) +abstract class Module[@effect F[_]](val mem: Memory[IO])(implicit F: Applicative[F]) { val name = "{{{moduleName}}}" @@ -25,8 +25,5 @@ abstract class Module[@effect F[_]](implicit F: Applicative[F]) } } - - var mem: Memory[IO] = null - {{{content}}} } diff --git a/generator/src/swam/generator/Generator.scala b/generator/src/swam/generator/Generator.scala index 0b193799..14c2f957 100644 --- a/generator/src/swam/generator/Generator.scala +++ b/generator/src/swam/generator/Generator.scala @@ -14,13 +14,13 @@ import swam.witx.parser.ImportContext import scala.concurrent.ExecutionContext -case class Config(wasms: Seq[File] = Seq(), +case class Config(wasms: List[File] = List(), printTemplateContext: Boolean = false, createBoilerplate: String = "", className: String = "GeneratedImports", renderTemplate: File = null, parseAsWitx: Boolean = false, - includeWitxTypesPath: Seq[String] = Seq()) + includeWitxTypesPath: List[String] = List()) /** @author Javier Cabrera-Arteaga on 2020-03-07 @@ -85,14 +85,14 @@ object Generator extends IOApp { wasms.map(w => getImports(w, blocker)).reduce((r, l) => r.combine(l)) } - def parseWitx(witxFile: File, includes: Seq[String], newPackagePath: String) = { + def parseWitx(witxFile: File, includes: List[String], newPackagePath: String) = { if (newPackagePath.isEmpty) throw new Exception("You must provide the path to create the boilerplate (--create-boilerplate)") Blocker[IO] .use(blocker => { for { parser <- IO(WitxParser[IO]) - ctx <- IO(ImportContext[IO]()) + ctx <- IO(ImportContext[IO](includes)) (types, interface) <- parser.parseModuleInterface(witxFile.toPath, blocker, ctx) scalaTypesTemplate <- IO(new TypesEmitTraverser(types).traverseAll("", (s1, s2) => s1 + s2)) scalaTraitTemplate <- IO(new ModuleTraverse(interface, types).traverseAll("", (s1, s2) => s1 + s2)) diff --git a/text/src/swam/witx/parser/ImportContext.scala b/text/src/swam/witx/parser/ImportContext.scala index d7366b33..8be23fd5 100644 --- a/text/src/swam/witx/parser/ImportContext.scala +++ b/text/src/swam/witx/parser/ImportContext.scala @@ -1,7 +1,7 @@ package swam.witx.parser import java.io.File -import java.nio.file.{Files, Path, Paths} +import java.nio.file.{Files, NoSuchFileException, Path, Paths} import cats.effect._ import fastparse.Parsed @@ -14,36 +14,41 @@ import scala.concurrent.ExecutionContext /** *@author Javier Cabrera-Arteaga on 2020-03-18 */ -class ImportContext[F[_]](implicit val F: Effect[F]) { +class ImportContext[F[_]](val includes: List[String])(implicit val F: Effect[F]) { // TODO FIX ! implicit val cs = IO.contextShift(ExecutionContext.Implicits.global) def load(path: String) = { + + val paths = getInPath(path) + + if (paths.isEmpty) + throw new NoSuchFileException(path) + Blocker[IO] .use { blocker => - WitxParser[IO].parseTypes(Paths.get( - s"generator/resources/wasi_witx/$path" - ), - blocker) + WitxParser[IO].parseTypes(paths.head, blocker) } .unsafeRunSync() } - def getInPath(path: String, includes: String*): String = { + def getInPath(path: String): List[Path] = { if (Files.exists(Paths.get(path))) - path - else + List(Paths.get(path)) // Current working directory + else // else search in path System .getenv("PATH") .split(":") .concat(includes) - .filter(t => Files.exists(Paths.get(s"$t/$path")))(0) + .map(t => Paths.get(s"$t/$path")) + .filter(t => Files.exists(t)) + .toList } } object ImportContext { - def apply[F[_]: Effect](): ImportContext[F] = new ImportContext[F]() + def apply[F[_]: Effect](includes: List[String]): ImportContext[F] = new ImportContext[F](includes) } From 17aa628a3782f95dd0ad4433964923a4ae3f9aee Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 12:36:58 +0200 Subject: [PATCH 08/14] Adding bytes padding --- .../generator/witx/TypesEmitTraverser.scala | 41 ++----------------- text/src/swam/witx/unresolved/Types.scala | 7 ++-- 2 files changed, 8 insertions(+), 40 deletions(-) diff --git a/generator/src/swam/generator/witx/TypesEmitTraverser.scala b/generator/src/swam/generator/witx/TypesEmitTraverser.scala index b01bc491..3a923bf8 100644 --- a/generator/src/swam/generator/witx/TypesEmitTraverser.scala +++ b/generator/src/swam/generator/witx/TypesEmitTraverser.scala @@ -36,38 +36,6 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse } } - val predefinedEnumVal: Map[String, String] = HashMap[String, String]( - "fd_datasync" -> "0x0000000000000001", - "fd_read" -> "0x0000000000000002", - "fd_seek" -> "0x0000000000000004", - "fd_fdstat_set_flags" -> "0x0000000000000008", - "fd_sync" -> "0x0000000000000010", - "fd_tell" -> "0x0000000000000020", - "fd_write" -> "0x0000000000000040", - "fd_advise" -> "0x0000000000000080", - "fd_allocate" -> "0x0000000000000100", - "path_create_directory" -> "0x0000000000000200", - "path_create_file" -> "0x0000000000000400", - "path_link_source" -> "0x0000000000000800", - "path_link_target" -> "0x0000000000001000", - "path_open" -> "0x0000000000002000", - "fd_readdir" -> "0x0000000000004000", - "path_readlink" -> "0x0000000000008000", - "path_rename_source" -> "0x0000000000010000", - "path_rename_target" -> "0x0000000000020000", - "path_filestat_get" -> "0x0000000000040000", - "path_filestat_set_size" -> "0x0000000000080000", - "path_filestat_set_times" -> "0x0000000000100000", - "fd_filestat_get" -> "0x0000000000200000", - "fd_filestat_set_size" -> "0x0000000000400000", - "fd_filestat_set_times" -> "0x0000000000800000", - "path_symlink" -> "0x0000000001000000", - "path_remove_directory" -> "0x0000000002000000", - "path_unlink_file" -> "0x0000000004000000", - "poll_fd_readwrite" -> "0x0000000008000000", - "sock_shutdown" -> "0x0000000010000000" - ) - override val aliasTypeTraverser = { case (_, t: AliasType) => s"type ${t.tpeName}= ${t.tpe.tpeName}\n" } @@ -75,7 +43,7 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse override val enumTypeTraverser = { case (_, t: EnumType) => s"object ${t.tpeName}Enum extends Enumeration { \n\t ${t.names - .map(t => s"\nval `$t` = Value${if (predefinedEnumVal.contains(t)) s"(${predefinedEnumVal(t)})" else " "}\n") + .map(t => s"\nval `$t` = Value\n") .mkString("\n")}}\n" } @@ -84,8 +52,7 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse s"object ${t.tpeName}Flags extends Enumeration { ${t.names.zipWithIndex .map { case (name, i) => - s"\t\nval ${name} = Value${if (predefinedEnumVal.contains(name)) s"(${predefinedEnumVal(name)})" - else s"($i)"}\n" + s"\t\nval ${name} = Value($i)\n" } .mkString("\n")}}\n\n" } @@ -109,7 +76,7 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse | ${t.fields.zipWithIndex .map { case (f, idx) => - s"val `${f.id}` = ${new LoadTypeEmitTraverser(t.fields.map(t => t.tpe.size).slice(0, idx).sum.toString, types, offset = "offset", mem = "mem") + s"val `${f.id}` = ${new LoadTypeEmitTraverser(t.fields.map(t => t.tpe.size + t.tpe.pad).slice(0, idx).sum.toString, types, offset = "offset", mem = "mem") .traverse("", f.tpe)}" } .mkString("\n")} @@ -118,7 +85,7 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse | ${t.fields.zipWithIndex .map { case (f, idx) => - s"${new WriteTypeEmitTraverser(f.id, t.fields.map(t => t.tpe.size).slice(0, idx).sum.toString, types, "offset", "mem") + s"${new WriteTypeEmitTraverser(f.id, t.fields.map(t => t.tpe.size + t.tpe.pad).slice(0, idx).sum.toString, types, "offset", "mem") .traverse("", f.tpe)}" } diff --git a/text/src/swam/witx/unresolved/Types.scala b/text/src/swam/witx/unresolved/Types.scala index 123bc4a6..8d7c5900 100644 --- a/text/src/swam/witx/unresolved/Types.scala +++ b/text/src/swam/witx/unresolved/Types.scala @@ -7,12 +7,13 @@ import swam.witx.traverser.TypesTraverser /** *@author Javier Cabrera-Arteaga on 2020-03-18 */ -abstract class BaseWitxType(val tpeName: String, val size: Int = 4) +abstract class BaseWitxType(val tpeName: String, val size: Int = 4, val pad: Int = 0) -sealed abstract class BasicType(name: String, override val size: Int) extends BaseWitxType(name, size) +sealed abstract class BasicType(name: String, override val size: Int, override val pad: Int = 0) + extends BaseWitxType(name, size, pad) object BasicType { - case object u8 extends BasicType("u8", 1) + case object u8 extends BasicType("u8", 1, 1) case object u32 extends BasicType("u32", 4) case object u64 extends BasicType("u64", 8) case object u16 extends BasicType("u16", 2) From d7dbd0ae5fc13e28eea7b9e074449eee82bc1208 Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 13:21:10 +0200 Subject: [PATCH 09/14] Refactoring and cleaning --- .../swam/generator/witx/ModuleTraverse.scala | 6 -- .../generator/witx/TypesEmitTraverser.scala | 70 ++++++------------- text/src/swam/witx/parser/TypesParser.scala | 2 +- text/src/swam/witx/unresolved/Types.scala | 10 +-- 4 files changed, 29 insertions(+), 59 deletions(-) diff --git a/generator/src/swam/generator/witx/ModuleTraverse.scala b/generator/src/swam/generator/witx/ModuleTraverse.scala index fe602a63..47576eaa 100644 --- a/generator/src/swam/generator/witx/ModuleTraverse.scala +++ b/generator/src/swam/generator/witx/ModuleTraverse.scala @@ -46,8 +46,6 @@ class ModuleTraverse(module: ModuleInterface, types: Map[String, BaseWitxType]) case _ => Seq(s"${head.id}:${adaptor.from}") ++ processParameters(fields.tail) } } - - // ${f.params.map(m => s"${m.id}:${mapTypeToWasm(m.tpe).from}").mkString(",")} } def mapFieldsToTuple(fields: Seq[Field]) = @@ -72,10 +70,6 @@ class ModuleTraverse(module: ModuleInterface, types: Map[String, BaseWitxType]) } def processResults(f: FunctionExport) = { - - val adaptors = f.results - .map(p => mapTypeToWasm(p.tpe)) - val args = f.params .map(t => (t.id, mapTypeToWasm(t.tpe))) .map { diff --git a/generator/src/swam/generator/witx/TypesEmitTraverser.scala b/generator/src/swam/generator/witx/TypesEmitTraverser.scala index 3a923bf8..bc34c4aa 100644 --- a/generator/src/swam/generator/witx/TypesEmitTraverser.scala +++ b/generator/src/swam/generator/witx/TypesEmitTraverser.scala @@ -68,59 +68,35 @@ class TypesEmitTraverser(types: Map[String, BaseWitxType]) extends TypesTraverse case x: UnionType => x.tpeName } - override val structTypeTraverser = { - - case (_, t: StructType) => { - - s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) { - | ${t.fields.zipWithIndex - .map { - case (f, idx) => - s"val `${f.id}` = ${new LoadTypeEmitTraverser(t.fields.map(t => t.tpe.size + t.tpe.pad).slice(0, idx).sum.toString, types, offset = "offset", mem = "mem") - .traverse("", f.tpe)}" - } - .mkString("\n")} + def traverseUnionOrStruct(t: StructType) = { + s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) { + | ${t.fields.zipWithIndex + .map { + case (f, idx) => + s"val `${f.id}` = ${new LoadTypeEmitTraverser(t.fields.map(t => t.tpe.size + t.tpe.pad).slice(0, idx).iterator.sum.toString, types, offset = "offset", mem = "mem") + .traverse("", f.tpe)}" + } + .mkString("\n")} def write(offset: Int, mem: Memory[IO]) = { - | ${t.fields.zipWithIndex - .map { - case (f, idx) => - s"${new WriteTypeEmitTraverser(f.id, t.fields.map(t => t.tpe.size + t.tpe.pad).slice(0, idx).sum.toString, types, "offset", "mem") - .traverse("", f.tpe)}" - - } - .mkString("\n")} - |} - |}\n\n""".stripMargin - } + | ${t.fields.zipWithIndex + .map { + case (f, idx) => + s"${new WriteTypeEmitTraverser(f.id, t.fields.map(t => t.tpe.size + t.tpe.pad).slice(0, idx).iterator.sum.toString, types, "offset", "mem") + .traverse("", f.tpe)}" + + } + .mkString("\n")} + |} + |}\n\n""".stripMargin + } + override val structTypeTraverser = { + case (_, t: StructType) => traverseUnionOrStruct(t) } override val unionTypeTraverser = { - case (_, t: UnionType) => { - - s"""case class `${t.tpeName}`(mem: Memory[IO], offset: Int) { // UNION - | ${t.fields.zipWithIndex - .map { - case (f, idx) => - s"val `${f.id}` = ${new LoadTypeEmitTraverser(t.fields.map(t => t.tpe.size).slice(0, idx).sum.toString, types, offset = "offset", mem = "mem") - .traverse("", f.tpe)}" - } - .mkString("\n")} - - def write(offset: Int, mem: Memory[IO]) = { - | ${t.fields.zipWithIndex - .map { - case (f, idx) => - s"${new WriteTypeEmitTraverser(f.id, t.fields.map(t => t.tpe.size).slice(0, idx).sum.toString, types, "offset", "mem") - .traverse("", f.tpe)}" - - } - .mkString("\n")} - |} - |}\n\n""".stripMargin - } - + case (_, t: UnionType) => traverseUnionOrStruct(t) } override val handleTypeTraverser = { diff --git a/text/src/swam/witx/parser/TypesParser.scala b/text/src/swam/witx/parser/TypesParser.scala index 0c74f7d3..14b7f4bc 100644 --- a/text/src/swam/witx/parser/TypesParser.scala +++ b/text/src/swam/witx/parser/TypesParser.scala @@ -60,7 +60,7 @@ object TypesParser { def struct[_: P](typeId: String, types: Map[String, BaseWitxType]): P[StructType] = { P("(" ~ word("struct") ~ field(types).rep(1) ~ ")").map { fields => - StructType(typeId, fields) + new StructType(typeId, fields) } } diff --git a/text/src/swam/witx/unresolved/Types.scala b/text/src/swam/witx/unresolved/Types.scala index 8d7c5900..80ffe7bc 100644 --- a/text/src/swam/witx/unresolved/Types.scala +++ b/text/src/swam/witx/unresolved/Types.scala @@ -9,11 +9,11 @@ import swam.witx.traverser.TypesTraverser */ abstract class BaseWitxType(val tpeName: String, val size: Int = 4, val pad: Int = 0) -sealed abstract class BasicType(name: String, override val size: Int, override val pad: Int = 0) +abstract class BasicType(name: String, override val size: Int, override val pad: Int = 0) extends BaseWitxType(name, size, pad) object BasicType { - case object u8 extends BasicType("u8", 1, 1) + case object u8 extends BasicType("u8", 2) case object u32 extends BasicType("u32", 4) case object u64 extends BasicType("u64", 8) case object u16 extends BasicType("u16", 2) @@ -34,10 +34,10 @@ case class ArrayType(override val tpeName: String, tpe: BaseWitxType) extends Ba case class Pointer(tpe: BaseWitxType) extends BaseWitxType("Pointer", 4 * tpe.size) // 8 bytes -case class StructType(override val tpeName: String, fields: Seq[Field]) +class StructType(override val tpeName: String, val fields: Seq[Field]) extends BaseWitxType(tpeName, size = fields.map(t => t.tpe.size).sum) -case class UnionType(override val tpeName: String, name: String, fields: Seq[Field]) - extends BaseWitxType(tpeName, size = fields.map(t => t.tpe.size).sum) +case class UnionType(override val tpeName: String, name: String, override val fields: Seq[Field]) + extends StructType(tpeName, fields) case class Handle(override val tpeName: String) extends BaseWitxType(tpeName) From 7dad963ff54c7b9a47a0255009c1a1b168015961 Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 13:56:58 +0200 Subject: [PATCH 10/14] Adding document for generator cli --- build.sc | 3 +- examples/docs/generator.md | 75 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 1 deletion(-) create mode 100644 examples/docs/generator.md diff --git a/build.sc b/build.sc index 20aa5c0d..e393d3f6 100644 --- a/build.sc +++ b/build.sc @@ -24,6 +24,7 @@ val swamLicense = License.`Apache-2.0` val swamUrl = "https://github.com/satabin/swam" val swamDeveloper = Developer("satabin", "Lucas Satabin", "https://github.com/satabin") +val swamContributor = Developer("Jacarte", "Javier Cabrera-Arteaga", "https://github.com/Jacarte") val fs2Version = "2.0.1" @@ -127,7 +128,7 @@ object generator extends SwamModule with PublishModule { url = swamUrl, licenses = Seq(swamLicense), versionControl = VersionControl.github("satabin", "swam"), - developers = Seq(swamDeveloper) + developers = Seq(swamDeveloper, swamContributor) ) diff --git a/examples/docs/generator.md b/examples/docs/generator.md new file mode 100644 index 00000000..ae765985 --- /dev/null +++ b/examples/docs/generator.md @@ -0,0 +1,75 @@ +--- +title: Boilerplates Generator +--- + +It is possible in swam to generate code boilerplates to implement imports for WebAssembly binaries execution. The module `generator` provides a cli tool to do so. To check available options in the `generator` cli, run `mill generator.run --help`. + + +## Generating scala code boilerplate for WASM binaries + +For instance, let’s have a WASM binary called `posix.wasm`. Running `run generator.run posix.wasm` will generate the following output. + +``` +import cats.effect.IO +import swam.runtime.imports.{AsInstance, AsInterface, Imports, TCMap} +import swam.runtime.formats._ +import swam.runtime.formats.DefaultFormatters._ + +trait GeneratedImports { + type AsIIO[T] = AsInterface[T, IO] + type AsIsIO[T] = AsInstance[T, IO] + + def wasi_unstableFd_write(p0: Int, p1: Int, p2: Int, p3: Int): IO[Int] + def wasi_unstableFd_close(p0: Int): IO[Int] + def wasi_unstableFd_fdstat_get(p0: Int, p1: Int): IO[Int] + def wasi_unstableFd_seek(p0: Int, p1: Long, p2: Int, p3: Int): IO[Int] + def imports() = { + Imports[IO]( + TCMap[String, AsIsIO]( + "wasi_unstable" -> TCMap[String, AsIIO]("fd_write" -> wasi_unstableFd_write _, + "fd_close" -> wasi_unstableFd_close _, + "fd_fdstat_get" -> wasi_unstableFd_fdstat_get _, + "fd_seek" -> wasi_unstableFd_seek _)) + ) + } +} + +``` + +By default, the generator cli will find imported functions, then it generates a Scala language boilerplate. The generated template represents a Scala trait by default as you can see in the snippet. + +The cli supports several WASM binaries as arguments, generating then, the composition of all function imports. + + +## Replacing the trait template + +We generate the funcion imports based trait using the [mustache template engine](https://mustache.github.io/). Mustache context is usually a dictionary. We provide the context in the following format. + +```ts +{ + "module" : str, + "comma" : boolean, + "fields" : + { + "name": str, + "return": str, + "params": str, + "nameCapital": str, + "comma": boolean + }[] +}[] + +``` + +To print the context provided in json format, run the command `mill generator.run -c true `. The used mustache template can be replaced, using the `--template ` option. + +## Parsing WITX + + + +> The [WITX](https://github.com/WebAssembly/WASI/blob/master/docs/witx.md) + file format is an experimental format which is based on the module types text format (wit), (which is in turn based on the wat format, which is based on S-expressions). It adds some features using the same syntax as interface types, some features with syntax similar to gc types, as well as a few special features of its own. witx is actively evolving. Expect backwards-incompatible changes, particularly in the areas where witx differs from wit. The initial goal for witx is just to have a language suitable for expressing WASI APIs in, to serve as the vocabulary for proposing changes to existing APIs and proposing new APIs. Initially, while it uses some of the syntax and concepts from interface types, it doesn't currently imply the full interface types specification, or the use of the interface types custom sections. We expect that eventually we will transition to using the full interface types specification. Until then, the goals here are to remain aligned with interface types and other relevant WebAssembly standards and proposals wherever practical, and to be an input into the design process of interface types. + +The generator cli also provides the way to parse witx files and generate the boilerplate project. To generate the boilerplate, run `mill generator.run -x true -p `. This command will generate two scala files, `Types.cala` and `Module.scala`, containing th types definitions and abstract function declarations, respectively. + +The `text` module provides the core implementation to parse this kind of files, specifically, the `swam.witx.WitxParser` class. This core component provides a small AST structure from the parsed witx file. The components of this AST can be seen in `swam.witx.unresolved.Declarations` class. Therefore, this AST can be translated to any target ,for instance, the generator cli implements the AST traversers to generate the scala files for the boilerplate. From 5502ab96971e41dac9f69a783fc1b0a2472d782a Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 14:04:13 +0200 Subject: [PATCH 11/14] Typos --- examples/docs/generator.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/docs/generator.md b/examples/docs/generator.md index ae765985..d4cc3464 100644 --- a/examples/docs/generator.md +++ b/examples/docs/generator.md @@ -72,4 +72,4 @@ To print the context provided in json format, run the command `mill generator.ru The generator cli also provides the way to parse witx files and generate the boilerplate project. To generate the boilerplate, run `mill generator.run -x true -p `. This command will generate two scala files, `Types.cala` and `Module.scala`, containing th types definitions and abstract function declarations, respectively. -The `text` module provides the core implementation to parse this kind of files, specifically, the `swam.witx.WitxParser` class. This core component provides a small AST structure from the parsed witx file. The components of this AST can be seen in `swam.witx.unresolved.Declarations` class. Therefore, this AST can be translated to any target ,for instance, the generator cli implements the AST traversers to generate the scala files for the boilerplate. +The `text` module provides the core implementation to parse this kind of files, specifically, the `swam.witx.WitxParser` class. This core component provides a small AST structure from the witx file. The components of this AST can be seen in `swam.witx.unresolved.Declarations` class. Therefore, this AST can be translated to any target, for instance, the generator cli implements the AST traversers to generate the scala files for the boilerplate given a witx file. From dc7c53e5e03cb2c63b0a9f1c189124641f2cadff Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 14:06:01 +0200 Subject: [PATCH 12/14] Removing unused test module in generator --- build.sc | 6 --- generator/test/src/generator/WitxTest.scala | 59 --------------------- 2 files changed, 65 deletions(-) delete mode 100644 generator/test/src/generator/WitxTest.scala diff --git a/build.sc b/build.sc index e393d3f6..039e8997 100644 --- a/build.sc +++ b/build.sc @@ -131,12 +131,6 @@ object generator extends SwamModule with PublishModule { developers = Seq(swamDeveloper, swamContributor) ) - - object test extends Tests with ScalafmtModule { - def ivyDeps = Agg(ivy"com.lihaoyi::utest:0.7.1") - def testFrameworks = Seq("swam.util.Framework") - def moduleDeps = Seq(core,text, generator, util.test ) - } } diff --git a/generator/test/src/generator/WitxTest.scala b/generator/test/src/generator/WitxTest.scala deleted file mode 100644 index c661c41e..00000000 --- a/generator/test/src/generator/WitxTest.scala +++ /dev/null @@ -1,59 +0,0 @@ -package swam -package generator - -import java.io -import java.nio.file.Paths -import org.fusesource.scalate.TemplateEngine - -import utest.{TestSuite, Tests, test} - -import runtime._ -import swam.test.util._ -import utest._ -import better.files._ -import cats.effect._ -import fs2.io.file -import swam.text.parser -import swam.witx.WitxParser -import swam.witx.parser.{ImportContext, TypesParser} -import swam.witx.unresolved._ -import swam.witx -import swam.witx.traverser.{ModuleInterfaceTraverser, TypesTraverser} - -import scala.concurrent.ExecutionContext - -class TypeMustacheWrapper(val inner: BaseWitxType) { - val isEnum = inner.getClass.equals(classOf[EnumType]) - val isFlag = inner.getClass.equals(classOf[FlagsType]) - val isStruct = inner.getClass.equals(classOf[StructType]) -} - -object WitParser extends TestSuite { - - implicit val cs = IO.contextShift(ExecutionContext.Implicits.global) - - def runParse() = { - val wasi_snaphot = Paths.get("generator/resources/wasi_witx/wasi_snapshot_preview1.witx") - - val parser = WitxParser[IO] - val ctx = ImportContext[IO]() - - val (types, interface) = Blocker[IO] - .use(blocker => { - for { - (types, instruction) <- parser.parseModuleInterface(wasi_snaphot, blocker, ctx) - } yield (types, instruction) - }) - .unsafeRunSync() - - println(types) - } - - def runGenerator() = {} - - val tests = Tests { - "parsing_witx" - runParse() - "generating_boilerplate_mustache" - runGenerator() - } - -} From f2e702957efe53f57a839df1d66c3f1e108ffcfb Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 14:15:12 +0200 Subject: [PATCH 13/14] Removing unused test module in generator --- examples/docs/string.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/examples/docs/string.md b/examples/docs/string.md index b314e8db..00378bd1 100644 --- a/examples/docs/string.md +++ b/examples/docs/string.md @@ -17,6 +17,8 @@ val engine = Engine[IO]() implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) + val strings = Blocker[IO].use { blocker => for { From 7cc89f8ef842376acc1c29ac9b3f31ec35a971fa Mon Sep 17 00:00:00 2001 From: Javier Cabrera Date: Tue, 21 Apr 2020 14:15:25 +0200 Subject: [PATCH 14/14] Removing unused test module in generator --- examples/docs/annotations.md | 2 +- examples/docs/fibo.md | 2 ++ examples/docs/logged.md | 2 ++ text/src/swam/text/package.scala | 2 +- 4 files changed, 6 insertions(+), 2 deletions(-) diff --git a/examples/docs/annotations.md b/examples/docs/annotations.md index 86b8ff0e..a4e51694 100644 --- a/examples/docs/annotations.md +++ b/examples/docs/annotations.md @@ -48,7 +48,7 @@ import java.nio.file.Paths val tcompiler = Compiler[IO] -val engine = Engine[IO]() +val engine = Engine[IO] implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) diff --git a/examples/docs/fibo.md b/examples/docs/fibo.md index 24d674e2..5c6c6fe4 100644 --- a/examples/docs/fibo.md +++ b/examples/docs/fibo.md @@ -18,6 +18,8 @@ val engine = Engine[IO]() implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) + def instantiate(p: String): Instance[IO] = Blocker[IO].use { blocker => for { diff --git a/examples/docs/logged.md b/examples/docs/logged.md index c641a8be..a2cb1306 100644 --- a/examples/docs/logged.md +++ b/examples/docs/logged.md @@ -19,6 +19,8 @@ val engine = Engine[IO]() implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) +implicit val cs = IO.contextShift(scala.concurrent.ExecutionContext.global) + def log(i: Int) = IO(println(s"got $i")) val f = diff --git a/text/src/swam/text/package.scala b/text/src/swam/text/package.scala index 24269cc9..d4fda117 100644 --- a/text/src/swam/text/package.scala +++ b/text/src/swam/text/package.scala @@ -26,7 +26,7 @@ import java.nio.file.Path package object text { def readFile[F[_]](path: Path, blocker: Blocker, chunkSize: Int)(implicit F: Sync[F], - cs: ContextShift[F]): F[String] = + cs: ContextShift[F]): F[String] = io.file.readAll[F](path, blocker, chunkSize).through(text.utf8Decode).compile.foldMonoid }