diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..3395099c8 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,125 @@ +name: ci + +on: + push: + branches: + - master + - next + + pull_request: + branches: + - master + - next + +jobs: + check: + runs-on: ubuntu-20.04 + strategy: + matrix: + rust: ["1.44.0", "stable", "beta", "nightly"] + name: Check (${{ matrix.rust }}) + steps: + - uses: actions/checkout@v2 + - name: Install minimal ${{ matrix.rust }} + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: ${{ matrix.rust }} + override: true + - name: Run cargo check + uses: actions-rs/cargo@v1 + with: + command: check + + test: + runs-on: ubuntu-20.04 + strategy: + matrix: + rust: ["1.44.0", "stable", "beta", "nightly"] + name: Test Suite (${{ matrix.rust }}) + steps: + - uses: actions/checkout@v2 + - name: Install minimal ${{ matrix.rust }} + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: ${{ matrix.rust }} + override: true + - name: Run cargo test + uses: actions-rs/cargo@v1 + with: + command: test + + fmt: + runs-on: ubuntu-20.04 + strategy: + matrix: + rust: ["1.44.0", "stable", "beta", "nightly"] + name: Rustfmt (${{ matrix.rust }}) + steps: + - uses: actions/checkout@v2 + - name: Install minimal ${{ matrix.rust }} with rustfmt + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: ${{ matrix.rust }} + override: true + components: rustfmt + - name: Run cargo fmt + uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all -- --check + + clippy: + runs-on: ubuntu-20.04 + strategy: + matrix: + rust: ["stable"] + # rust: ["1.44.0", "stable", "beta", "nightly"] + name: Clippy (${{ matrix.rust }}) + steps: + - uses: actions/checkout@v2 + - name: Install minimal ${{ matrix.rust }} with clippy + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: ${{ matrix.rust }} + override: true + components: clippy + - name: Run cargo clippy + uses: actions-rs/cargo@v1 + with: + command: clippy + args: -- -D warnings + + book: + runs-on: ubuntu-20.04 + name: Build and Test Book + env: + MDBOOK_VERSION: '0.4.4' + MDBOOK_LINKCHECK_VERSION: '0.7.0' + steps: + - uses: actions/checkout@v2 + - name: Install mdBook + # Install prebuilt binaries where possible to improve CI performance + run: | + mkdir -p "$HOME/mdbook" + curl -L "https://github.com/rust-lang/mdBook/releases/download/v$MDBOOK_VERSION/mdbook-v$MDBOOK_VERSION-x86_64-unknown-linux-gnu.tar.gz" | tar xz -C "$HOME/mdbook" + echo "${HOME}/mdbook/" >> $GITHUB_PATH + mkdir -p "$HOME/mdbook-linkcheck" + curl -L "https://github.com/Michael-F-Bryan/mdbook-linkcheck/releases/download/v$MDBOOK_LINKCHECK_VERSION/mdbook-linkcheck-v$MDBOOK_LINKCHECK_VERSION-x86_64-unknown-linux-gnu.tar.gz" | tar xz -C "$HOME/mdbook-linkcheck" + echo "${HOME}/mdbook-linkcheck/" >> $GITHUB_PATH + - name: Install Javascript dependencies + run: yarn install + working-directory: book + - name: Build additional Javascript + run: yarn build + working-directory: book + - name: Build book + run: mdbook build + working-directory: book + - name: Test book + run: mdbook test + working-directory: book + # TODO: Deploy to Github Pages on crate release diff --git a/.gitignore b/.gitignore index 09aa61fbc..57e3ca687 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,7 @@ +# Rust /target/ **/*.rs.bk Cargo.lock -repl-history + +# Yarn +/node_modules diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 34c076091..000000000 --- a/.gitmodules +++ /dev/null @@ -1,4 +0,0 @@ -[submodule "book/highlight.js"] - path = book/highlight.js - url = https://github.com/pikelet-lang/highlight.js.git - branch = add-pikelet diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index fb4f0da34..000000000 --- a/.travis.yml +++ /dev/null @@ -1,47 +0,0 @@ -language: rust -cache: - cargo: true - directories: - - book/highlight.js/node_modules - - editors/code/node_modules -rust: - - 1.31.0 - - stable - - beta - - nightly - -matrix: - allow_failures: - - rust: nightly - -install: - - tools/install-cargo-updates - - tools/build-highlight-js - - cargo build --verbose # https://docs.travis-ci.com/user/languages/rust/#Dependency-Management - - (cd editors/code && npm install) - -script: - - cargo build --verbose --all # https://docs.travis-ci.com/user/languages/rust/#Default-Build-Script - - cargo test --verbose --all # https://docs.travis-ci.com/user/languages/rust/#Default-Build-Script - - (cd editors/code && npm run travis) - - tools/build-book - -deploy: - # Deploy the book to Github Pages - # https://docs.travis-ci.com/user/deployment/pages/ - - provider: pages - skip-cleanup: true - github-token: $GITHUB_TOKEN # Set in travis-ci.org dashboard, marked secure - keep-history: true - local-dir: book/build - on: - branch: master - rust: stable - -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/54d1cf84e3e6d80f3faa - on_success: change # options: [always|never|change] default: always - on_failure: always # options: [always|never|change] default: always - on_start: never # options: [always|never|change] default: always diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 000000000..f47a55ac2 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,5 @@ +{ + "recommendations": [ + "editorconfig.editorconfig" + ] +} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..670d90b25 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,61 @@ +# Contributing + +## Code of Conduct + +Please note that this project is released with a [Code of Conduct](./CODE_OF_CONDUCT.md). +By participating in this project you agree to abide by its terms. + +[code_of_conduct]: https://github.com/pikelet-lang/pikelet/blob/master/CODE_OF_CONDUCT.md + +## Matrix room + +Joining the matrix room at [#pikelet:matrix.org][pikelet-matrix] is a good way to get in touch with the developers and community. + +[pikelet-matrix]: https://app.element.io/#/room/#pikelet:matrix.org + +## Prerequisites + +We use [Rust][rust] as our implementation language, which can be installed using the [rustup] tool. + +For the best experience in working with Rust we also recommend installing IDE support for your editor of choice: + +- [Rust Analyzer][rust-analyzer] (for VS Code, Vim Emacs, etc.) +- [IntelliJ Rust][intellij-rust] (for IntelliJ-based IDEs) + +You can learn more about programming in Rust by reading [The Rust Programming Language][rust-book]. + +[rust]: https://www.rust-lang.org/ +[rustup]: https://rustup.rs/ +[rust-analyzer]: https://rust-analyzer.github.io/ +[intellij-rust]: https://intellij-rust.github.io/ +[rust-book]: https://doc.rust-lang.org/book/ + +## Workflow + +Follow these steps to contribute to the project: + +1. Make a fork of the [Pikelet repository][pikelet-repo]. +1. Within your fork, create a branch for your contribution. Use a meaningful name. +1. Create your contribution, meeting all [contribution quality standards](#quality-standards). +1. Ensure all the tests pass (`cargo test`). +1. [Create a pull request][create-a-pr] against the `master` branch of the repository. +1. Once the pull request is reviewed and CI passes, it will be merged. + +[pikelet-repo]: https://github.com/pikelet-lang/pikelet/ +[create-a-pr]: https://help.github.com/articles/creating-a-pull-request-from-a-fork/ + +## Quality Standards + +Most quality and style standards are checked automatically by the CI build. +Contributions should: + +- Separate each **logical change** into its own commit. +- Include tests for any new functionality in your pull request. +- Document public functions. +- Format code with `cargo fmt`. +- Avoid adding `unsafe` code. + If it is necessary, provide an explanatory comment on any `unsafe` block explaining its rationale and why it's safe. +- Add a descriptive message for each commit. Follow [these commit message guidelines][commit-messages]. +- Document your pull requests. Include the reasoning behind each change, and the testing done. + +[commit-messages]: https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html diff --git a/Cargo.toml b/Cargo.toml index 50105b832..731d91d00 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,10 +1,7 @@ [workspace] members = [ - "./crates/pikelet", - "./crates/pikelet-concrete", - "./crates/pikelet-core", - "./crates/pikelet-driver", - "./crates/pikelet-language-server", - "./crates/pikelet-library", - "./crates/pikelet-repl", + "./pikelet", + "./pikelet-cli", + "./pikelet-editor", + "./pikelet-language-server", ] diff --git a/README.md b/README.md index 0778eadef..53807d41b 100644 --- a/README.md +++ b/README.md @@ -2,14 +2,18 @@ ![Pikelet Mascot][pikelet-mascot] -[![Build Status][travis-badge]][travis-url] -[![Gitter][gitter-badge]][gitter-lobby] +[pikelet-mascot]: ./book/assets/pikelet.png -[pikelet-mascot]: assets/pikelet.png -[travis-badge]: https://travis-ci.org/pikelet-lang/pikelet.svg?branch=master -[travis-url]: https://travis-ci.org/pikelet-lang/pikelet -[gitter-badge]: https://badges.gitter.im/pikelet-lang/pikelet.svg -[gitter-lobby]: https://gitter.im/pikelet-lang/Lobby +[![Actions Status][actions-badge]][actions-url] +[![Matrix][matrix-badge]][matrix-lobby] +[![License][license-badge]][license-url] + +[actions-badge]: https://github.com/pikelet-lang/pikelet/workflows/ci/badge.svg +[actions-url]: https://github.com/pikelet-lang/pikelet/actions +[matrix-badge]: https://img.shields.io/matrix/pikelet:matrix.org?label=%23pikelet%3Amatrix.org +[matrix-lobby]: https://app.element.io/#/room/#pikelet:matrix.org +[license-badge]: https://img.shields.io/github/license/pikelet-lang/pikelet +[license-url]: ./LICENSE Pikelet is a small, functional, dependently typed programming language. @@ -24,22 +28,11 @@ other languages. At the moment however we've only implemented a type checker and very slow interpreter, so don't get your hopes up too much yet! There's still a whole lot to do before it is even remotely useful to anyone! 😅 -For more information, see [The Pikelet Book][pikelet-book]. - -[pikelet-book]: https://pikelet-lang.github.io/pikelet/ - -## Contributing - -We really want to encourage new contributors to help out! Please come chat with -us [on our Gitter channel][gitter-lobby] - if you have any questions about the -project, or just want to say hi! We sometimes get side-tracked on technical -discussions, but we're always more than happy to spend time explaining things. - -## Acknowledgments +## Roadmap -[![YesLogic Logo][yeslogic-logo]][yeslogic] +Check out our plans in [the roadmap](./book/src/development/roadmap.md). -This work was done in part with the generous support of [YesLogic][yeslogic]. +## Code of Conduct -[yeslogic]: http://yeslogic.com/ -[yeslogic-logo]: assets/yeslogic-logo.png +Please note that this project is released with a [Code of Conduct](./CODE_OF_CONDUCT.md). +By participating in this project you agree to abide by its terms. diff --git a/assets/yeslogic-logo.png b/assets/yeslogic-logo.png deleted file mode 100644 index 6b7135106..000000000 Binary files a/assets/yeslogic-logo.png and /dev/null differ diff --git a/book/.gitignore b/book/.gitignore index 075630732..f3e890b63 100644 --- a/book/.gitignore +++ b/book/.gitignore @@ -1,2 +1,9 @@ -build -theme/highlight.js +# mdBook +/build + +# Yarn +/node_modules + +# Parcel +/.cache +/dist diff --git a/book/README.md b/book/README.md index bfbda06b6..e37a1e5c2 100644 --- a/book/README.md +++ b/book/README.md @@ -1,37 +1,45 @@ -# Pikelet Language Book +# Pikelet Book -To build the book, you will first need to [install mdBook via cargo][install-mdbook]: +To build the book, you will first need to [install mdBook][install-mdbook] and [mdbook-linkcheck]: ```sh -cargo install mdbook +cargo install mdbook mdbook-linkcheck ``` -You can then serve the documentation locally by calling the [`serve` command][mdbook-serve] -from the `book` directory: +Note that for consistency we use specific versions of these tools on CI, +so the one you install might be newer than the one used to build and deploy the book. +To check the versions we currently assume, look at the [workflows directory](../.github/workflows). + +## Building additional JavaScript + +In order to highlight the Fathom code examples in the book we override mdBook's built-in [highlight.js] with our own. +To build the highlighting code, run the following commands using [Yarn]: ```sh -mdbook serve +yarn workspace book install +yarn workspace book build ``` -[install-mdbook]: https://rust-lang-nursery.github.io/mdBook/cli/cli-tool.html#install-cratesio-version -[mdbook-serve]: https://rust-lang-nursery.github.io/mdBook/cli/serve.html +You will need to rebuild the book or restart the mdBook server for changes to take effect. + +[highlight.js]: https://highlightjs.org/ +[Yarn]: (https://yarnpkg.com/) -## Building custom syntax highlighting +## Running the mdBook server -Highlight.js can be [hard to extend][mdbook-custom-highlighting-issue], so we've -had to make a [custom fork][highlightjs-fork] that supports Pikelet syntax. For -better or worse, we've included this as a submodule as a temporary solution. -To build this, we've included a [handy script][build-highlight-js-script]: +You can then serve the documentation locally by calling the [`serve` command][mdbook-serve] +from the `book` directory: ```sh -tools/build-highlight-js +mdbook serve ``` -This should update/initialize the submodule, update the npm dependencies, -and copy the minified highlighting source to the proper directory. Note that -this is an optional step for developing locally - the CI pipeline will take care -of building and deploying this automatically. +Alternatively it can be called from the root of the repository: + +```sh +mdbook serve book +``` -[mdbook-custom-highlighting-issue]: https://github.com/rust-lang-nursery/mdBook/issues/657 -[highlightjs-fork]: https://github.com/pikelet-lang/highlight.js/tree/add-pikelet -[build-highlight-js-script]: /tools/build-highlight-js +[install-mdbook]: https://rust-lang.github.io/mdBook/cli/index.html#install-cratesio-version +[mdbook-serve]: https://rust-lang.github.io/mdBook/cli/serve.html +[mdbook-linkcheck]: https://github.com/Michael-F-Bryan/mdbook-linkcheck#getting-started diff --git a/assets/pikelet.png b/book/assets/pikelet.png similarity index 100% rename from assets/pikelet.png rename to book/assets/pikelet.png diff --git a/book/book.toml b/book/book.toml index 580ff52b0..f9ff5d968 100644 --- a/book/book.toml +++ b/book/book.toml @@ -1,7 +1,8 @@ [book] -title = "Pikelet Language Book" -authors = ["Brendan Zabarauskas"] -description = "Documentation for the Pikelet Language" +title = "Pikele Book" +authors = ["YesLogic Pty. Ltd. "] +description = "Documentation for the Pikelet programming language" +language = "en" multilingual = false src = "src" @@ -9,4 +10,27 @@ src = "src" build-dir = "build" [output.html] -mathjax-support = true +additional-js = [ + "dist/index.js", +] + +[output.html.redirect] +"/installation/index.html" = "./guide/installation.html" +"/language/index.html" = "./reference.html" +# "/language/conditionals.html" = TODO +"/language/functions.html" = "./reference/functions.html" +"/language/records.html" = "./reference/records.html" +# "/language/bindings.html" = TODO +# "/language/type-inference.html" = TODO +"/language/universes.html" = "./reference/universes.html" +"/appendix/index.html" = "./specification.html" +"/appendix/design.html" = "./development/design-goals.html" +"/appendix/theory.html" = "./specification.html" +# "/appendix/influences.html" = TODO +# "/appendix/references.html" = TODO + +# [output.linkcheck] +# exclude = [ +# '\./contributing\.md', # Bypass `traverse-parent-directories` for this symlink +# '\./code-of-conduct\.md', # Bypass `traverse-parent-directories` for this symlink +# ] diff --git a/book/highlight.js b/book/highlight.js deleted file mode 160000 index d3d29619a..000000000 --- a/book/highlight.js +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d3d29619a425cd2c60724f6980c911543042ebb6 diff --git a/book/index.js b/book/index.js new file mode 100644 index 000000000..df6d0cb1f --- /dev/null +++ b/book/index.js @@ -0,0 +1,49 @@ +import hljs from "highlight.js/lib/core"; + +hljs.registerLanguage("pikelet", (hljs) => { + const KEYWORDS = { + keyword: "as fun Fun record Record", + built_in: "Type Bool true false U8 U16 U32 U64 S8 S16 S32 S64 F32 F64 String Char Array List", + }; + + const CHARACTER = { + className: "string", + begin: /'([^'\\]|\\.)*'/, + }; + const STRING = { + className: "string", + begin: /"([^"\\]|\\.)*"/, + }; + const NUMBER = { + className: "number", + begin: /\b[-+]?[0-9][a-zA-Z0-9_\.]*\b/, + relevance: 0, + }; + + const COMMENT = { + variants: [ + hljs.COMMENT("--", "$"), + hljs.COMMENT("|||", "$"), + ], + }; + + return { + name: "Fathom", + keywords: KEYWORDS, + contains: [ + STRING, + CHARACTER, + NUMBER, + + COMMENT, + + { begin: "->|<-" }, // No markup, relevance booster + ], + }; +}); + +window.addEventListener("load", (event) => { + document + .querySelectorAll("code.language-pikelet") + .forEach((block) => hljs.highlightBlock(block)); +}); diff --git a/book/package.json b/book/package.json new file mode 100644 index 000000000..cb89081de --- /dev/null +++ b/book/package.json @@ -0,0 +1,15 @@ +{ + "name": "book", + "version": "0.0.0", + "private": true, + "scripts": { + "dev": "parcel index.js --no-source-maps", + "build": "parcel build index.js --no-source-maps" + }, + "devDependencies": { + "parcel-bundler": "^1.12.4" + }, + "dependencies": { + "highlight.js": "^10.2.0" + } +} diff --git a/book/src/SUMMARY.md b/book/src/SUMMARY.md index 125f08b63..0015a8142 100644 --- a/book/src/SUMMARY.md +++ b/book/src/SUMMARY.md @@ -2,16 +2,38 @@ [Pikelet](index.md) -- [Installation](./installation/index.md) -- [Language](./language/index.md) - - [Conditionals](./language/conditionals.md) - - [Functions](./language/functions.md) - - [Records](./language/records.md) - - [Bindings](./language/bindings.md) - - [Type inference](./language/type-inference.md) - - [Universes](./language/universes.md) -- [Appendix](./appendix/index.md) - - [Design](./appendix/design.md) - - [Theory](./appendix/theory.md) - - [Influences](./appendix/influences.md) - - [References](./appendix/references.md) +- [Guide](./guide.md) + - [Installation](./guide/installation.md) + - [Using the REPL](./guide/using-the-repl.md) + - [Compiling Standalone Programs]() + - [Pikelet as a Configuration Language]() + - [Pikelet as a Scripting Language]() + +- [Reference](./reference.md) + - [Comments](./reference/comments.md) + - [Keywords](./reference/keywords.md) + - [Names](./reference/names.md) + - [Builtins](./reference/builtins.md) + - [Literals](./reference/literals.md) + - [Universes](./reference/universes.md) + - [Functions](./reference/functions.md) + - [Records](./reference/records.md) + +- [Specification](./specification.md) + - [Core Language]() + - [Semantics]() + - [Typing]() + - [Surface Language]() + - [Elaboration]() + - [Textual Representation](./specification/textual-representation.md) + - [Lexical Syntax](./specification/textual-representation/lexical-syntax.md) + - [Concrete Syntax](./specification/textual-representation/concrete-syntax.md) + - [Inspiration](./specification/inspiration.md) + +- [Development](./development.md) + - [Contributing](./development/contributing.md) + - [Code of Conduct](./development/code-of-conduct.md) + - [Roadmap](./development/roadmap.md) + - [Design](./development/design.md) + - [Influences](./development/influences.md) + - [Bibliography](./development/bibliography.md) diff --git a/book/src/appendix/design.md b/book/src/appendix/design.md deleted file mode 100644 index 3776a9360..000000000 --- a/book/src/appendix/design.md +++ /dev/null @@ -1,121 +0,0 @@ -# Design - -## Design goals - -Pikelet should feel: - -- Friendly -- Liberating -- Sympathetic - -This is a rough list of things that might be interesting to explore with Pikelet: - -- Sympathetic to humans - - Programs should look pretty, and read clearly - - Friendly community - - Helpful interactive tooling - - Tight feedback loops -- Sympathetic to machines - - Predictable performance - - Predictable memory layout - - Predictable optimizations - - Zero (or close to zero) cost abstractions - - Low level control - - Minimal runtime -- Sympathetic to real-world realities - - Gradual correctness (eg. examples -> generative tests -> solvers -> proofs) - - Provide clearly marked escape hatches (learn from the lessons of [RustBelt](rust-belt)?) - - Automatic upgrades of code - between language and library versions - - Work for large scale systems -- Sympathetic to mathematical foundations - - Simple core language that can be formalized and proven sound - - Should allow for mathematically inspired patterns of code reuse - - Allow newcomers to learn mathematical thinking gradually - - Adaptable to future developments in type theory - -It's unclear how many of these will be able to be met, and what priorities these -should have, so this list might change over time. Come and [chat with us][gitter-channel] -if you'd like to give your input and get involved! - -[rust-belt]: https://plv.mpi-sws.org/rustbelt/ -[gitter-channel]: https://gitter.im/pikelet-lang/Lobby - -## Some key features of interest - -- [Dependent types](https://en.wikipedia.org/wiki/Dependent_type) -- [Purely functional](https://en.wikipedia.org/wiki/Pure_function) -- [Strict evaluation](https://en.wikipedia.org/wiki/Eager_evaluation) -- Implicit arguments -- Dependent records+instance arguments as first class modules/type classes -- Non-uniform memory layout - custom data layouts for records and variants? -- [Quantitative type theory](https://bentnib.org/quantitative-type-theory.pdf) - for [erasure](https://en.wikipedia.org/wiki/Type_erasure) and - [linear types](https://en.wikipedia.org/wiki/Substructural_type_system#Linear_type_systems) -- Totality checking -- Explicit tail-call elimination -- Excellent editor support with fast, incremental program analysis -- Interactive program development using holes -- Refinement types for lightweight verification - -Some more hand-wavey ideas: - -- Monomorphization by partial-evaluation of instance arguments -- Optional/configurable garbage collection -- Alternatives to currying for function application? -- First-class declarations (Levitation or Elaborator Reflection could be useful here) - -Some other features that may be trickier to integrate given the previous -features and design goals: - -- [Effect systems/Algebraic Effects](https://en.wikipedia.org/wiki/Effect_system) - - could make it easier to integrate async-io without needing to build it in - - how do we use effects for generative modules? - - how do we makes this *fast* for systems programming? - - should compile down in a similar way to the equivalent procedural code in Rust or C - - most effect handling does not need to be dynamic - - most systems cause lots of intermediate allocations or stack switching -- [Combined Effects/Coeffects](https://www.cs.kent.ac.uk/people/staff/dao7/publ/combining-effects-and-coeffects-icfp16.pdf) - - allow for statically checked compilation configurations - - explicit variable capture could be modelled as a coeffect - - could subsume quantitative type theory, implicit arguments, etc - - not yet integrated into dependent types in the research literature (Granule is working on this) -- Row polymorphism - - no research on integrating these with dependent records and inductive data types -- Program composition via category theory - - Challenging to integrate in the presence of strict evaluation - - Similar problems to effect systems: we don't want to allocate intermediate - data structures, instead we want to build up stack allocated state machines - (like in Rust's future and iterator traits) to be executed later - -## A possible implementation plan - -1. ~~Start with a simple dependent type system, like [LambdaPi](https://www.andres-loeh.de/LambdaPi/)~~ -2. Implement additional language extensions needed for actual usefulness - - ~~dependent records~~ - - ~~let/where bindings~~ - - quantitative type theory - - implicit arguments - - instance arguments - - basic (non-dependent) effect system - - ~~cumulative universes~~ -3. Implement back end(s) - - JIT and embeddable runtime (for bootstrapping usage) - possibly with - [HolyJIT](https://github.com/nbp/holyjit) or or [CraneLift](https://github.com/CraneStation/cranelift) - - CraneLift would unlock WebASM, which would be a huge boost - - Optimizing compiler - Possibly with LLVM or a verified compiler (like - CompCert) in the future - - Figure out how to integrate with libraries written in other languages, - like C or Rust - -By starting with a JIT we could get initial usage from embedding the language -within existing Rust programs, like games. Looking into the future it would also -be nice to then move forward towards implementing a native compiler, however. - -At the moment we are building the language in Rust, but perhaps it would be -better to build a verified implementation in Coq/Agda/Lean/Idris/something else. -That way we can actually start proving some of the claims we desire to make -about our system. A concern could be that we go too far down the route of -implementation and it would be extremely challenging to then form a solid -specification for what we are building. On the other hand, as always, the -downside of a verified implementation is that it could take a prohibitive -amount of time to complete. diff --git a/book/src/appendix/index.md b/book/src/appendix/index.md deleted file mode 100644 index 94870e89b..000000000 --- a/book/src/appendix/index.md +++ /dev/null @@ -1,4 +0,0 @@ -# Appendix - -Here you will find additional reference information for understanding the -theoretical foundations behind Pikelet. diff --git a/book/src/appendix/references.md b/book/src/appendix/references.md deleted file mode 100644 index b7475ea47..000000000 --- a/book/src/appendix/references.md +++ /dev/null @@ -1,19 +0,0 @@ -# References - -What follows is a non-exhaustive list of some of the references that were useful -when building Pikelet: - -- Christiansen, David Raymond (2013). “Bidirectional Typing Rules: A Tutorial”. - [[PAPER][bidirectional-typing-paper]] -- Löh, Andres, McBride, Conor and Swierstra, Wouter (2009). “A tutorial - implementation of a dependently typed lambda calculus”. - [[SITE][lambdapi-site]] - [[PAPER][lambdapi-paper]] -- Norell, Ulf (2007). “Towards a practical programming language based on - dependent type theory”. - [[PAPER][agda-paper]] - -[bidirectional-typing-paper]: http://www.davidchristiansen.dk/tutorials/bidirectional.pdf -[lambdapi-site]: https://www.andres-loeh.de/LambdaPi/ -[lambdapi-paper]: https://www.andres-loeh.de/LambdaPi/LambdaPi.pdf -[agda-paper]: http://www.cse.chalmers.se/~ulfn/papers/thesis.pdf diff --git a/book/src/appendix/theory.md b/book/src/appendix/theory.md deleted file mode 100644 index e759832b0..000000000 --- a/book/src/appendix/theory.md +++ /dev/null @@ -1,758 +0,0 @@ -# Theory - -A formalization of the semantics for type checking and normalizing Pikelet. - -## Contents - -- [Introduction](#introduction) - - [Notation](#notation) - - [Where is the soundness proof?](#where-is-the-soundness-proof) -- [Syntax](#syntax) - - [Raw terms](#raw-terms) - - [Terms](#terms) - - [Values](#values) - - [Contexts](#contexts) -- [Semantics](#semantics) - - [Elaboration](#elaboration) - - [Normalization](#normalization) - - [Type checking](#type-checking) - - [Type inference](#type-inference) - - [Subtyping](#subtyping) - - [Universe shifting](#universe-shifting) - - [Pattern matching](#pattern-matching) - - [Type checking of patterns](#type-checking-of-patterns) - - [Type inference of patterns](#type-inference-of-patterns) - -## Introduction - -At its core, Pikelet is a dependently typed lambda calculus with a cumulative -universe hierarchy with explicit level shifts. - -> **Note:** -> This document is intended for those who are interested in looking deeper into the formal foundations of Pikelet. -> You _don't_ need to understand this for general use of Pikelet, so feel free to skip this document if that is easier. -> We will however make an effort to explain some of the notation we use here, and point to resources that might help if this piques your curiosity! - -### Notation - -We use a combination of some [BNF][bnf]-style syntax definitions with -[natural deduction](natural-deduction) rules to define our language. This -combination of notation is sometimes referred to as _computer science -metanotation_ and is, alas, a little hard to pin down [as conventions vary][guy-steele-presentation] -between papers and authors. The general rules stay the same however, and once -you learn to read them they are much more succinct than an actual implementation -could be, and are an invaluable tool for quickly getting a high-level overview -of a programming language's semantics. - -> **TODO:** -> Describe BNF, natural deduction rules, overbars, variable binding, etc. - -Some handy links: - -- [Crash Course on Notation in Programming Language Theory](http://siek.blogspot.com.au/2012/07/crash-course-on-notation-in-programming.html) -- [A practitioner’s guide to reading programming languages papers](https://blog.acolyer.org/2018/01/26/a-practitioners-guide-to-reading-programming-languages-papers/) -- [A path to enlightenment in Programming Language Theory](http://steshaw.org/plt/) - -[bnf]: https://en.wikipedia.org/wiki/Backus%E2%80%93Naur_form -[natural-deduction]: https://en.wikipedia.org/wiki/Natural_deduction -[guy-steele-presentation]: https://www.youtube.com/watch?v=7HKbjYqqPPQ - -### Where is the soundness proof? - -Here we are only defining the rules of our language's type checking and -evaluation. Further work needs to be done to verify that our system actually -satisfies certain interesting [type soundness properties][type-soundness], -like progress, preservation, [strong normalization][normalization-property], -etc. If you would like to discuss this with us, please check out -[the relevant github issue][formalization-issue]! - -[type-soundness]: https://en.wikipedia.org/wiki/Type_safety -[normalization-property]: https://en.wikipedia.org/wiki/Normalization_property_(abstract_rewriting) -[formalization-issue]: https://github.com/pikelet-lang/pikelet/issues/39 - -## Syntax - -### Raw terms - -\\[ -% Haskell-style append https://tex.stackexchange.com/questions/4194/how-to-typeset-haskell-operator-and-friends -\\newcommand\doubleplus{+\kern-1.3ex+\kern0.8ex} -% Small caps https://github.com/mathjax/MathJax-docs/wiki/Small-caps-%5Ctextsc-in-MathJaxx -\\def\sc#1{\dosc#1\csod} -\\def\dosc#1#2\csod{{\rm #1{\small #2}}} -\\ -\\newcommand{\rule}[3]{ \dfrac{ ~~#2~~ }{ ~~#3~~ } & \Tiny{\text{(#1)}} } -\\ -\\DeclareMathOperator{\max}{max} -\\DeclareMathOperator{\field}{field} -\\DeclareMathOperator{\fieldty}{fieldty} -\\DeclareMathOperator{\fieldsubst}{fieldsubst} -\\DeclareMathOperator{\Match}{\sc{MATCH}} -\\DeclareMathOperator{\shift}{shift} -\\ -% Judgments -\\newcommand{\eval}[3]{ #1 \vdash #2 \hookrightarrow #3 } -\\newcommand{\check}[4]{ #1 \vdash #2 \uparrow #3 \leadsto #4 } -\\newcommand{\infer}[4]{ #1 \vdash #2 \downarrow #3 \leadsto #4 } -\\newcommand{\subty}[3]{ #1 \vdash #2 \preccurlyeq #3 } -\\newcommand{\match}[3]{ \Match(#1,#2) \Longrightarrow #3 } -\\newcommand{\checkpat}[5]{ #1 \vdash #2 \uparrow #3 \leadsto #4 \Longrightarrow #5 } -\\newcommand{\inferpat}[5]{ #1 \vdash #2 \downarrow #3 \leadsto #4 \Longrightarrow #5 } -\\ -% Metavariables -\\newcommand{\rexpr}{r} % raw expressions -\\newcommand{\rtype}{R} % raw types -\\newcommand{\rpat}{s} % raw patterns -\\ -\\newcommand{\texpr}{t} % expressions -\\newcommand{\ttype}{T} % types -\\newcommand{\tpat}{p} % patterns -\\ -\\newcommand{\vexpr}{v} % value expressions -\\newcommand{\vtype}{V} % value types -\\newcommand{\wexpr}{w} % whnf expressions -\\newcommand{\wtype}{W} % whnf types -\\newcommand{\nexpr}{n} % neutral expressions -\\newcommand{\ntype}{N} % neutral types -\\ -\\newcommand{\ctx}{\Gamma} % contexts -\\ -% Keywords -\\newcommand{\kw}[1]{ \mathsf{#1} } -\\ -% Term and Type constructors -\\newcommand{\label}{l} -\\newcommand{\binder}{x} -\\newcommand{\var}[1]{x^\wedge{#1}} -\\newcommand{\Type}[1]{\kw{Type}^\wedge{#1}} -\\newcommand{\Arrow}[2]{ #1 \rightarrow #2 } -\\newcommand{\Pi}[2]{ \Arrow{(#1)}{#2} } -\\newcommand{\lam}[2]{ \kw{\lambda} #1 . #2 } -\\newcommand{\app}[2]{ #1 ~ #2 } -\\newcommand{\case}[2]{ \kw{case} ~ #1 \left\\{ #2 \right\\} } -\\newcommand{\RecordCons}[2]{ \kw{Record} \left\\{ #1; #2 \right\\} } -\\newcommand{\RecordEmpty}{ \kw{Record} \left\\{\right\\} } -\\newcommand{\as}{ ~ \kw{as} ~ } -\\newcommand{\record}[1]{ \kw{record} \left\\{ #1 \right\\} } -\\newcommand{\proj}[3]{ #1.#2^\wedge{#3} } -\\newcommand{\subst}[3]{ #1 ~ [#2 \rightarrow #3] } -\\ -% Items -\\newcommand{\declItem}[2]{ #1 : #2 } -\\newcommand{\defnItem}[2]{ #1 = #2 } -\\ -% Contexts -\\newcommand{\emptyCtx}{ \varnothing } -\\newcommand{\composeCtx}[2]{ #1 \sim #2 } -\\newcommand{\extendCtx}[2]{ #1, #2 } -\\ -\begin{array}{rrll} - \rexpr,\rtype & ::= & \var{i} & \text{variables ($i \in \mathbb{N}$)} \\\\ - & | & \Type{i} & \text{universe of types ($i \in \mathbb{N}$)} \\\\ - & | & ? & \text{holes} \\\\ - & | & \rexpr : \rtype & \text{term annotated with a type} \\\\ - & | & \Pi{\binder:\rtype_1}{\rtype_2} & \text{dependent function type} \\\\ - & | & \lam{\binder:\rtype}{\rexpr} & \text{functions} \\\\ - & | & \app{\rexpr_1}{\rexpr_2} & \text{function application} \\\\ - & | & \case{\rexpr}{\overline{\rpat_i \rightarrow \rexpr_i}^{;}} & \text{case expressions} \\\\ - & | & \RecordCons{\label \as \binder:\rtype_1}{\rtype_2} & \text{record type extension} \\\\ - & | & \RecordEmpty & \text{empty record type} \\\\ - & | & \record{\label=\rexpr_1, \rexpr_2} & \text{record extension} \\\\ - & | & \record{} & \text{empty record} \\\\ - & | & \proj{\rexpr}{\label}{i} & \text{record projection ($i \in \mathbb{N}$)} \\\\ - \\\\ - \rpat & ::= & \binder & \text{binder pattern} \\\\ - & | & \rpat : \rtype & \text{pattern annotated with a type} \\\\ - % & | & \record{\label=\rpat_1, \rpat_2} & \text{record extension pattern} \\\\ - % & | & \record{} & \text{empty record pattern} \\\\ - \\\\ -\end{array} -\\] - -\\[ -\begin{array}{lrll} - \Arrow{\rtype_1}{\rtype_2} & := & \Pi{\binder:\rtype_1}{\rtype_2} & \text{non-dependent function types} \\\\ - \lam{\binder}{\rexpr} & := & \lam{\binder:?}{\rexpr} & \text{functions (without an annotation)} \\\\ -\end{array} -\\] - -### Terms - -The core term syntax skips holes, ensuring that everything is fully elaborated: - -\\[ -\begin{array}{rrll} - \texpr,\ttype & ::= & \var{i} & \text{variables ($i \in \mathbb{N}$)} \\\\ - & | & \Type{i} & \text{universe of types ($i \in \mathbb{N}$)} \\\\ - & | & \texpr : \ttype & \text{term annotated with a type} \\\\ - & | & \Pi{\binder:\ttype_1}{\ttype_2} & \text{dependent function type} \\\\ - & | & \lam{\binder:\ttype}{\texpr} & \text{functions} \\\\ - & | & \app{\texpr_1}{\texpr_2} & \text{function application} \\\\ - & | & \case{\texpr}{\overline{\tpat_i \rightarrow \texpr_i}^{;}} & \text{case expressions} \\\\ - & | & \RecordCons{\label \as \binder:\ttype_1}{\ttype_2} & \text{record type extension} \\\\ - & | & \RecordEmpty & \text{empty record type} \\\\ - & | & \record{\label=\texpr_1, \texpr_2} & \text{record extension} \\\\ - & | & \record{} & \text{empty record} \\\\ - & | & \proj{\texpr}{\label}{i} & \text{record projection ($i \in \mathbb{N}$)} \\\\ - \\\\ - \tpat & ::= & \binder & \text{binder pattern} \\\\ - & | & \tpat : \ttype & \text{pattern annotated with a type} \\\\ - & | & \record{\label=\tpat_1, \tpat_2} & \text{record extension pattern} \\\\ - & | & \record{} & \text{empty record pattern} \\\\ - \\\\ -\end{array} -\\] - -### Values - -In order to make it clear what is 'stuck' and what still needs to be evaluated, -we separate our syntax into [weak head normal forms][whnf-wikipedia] (\\(\wexpr\\)), -and neutral terms (\\(\nexpr\\)): - -\\[ -\begin{array}{rrll} - \vexpr,\vtype & ::= & \wexpr & \text{weak head normal forms} \\\\ - & | & \nexpr & \text{neutral terms} \\\\ - \\\\ - \nexpr,\ntype & ::= & \var{i} & \text{variables ($i \in \mathbb{N}$)} \\\\ - & | & \app{\nexpr}{\texpr} & \text{function application} \\\\ - & | & \case{\nexpr}{\overline{\tpat_i \rightarrow \texpr_i}^{;}} & \text{case expressions} \\\\ - & | & \proj{\nexpr}{\label}{i} & \text{record projection ($i \in \mathbb{N}$)} \\\\ - \\\\ - \wexpr,\wtype & ::= & \Type{i} & \text{universe of types ($i \in \mathbb{N}$)} \\\\ - & | & \Pi{\binder:\vtype_1}{\vtype_2} & \text{dependent function type} \\\\ - & | & \lam{\binder:\vtype}{\vexpr} & \text{functions} \\\\ - & | & \RecordCons{\label \as \binder:\vtype_1}{\vtype_2} & \text{record type extension} \\\\ - & | & \RecordEmpty & \text{empty record type} \\\\ - & | & \record{\label=\vexpr_1, \vexpr_2} & \text{record extension} \\\\ - & | & \record{} & \text{empty record} \\\\ - \\\\ -\end{array} -\\] - -[whnf-wikipedia]: https://en.wikipedia.org/wiki/Lambda_calculus_definition#Weak_head_normal_form - -### Contexts - -As we type check terms, we'll be passing over bindings like lambdas and pi types. -Contexts allow us to keep track of the bound parameters, -even though we don't know the exact values these will eventually take during normalization. - -\\[ -\begin{array}{rrll} - \ctx & ::= & \emptyCtx & \text{the empty context} \\\\ - & | & \extendCtx{\ctx}{\declItem{\binder}{\vtype}} & \text{context extended with a declaration} \\\\ - & | & \extendCtx{\ctx}{\defnItem{\binder}{\texpr}} & \text{context extended with a definition} \\\\ -\end{array} -\\] - -## Semantics - -We take a _bidirectional_ approach to type checking, splitting it into two -phases: type checking and type inference. This makes the flow of information -through the type checker clear and relatively easy to reason about. -Normalization happens after inference, and before types are fed back in to be -used during type checking. - -With that in mind, the next sections will describe the following judgments: - -| Name | Notation | Inputs | Outputs | -|-----------------------------------------------------------|---------------------------------------------------------------|-------------------------------------------|-------------------------------------------| -| [normalization](#normalization) | \\(\eval{ \ctx }{ \texpr }{ \vexpr }\\) | \\(\ctx\\), \\(\rexpr\\) | \\(\vexpr\\) | -| [type checking](#type-checking) | \\(\check{ \ctx }{ \rexpr }{ \vtype }{ \texpr }\\) | \\(\ctx\\), \\(\rexpr\\), \\(\vtype\\) | \\(\texpr\\) | -| [type inference](#type-inference) | \\(\infer{ \ctx }{ \rexpr }{ \vtype }{ \texpr }\\) | \\(\ctx\\), \\(\rexpr\\) | \\(\vtype\\), \\(\texpr\\) | -| [subtyping](#subtyping) | \\(\subty{ \ctx }{ \vtype_1 }{ \vtype_2 }\\) | \\(\ctx\\), \\(\vtype_1\\), \\(\vtype_2\\)| | -| [pattern matching](#pattern-matching) | \\(\match{ \wexpr }{ \tpat }{ \theta }\\) | \\(\wexpr\\), \\(\tpat\\) | \\(\theta\\) | -| [type checking of patterns](#type-checking-of-patterns) | \\(\checkpat{ \ctx }{ \rpat }{ \vtype }{ \tpat }{ \ctx' }\\) | \\(\ctx\\), \\(\rpat\\), \\(\vtype\\) | \\(\tpat\\), \\(\ctx'\\) | -| [type inference of patterns](#type-inference-of-patterns) | \\(\inferpat{ \ctx }{ \rpat }{ \vtype }{ \tpat }{ \ctx' }\\) | \\(\ctx\\), \\(\rpat\\), | \\(\vtype\\), \\(\tpat\\), \\(\ctx'\\) | - -Normalization stands on its own, but both checking and inference are mutually -dependent on each other. Care has been taken to design the judgments so that -they are _syntax-directed_, meaning that an algorithm can be clearly derived -from them. - -### Elaboration - -Elaboration is the process of filling in missing information that the -programmer omitted in the original code, generally based on the results -of type inference. - -In Pikelet's judgements the elaborated terms are denoted after the -diamond: \\(\rhd\\). At the moment not much is added - only the missing -type annotations on function parameters. In the future this could be extended -filling in type class instances and implicit arguments. - -### Normalization - -Here we describe how we normalize elaborated terms under the assumptions -in the context. - -\\[ -\boxed{ - \eval{ \ctx }{ \texpr }{ \vexpr } -} -\\\\[2em] -\begin{array}{cl} - \rule{E-ANN}{ - \eval{ \ctx }{ \texpr }{ \vexpr } - }{ - \eval{ \ctx }{ \texpr:\ttype }{ \vexpr } - } - \\\\[2em] - \rule{E-TYPE}{}{ - \eval{ \ctx }{ \Type{i} }{ \Type{i} } - } - \\\\[2em] - \rule{E-VAR}{ - \defnItem{\binder}{\texpr} \notin \ctx - }{ - \eval{ \ctx }{ \var{i} }{ \var{i} } - } - \\\\[2em] - \rule{E-VAR-DEF}{ - \defnItem{\binder}{\texpr} \in \ctx - \qquad - \eval{ \ctx }{ \texpr }{ \vexpr } - }{ - \eval{ \ctx }{ \var{i} }{ \shift(\vexpr,i) } - } - \\\\[2em] - \rule{E-PI}{ - \eval{ \ctx }{ \ttype_1 }{ \vtype_1 } - \qquad - \eval{ \ctx }{ \ttype_2 }{ \vtype_2 } - }{ - \eval{ \ctx }{ \Pi{\binder:\ttype_1}{\ttype_2} }{ \Pi{\binder:\vtype_1}{\vtype_2} } - } - \\\\[2em] - \rule{E-LAM}{ - \eval{ \ctx }{ \ttype }{ \vtype } - \qquad - \eval{ \ctx }{ \texpr }{ \vexpr } - }{ - \eval{ \ctx }{ \lam{\binder:\ttype}{\texpr} }{ \lam{\binder:\vtype}{\vexpr} } - } - \\\\[2em] - \rule{E-APP}{ - \eval{ \ctx }{ \texpr_1 }{ \lam{\binder:\vtype_1}{\vexpr_1} } - \qquad - \eval{ \ctx }{ \subst{\vexpr_1}{\binder}{\texpr_2} }{ \vexpr_3 } - }{ - \eval{ \ctx }{ \app{\texpr_1}{\texpr_2} }{ \vexpr_3 } - } - \\\\[2em] - \rule{E-CASE}{ - \eval{ \ctx }{ \nexpr }{ \nexpr' } - }{ - \eval{ \ctx }{ \case{\nexpr}{\overline{\tpat_i \rightarrow \texpr_i}^{;}} } - { \case{\nexpr'}{\overline{\tpat_i \rightarrow \texpr_i}^{;}} } - } - \\\\[2em] - \rule{E-CASE-MATCH}{ - \eval{ \ctx }{ \nexpr }{ \wexpr } - \qquad - \match{ \wexpr }{ \tpat_i }{ \theta } - \qquad - \eval{ \ctx }{ \texpr_i ~ \theta }{ \vexpr_i } - }{ - \eval{ \ctx }{ \case{\nexpr}{\overline{\tpat_i \rightarrow \texpr_i}^{;}} }{ \vexpr_i } - } - \\\\[2em] - \rule{E-RECORD-TYPE}{ - \eval{ \ctx }{ \ttype_1 }{ \vtype_1 } - \qquad - \eval{ \ctx }{ \ttype_2 }{ \vtype_2 } - }{ - \eval{ \ctx }{ \RecordCons{\label \as \binder:\ttype_1}{\ttype_2} }{ \RecordCons{\label \as \binder:\vtype_1}{\vtype_2} } - } - \\\\[2em] - \rule{E-RECORD}{ - \eval{ \ctx }{ \texpr_1 }{ \vexpr_1 } - \qquad - \eval{ \ctx }{ \texpr_2 }{ \vexpr_2 } - }{ - \eval{ \ctx }{ \record{\label=\texpr_1, \texpr_2} }{ \record{\label=\vexpr_1, \vexpr_2} } - } - \\\\[2em] - \rule{E-EMPTY-RECORD-TYPE}{}{ - \eval{ \ctx }{ \RecordEmpty }{ \RecordEmpty } - } - \\\\[2em] - \rule{E-EMPTY-RECORD}{}{ - \eval{ \ctx }{ \record{} }{ \record{} } - } - \\\\[2em] - \rule{E-PROJ}{ - \eval{ \ctx }{ \texpr_1 }{ \vexpr_1 } - \qquad - \vexpr_2 = \field(\label, \vexpr_1) - }{ - \eval{ \ctx }{ \proj{\texpr_1}{\label}{i} }{ \vexpr_2 } - } - \\\\[2em] -\end{array} -\\] - -We define \\(\field(-,-)\\) like so: - -\\[ -\begin{array}{lrll} - \field(\label_1, \record{\label_2 = \vexpr_1, \vexpr_2}) & = & \vexpr_1 & \text{if} ~ \label_1 \equiv \label_2 \\\\ - \field(\label_1, \record{\label_2 = \vexpr_1, \vexpr_2}) & = & \field(\label_1, \vexpr_2) \\\\ -\end{array} -\\] - -### Type checking - -This judgement checks that the given term has the expected type and returns its -elaborated form. - -\\[ -\boxed{ - \check{ \ctx }{ \rexpr }{ \vtype }{ \texpr } -} -\\\\[2em] -\begin{array}{cl} - \rule{C-LAM}{ - \infer{ \extendCtx{\ctx}{\declItem{\binder}{\vtype_1}} }{ \rexpr }{ \ttype_2 }{ \texpr } - }{ - \check{ \ctx }{ \lam{\binder}{\rexpr} }{ \Pi{\binder:\vtype_1}{\vtype_2} }{ \lam{\binder:\vtype_1}{\texpr} } - } - \\\\[2em] - \rule{C-CASE}{ - \infer{ \ctx }{ \rexpr }{ \vtype_1 }{ \texpr } - \qquad - \overline{ - % TODO: impl pattern checks - ~ - \check{ \ctx }{ \rpat_i }{ \vtype_1 }{ \tpat_i } \Rightarrow \ctx' - \qquad - \check{ \composeCtx{\ctx}{\ctx'} }{ \rexpr_i }{ \vtype_2 }{ \texpr_i } - ~ - } - }{ - \check{ \ctx }{ \case{\rexpr}{\overline{\rpat_i \rightarrow \rexpr_i}^{;}} }{ \vtype_2 } - { \case{\texpr}{\overline{\tpat_i \rightarrow \texpr_i}^{;}} } - } - \\\\[2em] - \rule{C-RECORD}{ - \label_1 \equiv \label_2 - \qquad - \check{ \ctx }{ \rexpr_1 }{ \vtype_1 }{ \texpr_1 } - \qquad - \eval{ \ctx }{ \subst{\vtype_2}{\binder}{\texpr_1} }{ \vtype_3 } - \qquad - \check{ \ctx }{ \rexpr_2 }{ \vtype_3 }{ \texpr_2 } - }{ - \check{ \ctx }{ \record{\label_1=\rexpr_1, \rexpr_2} } - { \RecordCons{\label_2 \as \binder:\vtype_1}{\vtype_2} } - { \record{\label_1=\texpr_1, \texpr_2} } - } - \\\\[2em] - \rule{C-CONV}{ - \infer{ \ctx }{ \rexpr }{ \vtype_2 }{ \texpr } - \qquad - \subty{ \ctx }{ \vtype_1 }{ \vtype_2 } - }{ - \check{ \ctx }{ \rexpr }{ \vtype_1 }{ \texpr } - } - \\\\[2em] -\end{array} -\\] - -### Type inference - -Here we define a judgement that synthesizes a type from the given term and -returns its elaborated form. - -\\[ -\boxed{ - \infer{ \ctx }{ \rexpr }{ \vtype }{ \texpr } -} -\\\\[2em] -\begin{array}{cl} - \rule{I-ANN}{ - \infer{ \ctx }{ \rtype }{ \Type{i} }{ \ttype } - \qquad - \eval{ \ctx }{ \ttype }{ \vtype } - \qquad - \check{ \ctx }{ \rexpr }{ \vtype }{ \texpr } - }{ - \infer{ \ctx }{ \rexpr:\rtype }{ \Type{i+1} }{ \texpr:\ttype } - } - \\\\[2em] - \rule{I-TYPE}{}{ - \infer{ \ctx }{ \Type{i} }{ \Type{(i+1)} }{ \Type{i} } - } - \\\\[2em] - \rule{I-VAR}{ - \declItem{\binder}{\vtype} \in \ctx - }{ - \infer{ \ctx }{ \var{i} }{ \shift(\vtype,i) }{ \var{i} } - } - \\\\[2em] - \rule{I-PI}{ - \infer{ \ctx }{ \rtype_1 }{ \Type{i} }{ \ttype_1 } - \qquad - \eval{ \ctx }{ \ttype_1 }{ \vtype_1 } - \qquad - \check{ \extendCtx{\ctx}{\declItem{\binder}{\vtype_1}} }{ \rtype_2 }{ \Type{j} }{ \ttype_2 } - }{ - \infer{ \ctx }{ \Pi{\binder:\rtype_1}{\rtype_2} }{ \Type{\max(i,j)} } - { \Pi{\binder:\ttype_1}{\ttype_2} } - } - \\\\[2em] - \rule{I-LAM}{ - \infer{ \ctx }{ \rtype }{ \Type{i} }{ \ttype } - \qquad - \eval{ \ctx }{ \ttype }{ \vtype_1 } - \qquad - \check{ \extendCtx{\ctx}{\declItem{\binder}{\vtype_1}} }{ \rexpr}{ \vtype_2 }{ \texpr } - }{ - \infer{ \ctx }{ \lam{\binder:\rtype}{\rexpr} } - { \Pi{\binder:\vtype_1}{\vtype_2} }{ \lam{\binder:\ttype}{\texpr} } - } - \\\\[2em] - \rule{I-APP}{ - \infer{ \ctx }{ \rexpr_1 }{ \Pi{\binder:\vtype_1}{\vtype_2} }{ \texpr_1 } - \qquad - \check{ \ctx }{ \rexpr_2 }{ \vtype_1 }{ \texpr_2 } - \qquad - \eval{ \ctx }{ \subst{\vtype_2}{\binder}{\texpr_2} }{ \vtype_3 } - }{ - \infer{ \ctx }{ \app{\rexpr_1}{\rexpr_2} }{ \vtype_3 }{ \app{\texpr_1}{\texpr_2} } - } - \\\\[2em] - \rule{I-RECORD-TYPE}{ - \infer{ \ctx }{ \rtype_1 }{ \Type{i} }{ \ttype_1 } - \qquad - \eval{ \ctx }{ \ttype_1 }{ \vtype_1 } - \qquad - \infer{ \extendCtx{\ctx}{\declItem{\binder}{\vtype_1}} }{ \rtype_2 }{ \Type{j} }{ \ttype_2 } - }{ - \infer{ \ctx } - { \RecordCons{\label \as \binder:\rtype_1}{\rtype_2} } - { \Type{\max(i,j)} } - { \RecordCons{\label \as \binder:\ttype_1}{\ttype_2} } - } - \\\\[2em] - \rule{I-EMPTY-RECORD-TYPE}{}{ - \infer{ \ctx }{ \RecordEmpty }{ \Type{0} }{ \RecordEmpty } - } - \\\\[2em] - \rule{I-RECORD}{ - \infer{ \ctx }{ \rexpr_1 }{ \vtype_1 }{ \texpr_1 } - \qquad - \infer{ \ctx }{ \rexpr_2 }{ \vtype_2 }{ \texpr_2 } - \qquad - \eval{ \ctx }{ \subst{\vtype_2}{\binder}{\texpr_1} }{ \vtype_3 } - }{ - \infer{ \ctx }{ \record{\label=\rexpr_1, \rexpr_2} } - { \RecordCons{\label \as \binder:\vtype_1}{\vtype_3} } - { \record{\label=\texpr_1, \texpr_2} } - } - \\\\[2em] - \rule{I-EMPTY-RECORD}{}{ - \infer{ \ctx }{ \record{} }{ \RecordEmpty }{ \record{} } - } - \\\\[2em] - \rule{I-PROJ}{ - \infer{ \ctx }{ \rexpr }{ \vtype_1 }{ \texpr } - \qquad - \vtype_2 = \fieldty(\label, \vtype_1) - \qquad - \theta = \fieldsubst(\texpr, \label, i, \vtype_1) - }{ - \infer{ \ctx }{ \proj{\rexpr}{\label}{i} }{ \vtype_2 ~ \theta }{ \proj{\texpr}{\label}{i} } - } - \\\\[2em] -\end{array} -\\] - -We define \\(\fieldty(-,-)\\) like so: - -\\[ -\begin{array}{lrll} - \fieldty(\label_1, \RecordCons{\label_2 : \vtype_1}{\vtype_2}) & = & \vtype_1 & \text{if} ~ \label_1 \equiv \label_2 \\\\ - \fieldty(\label_1, \RecordCons{\label_2 : \vtype_1}{\vtype_2}) & = & \fieldty(\label_1, \vtype_2) \\\\ - \\\\[2em] -\end{array} -\\] - -In order to ensure that we maintain maintain the proper paths to variables when -we project on them, we define \\(\fieldsubst(-,-,-,-)\\) as: - -\\[ -\begin{array}{lrll} - \fieldsubst(\texpr, \label_1, i, \RecordCons{\label_2 : \vtype_1}{\vtype_2}) & = - & [] & \text{if} ~ \label_1 \equiv \label_2 \\\\ - \fieldsubst(\texpr, \label_1, i, \RecordCons{\label_2 : \vtype_1}{\vtype_2}) & = - & \fieldsubst(\texpr, \label_1, \vtype_2) \doubleplus [ \label_2 \rightarrow \proj{\texpr}{\label_2}{i} ] \\\\ - \\\\[2em] -\end{array} -\\] - -### Subtyping - -\\[ -\boxed{ - \subty{ \ctx }{ \vtype_1 }{ \vtype_2 } -} -\\\\[2em] -\begin{array}{cl} - \rule{ST-TYPE}{ - i \leqslant j - }{ - \subty{ \ctx }{ \Type{i} }{ \Type{j} } - } - \\\\[2em] - \rule{ST-PI}{ - \subty{ \ctx }{ \vtype_2 }{ \vtype_1 } - \qquad - \subty{ \extendCtx{\ctx}{\declItem{\binder}{\vtype_2}} }{ \vtype_3 }{ \vtype_4 } - }{ - \subty{ \ctx }{ \Pi{\binder:\vtype_1}{\vtype_2} } - { \Pi{\binder:\vtype_3}{\vtype_4} } - } - \\\\[2em] - \rule{ST-RECORD-TYPE}{ - \subty{ \ctx }{ \vtype_1 }{ \vtype_3 } - \qquad - \subty{ \extendCtx{\ctx}{\declItem{\binder}{\vtype_1}} }{ \vtype_2 }{ \vtype_4 } - }{ - \subty{ \ctx }{ \RecordCons{\label \as \binder:\vtype_1}{\vtype_2} } - { \RecordCons{\label \as \binder:\vtype_3}{\vtype_4} } - } - \\\\[2em] - \rule{ST-EMPTY-RECORD-TYPE}{}{ - \subty{ \ctx }{ \RecordEmpty }{ \RecordEmpty } - } - \\\\[2em] - \rule{ST-ALPHA-EQ}{ - \vtype_1 \equiv_{\alpha} \vtype_2 - }{ - \subty{ \ctx }{ \vtype_1 }{ \vtype_2 } - } - \\\\[2em] -\end{array} -\\] - -### Universe shifting - -We implement explicit level shifts, giving us something like what Conor McBride -describes in his blog post, [universe hierarchies][universe-hierarchies]. - -We define \\(\shift(-,-)\\) for values: - -\\[ -\begin{array}{llrl} - \shift(\var{i}, & j) & = & \var{i} \\\\ - \shift(\app{\nexpr}{\texpr}, & j) & = & \app{\shift(\nexpr, j)}{\shift(\texpr, j)} \\\\ - \shift(\case{\nexpr}{\overline{\tpat_i \rightarrow \texpr_i}^{;}}, & j) & = & - % FIXME: define pattern shifting - \case{\shift(\nexpr, j)}{\overline{\shift(\tpat_i, j) \rightarrow \shift(\texpr_i, j)}^{;}} \\\\ - \shift(\proj{\nexpr}{\label}{i}, & j) & = & \proj{\shift(\nexpr, j)}{\label}{i} \\\\ - \shift(\Type{i}, & j) & = & \Type{(i + j)} \\\\ - \shift(\Pi{\binder:\vtype_1}{\vtype_2}, & j) & = & \Pi{\binder:\shift(\vtype_1, j)}{\shift(\vtype_2, j)} \\\\ - \shift(\lam{\binder:\vtype}{\vexpr}, & j) & = & \lam{\binder:\shift(\vtype, j)}{\shift(\vexpr, j)} \\\\ - \shift(\RecordCons{\label \as \binder:\vtype_1}{\vtype_2}, & j) & = & \RecordCons{\label \as \binder:\shift(\vtype_1, j)}{\shift(\vtype_2, j)} \\\\ - \shift(\RecordEmpty, & j) & = & \RecordEmpty \\\\ - \shift(\record{\label=\vexpr_1, \vexpr_2}, & j) & = & \record{\label=\shift(\vexpr_1, j), \shift(\vexpr_2, j)} \\\\ - \shift(\record{}, & j) & = & \record{} \\\\ - \\\\[2em] -\end{array} -\\] - -> **NOTE**: -> We might want to investigate making this shifting operator more expressive and -> 'first class', perhaps as was described in [Dependently typed lambda calculus -> with a lifting operator][dtlc-with-lifts]. For now this seems to be expressive -> enough for most use cases that our users might run into. - -[universe-hierarchies]: https://pigworker.wordpress.com/2015/01/09/universe-hierarchies/ -[dtlc-with-lifts]: http://www-sop.inria.fr/members/Damien.Rouhling/data/internships/M1Report.pdf - -### Pattern matching - -This judement takes an expression \\(\wexpr\\) in weak head normal form, and a -pattern \\(\tpat\\) and returns a substitution \\(\theta\\) with the matched bindings. - -\\[ -\boxed{ - \match{ \wexpr }{ \tpat }{ \theta } -} -\\\\[2em] -\begin{array}{cl} - \rule{M-VAR}{}{ - \match{ \wexpr }{ \binder }{ [\binder \rightarrow \wexpr] } - } - \\\\[2em] -% TODO: -% \rule{M-RECORD}{ -% \match{ \wexpr_1 }{ \tpat_1 }{ \theta_1 } -% \qquad -% \match{ \wexpr_2 }{ \tpat_2 }{ \theta_2 } -% }{ -% \match{ \record{\label=\wexpr_1, \wexpr_2} }{ \record{\label=\tpat_1, \tpat_2} }{ \theta_1 \doubleplus \theta_2 } -% } -% \\\\[2em] -% \rule{M-EMPTY-RECORD}{}{ -% \match{ \record{} }{ \record{} }{ [] } -% } -% \\\\[2em] -\end{array} -\\] - -### Type checking of patterns - -\\[ -\boxed{ - \checkpat{ \ctx }{ \rpat }{ \vtype }{ \tpat }{ \ctx' } -} -\\\\[2em] -\begin{array}{cl} - \rule{CP-BINDER}{}{ - \checkpat{ \ctx }{ \binder }{ \vtype }{ \binder }{ \binder : \vtype } - } - \\\\[2em] - \rule{CP-CONV}{ - \inferpat{ \ctx }{ \rpat }{ \vtype_2 }{ \tpat }{ \ctx' } - \qquad - \subty{ \ctx }{ \vtype_1 }{ \vtype_2 } - }{ - \checkpat{ \ctx }{ \rpat }{ \vtype_1 }{ \tpat }{ \ctx' } - } - \\\\[2em] -\end{array} -\\] - -### Type inference of patterns - -\\[ -\boxed{ - \inferpat{ \ctx }{ \rpat }{ \vtype }{ \tpat }{ \ctx' } -} -\\\\[2em] -\begin{array}{cl} - \rule{IP-ANN}{ - \infer{ \ctx }{ \rtype }{ \Type{i} }{ \ttype } - \qquad - \eval{ \ctx }{ \ttype }{ \vtype } - \qquad - \checkpat{ \ctx }{ \rpat }{ \vtype }{ \rpat }{ \ctx' } - }{ - \inferpat{ \ctx }{ \rpat : \rtype }{ \rtype }{ \rpat : \rtype }{ \ctx' } - } - \\\\[2em] -\end{array} -\\] - -> **TODO:** -> -> - Pattern matching coverage checking -> - Ensure that parametericity is maintained. Should we forbid [pattern matching -> directly on types][type-patterns]? McBride seems to [think we can have our -> cake and eat it][type-patterns-mcbride]! - -[type-patterns]: https://stackoverflow.com/questions/45439486/pattern-matching-on-type-in-idris -[type-patterns-mcbride]: https://stackoverflow.com/questions/23220884/why-is-typecase-a-bad-thing/26012264#26012264 diff --git a/book/src/development.md b/book/src/development.md new file mode 100644 index 000000000..1e316fde3 --- /dev/null +++ b/book/src/development.md @@ -0,0 +1,12 @@ +# Development + +Development documentation for contributors to the Pikelet programming language. + +## Summary + +- [Contributing](./development/contributing.md) +- [Code of Conduct](./development/code-of-conduct.md) +- [Roadmap](./development/roadmap.md) +- [Design](./development/design.md) +- [Influences](./development/influences.md) +- [Bibliography](./development/bibliography.md) diff --git a/book/src/development/bibliography.md b/book/src/development/bibliography.md new file mode 100644 index 000000000..cfe6f1d8b --- /dev/null +++ b/book/src/development/bibliography.md @@ -0,0 +1,70 @@ +# Bibliography + +The following resources where helpful when designing and building Pikelet. + +### How to implement dependent type theory I + +Andrej Bauer
+Blog post, 2012.
+[blog post](http://math.andrej.com/2012/11/08/how-to-implement-dependent-type-theory-i/) + +### How to implement dependent type theory II + +Andrej Bauer
+Blog post, 2012.
+[blog post](http://math.andrej.com/2012/11/11/how-to-implement-dependent-type-theory-ii/) + +### How to implement dependent type theory III + +Andrej Bauer
+Blog post, 2012.
+[blog post](http://math.andrej.com/2012/11/29/how-to-implement-dependent-type-theory-iii/) + +### A simple type-theoretic language: Mini-TT + +Thierry Coquand, Yoshiki Kinoshita, Bengt Nordström, and Makoto Takeyama
+Essays in Honour of Gilles Kahn, 2009.
+[paper](http://www.cse.chalmers.se/~bengt/papers/GKminiTT.pdf) + +### Bidirectional Typing Rules: A Tutorial + +David Raymond Christiansen
+Tutorial, 2013.
+[paper](http://www.davidchristiansen.dk/tutorials/bidirectional.pdf) + +### Checking Dependent Types with Normalization by Evaluation: A Tutorial + +David Thrane Christiansen
+Web page, 2018 (Last accessed 2020).
+[web page](http://www.davidchristiansen.dk/tutorials/nbe/) + +### A tutorial implementation of a dependently typed lambda calculus + +Andres Löh, Conor McBride, and Wouter Swierstra
+Fundamenta Informaticae XXI, 2001 (Revised 2009).
+[paper](https://www.andres-loeh.de/LambdaPi/LambdaPi.pdf) - +[abstract](https://www.andres-loeh.de/LambdaPi/) + +### Crude but Effective Stratification + +Conor McBride
+Blog post, 2011.
+[blog post](https://mazzo.li/epilogue/index.html%3Fp=857&cpage=1.html) + +### Universe Hierarchies + +Conor McBride
+Blog post, 2015.
+[blog post](https://pigworker.wordpress.com/2015/01/09/universe-hierarchies/) + +### Lecture Notes on Bidirectional Type Checking + +Frank Pfenning
+Lecture Notes, 2004.
+[paper](https://www.cs.cmu.edu/~fp/courses/15312-f04/handouts/15-bidirectional.pdf) + +### Dependently typed lambda calculus with a lifting operator + +Damien Rouhling
+Internship Report, 2014.
+[paper](http://www-sop.inria.fr/members/Damien.Rouhling/data/internships/M1Report.pdf) diff --git a/book/src/development/code-of-conduct.md b/book/src/development/code-of-conduct.md new file mode 120000 index 000000000..5d525cd1f --- /dev/null +++ b/book/src/development/code-of-conduct.md @@ -0,0 +1 @@ +../../../CODE_OF_CONDUCT.md \ No newline at end of file diff --git a/book/src/development/contributing.md b/book/src/development/contributing.md new file mode 120000 index 000000000..c97564d93 --- /dev/null +++ b/book/src/development/contributing.md @@ -0,0 +1 @@ +../../../CONTRIBUTING.md \ No newline at end of file diff --git a/book/src/development/design.md b/book/src/development/design.md new file mode 100644 index 000000000..8ec04f6a1 --- /dev/null +++ b/book/src/development/design.md @@ -0,0 +1,38 @@ +# Design + +## Design Principles + +- Empower our users through careful design, rather than being driven by familiarity. +- Surface level features should decompose into a simple, typed core. +- The top-level should not be [hopeless](https://gist.github.com/samth/3083053). +- Programs should not have to pay for things that they do not use. +- Pikelet should work well for high level and low level, resource constrained applications. +- It should be easy to bootstrap the language fro a new platform and cross-compile programs. +- Diagnostics should be clear and easy to understand. +- Features should behave predictably, without relying on complicated, hard to understand heuristics. +- Features should have a high power-to-weight ratio. + +## Research to Watch + +There are a number of exciting areas of research that are worth keeping an eye on: + +- Dependent types +- High performance elaboration +- Effects and Coeffects + - Algebraic Effects and Handlers + - Effect Runners + - Graded Modal Type Theory + - Quantitative Type Theory + - Multistage Programming +- Call by Push Value +- Codata vs. Data +- Modular Programming with Dependent Records +- Fancy Dependencies + - Self Types + - Dependent Intersection + - Very Dependent Types +- Datatype Generic Programming + - Levitated data descriptions + - Data Layout Interpretations + - Ornamented Data Types +- Projectional Editors diff --git a/book/src/appendix/influences.md b/book/src/development/influences.md similarity index 79% rename from book/src/appendix/influences.md rename to book/src/development/influences.md index 14e84018a..76ae2f6cf 100644 --- a/book/src/appendix/influences.md +++ b/book/src/development/influences.md @@ -1,29 +1,8 @@ # Influences -Some languages have been inspiring when building Pikelet. We list some of them -here, and the contributions they have made in our thinking. These ideas may or -may not be included in the final Pikelet language, but they are worth mentioning! - -## Contents - -- [1ML](#1ml) -- [Agda](#agda) -- [ATS](#ats) -- [D](#d) -- [Dhall](#dhall) -- [Discus (formerly DDC)](#discus-formerly-ddc) -- [Elm](#elm) -- [F*](#f) -- [Gluon](#gluon) -- [Granule](#granule) -- [Idris](#idris) -- [Ivory](#ivory) -- [Koka](#koka) -- [Lean](#lean) -- [OCaml](#ocaml) -- [Rust](#rust) -- [Sixten](#sixten) -- [Ur](#ur) +Some languages have been inspiring when building Pikelet. +We list some of them here, and the contributions they have made in our thinking. +These ideas may or may not be included in the final Pikelet language, but they are worth mentioning! ## 1ML @@ -31,7 +10,7 @@ Links: - [Website](https://people.mpi-sws.org/~rossberg/1ml/) -Key things we love: +Things we love: - focus on simplicity - combines module language of ML with dependent records @@ -43,7 +22,7 @@ Links: - [Wiki](http://wiki.portal.chalmers.se/agda/pmwiki.php) -Key things we love: +Things we love: - interactive editing - dependent types @@ -60,7 +39,7 @@ Links: - [Website](http://www.ats-lang.org/) -Key things we love: +Things we love: - dependent types - proofs can be generated by SMT solvers @@ -73,7 +52,7 @@ Links: - [Website](http://dlang.org) -Key things we love: +Things we love: - strong support for static metaprogramming - [design by introspection](https://dconf.org/2017/talks/alexandrescu.pdf) @@ -87,7 +66,7 @@ Links: - [Website](https://github.com/dhall-lang/) -Key things we love: +Things we love: - simple core language - dependent types @@ -106,7 +85,7 @@ Links: - [Website](http://elm-lang.org/) -Key things we love: +Things we love: - focus on usability, and adoption - friendly marketing @@ -120,7 +99,7 @@ Links: - [Website](https://www.fstar-lang.org/) -Key things we love: +Things we love: - combining SMT solvers with explicit proofs - combining effects with dependent types @@ -131,7 +110,7 @@ Links: - [Repository](https://github.com/gluon-lang/gluon) -Key things we love: +Things we love: - strict evaluation - focus on simplicity @@ -144,7 +123,7 @@ Links: - [Repository](https://github.com/dorchard/granule/) -Key things we love: +Things we love: - combining coeffects with effects in one language @@ -155,7 +134,7 @@ Links: - [Website](https://www.idris-lang.org/) - [Documentation](http://docs.idris-lang.org) -Key things we love: +Things we love: - focus on making dependently typed programming practical - interactive editing @@ -171,19 +150,13 @@ Key things we love: - effects system as a library - state machine library -## Ivory - -Links: - -- [Website](https://ivorylang.org/ivory-introduction.html) - ## Koka Links: - [Website](https://www.microsoft.com/en-us/research/project/koka/) -Key things we love: +Things we love: - algebraic effects and handlers - nice library documentation, with clickable links, etc. @@ -194,7 +167,7 @@ Links: - [Website](http://leanprover.github.io) -Key things we love: +Things we love: - focus on responsive interactive development - metaprogramming support using Lean @@ -208,7 +181,7 @@ Links: - [Repository (Multicore)](https://github.com/ocamllabs/ocaml-multicore) - [Repository (Modular implicits)](https://github.com/ocamllabs/ocaml-modular-implicits) -Key things we love: +Things we love: - module system - algebraic effects @@ -223,7 +196,7 @@ Links: - [Website](http://rust-lang.org/) -Key things we love: +Things we love: - friendly community - non-uniform, unboxed data layout @@ -242,7 +215,7 @@ Links: - [Repository](https://github.com/ollef/sixten) -Key things we love: +Things we love: - non-uniform, unboxed data layout - dependent types @@ -253,6 +226,6 @@ Links: - [Website](http://www.impredicative.com/ur/) -Key things we love: +Things we love: - Statically typed metaprogramming with type-level records diff --git a/book/src/development/roadmap.md b/book/src/development/roadmap.md new file mode 100644 index 000000000..492dbf09a --- /dev/null +++ b/book/src/development/roadmap.md @@ -0,0 +1,89 @@ +# Roadmap + +Our main aim is to start off with a simple configuration language, like [Dhall][dhall]. +From there we will progressively add features to gain more flexibility. +We want to provide a textual syntax up-front, +but we should aim to keep the core language reasonably decoupled from this, +allowing us to provide support for [projectional editing][structure-editor-wikipedia] in the future. + +[dhall]: https://dhall-lang.org/ +[structure-editor-wikipedia]: https://en.wikipedia.org/wiki/Structure_editor + +You can read more about what we hope to achieve in [_Pondering the next version of Pikelet_][next-pikelet]. + +[next-pikelet]: https://gist.github.com/brendanzab/eba7015e6345abe79a57a704091820bb/. + +### Language + +- Basic config language + - [x] Comments + - [x] Boolean literals/constants + - [x] Integer literals/constants + - [x] Float literals/constants + - [x] Character literals/constants + - [x] String literals/constants + - [x] Record terms + - [x] Non-dependent record types + - [x] Dynamically sized arrays + - [x] Fixed sized arrays +- Basic programming language + - [x] Improved literal parsing + - [x] Annotated terms + - [ ] Let expressions + - [x] Record field lookups + - [ ] Import expressions + - [x] Function terms + - [x] Non-dependent function types + - [ ] Enumeration sets + - [ ] Pattern matching + - [ ] Recursive terms +- Dependently typed language + - [x] Dependent record types + - [x] Dependent function types + - [ ] Equality (identity) types + - [x] Universe levels + - [x] Stratified + - [x] Cumulative + - [x] Lifting operator + - [ ] Large types (`Typeω`) + - [ ] Multi-stage programming + - [ ] Quantitative type theory + +### Projections + +- [x] Surface → Pretty +- [x] Surface → Core +- [x] Core → Pretty +- [x] Core → Value +- [ ] Core → Binary +- [ ] Core → Documentation +- [ ] Core → Cranelift +- [ ] Core → LLVM +- [ ] Value → JSON/YAML/TOML + +### Tooling + +- [x] REPL +- [ ] Package manager +- [ ] Auto-formatter for surface language +- [ ] Structured editor + +### Testing + +- [x] Language samples +- [ ] Feature tests +- [ ] Property based tests + +### Diagnostics + +- [x] Basic error enum +- [x] Error recovery +- [x] Pretty diagnostic reporting + +### Rust marshalling + +- [x] Mashalling traits +- [ ] Improved error messages +- [ ] Nicer marshalling API +- [ ] Derive macro for generating marshalling trait implementations +- [ ] More efficient, visitor based marshalling diff --git a/book/src/guide.md b/book/src/guide.md new file mode 100644 index 000000000..5c63867ee --- /dev/null +++ b/book/src/guide.md @@ -0,0 +1,18 @@ +# Language Guide + +Welcome to the Pikelet Language Guide! +This part of the documentation will guide you through the installation of Pikelet, +and show you how to start writing your own programs. + +The aim for this guide is to be accessible as possible! +Once you feel comfortable with the basics more detailed descriptions of the Pikelet's features can be found in the [language reference]. + +[language reference]: ./reference.md + +## Summary + +- [Installation](./guide/installation.md) +- [Using the REPL](./guide/using-the-repl.md) +- [Compiling Standalone Programs]() +- [Pikelet as a Configuration Language]() +- [Pikelet as a Scripting Language]() diff --git a/book/src/installation/index.md b/book/src/guide/installation.md similarity index 100% rename from book/src/installation/index.md rename to book/src/guide/installation.md diff --git a/book/src/guide/using-the-repl.md b/book/src/guide/using-the-repl.md new file mode 100644 index 000000000..59a8cd422 --- /dev/null +++ b/book/src/guide/using-the-repl.md @@ -0,0 +1,36 @@ +# Using the REPL + +If you have [installed Pikelet][installation], you can run the REPL by running this command in the terminal: + +```sh +pikelet repl +``` + +[installation]: ./installation + +The REPL should appear in the terminal like so: + +```text +$ pikelet repl + ____ _ __ __ __ + / __ \(_) /_____ / /__ / /_ + / /_/ / / //_/ _ \/ / _ \/ __/ Version 0.1.0 + / ____/ / ,< / __/ / __/ /_ https://github.com/pikelet-lang/pikelet +/_/ /_/_/|_|\___/_/\___/\__/ :? for help + +> +``` + +"REPL" stands for "Read-eval-print-loop" and is a nice way to experiment with Pikelet in an interactive way. +You can enter Pikelet terms into the REPL after the `>`. For example: + +```pikelet +> "Hello world!" +``` + +By pressing Enter, you can 'normalize' the term, and see its type: + +```pikelet +> "Hello world!" +"Hello world!" : String +``` diff --git a/book/src/index.md b/book/src/index.md index f84203884..a28104dc5 100644 --- a/book/src/index.md +++ b/book/src/index.md @@ -1,53 +1,65 @@ -# Pikelet đŸ„ž +# Pikelet! + +[![Actions Status][actions-badge]][actions-url] +[![Matrix][matrix-badge]][matrix-lobby] +[![License][license-badge]][license-url] +[![GitHub stars][stars-badge]][github-url] + +[actions-badge]: https://github.com/pikelet-lang/pikelet/workflows/ci/badge.svg +[actions-url]: https://github.com/pikelet-lang/pikelet/actions +[matrix-badge]: https://img.shields.io/matrix/pikelet:matrix.org?label=%23pikelet%3Amatrix.org +[matrix-lobby]: https://app.element.io/#/room/#pikelet:matrix.org +[license-badge]: https://img.shields.io/github/license/pikelet-lang/pikelet +[license-url]: https://github.com/pikelet-lang/pikelet/blob/master/LICENSE +[stars-badge]: https://img.shields.io/github/stars/pikelet-lang/pikelet?style=social +[github-url]: https://github.com/pikelet-lang/pikelet + +![Pikelet Mascot][pikelet-mascot] + +[pikelet-mascot]: ../assets/pikelet.png Pikelet is a small [dependently typed][dependent-type-wikipedia] language. It doesn't do many interesting things yet, but hopefully that will change in the future! -- [Source code](https://github.com/pikelet-lang/pikelet) -- [Issues](https://github.com/pikelet-lang/pikelet/issues) -- [Gitter Chat](https://gitter.im/pikelet-lang/Lobby) - [dependent-type-wikipedia]: https://en.wikipedia.org/wiki/Dependent_type +> **Note:** +> +> Pikelet is still a work in progress! Many features are not implemented yet! +> +> If you'd like to see what we hope to work on next, have a look at [the roadmap](./development/roadmap). + ## A small taste Definitions: ```pikelet -let - id : (a : Type) -> a -> a; - id a x = x; - - const : (a b : Type) -> a -> b -> a; - const a b x y = x; -in - record { - id = id; - const = const; - } +record { + id : Fun (A : Type) -> A -> A, + id A a = a, + + always : Fun (A B : Type) -> A -> B -> A, + always A B a b = a, +} ``` Interactive REPL: -```pikelet-repl -$ cargo run repl +```text +$ pikelet repl ____ _ __ __ __ / __ \(_) /_____ / /__ / /_ / /_/ / / //_/ _ \/ / _ \/ __/ Version 0.1.0 / ____/ / ,< / __/ / __/ /_ https://github.com/pikelet-lang/pikelet /_/ /_/_/|_|\___/_/\___/\__/ :? for help -Pikelet> (\(a : Type) (x : a) => x) String "hello" +> (fun A a => a : Fun (A : Type) -> A -> A) String "hello" "hello" : String -Pikelet> :t Type -Type^1 -Pikelet> 1 : S16 -1 : S16 -Pikelet> ``` -## What is a Pikelet? +## Summary -A pikelet is an odd sort of small (often pre-made) pancake found in Australia -and New Zealand. Commonly sent in school lunches spread with jam and butter. -Handily it also has a name that includes 'pi' and 'let' as substrings! 😅 +- [Guide](./guide.md): For people new to Pikelet +- [Reference](./reference.md): For people who need a detailed descriptions of individual language features +- [Development](./development.md): For people wanting to contribute to the language +- [Specification](./specification.md): For developers and researchers diff --git a/book/src/language/bindings.md b/book/src/language/bindings.md deleted file mode 100644 index d0f59dca0..000000000 --- a/book/src/language/bindings.md +++ /dev/null @@ -1,108 +0,0 @@ -# Bindings - -## Contents - -- [Items](#items) -- [Function definitions](#function-definitions) -- [Type aliases](#type-aliases) -- [Doc comments](#doc-comments) - -## Items - -Let bindings are made up of items. At the moment these can either be _declarations_ -or _definitions_. - -A _declaration_ states the type for of an identifier that we should -expect to see in a subsequent _definition_ for that identifier. For example: - -```pikelet -let - greeting : String; -- declaration - greeting = "hello there!"; -- definition -in - ... -``` - -We can make supply a number of forward declarations before providing their -associated definitions: - -```pikelet -let - one : S32; - two : S32; - - one = 1; - two = 2; -in - ... -``` - -Values that can be inferred do not require a declaration, although sometimes a -declaration may be useful for documentations purposes! - -```pikelet -let - string = "hello" -- ok! - one = 1 : U16 -- ok! - two = 2 -- error: is this an U8, U16, S64, etc? -in - ... -``` - -## Function definitions - -We have some nice syntactic sugar for defining functions. For example: - -```pikelet -let - const : (a b : Type) -> a -> b -> a; - const = \a b x y => x; -in - const String I32 "hello" 1 -``` - -Could be expressed more cleanly as: - -```pikelet -let - const : (a b : Type) -> a -> b -> a; - const a b x y = x; -in - const String I32 "hello" 1 -``` - -## Type aliases - -Because Pikelet is dependently typed, we need no other mechanism for making -type aliases. Instead we just use definitions! - -```pikelet -let - Name : Type; - Name = String; - - bobs-name : Name - bobs-name = "bob" -in - ... -``` - -## Doc comments - -Documentation can be provided for above declarations, by using doc comments: - -```pikelet -let - ||| This is a documented definition - ||| - ||| # Example - ||| - ||| ```pikelet-repl - ||| Pikelet> self-aware-string - ||| "I am a string!" : String - ||| ``` - self-aware-string : String; - self-aware-string = "I am a string!"; -in - ... -``` diff --git a/book/src/language/conditionals.md b/book/src/language/conditionals.md deleted file mode 100644 index aab6758f8..000000000 --- a/book/src/language/conditionals.md +++ /dev/null @@ -1,39 +0,0 @@ -# Conditionals - -## If-then-else expressions - -`if` expressions take an expression that evaluates to a `Bool` (the _condition_), -and two other expressions (the _consequent_ and the _alternative_) that evaluate -to the same type. If the condition evaluates to `true`, then the consequent will -be evaluated and returned, otherwise the alternative will be evaluated and -returned. - -```pikelet-repl -Pikelet> if true then "hello!" else "goodbye!" -"hello!" : String -Pikelet> if false then "hello!" else "goodbye!" -"goodbye!" : String -``` - -## Case expressions - -Pikelet supports case expressions on strings, and numbers: - -```pikelet -case value { - "hello" => "goodbye"; - "goodbye" => "hello"; - value => value; -- matches all strings -} -``` - -Note that we don't (yet) check that the series of patterns provided cover all -possible cases, leading to the following embarrassing error: - -```pikelet-repl -Pikelet> case "hello" { "hi" => "oh dear" } -error: internal compiler error: no patterns matched the given expression -``` - -In the future we' plan to fix this, add support for matching on booleans, and -also support more complex patterns (eg. for records). diff --git a/book/src/language/functions.md b/book/src/language/functions.md deleted file mode 100644 index 92a9717a5..000000000 --- a/book/src/language/functions.md +++ /dev/null @@ -1,83 +0,0 @@ -# Functions - -## Contents - -- [Simply typed functions](#simply-typed-functions) -- [Polymorphic functions](#polymorphic-functions) -- [Syntactic sugar for functions](#syntactic-sugar-for-functions) - -## Simply typed functions - -Here are some simple functions and their types: - -```pikelet-repl -Pikelet> :t \x : S32 => x -S32 -> S32 -Pikelet> :t \x : String => x -String -> String -Pikelet> :t \x : Char => x -Char -> Char -``` - -Note that all of these types follow the same pattern - they are the identity -function! This means that if you pass a value to them, they'll return the same -thing without alteration! - -```pikelet-repl -Pikelet> (\x : S32 => x) 42 -42 : S32 -Pikelet> (\x : String => x) "hi" -"hi" : String -Pikelet> (\x : Char => x) 'b' -'b' : Char -``` - -## Polymorphic functions - -Alas, we can't reuse one of these identity functions with other, incompatible -types: - -```pikelet-repl -Pikelet> (\x : S32 => x) 4.0 -error: found a floating point literal, but expected a type `S32` -- :1:17 -1 | (\x : S32 => x) 4.0 - | ^^^ the literal -``` - -Let's make this identity function polymorphic by adding a parameter for the type -of the argument: - -```pikelet-repl -Pikelet> :t \(a : Type) (x : a) => x -(a : Type) -> a -> a -``` - -We now have a polymorphic identity function! We can specialize this function by -applying a type to it: - -```pikelet-repl -Pikelet> (\(x : Type) (x : a) => x) String "hello" -"hello" : String -Pikelet> (\(x : Type) (x : a) => x) S32 1 -1 : S32 -``` - -## Syntactic sugar for functions - -In Pikelet, all functions take a single argument - in order to pass multiple -arguments we use currying. The following functions are equivalent: - -```pikelet -\(x : Type) (x : a) => x -\(x : Type) => \(x : a) => x -``` - -Non-dependent functions can be expressed without explicit parameter names. For -example the following function types are equivalent: - -```pikelet -(a : Type) (x : a) -> a -(a : Type) -> (x : a) -> a -(a : Type) -> a -> a -``` diff --git a/book/src/language/index.md b/book/src/language/index.md deleted file mode 100644 index 7e40effbe..000000000 --- a/book/src/language/index.md +++ /dev/null @@ -1,92 +0,0 @@ -# Language - -## Contents - -- [Comments](#comments) -- [Primitive types and their literals](#primitive-types-and-their-literals) -- [Type annotations](#type-annotations) -- [Identifiers](#identifiers) -- [Keywords](#keywords) - -## Comments - -Line comments are preceded by a double dash: - -```pikelet --- this is a comment! -``` - -## Primitive types and their literals - -Pikelet has a number of primitive types: - -| Type | Literal | -|----------|----------------------------------------| -| `Bool` | `true`, `false` | -| `String` | `"hello there!"` | -| `Char` | `'a'`, `'b'`, ..., `'\n'`, `'\t'`, ... | -| `U8` | `1`, `2`, `3`, ... | -| `U16` | `1`, `2`, `3`, ... | -| `U32` | `1`, `2`, `3`, ... | -| `U64` | `1`, `2`, `3`, ... | -| `S8` | `1`, `2`, `3`, ... | -| `S16` | `1`, `2`, `3`, ... | -| `S32` | `1`, `2`, `3`, ... | -| `S64` | `1`, `2`, `3`, ... | -| `F32` | `1`, `2`, `3`, ..., `0.0`, `1.0`, ... | -| `F64` | `1`, `2`, `3`, ..., `0.0`, `1.0`, ... | - -> **Note:** You can't do much with these primitive types yet. In the future we -> will add some primitive functions to allow you to manipulate them. - -## Type annotations - -If you note [above](#primitive-types-and-their-literals), a number of the -primitive types share a literal representation. Pikelet will try to predictably -infer the types, but if it fails to do so you will get an error. In that case -you can use the type annotation operator, `(:)`, to specify the intended type: - -```pikelet-repl -Pikelet> 1 -error: ambiguous integer literal -Pikelet> 1 : S32 -1 : S32 -Pikelet> 1 : F32 -1 : F32 -Pikelet> 1.0 : F32 -1.0 : F32 -Pikelet> 1.1 : U64 -error: found a floating point literal, but expected a type `U64` -- :1:1 -1 | 1.1 : U64 - | ^^^ the literal -``` - -## Identifiers - -> TODO - -## Keywords -| Keyword | Documentation | -|----------|-------------------------------------------| -| `as` | [internal field names] | -| `case` | [case expressions] | -| `else` | [if-then-else-expressions] | -| `extern` | | -| `if` | [if-then-else-expressions] | -| `import` | | -| `in` | [bindings] | -| `let` | [bindings] | -| `record` | [record] values | -| `Record` | [Record] types | -| `then` | [if-then-else-expressions] | -| `Type` | [polymorphic functions], [types of types] | -| `where` | | - -[if-then-else-expressions]: conditionals.html#if-then-else-expressions -[case expressions]: conditionals.html#case-expressions -[bindings]: bindings.html -[record]: records.html -[polymorphic functions]: functions.html -[types of types]: universes.html#types-of-types -[internal field names]: records.html#external-vs-internal-field-names diff --git a/book/src/language/records.md b/book/src/language/records.md deleted file mode 100644 index 9d6a926ad..000000000 --- a/book/src/language/records.md +++ /dev/null @@ -1,116 +0,0 @@ -# Records - -## Contents - -- [Record values and record types](#record-values-and-record-types) -- [Field lookups](#field-lookups) -- [Dependent record types](#dependent-record-types) -- [External vs. internal field names](#external-vs-internal-field-names) - -## Record values and record types - -You can group together multiple values by using records: - -```pikelet-repl -Pikelet> record { x = 3.0 : F32; y = 3.0 : F32 } -record { x = 3; y = 3 } : Record { x : F32; y : F32 } -``` - -Take note of the following: - -- record values use the lower case `record` keyword -- record types use the upper case `Record` keyword -- we have to [annotate](#type-annotations) ambiguous field values - -We can make a new definition for point types: - -```pikelet -Point2d = Record { - x : F32; - y : F32; -}; -``` - -You can then use this type to make it easier to define a point record: - -```pikelet-repl -Pikelet> record { x = 3.0; y = 3.0 } : Point2d -record { x = 3; y = 3 } : Record { x : F32; y : F32 } -``` - -Note that we no longer need to annotate each field! Pikelet was able to pick up -the type of each field from the type definition during type checking. You can -read more about Pikelet's type inference on [the type inference page](./type-inference). - -## Field lookups - -You can access the value associated with a field name by using the dot operator: - -```pikelet-repl -Pikelet> record { name = "Jane" }.name -"Jane" : String -``` - -## Dependent record types - -Field types can depend on data from previous fields. Here we turn a -fixed-length array into a dynamically sized array, by using the `len` field -later on to define the `data` field's annotation: - -```pikelet -DArray (a : Type) = Record { - len : S32; - data : Box (Array len a); -}; -``` - -## External vs. internal field names - -Sometimes we'll run into rare cases where a field name might shadow a binding -from a higher scope. In this case we can give the field a new, internal name -using the `as` notation: - -```pikelet -Foo = Record { - -- external name - -- | - -- | internal name - -- | | - -- v v - String as String1 : Type; - - -- refers to the built in `String` type - -- | - -- v - x : String; - - -- refers to the local `String` field - -- | - -- v - y : String1; -}; -``` - -We define the following terms: - -- _external field name_: the name that we use when projecting on the record -- _internal field name_: the name that we use internally, in dependent fields - -Note that most of the time the internal and external field names are the same. -For example: - -```pikelet -Point2d = Record { - x : F32; - y : F32; -}; -``` - -Is actually desugared to: - -```pikelet -Point2d = Record { - x as x : F32; - y as y : F32; -}; -``` diff --git a/book/src/language/type-inference.md b/book/src/language/type-inference.md deleted file mode 100644 index 949ba708f..000000000 --- a/book/src/language/type-inference.md +++ /dev/null @@ -1,79 +0,0 @@ -# Type inference - -Many statically typed languages perform type inference to varying degrees, and -Pikelet is no different! The goal is to reduce the burden of writing type -annotations everywhere. Some languages like [OCaml](https://ocaml.org/) and -[Elm](http://elm-lang.org/) can even infer the types of a whole program without -any annotations at all! - -Pikelet's type inference follows some very simple rules that you can probably -pick up on your own, but we thought it might help to give a deeper explanation -of how it works, without getting too bogged down in the theoretical details. - -## Contents - -- [Bidirectional type checking](#bidirectional-typechecking) - - [Inferable terms](#inferable-terms) - - [Checkable terms](#checkable-terms) -- [Further reading](#further-reading) - -## Bidirectional type checking - -Pikelet has a rather flexible type system that can have expressions embedded in -them, so we've opted to use an algorithm known as 'bidirectional type checking' -as a way to get a decent amount of inference while still remaining relatively -predictable to you, the programmer. This means that you may sometimes have to -write annotations on top-level definitions, but the types should propagate -downwards and inner definitions should not require much annotation at all. - -To do this we break the terms of the language into two groups. We call these -[_inferable terms_](#inferable-terms) and [_checkable terms_](#checkable-terms). - -### Inferable terms - -Inferable terms can be checked on their own or based on previous definitions. - -> TODO: Explain examples - -```pikelet-repl -Pikelet> true -Pikelet> "1" -Pikelet> 'a' -Pikelet> Bool -Pikelet> Type -Pikelet> Type^2 -Pikelet> record { name = "Jane" } -Pikelet> Record { name : String } -Pikelet> record { x = 3.0 : F32; y = 3.0 : F32 } -Pikelet> \x : Int => x -Pikelet> (a : Type) -> a -``` - -### Checkable terms - -Checkable terms need extra annotations, or be used in a position where extra -information can be supplied. - -> TODO: Explain examples - -```pikelet-repl -Pikelet> 1 -Pikelet> 2.0 -Pikelet> record { x = 3.0; y = 3.0 } -Pikelet> \x => x -``` - -```pikelet-repl -Pikelet> 1 : S32 -Pikelet> 2.0 : F32 -Pikelet> record { x = 3.0; y = 3.0 } : Record { x : F32; y : F32 } -Pikelet> \x => x : S32 -> S32 -``` - -## Further reading - -We describe Pikelet's type checking algorithm more formally -[in the appendix](./appendix/theory). If you have a background in programming -languages and type theory this might be of interest to you. If not, that's -ok - understanding the formal notation is not necessary for developing a high -level intuition of type inference in Pikelet. diff --git a/book/src/language/universes.md b/book/src/language/universes.md deleted file mode 100644 index 04a0ecce3..000000000 --- a/book/src/language/universes.md +++ /dev/null @@ -1,151 +0,0 @@ -# Universes - -## Contents - -- [Types of types](#types-of-types) -- [Cumulativity](#cumulativity) -- [Syntactic sugar](#syntactic-sugar) -- [Shifting universes](#shifting-universes) - -## Types of types - -Types also have types! - -```pikelet-repl -Pikelet> :t S32 -Type -``` - -We call this special 'type of types' a _universe_. - -You might then wonder, “what is the type of `Type`?” That's a good question! -Clever people have figured out that if `Type` was its own type, ie. `Type : Type`, -it would lead situations like [Girard's paradox][girards-paradox], (the type -theory equivalent of the more well-known [Russel's paradox][russels-paradox] in -set theory). There is nothing worse than paradoxes in a type system, so instead -we create ourselves a new universe called `Type^1`: - -```pikelet-repl -Pikelet> :t Type -Type^1 -``` - -We keep on going like this, giving us a hierarchy of _universes_, as many -as we need for a given program: - -```pikelet -Type : Type^1 : Type^2 : Type^3 : ... -``` - -[girards-paradox]: https://en.wikipedia.org/wiki/Girard%27s_paradox -[russels-paradox]: https://en.wikipedia.org/wiki/Russell%27s_paradox - -We call the number given to each universe the _level_ of that universe. You can -think of these universes as larger and larger collections of things, with the -smaller universes being contained within the larger universes: - -``` -.- Type^2 -----------------------------------------------------------------------. -| Array n Type^1 | -| | -| .- Type^1 -----------------------------------------------------------------. | -| | | | -| | .- Type -------------------------------. | | -| | | Array n String | | | -| | | | | | -| | | Record { x : F32 } | Array n Type | | -| | | | | | -| | | .- S32 --------------. | | | -| | | | ..., -1, 0, 1, ... | | Nat -> Type -> Type | | -| | | '--------------------' | | | -| | | | | | -| | | .- Record {} -. .- Bool -------. | | | -| | | | record {} | | true, false | | Type -> Type | | -| | | '-------------' '--------------' | | | -| | | | | | -| | | .- String ----------------------. | .- Record { t : Type } ----. | | -| | | | "hello", "byee!", "hoho", ... | | | record { t = String }, | | | -| | | '-------------------------------' | | record { t = U32 }, ... | | | -| | '--------------------------------------' '--------------------------' | | -| '--------------------------------------------------------------------------' | -'--------------------------------------------------------------------------------' -``` - -Note that in most regular programming you will rarely see anything above `Type`, -and even more rarely still will you see things above `Type^1`, so all of this might -seem a little excessive. That being said, we believe it is important enough to -plug this gap in our type system while we have the chance. - -## Cumulativity - -Because the level of a universe corresponds to some notion of it's 'size', this -suggests that larger universes should be able to contain all the other things -smaller than themselves. This is reflected in Pikelet too: - -```pikelet-repl -Pikelet> Bool : Type -- ok -Pikelet> Bool : Type^2 -- ok -Pikelet> Type^1 : Type^3 -- ok -Pikelet> Type^3 : Type^1 -- error! -``` - -## Syntactic sugar - -Note that `Type` is actually just sugar for `Type^0`: - -```pikelet-repl -Pikelet> :t Type^0 -Type^1 -``` - -## Shifting universes - -Often we'll write definitions in terms of `Type`, without worrying about the -universe levels. For example the identity function can be defined with a type -parameter in the universe of `Type`: - -```pikelet-repl -Pikelet> :let id = \(a : Type) (x : a) => x -id : (a : Type) (x : a) -> a -``` - -This then allows us to use it with values: - -```pikelet-repl -Pikelet> id String "hello" -- ok -Pikelet> id S32 1 -- ok -``` - -Sadly because of our universe hierarchy, we can't use our identity function at -the type level! - -```pikelet-repl -Pikelet> id Type String -- error! -Pikelet> id ((a : Type) -> a -> a) id -- error! -Pikelet> id Type^1 Type -- error! -``` - -This would seem like it would be terrible for code reuse - you would need to -create a new `id` function for every universe level! Thankfully we have a simple -solution: We allow identifiers to be _shifted_ to the correct universe level -using the `^` notation. This shifts the given definition to the desired universe -level: - -```pikelet-repl -Pikelet> :t id^1 -(a : Type^1) -> a -> a -``` - -We can then use the shifted identity functions like so: - -```pikelet-repl -Pikelet> id^1 Type String -- ok -Pikelet> id^1 ((a : Type) -> a -> a) id -- ok -Pikelet> id^2 Type^1 Type -- ok -``` - -Field projections can also have shifts applied to them: - -```pikelet-repl -Pikelet> prelude.id^1 Type String -``` diff --git a/book/src/reference.md b/book/src/reference.md new file mode 100644 index 000000000..7a186e80b --- /dev/null +++ b/book/src/reference.md @@ -0,0 +1,21 @@ +# Language Reference + +This part of the documentation is a reference-level description of Pikelet, +intended for Pikelet users who want a comprehensive description of Pikelet's surface-level features. + +This is _not_ a precise description of the Pikelet language. +A more precise description of the concrete syntax, elaboration, +and core language of Fathom can be found in the [language specification]. + +[language specification]: ./specification.md + +## Summary + +- [Comments](./reference/comments.md) +- [Keywords](./reference/keywords.md) +- [Names](./reference/names.md) +- [Builtins](./reference/builtins.md) +- [Literals]() +- [Universes](./reference/universes.md) +- [Functions](./reference/functions.md) +- [Records](./reference/records.md) diff --git a/book/src/reference/builtins.md b/book/src/reference/builtins.md new file mode 100644 index 000000000..2bf9abc26 --- /dev/null +++ b/book/src/reference/builtins.md @@ -0,0 +1,118 @@ +# Builtins + +Pikelet has a number of builtin types, which we now describe here: + +## Booleans + +```pikelet +Bool : Type +``` + +Booleans have two constructors, `true` and `false`: + +```pikelet +true : Type +false : Type +``` + +## Unsigned integers + +Unsigned integers are defined via the following built-ins: + +```pikelet +U8 : Type +U16 : Type +U32 : Type +U64 : Type +``` + +Unsigned integers can be constructed using numeric literals: + +```pikelet +0 : S8 ++42 : S32 +0x2F : S16 +``` + +## Signed integers + +Two's complement, signed integers are defined via the following built-ins: + +```pikelet +S8 : Type +S16 : Type +S32 : Type +S64 : Type +``` + +Signed integers can be constructed using numeric literals: + +```pikelet +0 : S8 ++42 : S32 +-42 : S32 +0x2F : S16 +``` + +## Floating point numbers + +```pikelet +F32 : Type +F64 : Type +``` + +## Strings + +```pikelet +String : Type +``` + +Characters can be constructed using string literals. For example: + +```pikelet +"hello" : String +``` + +## Characters + +```pikelet +Char : Type +``` + +Characters can be constructed using character literals. For example: + +```pikelet +'A' : Char +'가' : Char +'đŸ„ž' : Char +``` + +## Lists + +Lists are ordered sequences of terms. + +```pikelet +List : Type -> Type +``` + +Lists can be constructed using sequences. For example: + +```pikelet +[] : List F32 +[1, 2, 3] : List F32 +``` + +## Arrays + +Arrays are ordered sequences of terms, with a length specified in the type. + +```pikelet +Array : U32 -> Type -> Type +``` + +Arrays can be constructed using sequences. For example: + +```pikelet +[] : Array 0 F32 +[1, 2, 3] : Array 3 F32 +``` diff --git a/book/src/reference/comments.md b/book/src/reference/comments.md new file mode 100644 index 000000000..e7267a0b1 --- /dev/null +++ b/book/src/reference/comments.md @@ -0,0 +1,26 @@ +# Comments + +## Line comments + +Line comments are preceded by a double dash (`--`): + +```pikelet +-- This is a comment! +``` + +## Doc comments + +Documentation comments are preceded by three pipes (`|||`): + +```pikelet +||| A doc comment! +``` + +Multi-line doc comments can be created by 'stacking'. For example: + +```pikelet +||| The unit type +||| +||| This is a synonym for the empty record, +||| and can be constructed using the `unit` function. +``` diff --git a/book/src/reference/functions.md b/book/src/reference/functions.md new file mode 100644 index 000000000..2e8b75147 --- /dev/null +++ b/book/src/reference/functions.md @@ -0,0 +1,103 @@ +# Functions + +A function relates a number some unknown input to an output term. + +## Types + +Function types are written as `A -> B`. +Functions are [_curried_][currying-wikipedia], meaning that they take a single input, and return a single output. +Multi-input functions can be created by creating functions that output other functions. + +For example, the function type for adding two 32-bit signed integers together is: + +```pikelet +S32 -> S32 -> S32 +``` + +### Dependency + +Functions output types can also depend on their inputs. +For example this is the type of the identity function: + +```pikelet +Fun (A : Type) -> A -> A +``` + +> **Note:** +> +> These are sometimes called _pi types_ or [_dependent product types_][dependent-product-types-nlab] in type theory. + +### Universes + +Function types are also types: + +```pikelet +U32 -> U32 : Type +``` + +In order to find the universe level of a function type, +we use the universe level the largest input or output: + +```pikelet +U32 -> Type^2 : Type^3 +``` + +## Terms + +Functions are constructed by specifying a list of one-or-more input names after a `fun` token, +and then a output term after a `=>` token. +The inputs can then be referred to in the output term of the function. + +```pikelet +fun input-1 input-2 => output +``` + +Functions must always be constructed in a position where they can find a type annotation. +For example, the following function is ambiguous: + +```pikelet +fun x y => x +``` + +The following function passes the type checker, +because the function type is pulled from the record annotation: + +```pikelet +record { + const = fun x y => x, +} : Record { + const : S32 -> String -> S32, +} +``` + +> **Note:** +> +> These are sometimes called [_lambda abstractions_][lambda-abstraction-nlab] in type theory, +> or _anonymous functions_ in programming languages. + +## Eliminations + +Functions can be applied to arguments via [_juxtaposition_][juxtaposition-wikipedia]. + +For example, this is how the identity function might be applied: + +```pikelet +id String "hello!" +``` + +```pikelet +Array 3 String +``` + +### Computation + +> **Note:** +> +> This section is a work in progress. +> +> We should describe beta-reduction here. + +[currying-wikipedia]: https://en.wikipedia.org/wiki/Currying +[dependent-product-types-nlab]: https://ncatlab.org/nlab/show/dependent+product+type +[lambda-abstraction-nlab]: https://ncatlab.org/nlab/show/lambda-abstraction +[juxtaposition-wikipedia]: https://en.wikipedia.org/wiki/Juxtaposition#Mathematics diff --git a/book/src/reference/keywords.md b/book/src/reference/keywords.md new file mode 100644 index 000000000..a1ee0cf5a --- /dev/null +++ b/book/src/reference/keywords.md @@ -0,0 +1,13 @@ +# Keywords + +Keywords use the same lexical syntax as [names](./names.md), but are reserved by Pikelet. + +The following keywords are reserved by Pikelet: + +| Keyword | Purpose | +| ------- | ------- | +| `as` | [Explicit binding names](./records#Explicit-binding-names) | +| `Fun` | [Function formation](./functions#Formation) | +| `fun` | [Function terms](./functions#Terms) | +| `Record` | [Record types](./records#Types) | +| `record` | [Record terms](./records#Terms) | diff --git a/book/src/reference/literals.md b/book/src/reference/literals.md new file mode 100644 index 000000000..d110d8d2f --- /dev/null +++ b/book/src/reference/literals.md @@ -0,0 +1,59 @@ +# Literals + +## Numbers + +```pikelet +0.0 ++1 +-25 +0xAB342 +1_000_000 +``` + +### Supported types + +- Unsigned integers: [`U8`][unsigned-integers], [`U16`][unsigned-integers], [`U32`][unsigned-integers], [`U64`][unsigned-integers] +- Signed integers: [`S8`][signed-integers], [`S16`][signed-integers], [`S32`][signed-integers], [`S64`][signed-integers] +- Floating point numbers: [`F32`][floating-point-numbers], [`F64`][floating-point-numbers] + +[unsigned-integers]: ./builtins#unsigned-integers +[signed-integers]: ./builtins#signed-integers +[floating-point-numbers]: ./builtins#floating-point-numbers + +### Overloading + +Overloaded number literals are not yet supported, but _are_ planned. + +## Characters + +```pikelet +'A' +'가' +'đŸ„ž' +``` + +### Supported types + +- [`Char`][characters] + +[characters]: ./builtins#characters + +### Overloading + +Overloaded character literals are not yet supported, but _are_ planned. + +## Strings + +```pikelet +"hello" +``` + +### Supported types + +- [`String`][strings] + +[strings]: ./builtins#strings + +### Overloading + +Overloaded string literals are not yet supported, but _are_ planned. diff --git a/book/src/reference/names.md b/book/src/reference/names.md new file mode 100644 index 000000000..01a651b7f --- /dev/null +++ b/book/src/reference/names.md @@ -0,0 +1,26 @@ +# Names + +Names refer to bindings that are currently in scope. + +These could either be _global_, or _local_. + +```pikelet +make-string +Foo-23 +Unicode-String +``` + +## Conventions + +'Small' bindings should use `lower-kebab-case`, for example: + +```pikelet +my-string +``` + +'Large' bindings should use `Title-Kebab-Case`, for example: + +```pikelet +My-String +My-Universe +``` diff --git a/book/src/reference/records.md b/book/src/reference/records.md new file mode 100644 index 000000000..bf3d81dfd --- /dev/null +++ b/book/src/reference/records.md @@ -0,0 +1,194 @@ +# Records + +Records provide a way of grouping together data into [composite data types][composite-data-types-wikipedia]. + +[composite-data-types-wikipedia]: https://en.wikipedia.org/wiki/Composite_data_type + +## Types + +A record type is a list of entries, consisting of an entry label, and an entry type. +For example, this is a record that defines `width` and `height` extents: + +```pikelet +Record { + width : U32, + height : U32, +} +``` + +### Entry dependencies + +Entries can be used to constrain the types of later entries. +For example: + +```pikelet +Record { + A : Type, + a : A, +} +``` + +Here the type of the entry with the label `a` _depends_ on the type given to +the entry with label `A`. + +### Explicit binding names + +By default, the binding name of an entry is the same as the label. +In rare cases, however the label name might shadow a binding from a higher scope. +In this case we can give the field a new, internal name using the `as` keyword: + +```pikelet +Record { + -- label + -- │ + -- │ explicit name binding + -- │ │ + -- v v + String as String-1 : Type, + + -- refers to the built-in `String` type + -- │ + -- v + x : String, + + -- refers to the local `String` entry + -- │ + -- v + y : String-1, +} +``` + +### Universes + +Record types are also types: + +```pikelet +Record { + first : U32, +} : Type +``` + +In order to find the universe level of a record type, +we use the universe level the largest entry type: + +```pikelet +Record { + first : U32, + second : Type^2, + third : Type, +} : Type^3 +``` + +### Entry order + +The order of entries in a record type are significant, +so the following record type is not the same as the one shown above: + +```pikelet +Record { + height : U32, + width : U32, +} +``` + +Dependencies must be supplied from the roots to the leaves. +For example the following record would not type check because `A : Type` is not yet defined when `a : A` is declared: + +```pikelet +Record { + a : A, + A : Type, +} +``` + +> **Note:** +> +> The entry order seems annoying! +> It would be nice not to require this in the future, but dependencies make this a challenge! + +## Terms + +> **Note:** +> +> This section is a work in progress. + +```pikelet +record {} +``` + +```pikelet +record { + width = 24, + height = 33, +} : Record { + width : U32, + height : U32, +} +``` + +### Entry dependencies + +The entries of record terms can depend on one another: + +```pikelet +record { x = 1, y = x } + : Record { x : S32, y : S32 } +``` + +### Entry order + +The entries of record terms must be supplied in the order that was specified in the type. +For example this is a type error: + +```pikelet +record { y = 2, x = 1 } + : Record { x : S32, y : S32 } +``` + +> **Note:** +> +> The entry order seems annoying! +> It would be nice not to require this in the future. + +### Explicit binding names + +By default, the binding name of an entry is the same as the label. +In rare cases, however the label name might shadow a binding from a higher scope. +In this case we can give the field a new, internal name using the `as` keyword: + +```pikelet +record { + -- label + -- │ + -- │ explicit name binding + -- │ │ + -- v v + String as String-1 = MyString, + + -- refers to the built-in `String` type + -- │ + -- │ refers to the local `String` entry + -- | | + -- v v + Types = [ String, String-1 ], +} : Record { + String : Type, + Types : Array 2 Type, +} +``` + +## Eliminations + +> **Note:** +> +> This section is a work in progress. + +```pikelet +extents.width +``` + +### Computation + +> **Note:** +> +> This section is a work in progress. diff --git a/book/src/reference/universes.md b/book/src/reference/universes.md new file mode 100644 index 000000000..f1776a114 --- /dev/null +++ b/book/src/reference/universes.md @@ -0,0 +1,76 @@ +# Universes + +Having first class types naturally poses the question: what is the type of `Type`? + +```pikelet +Type : ??? +``` + +One idea would might be to have: + +```pikelet +Type : Type +``` + +This is not a bad design, and many systems choose it pragmatically for its simplicity, +but it is, however, [_inconsistent_][consistency-wikipedia] for [subtle reasons][type-in-type-liamoc], +as seen in [Girard's Paradox][girards-paradox-wikipedia]. +This means that allowing it would allow for the construction of a program that returned `Void`! +Pikelet avoids such paradoxical constructions by introducing a hierarchy of universes, +indexed by _universe levels_, as seen in the next section. + +[consistency-wikipedia]: https://en.wikipedia.org/wiki/Consistency +[type-in-type-liamoc]: http://liamoc.net/posts/2015-09-10-girards-paradox/index.html +[girards-paradox-wikipedia]: https://en.wikipedia.org/wiki/System_U#Girard's_paradox + +## Universe levels + +> **Note:** +> +> This section is a work in progress. + +```text +╭─ Type^2 ───────────────────────────────────────────────────────────────────────╼ +│ Array n Type^1 │ +│ │ +│ ╭─ Type^1 ─────────────────────────────────────────────────────────────────╼ │ +│ │ │ │ +│ │ ╭─ Type ───────────────────────────────╼ │ │ +│ │ │ Array n String │ │ │ +│ │ │ │ │ │ +│ │ │ Record { x : F32 } │ Array n Type │ │ +│ │ │ │ │ │ +│ │ │ ╭─ S32 ──────────────╼ │ │ │ +│ │ │ │ ..., -1, 0, 1, ... │ │ Nat -> Type -> Type │ │ +│ │ │ ╰────────────────────╯ │ │ │ +│ │ │ │ │ │ +│ │ │ ╭─ Record {} ─╼ ╭─ Bool ───────╼ │ │ │ +│ │ │ │ record {} │ │ true, false │ │ Type -> Type │ │ +│ │ │ ╰────────────-╯ ╰──────────────╯ │ │ │ +│ │ │ │ │ │ +│ │ │ ╭─ String ──────────────────────╼ │ ╭─ Record { t : Type } ────╼ │ │ +│ │ │ │ "hello", "byee!", "hoho", ... │ │ │ record { t = String }, │ │ │ +│ │ │ ╰──────────────────────────────-╯ │ │ record { t = U32 }, ... │ │ │ +│ │ ╰──────────────────────────────────────╯ ╰──────────────────────────╯ │ │ +│ ╰──────────────────────────────────────────────────────────────────────────╯ │ +╰────────────────────────────────────────────────────────────────────────────────╯ +``` + +## Cumulativity + +Because the level of a universe corresponds to some notion of it's 'size', +this suggests that larger universes should be able to contain all the other things smaller than themselves. +This is reflected in Pikelet too: + +```pikelet +Bool : Type -- ok +Bool : Type^2 -- ok +Type^1 : Type^3 -- ok +Type^3 : Type^1 -- error! +``` + +## Lifting terms + +> **Note:** +> +> This section is a work in progress. diff --git a/book/src/specification.md b/book/src/specification.md new file mode 100644 index 000000000..af305e640 --- /dev/null +++ b/book/src/specification.md @@ -0,0 +1,18 @@ +# Language Specification + +This part of the documentation is where we describe the syntax and semantics of Pikelet in a more precise way than the [language reference]. +It is intended for language developers and programming languages researchers who want to understand the underlying type system of the language. + +[language reference]: ./reference.md + +## Limitations + +It is important to note that we do not claim that these semantics are sound, +but at the very least this specification could form the building blocks of formally verified specification in the future. + +## Summary + +- [Core Language]() +- [Surface Language]() +- [Textual Representation](./specification/textual-representaion.md) +- [Inspiration](./specification/inspiration.md) diff --git a/book/src/specification/inspiration.md b/book/src/specification/inspiration.md new file mode 100644 index 000000000..ce185b15c --- /dev/null +++ b/book/src/specification/inspiration.md @@ -0,0 +1,24 @@ +--- +id: inspiration +title: Inspiration +sidebar_label: Inspiration +keywords: + - docs + - specification + - pikelet +--- + +Some inspiring language specifications/references: + +- [WebAssembly Specification](https://webassembly.github.io/spec/core/) +- [The Definition of Standard ML](http://sml-family.org/sml97-defn.pdf) +- [Dhall Specification](https://github.com/dhall-lang/dhall-lang/blob/master/standard/README.md) +- [The Ur/Web Manual](http://www.impredicative.com/ur/manual.pdf) +- [Coq Reference Manual: The Gallina specification language](https://coq.inria.fr/refman/language/gallina-specification-language.html) +- [Coq Reference Manual: Calculus of Inductive Constructions](https://coq.inria.fr/refman/language/cic.html) +- [The Isabelle/Isar Reference Manual](http://isabelle.in.tum.de/dist/Isabelle2019/doc/isar-ref.pdf) +- [The Isabelle/Isar Implementation](http://isabelle.in.tum.de/dist/Isabelle2019/doc/implementation.pdf) +- [Specification of Core Agda](https://agda.github.io/agda-spec/core-agda.pdf) +- [Swift Reference](https://docs.swift.org/swift-book/ReferenceManual/AboutTheLanguageReference.html) +- [D Specification](https://dlang.org/spec/spec.html) +- [Rust Reference](https://doc.rust-lang.org/reference/) diff --git a/book/src/specification/textual-representation.md b/book/src/specification/textual-representation.md new file mode 100644 index 000000000..fd28ca8a9 --- /dev/null +++ b/book/src/specification/textual-representation.md @@ -0,0 +1,8 @@ +# Textual Representation + +This section describes the textual representation of Pikelet. + +## Summary + +- [Lexical Syntax](./textual-representation/lexical-syntax.md) +- [Concrete Syntax](./textual-representation/concrete-syntax.md) diff --git a/book/src/specification/textual-representation/concrete-syntax.md b/book/src/specification/textual-representation/concrete-syntax.md new file mode 100644 index 000000000..4fb477260 --- /dev/null +++ b/book/src/specification/textual-representation/concrete-syntax.md @@ -0,0 +1,46 @@ +# Concrete Syntax + +This section defines the concrete syntax of the surface language. + +## Terms + +```text +term ::= + | expr-term + | expr-term ":" term + +expr-term ::= + | arrow-term + | "fun" name+ "=>" expr-term + +arrow-term ::= + | app-term + | "Fun" ("(" name+ ":" arrow-term ")")+ "->" arrow-term + | app-term "->" arrow-term + +app-term ::= + | atomic-term + | atomic-term atomic-term+ + +atomic-term ::= + | "(" term ")" + | name + | atomic-term "^" number-literal + | "Record" "{" (type-entry ",")* type-entry? "}" + | "record" "{" (term-entry ",")* term-entry? "}" + | atomic-term "." name + | "[" (term ",")* term? "]" + | number-literal + | character-literal + | string-literal +``` + +## Entries + +```text +type-entry ::= + | doc-comment* name ("as" name)? ":" term + +term-entry ::= + | doc-comment* name ("as" name)? "=" term +``` diff --git a/book/src/specification/textual-representation/lexical-syntax.md b/book/src/specification/textual-representation/lexical-syntax.md new file mode 100644 index 000000000..af0ff3121 --- /dev/null +++ b/book/src/specification/textual-representation/lexical-syntax.md @@ -0,0 +1,132 @@ +# Lexical Syntax + +The _lexical structure_ of the Pikelet programming langues is a description of what constitutes a valid sequence of tokens in the programming language. + +## Characters + +The textual surface language assigns meaning to a source string, +which consists of a sequence of _Unicode scalar values_ (as defined in Section 3.4 of [the Unicode Standard](www.unicode.org/versions/latest/)), +terminated with a virtual end-of-file symbol, `"\0"`: + +```text +unicode-scalar-value ::= + | "\u{00}" ... "\u{D7FF}" + | "\u{E000}" ... "\u{10FFF}" + +source ::= + | unicode-scalar-value* "\0" +``` + +For convenience, we define a number of special values within the above `unicode-scalar-value` definition: + +```text +horizontal-tab ::= "\u{0009}" +line-feed ::= "\u{000A}" +vertical-tab ::= "\u{000B}" +form-feed ::= "\u{000C}" +carriage-return ::= "\u{000D}" +next-line ::= "\u{0085}" +left-to-right-mark ::= "\u{200E}" +right-to-left-mark ::= "\u{200F}" +line-separator ::= "\u{2028}" +paragraph-separator ::= "\u{2029}" +``` + +## Whitespace and comments + +```text +line-break ::= + | line-feed + | carriage-return + | carriage-return line-feed + | "\0" + +comment-text ::= + | (~(line-feed | carriage-return) unicode-scalar-value)* + +comment ::= + | "--" comment-text line-break + +doc-comment ::= + | "|||" comment-text line-break + +white-space ::= + | horizontal-tab + | comment + | vertical-tab + | form-feed + | line-break + | next-line + | left-to-right-mark + | right-to-left-mark + | line-separator + | paragraph-separator +``` + +## Keywords and names + +```text +keyword ::= + | "as" + | "fun" + | "Fun" + | "Record" + | "record" + +name-or-keyword ::= + | ("a" ... "z" | "A" ... "Z") ("a" ... "z" | "A" ... "Z" | "0" ... "9" | "-")* + +name ::= + | ~keyword name-or-keyword +``` + +### Punctuation + +```text +delimiter ::= + | "{" + | "}" + | "[" + | "]" + | "(" + | ")" + +symbol ::= + | "." + | ":" + | "," + | "=" + | "=>" + | "->" + +punctuation ::= + | delimiter + | symbol +``` + +### Literals + +```text +number-literal ::= + | ("+" | "-")? ("0" ... "9") ("a" ... "z" | "A" ... "Z" | "0" ... "9" | ".")* + +character-literal ::= + | "\"" ("\"" | ~"\"" unicode-scalar-value)* "\"" + +string-literal ::= + | "'" ("'" | ~"'" unicode-scalar-value)* "'" +``` + +### Tokens + +```text +token ::= + | white-space + | doc-comment + | keyword + | name + | punctuation + | number-literal + | character-literal + | string-literal +``` diff --git a/crates/README.md b/crates/README.md deleted file mode 100644 index ddb812b18..000000000 --- a/crates/README.md +++ /dev/null @@ -1,32 +0,0 @@ -# The Pikelet Compiler - -Welcome to the core implementation of the Pikelet compiler. We hope you enjoy -your stay! - -## Crates - -### Tools - -| Name | Description | -|-----------------------------|-------------------------------------------------| -| [`pikelet`] | Top-level command line interface | -| [`pikelet-repl`] | Interactive mode | -| [`pikelet-language-server`] | LSP conforming language server | - -[`pikelet`]: /crates/pikelet -[`pikelet-repl`]: /crates/pikelet-repl -[`pikelet-language-server`]: /crates/pikelet-language-server - -### Compiler - -| Name | Description | -|-----------------------------|-------------------------------------------------------------------| -| [`pikelet-driver`] | Main entry-point for the compiler pipeline | -| [`pikelet-library`] | Builtin libraries | -| [`pikelet-concrete`] | Parsing, pretty printing, and elaboration of the concrete syntax | -| [`pikelet-core`] | Normalization-by-evaluation and checking of the core language | - -[`pikelet-driver`]: /crates/pikelet-driver -[`pikelet-library`]: /crates/pikelet-library -[`pikelet-concrete`]: /crates/pikelet-concrete -[`pikelet-core`]: /crates/pikelet-core diff --git a/crates/pikelet-concrete/Cargo.toml b/crates/pikelet-concrete/Cargo.toml deleted file mode 100644 index 024981407..000000000 --- a/crates/pikelet-concrete/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "pikelet-concrete" -version = "0.1.0" -license = "Apache-2.0" -readme = "README.md" -authors = ["Brendan Zabarauskas "] -homepage = "https://github.com/pikelet-lang/pikelet" -repository = "https://github.com/pikelet-lang/pikelet" -edition = "2018" -publish = false - -[dependencies] -codespan = "0.2.0" -codespan-reporting = "0.2.0" -failure = "0.1.3" -im = "12.2.0" -lalrpop-util = "0.16.0" -moniker = { version = "0.5.0", features = ["codespan", "im"] } -pikelet-core = { version = "0.1.0", path = "../pikelet-core" } -pretty = { version = "0.5.2", features = ["termcolor"] } -unicode-xid = "0.1.0" - -[build-dependencies] -lalrpop = "0.16.0" - -[dev-dependencies] -goldenfile = "0.7.1" -pretty_assertions = "0.5.1" diff --git a/crates/pikelet-concrete/README.md b/crates/pikelet-concrete/README.md deleted file mode 100644 index 91ee093b8..000000000 --- a/crates/pikelet-concrete/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# Pikelet Concrete Syntax - -This crate is responsible for: - -- defining data structures for: - - concrete terms - - raw terms -- parsing the concrete syntax -- pretty printing -- desugaring from the concrete syntax -- resugaring to the concrete syntax -- bidirectional elaboration, involving: - - bidirectional type checking - - passing implicit arguments explicitly (TODO) - - passing instance arguments explicitly (TODO) - - returning the fully explicit core syntax - diff --git a/crates/pikelet-concrete/src/desugar.rs b/crates/pikelet-concrete/src/desugar.rs deleted file mode 100644 index 6bef25946..000000000 --- a/crates/pikelet-concrete/src/desugar.rs +++ /dev/null @@ -1,580 +0,0 @@ -use codespan::{ByteIndex, ByteOffset, ByteSpan}; -use codespan_reporting::{Diagnostic, Label as DiagnosticLabel}; -use im; -use failure::Fail; -use moniker::{Binder, Embed, FreeVar, Nest, Scope, Var}; - -use pikelet_core::syntax::{Label, Level, LevelShift}; - -use crate::syntax::concrete; -use crate::syntax::raw; - -/// The environment used when desugaring from the concrete to raw syntax -#[derive(Debug, Clone)] -pub struct DesugarEnv { - /// An environment that maps strings to unique free variables - /// - /// This is a persistent map so that we can create new environments as we enter - /// new scopes, allowing us to properly model variable shadowing. - /// - /// If we arrive at a variable that has not already been assigned a free name, - /// we assume that it is a global name. - locals: im::HashMap>, -} - -impl DesugarEnv { - pub fn new(mappings: im::HashMap>) -> DesugarEnv { - DesugarEnv { locals: mappings } - } - - pub fn on_item(&mut self, name: &str) -> Binder { - if let Some(free_var) = self.locals.get(name) { - return Binder(free_var.clone()); - } - Binder(self.on_binding(name)) - } - - pub fn on_binding(&mut self, name: &str) -> FreeVar { - let name = name.to_owned(); - let free_var = FreeVar::fresh_named(name.clone()); - self.locals.insert(name, free_var.clone()); - free_var - } - - pub fn on_name(&self, span: ByteSpan, name: &str, shift: u32) -> raw::RcTerm { - let free_var = match self.locals.get(name) { - None => FreeVar::fresh_named(name), - Some(free_var) => free_var.clone(), - }; - - raw::RcTerm::from(raw::Term::Var(span, Var::Free(free_var), LevelShift(shift))) - } -} - -/// An error produced during resugaring -#[derive(Debug, Fail, Clone, PartialEq)] -pub enum DesugarError { - #[fail( - display = "Name had more than one declaration associated with it: `{}`", - name - )] - DuplicateDeclarations { - original_span: ByteSpan, - duplicate_span: ByteSpan, - name: String, - }, - #[fail(display = "Declaration followed definition: `{}`", name)] - DeclarationFollowedDefinition { - definition_span: ByteSpan, - declaration_span: ByteSpan, - name: String, - }, - #[fail( - display = "Name had more than one definition associated with it: `{}`", - name - )] - DuplicateDefinitions { - original_span: ByteSpan, - duplicate_span: ByteSpan, - name: String, - }, -} - -impl DesugarError { - /// Convert the error into a diagnostic message - pub fn to_diagnostic(&self) -> Diagnostic { - match *self { - DesugarError::DuplicateDeclarations { - original_span, - duplicate_span, - ref name, - } => Diagnostic::new_error(format!( - "name had more than one declaration associated with it `{}`", - name, - )) - .with_label( - DiagnosticLabel::new_primary(duplicate_span) - .with_message("the duplicated declaration"), - ) - .with_label( - DiagnosticLabel::new_secondary(original_span) - .with_message("the original declaration"), - ), - DesugarError::DeclarationFollowedDefinition { - definition_span, - declaration_span, - name: _, - } => Diagnostic::new_error(format!("declarations cannot follow definitions")) - .with_label( - DiagnosticLabel::new_primary(declaration_span).with_message("the declaration"), - ) - .with_label( - DiagnosticLabel::new_secondary(definition_span) - .with_message("the original definition"), - ), - DesugarError::DuplicateDefinitions { - original_span, - duplicate_span, - ref name, - } => Diagnostic::new_error(format!( - "name had more than one definition associated with it `{}`", - name, - )) - .with_label( - DiagnosticLabel::new_primary(duplicate_span) - .with_message("the duplicated definition"), - ) - .with_label( - DiagnosticLabel::new_secondary(original_span) - .with_message("the original definition"), - ), - } - } -} - -/// Translate something to the corresponding core representation -pub trait Desugar { - fn desugar(&self, env: &DesugarEnv) -> Result; -} - -/// Convert a sugary pi type from something like: -/// -/// ```text -/// (a b : t1) (c : t2) -> t3 -/// ``` -/// -/// To a bunch of nested pi types like: -/// -/// ```text -/// (a : t1) -> (b : t1) -> (c : t2) -> t3 -/// ``` -fn desugar_fun_ty( - env: &DesugarEnv, - param_groups: &[concrete::FunTypeParamGroup], - body: &concrete::Term, -) -> Result { - let mut env = env.clone(); - - let mut params = Vec::new(); - for &(ref names, ref ann) in param_groups { - let ann = raw::RcTerm::from(ann.desugar(&env)?); - params.extend(names.iter().map(|&(start, ref name)| { - let free_var = env.on_binding(name); - (start, Binder(free_var), ann.clone()) - })); - } - - Ok(params - .into_iter() - .rev() - .fold(body.desugar(&env)?, |acc, (start, binder, ann)| { - raw::RcTerm::from(raw::Term::FunType( - ByteSpan::new(start, acc.span().end()), - Scope::new((binder, Embed(ann.clone())), acc), - )) - })) -} - -/// Convert a sugary lambda from something like: -/// -/// ```text -/// \(a b : t1) c (d : t2) => t3 -/// ``` -/// -/// To a bunch of nested lambdas like: -/// -/// ```text -/// \(a : t1) => \(b : t1) => \c => \(d : t2) => t3 -/// ``` -fn desugar_fun_intro( - env: &DesugarEnv, - param_groups: &[concrete::FunIntroParamGroup], - return_ann: Option<&concrete::Term>, - body: &concrete::Term, -) -> Result { - let mut env = env.clone(); - - let mut params = Vec::new(); - for &(ref names, ref ann) in param_groups { - let ann = match *ann { - None => raw::RcTerm::from(raw::Term::Hole(ByteSpan::default())), - Some(ref ann) => ann.desugar(&env)?, - }; - - params.extend(names.iter().map(|&(start, ref name)| { - let free_var = env.on_binding(name); - (start, Binder(free_var), ann.clone()) - })); - } - - let body = match return_ann { - None => body.desugar(&env)?, - Some(ann) => raw::RcTerm::from(raw::Term::Ann(body.desugar(&env)?, ann.desugar(&env)?)), - }; - - Ok(params - .into_iter() - .rev() - .fold(body, |acc, (start, binder, ann)| { - raw::RcTerm::from(raw::Term::FunIntro( - ByteSpan::new(start, acc.span().end()), - Scope::new((binder, Embed(ann.clone())), acc), - )) - })) -} - -fn desugar_items( - env: &mut DesugarEnv, - concrete_items: &[concrete::Item], -) -> Result, Embed)>, DesugarError> { - use im::HashMap; - - #[derive(Clone)] - pub enum ForwardDecl { - Pending(ByteSpan, raw::RcTerm), - Defined(ByteSpan), - } - - // Declarations that may be waiting to be defined - let mut forward_declarations = HashMap::new(); - // The elaborated items, pre-allocated to improve performance - let mut items = Vec::with_capacity(concrete_items.len()); - let hole = raw::RcTerm::from(raw::Term::Hole(ByteSpan::default())); - - // Iterate through the items in the module, checking each in turn - for concrete_item in concrete_items { - match *concrete_item { - concrete::Item::Declaration { - name: (start, ref name), - ref ann, - } => { - let binder = env.on_item(name); - let name_span = ByteSpan::from_offset(start, ByteOffset::from_str(name)); - - // Ensure that this declaration has not already been seen - match forward_declarations.get(&binder) { - // There's already a definition associated with this name - - // we can't add a new declaration for it! - Some(&ForwardDecl::Defined(definition_span)) => { - return Err(DesugarError::DeclarationFollowedDefinition { - definition_span, - declaration_span: name_span, - name: name.clone(), - }); - }, - // There's a declaration for this name already pending - we - // can't add a new one! - Some(&ForwardDecl::Pending(original_span, _)) => { - return Err(DesugarError::DuplicateDeclarations { - original_span, - duplicate_span: name_span, - name: name.clone(), - }); - }, - // No previous declaration for this name was seen, so we can - // go-ahead and type check, elaborate, and then add it to - // the context - None => {}, - } - - // Remember the declaration for when we get to a subsequent definition - let declaration = ForwardDecl::Pending(name_span, ann.desugar(&env)?); - forward_declarations.insert(binder.clone(), declaration); - }, - - concrete::Item::Definition { - name: (start, ref name), - ref params, - ref return_ann, - ref body, - } => { - let binder = env.on_item(name); - let name_span = ByteSpan::from_offset(start, ByteOffset::from_str(name)); - let term = - desugar_fun_intro(env, params, return_ann.as_ref().map(<_>::as_ref), body)?; - let ann = match forward_declarations.get(&binder).cloned() { - // This declaration was already given a definition, so this - // is an error! - // - // NOTE: Some languages (eg. Haskell, Agda, Idris, and - // Erlang) turn duplicate definitions into case matches. - // Languages like Elm don't. What should we do here? - Some(ForwardDecl::Defined(original_span)) => { - return Err(DesugarError::DuplicateDefinitions { - original_span, - duplicate_span: name_span, - name: name.clone(), - }); - }, - // We found a prior declaration, so we'll use it as a basis - // for checking the definition - Some(ForwardDecl::Pending(_, ann)) => ann.clone(), - // No prior declaration was found, so use a hole instead - None => hole.clone(), - }; - - // We must not remove this from the list of pending - // declarations, lest we encounter another declaration or - // definition of the same name later on! - forward_declarations.insert(binder.clone(), ForwardDecl::Defined(name_span)); - // Add the definition to the elaborated items - items.push((binder, Embed(raw::RcTerm::from(raw::Term::Ann(term, ann))))); - }, - concrete::Item::Error(_) => unimplemented!("error recovery"), - } - } - - Ok(Nest::new(items)) -} - -fn desugar_let( - env: &DesugarEnv, - start: ByteIndex, - concrete_items: &[concrete::Item], - body: &concrete::Term, -) -> Result { - let mut env = env.clone(); - let items = desugar_items(&mut env, concrete_items)?; - - Ok(raw::RcTerm::from(raw::Term::Let( - ByteSpan::new(start, body.span().end()), - Scope::new(items, body.desugar(&env)?), - ))) -} - -fn desugar_where( - env: &DesugarEnv, - body: &concrete::Term, - concrete_items: &[concrete::Item], - end: ByteIndex, -) -> Result { - let mut env = env.clone(); - let items = desugar_items(&mut env, concrete_items)?; - - // TODO: Remember formatting - Ok(raw::RcTerm::from(raw::Term::Let( - ByteSpan::new(body.span().start(), end), - Scope::new(items, body.desugar(&env)?), - ))) -} - -fn desugar_record_ty( - env: &DesugarEnv, - span: ByteSpan, - fields: &[concrete::RecordTypeField], -) -> Result { - let mut env = env.clone(); - - let fields = fields - .iter() - .map(|field| { - let (_, ref label) = field.label; - let ann = field.ann.desugar(&env)?; - let free_var = match field.binder { - Some((_, ref binder)) => env.on_binding(binder), - None => env.on_binding(label), - }; - - Ok((Label(label.clone()), Binder(free_var), Embed(ann))) - }) - .collect::, _>>()?; - - Ok(raw::RcTerm::from(raw::Term::RecordType( - span, - Scope::new(Nest::new(fields), ()), - ))) -} - -fn desugar_record_intro( - env: &DesugarEnv, - span: ByteSpan, - fields: &[concrete::RecordIntroField], -) -> Result { - use crate::syntax::concrete::RecordIntroField; - - let fields = fields - .iter() - .map(|field| match field { - RecordIntroField::Punned { - label: (_, ref name), - shift, - } => { - let var = env.on_name(span, name, shift.unwrap_or(0)); - Ok((Label(name.clone()), var)) - }, - RecordIntroField::Explicit { - label: (_, ref name), - ref params, - ref return_ann, - ref term, - } => Ok(( - Label(name.clone()), - desugar_fun_intro(env, params, return_ann.as_ref().map(<_>::as_ref), term)?, - )), - }) - .collect::, _>>()?; - - Ok(raw::RcTerm::from(raw::Term::RecordIntro(span, fields))) -} - -impl Desugar for concrete::Literal { - fn desugar(&self, _: &DesugarEnv) -> Result { - Ok(match *self { - concrete::Literal::String(span, ref val) => raw::Literal::String(span, val.clone()), - concrete::Literal::Char(span, val) => raw::Literal::Char(span, val), - concrete::Literal::Int(span, format, val) => raw::Literal::Int(span, format, val), - concrete::Literal::Float(span, format, val) => raw::Literal::Float(span, format, val), - }) - } -} - -impl Desugar<(raw::RcPattern, DesugarEnv)> for concrete::Pattern { - fn desugar(&self, env: &DesugarEnv) -> Result<(raw::RcPattern, DesugarEnv), DesugarError> { - match *self { - concrete::Pattern::Parens(_, ref pattern) => pattern.desugar(env), - concrete::Pattern::Ann(ref pattern, ref ty) => { - let ty = ty.desugar(env)?; - let (pattern, env) = pattern.desugar(env)?; - let ann_pattern = raw::RcPattern::from(raw::Pattern::Ann(pattern, Embed(ty))); - - Ok((ann_pattern, env)) - }, - concrete::Pattern::Name(span, ref name, shift) => match (env.locals.get(name), shift) { - (Some(free_var), shift) => { - let var = Var::Free(free_var.clone()); - let shift = LevelShift(shift.unwrap_or(0)); - let pattern = raw::RcPattern::from(raw::Pattern::Var(span, Embed(var), shift)); - - Ok((pattern, env.clone())) - }, - (None, Some(shift)) => { - let var = Var::Free(FreeVar::fresh_named(name.clone())); - let shift = LevelShift(shift); - let pattern = raw::RcPattern::from(raw::Pattern::Var(span, Embed(var), shift)); - - Ok((pattern, env.clone())) - }, - (None, None) => { - let mut env = env.clone(); - let free_var = env.on_binding(name); - let binder = Binder(free_var); - let pattern = raw::RcPattern::from(raw::Pattern::Binder(span, binder)); - - Ok((pattern, env)) - }, - }, - concrete::Pattern::Literal(ref literal) => { - let literal = raw::RcPattern::from(raw::Pattern::Literal(literal.desugar(env)?)); - - Ok((literal, env.clone())) - }, - concrete::Pattern::Error(_) => unimplemented!("error recovery"), - } - } -} - -impl Desugar for concrete::Term { - fn desugar(&self, env: &DesugarEnv) -> Result { - let span = self.span(); - match *self { - concrete::Term::Parens(_, ref term) => term.desugar(env), - concrete::Term::Ann(ref expr, ref ty) => Ok(raw::RcTerm::from(raw::Term::Ann( - expr.desugar(env)?, - ty.desugar(env)?, - ))), - concrete::Term::Universe(_, level) => Ok(raw::RcTerm::from(raw::Term::Universe( - span, - Level(level.unwrap_or(0)), - ))), - concrete::Term::Literal(ref literal) => { - Ok(raw::RcTerm::from(raw::Term::Literal(literal.desugar(env)?))) - }, - concrete::Term::ArrayIntro(_, ref elems) => { - let elems = elems - .iter() - .map(|elem| elem.desugar(env)) - .collect::>()?; - - Ok(raw::RcTerm::from(raw::Term::ArrayIntro(span, elems))) - }, - concrete::Term::Hole(_) => Ok(raw::RcTerm::from(raw::Term::Hole(span))), - concrete::Term::Name(_, ref name, shift) => { - Ok(env.on_name(span, name, shift.unwrap_or(0))) - }, - concrete::Term::Import(_, name_span, ref name) => Ok(raw::RcTerm::from( - raw::Term::Import(span, name_span, name.clone()), - )), - concrete::Term::FunType(_, ref params, ref body) => desugar_fun_ty(env, params, body), - concrete::Term::FunIntro(_, ref params, ref body) => { - desugar_fun_intro(env, params, None, body) - }, - concrete::Term::FunArrow(ref ann, ref body) => { - Ok(raw::RcTerm::from(raw::Term::FunType( - span, - Scope::new( - (Binder(FreeVar::fresh_unnamed()), Embed(ann.desugar(env)?)), - body.desugar(env)?, - ), - ))) - }, - concrete::Term::FunApp(ref head, ref args) => { - args.iter().fold(head.desugar(env), |acc, arg| { - Ok(raw::RcTerm::from(raw::Term::FunApp( - acc?, - arg.desugar(env)?, - ))) - }) - }, - concrete::Term::Let(start, ref items, ref body) => desugar_let(env, start, items, body), - concrete::Term::Where(ref expr, ref items, end) => desugar_where(env, expr, items, end), - concrete::Term::If(_, ref cond, ref if_true, ref if_false) => { - let bool_pattern = |name: &str| { - raw::RcPattern::from(raw::Pattern::Var( - ByteSpan::default(), - Embed(Var::Free(match env.locals.get(name) { - Some(free_var) => free_var.clone(), - None => FreeVar::fresh_named("oops"), - })), - LevelShift(0), - )) - }; - - Ok(raw::RcTerm::from(raw::Term::Case( - span, - cond.desugar(env)?, - vec![ - Scope::new(bool_pattern("true"), if_true.desugar(&env)?), - Scope::new(bool_pattern("false"), if_false.desugar(&env)?), - ], - ))) - }, - concrete::Term::Case(span, ref head, ref clauses) => { - Ok(raw::RcTerm::from(raw::Term::Case( - span, - head.desugar(env)?, - clauses - .iter() - .map(|(pattern, term)| { - let (pattern, env) = pattern.desugar(env)?; - Ok(Scope::new(pattern, term.desugar(&env)?)) - }) - .collect::>()?, - ))) - }, - concrete::Term::RecordType(span, ref fields) => desugar_record_ty(env, span, fields), - concrete::Term::RecordIntro(span, ref fields) => { - desugar_record_intro(env, span, fields) - }, - concrete::Term::RecordProj(_, ref tm, label_start, ref label, shift) => { - Ok(raw::RcTerm::from(raw::Term::RecordProj( - span, - tm.desugar(env)?, - ByteSpan::from_offset(label_start, ByteOffset::from_str(label)), - Label(label.clone()), - LevelShift(shift.unwrap_or(0)), - ))) - }, - concrete::Term::Error(_) => unimplemented!("error recovery"), - } - } -} diff --git a/crates/pikelet-concrete/src/elaborate/context.rs b/crates/pikelet-concrete/src/elaborate/context.rs deleted file mode 100644 index 85e6d191c..000000000 --- a/crates/pikelet-concrete/src/elaborate/context.rs +++ /dev/null @@ -1,581 +0,0 @@ -use im; -use moniker::{Binder, FreeVar, Var}; -use std::rc::Rc; - -use pikelet_core::nbe; -use pikelet_core::syntax::core::RcTerm; -use pikelet_core::syntax::domain::{RcType, RcValue, Value}; -use pikelet_core::syntax::{Import, Literal}; - -use crate::resugar::{Resugar, ResugarEnv}; - -// Some helper traits for marshalling between Rust and Pikelet values -// -// I'm not super happy with the API at the moment, so these are currently private - -trait IntoValue { - fn ty(context: &Context) -> RcType; - fn into_value(self) -> RcValue; -} - -macro_rules! impl_into_value { - ($T:ty, $ty:ident, $Variant:ident) => { - impl IntoValue for $T { - fn ty(context: &Context) -> RcType { - context.$ty().clone() - } - - fn into_value(self) -> RcValue { - RcValue::from(Value::Literal(Literal::$Variant(self))) - } - } - }; -} - -impl_into_value!(String, string, String); -impl_into_value!(char, char, Char); -impl_into_value!(bool, bool, Bool); -impl_into_value!(u8, u8, U8); -impl_into_value!(u16, u16, U16); -impl_into_value!(u32, u32, U32); -impl_into_value!(u64, u64, U64); -impl_into_value!(i8, s8, S8); -impl_into_value!(i16, s16, S16); -impl_into_value!(i32, s32, S32); -impl_into_value!(i64, s64, S64); -impl_into_value!(f32, f32, F32); -impl_into_value!(f64, f64, F64); - -trait TryFromValueRef { - fn try_from_value_ref(src: &Value) -> Option<&Self>; -} - -impl TryFromValueRef for String { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::String(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for char { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::Char(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for bool { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::Bool(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for u8 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::U8(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for u16 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::U16(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for u32 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::U32(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for u64 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::U64(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for i8 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::S8(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for i16 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::S16(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for i32 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::S32(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for i64 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::S64(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for f32 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::F32(ref val)) => Some(val), - _ => None, - } - } -} - -impl TryFromValueRef for f64 { - fn try_from_value_ref(src: &Value) -> Option<&Self> { - match *src { - Value::Literal(Literal::F64(ref val)) => Some(val), - _ => None, - } - } -} - -#[derive(Clone, Debug)] -pub struct Globals { - ty_bool: RcType, - ty_string: RcType, - ty_char: RcType, - ty_u8: RcType, - ty_u16: RcType, - ty_u32: RcType, - ty_u64: RcType, - ty_s8: RcType, - ty_s16: RcType, - ty_s32: RcType, - ty_s64: RcType, - ty_f32: RcType, - ty_f64: RcType, - var_array: FreeVar, -} - -/// The type checking context -/// -/// A default context with entries for built-in types is provided via the -/// implementation of the `Default` trait. -/// -/// We use persistent data structures internally so that we can copy the -/// context as we enter into scopes, without having to deal with the -/// error-prone tedium of working with mutable context. -#[derive(Clone, Debug)] -pub struct Context { - /// The resugar context - /// - /// We'll keep this up to date as we type check to make it easier to do - /// resugaring on any errors that we encounter - resugar_env: ResugarEnv, - /// The globals - globals: Rc, - /// Imports - imports: im::HashMap, - /// The type annotations of the binders we have passed over - declarations: im::HashMap, RcType>, - /// Any definitions we have passed over - definitions: im::HashMap, RcTerm>, -} - -impl Default for Context { - fn default() -> Context { - use moniker::{Embed, Scope}; - - use pikelet_core::syntax::core::Term; - - let var_bool = FreeVar::fresh_named("Bool"); - let var_true = FreeVar::fresh_named("true"); - let var_false = FreeVar::fresh_named("false"); - let var_string = FreeVar::fresh_named("String"); - let var_char = FreeVar::fresh_named("Char"); - let var_u8 = FreeVar::fresh_named("U8"); - let var_u16 = FreeVar::fresh_named("U16"); - let var_u32 = FreeVar::fresh_named("U32"); - let var_u64 = FreeVar::fresh_named("U64"); - let var_s8 = FreeVar::fresh_named("S8"); - let var_s16 = FreeVar::fresh_named("S16"); - let var_s32 = FreeVar::fresh_named("S32"); - let var_s64 = FreeVar::fresh_named("S64"); - let var_f32 = FreeVar::fresh_named("F32"); - let var_f64 = FreeVar::fresh_named("F64"); - let var_array = FreeVar::fresh_named("Array"); - - let mut context = Context { - resugar_env: ResugarEnv::new(), - globals: Rc::new(Globals { - ty_bool: RcValue::from(Value::var(Var::Free(var_bool.clone()), 0)), - ty_string: RcValue::from(Value::var(Var::Free(var_string.clone()), 0)), - ty_char: RcValue::from(Value::var(Var::Free(var_char.clone()), 0)), - ty_u8: RcValue::from(Value::var(Var::Free(var_u8.clone()), 0)), - ty_u16: RcValue::from(Value::var(Var::Free(var_u16.clone()), 0)), - ty_u32: RcValue::from(Value::var(Var::Free(var_u32.clone()), 0)), - ty_u64: RcValue::from(Value::var(Var::Free(var_u64.clone()), 0)), - ty_s8: RcValue::from(Value::var(Var::Free(var_s8.clone()), 0)), - ty_s16: RcValue::from(Value::var(Var::Free(var_s16.clone()), 0)), - ty_s32: RcValue::from(Value::var(Var::Free(var_s32.clone()), 0)), - ty_s64: RcValue::from(Value::var(Var::Free(var_s64.clone()), 0)), - ty_f32: RcValue::from(Value::var(Var::Free(var_f32.clone()), 0)), - ty_f64: RcValue::from(Value::var(Var::Free(var_f64.clone()), 0)), - var_array: var_array.clone(), - }), - imports: im::HashMap::new(), - declarations: im::HashMap::new(), - definitions: im::HashMap::new(), - }; - - let universe0 = RcValue::from(Value::universe(0)); - let bool_ty = context.globals.ty_bool.clone(); - let bool_lit = |value| RcTerm::from(Term::Literal(Literal::Bool(value))); - let array_ty = RcValue::from(Value::FunType(Scope::new( - ( - Binder(FreeVar::fresh_unnamed()), - Embed(context.globals.ty_u64.clone()), - ), - RcValue::from(Value::FunType(Scope::new( - (Binder(FreeVar::fresh_unnamed()), Embed(universe0.clone())), - universe0.clone(), - ))), - ))); - - context.insert_declaration(var_bool, universe0.clone()); - context.insert_declaration(var_string, universe0.clone()); - context.insert_declaration(var_char, universe0.clone()); - context.insert_declaration(var_u8, universe0.clone()); - context.insert_declaration(var_u16, universe0.clone()); - context.insert_declaration(var_u32, universe0.clone()); - context.insert_declaration(var_u64, universe0.clone()); - context.insert_declaration(var_s8, universe0.clone()); - context.insert_declaration(var_s16, universe0.clone()); - context.insert_declaration(var_s32, universe0.clone()); - context.insert_declaration(var_s64, universe0.clone()); - context.insert_declaration(var_f32, universe0.clone()); - context.insert_declaration(var_f64, universe0.clone()); - context.insert_declaration(var_array, array_ty); - - context.insert_declaration(var_true.clone(), bool_ty.clone()); - context.insert_declaration(var_false.clone(), bool_ty.clone()); - context.insert_definition(var_true, bool_lit(true)); - context.insert_definition(var_false, bool_lit(false)); - - /// Define a primitive import - macro_rules! prim_import { - ($name:expr, fn($($param_name:ident : $PType:ty),*) -> $RType:ty $body:block) => {{ - fn interpretation<'a>(params: &'a [RcValue]) -> Option { - match params { - [$(ref $param_name),*] if $($param_name.is_nf())&&* => { - $(let $param_name = <$PType>::try_from_value_ref($param_name)?;)* - Some(<$RType>::into_value($body)) - } - _ => None, - } - } - - let ty = <$RType>::ty(&context); - $(let ty = { - let param_var = FreeVar::fresh_unnamed(); - let param_ty = <$PType>::ty(&context); - RcValue::from(Value::FunType(Scope::new((Binder(param_var), Embed(param_ty)), ty))) - };)* - - context.insert_import($name.to_owned(), Import::Prim(interpretation), ty); - }}; - } - - prim_import!("prim/string/eq", fn(x: String, y: String) -> bool { x == y }); - prim_import!("prim/bool/eq", fn(x: bool, y: bool) -> bool { x == y }); - prim_import!("prim/char/eq", fn(x: char, y: char) -> bool { x == y }); - prim_import!("prim/u8/eq", fn(x: u8, y: u8) -> bool { x == y }); - prim_import!("prim/u16/eq", fn(x: u16, y: u16) -> bool { x == y }); - prim_import!("prim/u32/eq", fn(x: u32, y: u32) -> bool { x == y }); - prim_import!("prim/u64/eq", fn(x: u64, y: u64) -> bool { x == y }); - prim_import!("prim/i8/eq", fn(x: i8, y: i8) -> bool { x == y }); - prim_import!("prim/i16/eq", fn(x: i16, y: i16) -> bool { x == y }); - prim_import!("prim/i32/eq", fn(x: i32, y: i32) -> bool { x == y }); - prim_import!("prim/i64/eq", fn(x: i64, y: i64) -> bool { x == y }); - prim_import!("prim/f32/eq", fn(x: f32, y: f32) -> bool { f32::eq(x, y) }); - prim_import!("prim/f64/eq", fn(x: f64, y: f64) -> bool { f64::eq(x, y) }); - - prim_import!("prim/string/ne", fn(x: String, y: String) -> bool { x != y }); - prim_import!("prim/bool/ne", fn(x: bool, y: bool) -> bool { x != y }); - prim_import!("prim/char/ne", fn(x: char, y: char) -> bool { x != y }); - prim_import!("prim/u8/ne", fn(x: u8, y: u8) -> bool { x != y }); - prim_import!("prim/u16/ne", fn(x: u16, y: u16) -> bool { x != y }); - prim_import!("prim/u32/ne", fn(x: u32, y: u32) -> bool { x != y }); - prim_import!("prim/u64/ne", fn(x: u64, y: u64) -> bool { x != y }); - prim_import!("prim/i8/ne", fn(x: i8, y: i8) -> bool { x != y }); - prim_import!("prim/i16/ne", fn(x: i16, y: i16) -> bool { x != y }); - prim_import!("prim/i32/ne", fn(x: i32, y: i32) -> bool { x != y }); - prim_import!("prim/i64/ne", fn(x: i64, y: i64) -> bool { x != y }); - prim_import!("prim/f32/ne", fn(x: f32, y: f32) -> bool { f32::ne(x, y) }); - prim_import!("prim/f64/ne", fn(x: f64, y: f64) -> bool { f64::ne(x, y) }); - - prim_import!("prim/string/le", fn(x: String, y: String) -> bool { x <= y }); - prim_import!("prim/bool/le", fn(x: bool, y: bool) -> bool { x <= y }); - prim_import!("prim/char/le", fn(x: char, y: char) -> bool { x <= y }); - prim_import!("prim/u8/le", fn(x: u8, y: u8) -> bool { x <= y }); - prim_import!("prim/u16/le", fn(x: u16, y: u16) -> bool { x <= y }); - prim_import!("prim/u32/le", fn(x: u32, y: u32) -> bool { x <= y }); - prim_import!("prim/u64/le", fn(x: u64, y: u64) -> bool { x <= y }); - prim_import!("prim/i8/le", fn(x: i8, y: i8) -> bool { x <= y }); - prim_import!("prim/i16/le", fn(x: i16, y: i16) -> bool { x <= y }); - prim_import!("prim/i32/le", fn(x: i32, y: i32) -> bool { x <= y }); - prim_import!("prim/i64/le", fn(x: i64, y: i64) -> bool { x <= y }); - prim_import!("prim/f32/le", fn(x: f32, y: f32) -> bool { x <= y }); - prim_import!("prim/f64/le", fn(x: f64, y: f64) -> bool { x <= y }); - - prim_import!("prim/string/lt", fn(x: String, y: String) -> bool { x < y }); - prim_import!("prim/bool/lt", fn(x: bool, y: bool) -> bool { x < y }); - prim_import!("prim/char/lt", fn(x: char, y: char) -> bool { x < y }); - prim_import!("prim/u8/lt", fn(x: u8, y: u8) -> bool { x < y }); - prim_import!("prim/u16/lt", fn(x: u16, y: u16) -> bool { x < y }); - prim_import!("prim/u32/lt", fn(x: u32, y: u32) -> bool { x < y }); - prim_import!("prim/u64/lt", fn(x: u64, y: u64) -> bool { x < y }); - prim_import!("prim/i8/lt", fn(x: i8, y: i8) -> bool { x < y }); - prim_import!("prim/i16/lt", fn(x: i16, y: i16) -> bool { x < y }); - prim_import!("prim/i32/lt", fn(x: i32, y: i32) -> bool { x < y }); - prim_import!("prim/i64/lt", fn(x: i64, y: i64) -> bool { x < y }); - prim_import!("prim/f32/lt", fn(x: f32, y: f32) -> bool { x < y }); - prim_import!("prim/f64/lt", fn(x: f64, y: f64) -> bool { x < y }); - - prim_import!("prim/string/gt", fn(x: String, y: String) -> bool { x > y }); - prim_import!("prim/bool/gt", fn(x: bool, y: bool) -> bool { x > y }); - prim_import!("prim/char/gt", fn(x: char, y: char) -> bool { x > y }); - prim_import!("prim/u8/gt", fn(x: u8, y: u8) -> bool { x > y }); - prim_import!("prim/u16/gt", fn(x: u16, y: u16) -> bool { x > y }); - prim_import!("prim/u32/gt", fn(x: u32, y: u32) -> bool { x > y }); - prim_import!("prim/u64/gt", fn(x: u64, y: u64) -> bool { x > y }); - prim_import!("prim/i8/gt", fn(x: i8, y: i8) -> bool { x > y }); - prim_import!("prim/i16/gt", fn(x: i16, y: i16) -> bool { x > y }); - prim_import!("prim/i32/gt", fn(x: i32, y: i32) -> bool { x > y }); - prim_import!("prim/i64/gt", fn(x: i64, y: i64) -> bool { x > y }); - prim_import!("prim/f32/gt", fn(x: f32, y: f32) -> bool { x > y }); - prim_import!("prim/f64/gt", fn(x: f64, y: f64) -> bool { x > y }); - - prim_import!("prim/string/ge", fn(x: String, y: String) -> bool { x >= y }); - prim_import!("prim/bool/ge", fn(x: bool, y: bool) -> bool { x >= y }); - prim_import!("prim/char/ge", fn(x: char, y: char) -> bool { x >= y }); - prim_import!("prim/u8/ge", fn(x: u8, y: u8) -> bool { x >= y }); - prim_import!("prim/u16/ge", fn(x: u16, y: u16) -> bool { x >= y }); - prim_import!("prim/u32/ge", fn(x: u32, y: u32) -> bool { x >= y }); - prim_import!("prim/u64/ge", fn(x: u64, y: u64) -> bool { x >= y }); - prim_import!("prim/i8/ge", fn(x: i8, y: i8) -> bool { x >= y }); - prim_import!("prim/i16/ge", fn(x: i16, y: i16) -> bool { x >= y }); - prim_import!("prim/i32/ge", fn(x: i32, y: i32) -> bool { x >= y }); - prim_import!("prim/i64/ge", fn(x: i64, y: i64) -> bool { x >= y }); - prim_import!("prim/f32/ge", fn(x: f32, y: f32) -> bool { x >= y }); - prim_import!("prim/f64/ge", fn(x: f64, y: f64) -> bool { x >= y }); - - prim_import!("prim/u8/add", fn(x: u8, y: u8) -> u8 { x + y }); - prim_import!("prim/u16/add", fn(x: u16, y: u16) -> u16 { x + y }); - prim_import!("prim/u32/add", fn(x: u32, y: u32) -> u32 { x + y }); - prim_import!("prim/u64/add", fn(x: u64, y: u64) -> u64 { x + y }); - prim_import!("prim/i8/add", fn(x: i8, y: i8) -> i8 { x + y }); - prim_import!("prim/i16/add", fn(x: i16, y: i16) -> i16 { x + y }); - prim_import!("prim/i32/add", fn(x: i32, y: i32) -> i32 { x + y }); - prim_import!("prim/i64/add", fn(x: i64, y: i64) -> i64 { x + y }); - prim_import!("prim/f32/add", fn(x: f32, y: f32) -> f32 { x + y }); - prim_import!("prim/f64/add", fn(x: f64, y: f64) -> f64 { x + y }); - - prim_import!("prim/u8/sub", fn(x: u8, y: u8) -> u8 { x - y }); - prim_import!("prim/u16/sub", fn(x: u16, y: u16) -> u16 { x - y }); - prim_import!("prim/u32/sub", fn(x: u32, y: u32) -> u32 { x - y }); - prim_import!("prim/u64/sub", fn(x: u64, y: u64) -> u64 { x - y }); - prim_import!("prim/i8/sub", fn(x: i8, y: i8) -> i8 { x - y }); - prim_import!("prim/i16/sub", fn(x: i16, y: i16) -> i16 { x - y }); - prim_import!("prim/i32/sub", fn(x: i32, y: i32) -> i32 { x - y }); - prim_import!("prim/i64/sub", fn(x: i64, y: i64) -> i64 { x - y }); - prim_import!("prim/f32/sub", fn(x: f32, y: f32) -> f32 { x - y }); - prim_import!("prim/f64/sub", fn(x: f64, y: f64) -> f64 { x - y }); - - prim_import!("prim/u8/mul", fn(x: u8, y: u8) -> u8 { x * y }); - prim_import!("prim/u16/mul", fn(x: u16, y: u16) -> u16 { x * y }); - prim_import!("prim/u32/mul", fn(x: u32, y: u32) -> u32 { x * y }); - prim_import!("prim/u64/mul", fn(x: u64, y: u64) -> u64 { x * y }); - prim_import!("prim/i8/mul", fn(x: i8, y: i8) -> i8 { x * y }); - prim_import!("prim/i16/mul", fn(x: i16, y: i16) -> i16 { x * y }); - prim_import!("prim/i32/mul", fn(x: i32, y: i32) -> i32 { x * y }); - prim_import!("prim/i64/mul", fn(x: i64, y: i64) -> i64 { x * y }); - prim_import!("prim/f32/mul", fn(x: f32, y: f32) -> f32 { x * y }); - prim_import!("prim/f64/mul", fn(x: f64, y: f64) -> f64 { x * y }); - - prim_import!("prim/u8/div", fn(x: u8, y: u8) -> u8 { x / y }); - prim_import!("prim/u16/div", fn(x: u16, y: u16) -> u16 { x / y }); - prim_import!("prim/u32/div", fn(x: u32, y: u32) -> u32 { x / y }); - prim_import!("prim/u64/div", fn(x: u64, y: u64) -> u64 { x / y }); - prim_import!("prim/i8/div", fn(x: i8, y: i8) -> i8 { x / y }); - prim_import!("prim/i16/div", fn(x: i16, y: i16) -> i16 { x / y }); - prim_import!("prim/i32/div", fn(x: i32, y: i32) -> i32 { x / y }); - prim_import!("prim/i64/div", fn(x: i64, y: i64) -> i64 { x / y }); - prim_import!("prim/f32/div", fn(x: f32, y: f32) -> f32 { x / y }); - prim_import!("prim/f64/div", fn(x: f64, y: f64) -> f64 { x / y }); - - prim_import!("prim/char/to-string", fn(val: char) -> String { val.to_string() }); - prim_import!("prim/u8/to-string", fn(val: u8) -> String { val.to_string() }); - prim_import!("prim/u16/to-string", fn(val: u16) -> String { val.to_string() }); - prim_import!("prim/u32/to-string", fn(val: u32) -> String { val.to_string() }); - prim_import!("prim/u64/to-string", fn(val: u64) -> String { val.to_string() }); - prim_import!("prim/i8/to-string", fn(val: i8) -> String { val.to_string() }); - prim_import!("prim/i16/to-string", fn(val: i16) -> String { val.to_string() }); - prim_import!("prim/i32/to-string", fn(val: i32) -> String { val.to_string() }); - prim_import!("prim/i64/to-string", fn(val: i64) -> String { val.to_string() }); - prim_import!("prim/f32/to-string", fn(val: f32) -> String { val.to_string() }); - prim_import!("prim/f64/to-string", fn(val: f64) -> String { val.to_string() }); - - prim_import!("prim/string/append", fn(x: String, y: String) -> String { x.clone() + y }); // FIXME: Clone - - context - } -} - -impl Context { - pub fn resugar(&self, src: &impl Resugar) -> T { - src.resugar(&self.resugar_env) - } - - pub fn mappings(&self) -> im::HashMap> { - self.declarations - .iter() - .filter_map(|(free_var, _)| { - let pretty_name = free_var.pretty_name.as_ref()?; - Some((pretty_name.clone(), free_var.clone())) - }) - .collect() - } - - pub fn bool(&self) -> &RcType { - &self.globals.ty_bool - } - - pub fn string(&self) -> &RcType { - &self.globals.ty_string - } - - pub fn char(&self) -> &RcType { - &self.globals.ty_char - } - - pub fn u8(&self) -> &RcType { - &self.globals.ty_u8 - } - - pub fn u16(&self) -> &RcType { - &self.globals.ty_u16 - } - - pub fn u32(&self) -> &RcType { - &self.globals.ty_u32 - } - - pub fn u64(&self) -> &RcType { - &self.globals.ty_u64 - } - - pub fn s8(&self) -> &RcType { - &self.globals.ty_s8 - } - - pub fn s16(&self) -> &RcType { - &self.globals.ty_s16 - } - - pub fn s32(&self) -> &RcType { - &self.globals.ty_s32 - } - - pub fn s64(&self) -> &RcType { - &self.globals.ty_s64 - } - - pub fn f32(&self) -> &RcType { - &self.globals.ty_f32 - } - - pub fn f64(&self) -> &RcType { - &self.globals.ty_f64 - } - - pub fn array<'a>(&self, ty: &'a RcType) -> Option<(u64, &'a RcType)> { - use pikelet_core::syntax::LevelShift; - - match ty.free_var_app() { - // Conservatively forcing the shift to be zero for now. Perhaps this - // could be relaxed in the future if it becomes a problem? - Some((fv, LevelShift(0), &[ref len, ref elem_ty])) if *fv == self.globals.var_array => { - match **len { - Value::Literal(Literal::U64(len)) => Some((len, elem_ty)), - _ => None, - } - }, - Some(_) | None => None, - } - } - - pub fn get_import(&self, name: &str) -> Option<&(Import, RcType)> { - self.imports.get(name) - } - - pub fn get_declaration(&self, free_var: &FreeVar) -> Option<&RcType> { - self.declarations.get(free_var) - } - - pub fn get_definition(&self, free_var: &FreeVar) -> Option<&RcTerm> { - self.definitions.get(free_var) - } - - pub fn insert_import(&mut self, name: String, import: Import, ty: RcType) { - self.imports.insert(name, (import, ty)); - } - - pub fn insert_declaration(&mut self, free_var: FreeVar, ty: RcType) { - self.resugar_env.on_binder(&Binder(free_var.clone())); - self.declarations.insert(free_var, ty); - } - - pub fn insert_definition(&mut self, free_var: FreeVar, term: RcTerm) { - self.resugar_env.on_binder(&Binder(free_var.clone())); - self.definitions.insert(free_var, term); - } -} - -impl nbe::Env for Context { - fn get_import(&self, name: &str) -> Option<&Import> { - self.imports.get(name).map(|&(ref import, _)| import) - } - - fn get_definition(&self, free_var: &FreeVar) -> Option<&RcTerm> { - self.definitions.get(free_var) - } -} diff --git a/crates/pikelet-concrete/src/elaborate/errors.rs b/crates/pikelet-concrete/src/elaborate/errors.rs deleted file mode 100644 index 869ad52b5..000000000 --- a/crates/pikelet-concrete/src/elaborate/errors.rs +++ /dev/null @@ -1,391 +0,0 @@ -//! Errors that might be produced during semantic analysis - -use codespan::ByteSpan; -use codespan_reporting::{Diagnostic, Label}; -use moniker::{Binder, FreeVar, Var}; -use failure::Fail; - -use pikelet_core::nbe::NbeError; -use pikelet_core::syntax; - -use crate::syntax::{concrete, raw}; - -/// An internal error. These are bugs! -#[derive(Debug, Fail, Clone, PartialEq)] -pub enum InternalError { - #[fail(display = "Unexpected bound variable: `{}`.", var)] - UnexpectedBoundVar { span: ByteSpan, var: Var }, - #[fail(display = "not yet implemented: {}", message)] - Unimplemented { - span: Option, - message: String, - }, - #[fail(display = "nbe: {}", _0)] - Nbe(#[cause] NbeError), -} - -impl From for InternalError { - fn from(src: NbeError) -> InternalError { - InternalError::Nbe(src) - } -} - -impl InternalError { - pub fn to_diagnostic(&self) -> Diagnostic { - match *self { - InternalError::UnexpectedBoundVar { span, ref var } => { - Diagnostic::new_bug(format!("unexpected bound variable: `{}`", var)).with_label( - Label::new_primary(span).with_message("bound variable encountered here"), - ) - }, - InternalError::Unimplemented { span, ref message } => { - let base = Diagnostic::new_bug(format!("not yet implemented: {}", message)); - match span { - None => base, - Some(span) => base.with_label( - Label::new_primary(span) - .with_message("unimplemented feature encountered here"), - ), - } - }, - InternalError::Nbe(ref nbe_error) => { - Diagnostic::new_bug(format!("failed to normalize: {}", nbe_error)) - }, - } - } -} - -/// An error produced during type checking -#[derive(Debug, Fail, Clone, PartialEq)] -pub enum TypeError { - #[fail( - display = "Name had more than one declaration associated with it: `{}`", - binder - )] - DuplicateDeclarations { - original_span: ByteSpan, - duplicate_span: ByteSpan, - binder: Binder, - }, - #[fail(display = "Declaration followed definition: `{}`", binder)] - DeclarationFollowedDefinition { - definition_span: ByteSpan, - declaration_span: ByteSpan, - binder: Binder, - }, - #[fail( - display = "Name had more than one definition associated with it: `{}`", - binder - )] - DuplicateDefinitions { - original_span: ByteSpan, - duplicate_span: ByteSpan, - binder: Binder, - }, - #[fail(display = "Applied an argument to a non-function type `{}`", found)] - ArgAppliedToNonFunction { - fn_span: ByteSpan, - arg_span: ByteSpan, - found: Box, - }, - #[fail( - display = "Type annotation needed for the function parameter `{}`", - name - )] - FunctionParamNeedsAnnotation { - param_span: ByteSpan, - var_span: Option, - name: FreeVar, - }, - #[fail(display = "Type annotation needed for the binder `{}`", binder)] - BinderNeedsAnnotation { - span: ByteSpan, - binder: Binder, - }, - #[fail(display = "found a `{}`, but expected a type `{}`", found, expected)] - LiteralMismatch { - literal_span: ByteSpan, - found: raw::Literal, - expected: Box, - }, - #[fail(display = "Ambiguous integer literal")] - AmbiguousIntLiteral { span: ByteSpan }, - #[fail(display = "Ambiguous floating point literal")] - AmbiguousFloatLiteral { span: ByteSpan }, - #[fail(display = "Empty case expressions need type annotations.")] - AmbiguousEmptyCase { span: ByteSpan }, - #[fail(display = "Unable to elaborate hole, expected: `{:?}`", expected)] - UnableToElaborateHole { - span: ByteSpan, - expected: Option>, - }, - #[fail( - display = "Type mismatch: found `{}` but `{}` was expected", - found, expected - )] - Mismatch { - span: ByteSpan, - found: Box, - expected: Box, - }, - #[fail(display = "Found a function but expected `{}`", expected)] - UnexpectedFunction { - span: ByteSpan, - expected: Box, - }, - #[fail(display = "Found `{}` but a universe was expected", found)] - ExpectedUniverse { - span: ByteSpan, - found: Box, - }, - #[fail(display = "Not yet defined: `{}`", free_var)] - UndefinedName { - span: ByteSpan, - free_var: FreeVar, - }, - #[fail(display = "Undefined import `{:?}`", name)] - UndefinedImport { span: ByteSpan, name: String }, - #[fail( - display = "Label mismatch: found label `{}` but `{}` was expected", - found, expected - )] - LabelMismatch { - span: ByteSpan, - found: syntax::Label, - expected: syntax::Label, - }, - #[fail( - display = "Mismatched array length: expected {} elements but found {}", - expected_len, found_len - )] - ArrayLengthMismatch { - span: ByteSpan, - found_len: u64, - expected_len: u64, - }, - #[fail(display = "Ambiguous record")] - AmbiguousArrayLiteral { span: ByteSpan }, - #[fail( - display = "The type `{}` does not contain a field named `{}`.", - found, expected_label - )] - NoFieldInType { - label_span: ByteSpan, - expected_label: syntax::Label, - found: Box, - }, - #[fail( - display = "Mismatched record size: expected {} fields but found {}", - expected_size, found_size - )] - RecordSizeMismatch { - span: ByteSpan, - found_size: u64, - expected_size: u64, - }, - #[fail(display = "Internal error - this is a bug! {}", _0)] - Internal(#[cause] InternalError), -} - -impl TypeError { - /// Convert the error into a diagnostic message - pub fn to_diagnostic(&self) -> Diagnostic { - match *self { - TypeError::Internal(ref err) => err.to_diagnostic(), - TypeError::DuplicateDeclarations { - original_span, - duplicate_span, - ref binder, - } => Diagnostic::new_error(format!( - "name had more than one declaration associated with it `{}`", - binder, - )) - .with_label( - Label::new_primary(duplicate_span).with_message("the duplicated declaration"), - ) - .with_label( - Label::new_secondary(original_span).with_message("the original declaration"), - ), - TypeError::DeclarationFollowedDefinition { - definition_span, - declaration_span, - binder: _, - } => Diagnostic::new_error(format!("declarations cannot follow definitions")) - .with_label(Label::new_primary(declaration_span).with_message("the declaration")) - .with_label( - Label::new_secondary(definition_span).with_message("the original definition"), - ), - TypeError::DuplicateDefinitions { - original_span, - duplicate_span, - ref binder, - } => Diagnostic::new_error(format!( - "name had more than one definition associated with it `{}`", - binder, - )) - .with_label( - Label::new_primary(duplicate_span).with_message("the duplicated definition"), - ) - .with_label( - Label::new_secondary(original_span).with_message("the original definition"), - ), - TypeError::ArgAppliedToNonFunction { - fn_span, - arg_span, - ref found, - } => Diagnostic::new_error(format!( - "applied an argument to a term that was not a function - found type `{}`", - found, - )) - .with_label(Label::new_primary(fn_span).with_message("the term")) - .with_label(Label::new_secondary(arg_span).with_message("the applied argument")), - TypeError::FunctionParamNeedsAnnotation { - param_span, - var_span: _, // TODO - ref name, - } => Diagnostic::new_error(format!( - "type annotation needed for the function parameter `{}`", - name - )) - .with_label( - Label::new_primary(param_span) - .with_message("the parameter that requires an annotation"), - ), - TypeError::BinderNeedsAnnotation { span, ref binder } => Diagnostic::new_error( - format!("type annotation needed for the binder `{}`", binder), - ) - .with_label( - Label::new_primary(span).with_message("the binder that requires an annotation"), - ), - TypeError::LiteralMismatch { - literal_span, - ref found, - ref expected, - } => { - let found_text = match *found { - raw::Literal::String(_, _) => "string", - raw::Literal::Char(_, _) => "character", - raw::Literal::Int(_, _, _) => "numeric", - raw::Literal::Float(_, _, _) => "floating point", - }; - - Diagnostic::new_error(format!( - "found a {} literal, but expected a type `{}`", - found_text, expected, - )) - .with_label(Label::new_primary(literal_span).with_message("the literal")) - }, - TypeError::AmbiguousIntLiteral { span } => Diagnostic::new_error( - "ambiguous integer literal", - ) - .with_label(Label::new_primary(span).with_message("type annotation needed here")), - TypeError::AmbiguousFloatLiteral { span } => Diagnostic::new_error( - "ambiguous floating point literal", - ) - .with_label(Label::new_primary(span).with_message("type annotation needed here")), - TypeError::AmbiguousEmptyCase { span } => Diagnostic::new_error( - "empty case expressions need type annotations", - ) - .with_label(Label::new_primary(span).with_message("type annotation needed here")), - TypeError::UnableToElaborateHole { - span, - expected: None, - .. - } => Diagnostic::new_error("unable to elaborate hole") - .with_label(Label::new_primary(span).with_message("the hole")), - TypeError::UnableToElaborateHole { - span, - expected: Some(ref expected), - .. - } => Diagnostic::new_error(format!( - "unable to elaborate hole - expected: `{}`", - expected, - )) - .with_label(Label::new_primary(span).with_message("the hole")), - TypeError::UnexpectedFunction { - span, ref expected, .. - } => Diagnostic::new_error(format!( - "found a function but expected a term of type `{}`", - expected, - )) - .with_label(Label::new_primary(span).with_message("the function")), - TypeError::Mismatch { - span, - ref found, - ref expected, - } => Diagnostic::new_error(format!( - "found a term of type `{}`, but expected a term of type `{}`", - found, expected, - )) - .with_label(Label::new_primary(span).with_message("the term")), - TypeError::ExpectedUniverse { ref found, span } => { - Diagnostic::new_error(format!("expected type, found a value of type `{}`", found)) - .with_label(Label::new_primary(span).with_message("the value")) - }, - TypeError::UndefinedName { ref free_var, span } => { - Diagnostic::new_bug(format!("cannot find `{}` in scope", free_var)) - .with_label(Label::new_primary(span).with_message("not found in this scope")) - }, - TypeError::UndefinedImport { span, ref name } => { - Diagnostic::new_error(format!("cannot find import for `{:?}`", name)) - .with_label(Label::new_primary(span).with_message("import not found")) - }, - TypeError::LabelMismatch { - span, - ref expected, - ref found, - } => Diagnostic::new_error(format!( - "expected field called `{}`, but found a field called `{}`", - expected, found, - )) - .with_label(Label::new_primary(span)), - TypeError::ArrayLengthMismatch { - span, - found_len, - expected_len, - } => Diagnostic::new_error(format!( - "mismatched array length: expected {} elements but found {}", - expected_len, found_len - )) - .with_label( - Label::new_primary(span).with_message(format!("array with {} elements", found_len)), - ), - TypeError::AmbiguousArrayLiteral { span } => Diagnostic::new_error( - "ambiguous array literal", - ) - .with_label(Label::new_primary(span).with_message("type annotations needed here")), - TypeError::NoFieldInType { - label_span, - ref expected_label, - ref found, - } => Diagnostic::new_error(format!( - "the type `{}` does not contain a field called `{}`", - found, expected_label - )) - .with_label(Label::new_primary(label_span).with_message("the field lookup")), - TypeError::RecordSizeMismatch { - span, - found_size, - expected_size, - } => Diagnostic::new_error(format!( - "mismatched record size: expected {} fields but found {}", - expected_size, found_size - )) - .with_label( - Label::new_primary(span).with_message(format!("record with {} fields", found_size)), - ), - } - } -} - -impl From for TypeError { - fn from(src: InternalError) -> TypeError { - TypeError::Internal(src) - } -} - -impl From for TypeError { - fn from(src: NbeError) -> TypeError { - TypeError::from(InternalError::from(src)) - } -} diff --git a/crates/pikelet-concrete/src/elaborate/mod.rs b/crates/pikelet-concrete/src/elaborate/mod.rs deleted file mode 100644 index ad5ec4dfd..000000000 --- a/crates/pikelet-concrete/src/elaborate/mod.rs +++ /dev/null @@ -1,684 +0,0 @@ -//! The semantics of the language -//! -//! Here we define the rules of normalization, type checking, and type inference. -//! -//! For more information, check out the theory appendix of the Pikelet book. - -use codespan::ByteSpan; -use moniker::{Binder, BoundPattern, BoundTerm, Embed, FreeVar, Nest, Scope, Var}; - -use pikelet_core::nbe; -use pikelet_core::syntax::core::{Pattern, RcPattern, RcTerm, Term}; -use pikelet_core::syntax::domain::{RcType, RcValue, Value}; -use pikelet_core::syntax::{Level, Literal}; - -use crate::syntax::raw; - -mod context; -mod errors; - -pub use self::context::{Context, Globals}; -pub use self::errors::{InternalError, TypeError}; - -/// Returns true if `ty1` is a subtype of `ty2` -fn is_subtype(context: &Context, ty1: &RcType, ty2: &RcType) -> bool { - match (&*ty1.inner, &*ty2.inner) { - // ST-TYPE - (&Value::Universe(level1), &Value::Universe(level2)) => level1 <= level2, - - // ST-PI - (&Value::FunType(ref scope1), &Value::FunType(ref scope2)) => { - let ((_, Embed(ann1)), body1, (Binder(free_var2), Embed(ann2)), body2) = - Scope::unbind2(scope1.clone(), scope2.clone()); - - is_subtype(context, &ann2, &ann1) && { - let mut context = context.clone(); - context.insert_declaration(free_var2, ann2); - is_subtype(&context, &body1, &body2) - } - }, - - // ST-RECORD-TYPE, ST-EMPTY-RECORD-TYPE - (&Value::RecordType(ref scope1), &Value::RecordType(ref scope2)) => { - if scope1.unsafe_pattern.unsafe_patterns.len() - != scope2.unsafe_pattern.unsafe_patterns.len() - { - return false; - } - - let (fields1, (), fields2, ()) = Scope::unbind2(scope1.clone(), scope2.clone()); - - let mut context = context.clone(); - for (field1, field2) in - Iterator::zip(fields1.unnest().into_iter(), fields2.unnest().into_iter()) - { - let (label1, Binder(free_var1), Embed(ty1)) = field1; - let (label2, _, Embed(ty2)) = field2; - - if label1 == label2 && is_subtype(&context, &ty1, &ty2) { - context.insert_declaration(free_var1, ty1); - } else { - return false; - } - } - - true - }, - - // ST-ALPHA-EQ - (_, _) => RcType::term_eq(ty1, ty2), - } -} - -/// Ensures that the given term is a universe, returning the level of that -/// universe and its elaborated form. -fn infer_universe(context: &Context, raw_term: &raw::RcTerm) -> Result<(RcTerm, Level), TypeError> { - let (term, ty) = infer_term(context, raw_term)?; - match *ty { - Value::Universe(level) => Ok((term, level)), - _ => Err(TypeError::ExpectedUniverse { - span: raw_term.span(), - found: Box::new(context.resugar(&ty)), - }), - } -} - -/// Checks that a literal is compatible with the given type, returning the -/// elaborated literal if successful -fn check_literal( - context: &Context, - raw_literal: &raw::Literal, - expected_ty: &RcType, -) -> Result { - match *raw_literal { - raw::Literal::String(_, ref val) if context.string() == expected_ty => { - Ok(Literal::String(val.clone())) - }, - raw::Literal::Char(_, val) if context.char() == expected_ty => Ok(Literal::Char(val)), - - // FIXME: overflow? - raw::Literal::Int(_, v, _) if context.u8() == expected_ty => Ok(Literal::U8(v as u8)), - raw::Literal::Int(_, v, _) if context.u16() == expected_ty => Ok(Literal::U16(v as u16)), - raw::Literal::Int(_, v, _) if context.u32() == expected_ty => Ok(Literal::U32(v as u32)), - raw::Literal::Int(_, v, _) if context.u64() == expected_ty => Ok(Literal::U64(v)), - raw::Literal::Int(_, v, _) if context.s8() == expected_ty => Ok(Literal::S8(v as i8)), - raw::Literal::Int(_, v, _) if context.s16() == expected_ty => Ok(Literal::S16(v as i16)), - raw::Literal::Int(_, v, _) if context.s32() == expected_ty => Ok(Literal::S32(v as i32)), - raw::Literal::Int(_, v, _) if context.s64() == expected_ty => Ok(Literal::S64(v as i64)), - raw::Literal::Int(_, v, _) if context.f32() == expected_ty => Ok(Literal::F32(v as f32)), - raw::Literal::Int(_, v, _) if context.f64() == expected_ty => Ok(Literal::F64(v as f64)), - raw::Literal::Float(_, v, _) if context.f32() == expected_ty => Ok(Literal::F32(v as f32)), - raw::Literal::Float(_, v, _) if context.f64() == expected_ty => Ok(Literal::F64(v)), - - _ => Err(TypeError::LiteralMismatch { - literal_span: raw_literal.span(), - found: raw_literal.clone(), - expected: Box::new(context.resugar(expected_ty)), - }), - } -} - -/// Synthesize the type of a literal, returning the elaborated literal and the -/// inferred type if successful -fn infer_literal( - context: &Context, - raw_literal: &raw::Literal, -) -> Result<(Literal, RcType), TypeError> { - use pikelet_core::syntax::Literal::{Char, String}; - - match *raw_literal { - raw::Literal::String(_, ref val) => Ok((String(val.clone()), context.string().clone())), - raw::Literal::Char(_, val) => Ok((Char(val), context.char().clone())), - raw::Literal::Int(span, _, _) => Err(TypeError::AmbiguousIntLiteral { span }), - raw::Literal::Float(span, _, _) => Err(TypeError::AmbiguousFloatLiteral { span }), - } -} - -/// Checks that a pattern is compatible with the given type, returning the -/// elaborated pattern and a vector of the declarations it introduced if successful -pub fn check_pattern( - context: &Context, - raw_pattern: &raw::RcPattern, - expected_ty: &RcType, -) -> Result<(RcPattern, Vec<(FreeVar, RcType)>), TypeError> { - match (&*raw_pattern.inner, &*expected_ty.inner) { - (&raw::Pattern::Binder(_, Binder(ref free_var)), _) => { - return Ok(( - RcPattern::from(Pattern::Binder(Binder(free_var.clone()))), - vec![(free_var.clone(), expected_ty.clone())], - )); - }, - (&raw::Pattern::Literal(ref raw_literal), _) => { - let literal = check_literal(context, raw_literal, expected_ty)?; - return Ok((RcPattern::from(Pattern::Literal(literal)), vec![])); - }, - _ => {}, - } - - let (pattern, inferred_ty, declarations) = infer_pattern(context, raw_pattern)?; - if is_subtype(context, &inferred_ty, expected_ty) { - Ok((pattern, declarations)) - } else { - Err(TypeError::Mismatch { - span: raw_pattern.span(), - found: Box::new(context.resugar(&inferred_ty)), - expected: Box::new(context.resugar(expected_ty)), - }) - } -} - -/// Synthesize the type of a pattern, returning the elaborated pattern, the -/// inferred type, and a vector of the declarations it introduced if successful -pub fn infer_pattern( - context: &Context, - raw_pattern: &raw::RcPattern, -) -> Result<(RcPattern, RcType, Vec<(FreeVar, RcType)>), TypeError> { - match *raw_pattern.inner { - raw::Pattern::Ann(ref raw_pattern, Embed(ref raw_ty)) => { - let (ty, _) = infer_universe(context, raw_ty)?; - let value_ty = nbe::nf_term(context, &ty)?; - let (pattern, declarations) = check_pattern(context, raw_pattern, &value_ty)?; - - Ok(( - RcPattern::from(Pattern::Ann(pattern, Embed(ty))), - value_ty, - declarations, - )) - }, - raw::Pattern::Binder(span, ref binder) => Err(TypeError::BinderNeedsAnnotation { - span, - binder: binder.clone(), - }), - raw::Pattern::Var(span, Embed(ref var), shift) => match *var { - Var::Free(ref free_var) => match context.get_declaration(free_var) { - Some(ty) => { - let mut ty = ty.clone(); - ty.shift_universes(shift); - let pattern = RcPattern::from(Pattern::Var(Embed(var.clone()), shift)); - - Ok((pattern, ty, vec![])) - }, - None => Err(TypeError::UndefinedName { - span, - free_var: free_var.clone(), - }), - }, - - // We should always be substituting bound variables with fresh - // variables when entering scopes using `unbind`, so if we've - // encountered one here this is definitely a bug! - Var::Bound(_) => Err(InternalError::UnexpectedBoundVar { - span: raw_pattern.span(), - var: var.clone(), - } - .into()), - }, - raw::Pattern::Literal(ref literal) => { - let (literal, ty) = infer_literal(context, literal)?; - Ok((RcPattern::from(Pattern::Literal(literal)), ty, vec![])) - }, - } -} - -/// Checks that a term is compatible with the given type, returning the -/// elaborated term if successful -pub fn check_term( - context: &Context, - raw_term: &raw::RcTerm, - expected_ty: &RcType, -) -> Result { - match (&*raw_term.inner, &*expected_ty.inner) { - (&raw::Term::Literal(ref raw_literal), _) => { - let literal = check_literal(context, raw_literal, expected_ty)?; - return Ok(RcTerm::from(Term::Literal(literal))); - }, - - // C-LAM - (&raw::Term::FunIntro(_, ref fun_scope), &Value::FunType(ref fun_ty_scope)) => { - let ( - (fun_name, Embed(fun_ann)), - fun_body, - (Binder(fun_ty_name), Embed(fun_ty_ann)), - fun_ty_body, - ) = Scope::unbind2(fun_scope.clone(), fun_ty_scope.clone()); - - // Elaborate the hole, if it exists - if let raw::Term::Hole(_) = *fun_ann.inner { - let fun_ann = RcTerm::from(Term::from(&*fun_ty_ann)); - let fun_body = { - let mut body_context = context.clone(); - body_context.insert_declaration(fun_ty_name, fun_ty_ann); - check_term(&body_context, &fun_body, &fun_ty_body)? - }; - let fun_scope = Scope::new((fun_name, Embed(fun_ann)), fun_body); - - return Ok(RcTerm::from(Term::FunIntro(fun_scope))); - } - - // TODO: We might want to optimise for this case, rather than - // falling through to `infer` and unbinding again at I-LAM - }, - (&raw::Term::FunIntro(_, _), _) => { - return Err(TypeError::UnexpectedFunction { - span: raw_term.span(), - expected: Box::new(context.resugar(expected_ty)), - }); - }, - - // C-RECORD - (&raw::Term::RecordIntro(span, ref raw_fields), &Value::RecordType(ref raw_ty_scope)) => { - let (raw_ty_fields, ()) = { - let expected_size = raw_ty_scope.unsafe_pattern.binders().len(); - if raw_fields.len() == raw_ty_scope.unsafe_pattern.binders().len() { - raw_ty_scope.clone().unbind() - } else { - return Err(TypeError::RecordSizeMismatch { - span, - found_size: raw_fields.len() as u64, - expected_size: expected_size as u64, - }); - } - }; - - let raw_ty_fields = raw_ty_fields.unnest(); - - // FIXME: Check that record is well-formed? - let fields = { - let mut mappings = Vec::with_capacity(raw_fields.len()); - <_>::zip(raw_fields.iter(), raw_ty_fields.into_iter()) - .map(|(field, ty_field)| { - let &(ref label, ref raw_expr) = field; - let (ty_label, Binder(ty_free_var), Embed(ann)) = ty_field; - - if *label == ty_label { - let ann = nbe::nf_term(context, &ann.substs(&mappings))?; - let expr = check_term(context, &raw_expr, &ann)?; - mappings.push((ty_free_var, expr.clone())); - Ok((label.clone(), expr)) - } else { - Err(TypeError::LabelMismatch { - span, - found: label.clone(), - expected: ty_label, - }) - } - }) - .collect::>()? - }; - - return Ok(RcTerm::from(Term::RecordIntro(fields))); - }, - - (&raw::Term::Case(_, ref raw_head, ref raw_clauses), _) => { - let (head, head_ty) = infer_term(context, raw_head)?; - - // TODO: ensure that patterns are exhaustive - let clauses = raw_clauses - .iter() - .map(|raw_clause| { - let (raw_pattern, raw_body) = raw_clause.clone().unbind(); - let (pattern, declarations) = check_pattern(context, &raw_pattern, &head_ty)?; - - let body = { - let mut body_context = context.clone(); - for (free_var, ty) in declarations { - body_context.insert_declaration(free_var, ty); - } - check_term(&body_context, &raw_body, expected_ty)? - }; - - Ok(Scope::new(pattern, body)) - }) - .collect::>()?; - - return Ok(RcTerm::from(Term::Case(head, clauses))); - }, - - (&raw::Term::ArrayIntro(span, ref elems), _) => { - return match context.array(expected_ty) { - Some((len, elem_ty)) if len == elems.len() as u64 => { - let elems = elems - .iter() - .map(|elem| check_term(context, elem, elem_ty)) - .collect::>()?; - - Ok(RcTerm::from(Term::ArrayIntro(elems))) - }, - Some((len, _)) => Err(TypeError::ArrayLengthMismatch { - span, - found_len: elems.len() as u64, - expected_len: len, - }), - None => Err(TypeError::Internal(InternalError::Unimplemented { - span: Some(span), - message: "unexpected arguments to `Array`".to_owned(), - })), - }; - }, - - (&raw::Term::Hole(span), _) => { - let expected = Some(Box::new(context.resugar(expected_ty))); - return Err(TypeError::UnableToElaborateHole { span, expected }); - }, - - _ => {}, - } - - // C-CONV - let (term, inferred_ty) = infer_term(context, raw_term)?; - if is_subtype(context, &inferred_ty, expected_ty) { - Ok(term) - } else { - Err(TypeError::Mismatch { - span: raw_term.span(), - found: Box::new(context.resugar(&inferred_ty)), - expected: Box::new(context.resugar(expected_ty)), - }) - } -} - -/// Synthesize the type of a term, returning the elaborated term and the -/// inferred type if successful -pub fn infer_term( - context: &Context, - raw_term: &raw::RcTerm, -) -> Result<(RcTerm, RcType), TypeError> { - use std::cmp; - - match *raw_term.inner { - // I-ANN - raw::Term::Ann(ref raw_term, ref raw_ty) => { - if let raw::Term::Hole(_) = *raw_ty.inner { - let (term, value_ty) = infer_term(context, &raw_term)?; - let ty = RcTerm::from(&*value_ty); - - Ok((RcTerm::from(Term::Ann(term, ty)), value_ty)) - } else { - let (ty, _) = infer_universe(context, &raw_ty)?; - let value_ty = nbe::nf_term(context, &ty)?; - let term = check_term(context, raw_term, &value_ty)?; - - Ok((RcTerm::from(Term::Ann(term, ty)), value_ty)) - } - }, - - // I-TYPE - raw::Term::Universe(_, level) => Ok(( - RcTerm::from(Term::Universe(level)), - RcValue::from(Value::Universe(level.succ())), - )), - - raw::Term::Hole(span) => { - let expected = None; - Err(TypeError::UnableToElaborateHole { span, expected }) - }, - - raw::Term::Literal(ref raw_literal) => { - let (literal, ty) = infer_literal(context, raw_literal)?; - Ok((RcTerm::from(Term::Literal(literal)), ty)) - }, - - // I-VAR - raw::Term::Var(span, ref var, shift) => match *var { - Var::Free(ref free_var) => match context.get_declaration(free_var) { - Some(ty) => { - let mut ty = ty.clone(); - ty.shift_universes(shift); - - Ok((RcTerm::from(Term::Var(var.clone(), shift)), ty)) - }, - None => Err(TypeError::UndefinedName { - span, - free_var: free_var.clone(), - }), - }, - - // We should always be substituting bound variables with fresh - // variables when entering scopes using `unbind`, so if we've - // encountered one here this is definitely a bug! - Var::Bound(_) => Err(InternalError::UnexpectedBoundVar { - span: raw_term.span(), - var: var.clone(), - } - .into()), - }, - - raw::Term::Import(_, name_span, ref name) => match context.get_import(name) { - Some((_, ty)) => Ok((RcTerm::from(Term::Import(name.clone())), ty.clone())), - None => Err(TypeError::UndefinedImport { - span: name_span, - name: name.clone(), - }), - }, - - // I-PI - raw::Term::FunType(_, ref raw_scope) => { - let ((Binder(free_var), Embed(raw_ann)), raw_body) = raw_scope.clone().unbind(); - - let (ann, ann_level) = infer_universe(context, &raw_ann)?; - let (body, body_level) = { - let ann = nbe::nf_term(context, &ann)?; - let mut body_context = context.clone(); - body_context.insert_declaration(free_var.clone(), ann); - infer_universe(&body_context, &raw_body)? - }; - - let param = (Binder(free_var), Embed(ann)); - - Ok(( - RcTerm::from(Term::FunType(Scope::new(param, body))), - RcValue::from(Value::Universe(cmp::max(ann_level, body_level))), - )) - }, - - // I-LAM - raw::Term::FunIntro(_, ref raw_scope) => { - let ((Binder(free_var), Embed(raw_ann)), raw_body) = raw_scope.clone().unbind(); - - // Check for holes before entering to ensure we get a nice error - if let raw::Term::Hole(_) = *raw_ann { - return Err(TypeError::FunctionParamNeedsAnnotation { - param_span: ByteSpan::default(), // TODO: param.span(), - var_span: None, - name: free_var.clone(), - }); - } - - let (fun_ann, _) = infer_universe(context, &raw_ann)?; - let fun_ty_ann = nbe::nf_term(context, &fun_ann)?; - let (fun_body, fun_ty_body) = { - let mut body_context = context.clone(); - body_context.insert_declaration(free_var.clone(), fun_ty_ann.clone()); - infer_term(&body_context, &raw_body)? - }; - - let fun_param = (Binder(free_var.clone()), Embed(fun_ann)); - let fun_ty_param = (Binder(free_var.clone()), Embed(fun_ty_ann)); - - Ok(( - RcTerm::from(Term::FunIntro(Scope::new(fun_param, fun_body))), - RcValue::from(Value::FunType(Scope::new(fun_ty_param, fun_ty_body))), - )) - }, - - // I-LET - raw::Term::Let(_, ref raw_scope) => { - let (raw_fields, raw_body) = raw_scope.clone().unbind(); - - let (term, ty) = { - let mut context = context.clone(); - let bindings = raw_fields - .unnest() - .into_iter() - .map(|(Binder(free_var), Embed(raw_term))| { - let (term, term_ty) = infer_term(&context, &raw_term)?; - - context.insert_definition(free_var.clone(), term.clone()); - context.insert_declaration(free_var.clone(), term_ty); - - Ok((Binder(free_var), Embed(term))) - }) - .collect::>()?; - - let (body, ty) = infer_term(&context, &raw_body)?; - let term = RcTerm::from(Term::Let(Scope::new(Nest::new(bindings), body))); - - (term, ty) - }; - - Ok((term, ty)) - }, - - // I-APP - raw::Term::FunApp(ref raw_head, ref raw_arg) => { - let (head, head_ty) = infer_term(context, raw_head)?; - - match *head_ty { - Value::FunType(ref scope) => { - let ((Binder(free_var), Embed(ann)), body) = scope.clone().unbind(); - - let arg = check_term(context, raw_arg, &ann)?; - let body = nbe::nf_term(context, &body.substs(&[(free_var, arg.clone())]))?; - - Ok((RcTerm::from(Term::FunApp(head, arg)), body)) - }, - _ => Err(TypeError::ArgAppliedToNonFunction { - fn_span: raw_head.span(), - arg_span: raw_arg.span(), - found: Box::new(context.resugar(&head_ty)), - }), - } - }, - - // I-RECORD-TYPE, I-EMPTY-RECORD-TYPE - raw::Term::RecordType(_, ref raw_scope) => { - let (raw_fields, ()) = raw_scope.clone().unbind(); - let mut max_level = Level(0); - - // FIXME: Check that record is well-formed? - let fields = { - let mut context = context.clone(); - raw_fields - .unnest() - .into_iter() - .map(|(label, Binder(free_var), Embed(raw_ann))| { - let (ann, ann_level) = infer_universe(&context, &raw_ann)?; - let nf_ann = nbe::nf_term(&context, &ann)?; - - max_level = cmp::max(max_level, ann_level); - context.insert_declaration(free_var.clone(), nf_ann); - - Ok((label, Binder(free_var), Embed(ann))) - }) - .collect::>()? - }; - - Ok(( - RcTerm::from(Term::RecordType(Scope::new(Nest::new(fields), ()))), - RcValue::from(Value::Universe(max_level)), - )) - }, - - // I-RECORD, I-EMPTY-RECORD - raw::Term::RecordIntro(_, ref raw_fields) => { - let mut fields = Vec::with_capacity(raw_fields.len()); - let mut ty_fields = Vec::with_capacity(raw_fields.len()); - - // FIXME: error on duplicate field names - { - let mut ty_mappings = Vec::with_capacity(raw_fields.len()); - for &(ref label, ref raw_term) in raw_fields { - let free_var = FreeVar::fresh_named(label.0.clone()); - let (term, term_ty) = infer_term(context, &raw_term)?; - let term_ty = nbe::nf_term(context, &term_ty.substs(&ty_mappings))?; - - fields.push((label.clone(), term.clone())); - ty_fields.push((label.clone(), Binder(free_var.clone()), Embed(term_ty))); - ty_mappings.push((free_var, term)); - } - } - - Ok(( - RcTerm::from(Term::RecordIntro(fields)), - RcValue::from(Value::RecordType(Scope::new(Nest::new(ty_fields), ()))), - )) - }, - - // I-PROJ - raw::Term::RecordProj(_, ref expr, label_span, ref label, shift) => { - let (expr, ty) = infer_term(context, expr)?; - - if let Value::RecordType(ref scope) = *ty.inner { - let (fields, ()) = scope.clone().unbind(); - let mut mappings = vec![]; - - for (current_label, Binder(free_var), Embed(current_ann)) in fields.unnest() { - if current_label == *label { - let expr = RcTerm::from(Term::RecordProj(expr, current_label, shift)); - let mut ty = nbe::nf_term(context, ¤t_ann.substs(&mappings))?; - ty.shift_universes(shift); - - return Ok((expr, ty)); - } else { - mappings.push(( - free_var, - // NOTE: Not sure if we should be shifting here... - RcTerm::from(Term::RecordProj(expr.clone(), current_label, shift)), - )); - } - } - } - - Err(TypeError::NoFieldInType { - label_span, - expected_label: label.clone(), - found: Box::new(context.resugar(&ty)), - }) - }, - - // I-CASE - raw::Term::Case(span, ref raw_head, ref raw_clauses) => { - let (head, head_ty) = infer_term(context, raw_head)?; - let mut ty = None; - - // TODO: ensure that patterns are exhaustive - let clauses = raw_clauses - .iter() - .map(|raw_clause| { - let (raw_pattern, raw_body) = raw_clause.clone().unbind(); - let (pattern, declarations) = check_pattern(context, &raw_pattern, &head_ty)?; - - let (body, body_ty) = { - let mut body_context = context.clone(); - for (free_var, ty) in declarations { - body_context.insert_declaration(free_var, ty); - } - infer_term(&body_context, &raw_body)? - }; - - match ty { - None => ty = Some(body_ty), - // FIXME: use common subtype? - Some(ref ty) if RcValue::term_eq(&body_ty, ty) => {}, - Some(ref ty) => { - return Err(TypeError::Mismatch { - span: raw_body.span(), - found: Box::new(context.resugar(&body_ty)), - expected: Box::new(context.resugar(ty)), - }); - }, - } - - Ok(Scope::new(pattern, body)) - }) - .collect::>()?; - - match ty { - Some(ty) => Ok((RcTerm::from(Term::Case(head, clauses)), ty)), - None => Err(TypeError::AmbiguousEmptyCase { span }), - } - }, - - raw::Term::ArrayIntro(span, _) => Err(TypeError::AmbiguousArrayLiteral { span }), - } -} diff --git a/crates/pikelet-concrete/src/lib.rs b/crates/pikelet-concrete/src/lib.rs deleted file mode 100644 index 4bc24425f..000000000 --- a/crates/pikelet-concrete/src/lib.rs +++ /dev/null @@ -1,7 +0,0 @@ -//! The syntax of the language - -pub mod desugar; -pub mod elaborate; -pub mod parse; -pub mod resugar; -pub mod syntax; diff --git a/crates/pikelet-concrete/src/parse/errors.rs b/crates/pikelet-concrete/src/parse/errors.rs deleted file mode 100644 index 657c83f53..000000000 --- a/crates/pikelet-concrete/src/parse/errors.rs +++ /dev/null @@ -1,138 +0,0 @@ -use codespan::FileMap; -use codespan::{ByteIndex, ByteSpan}; -use codespan_reporting::{Diagnostic, Label}; -use failure::Fail; -use lalrpop_util::ParseError as LalrpopError; -use std::fmt; - -use crate::parse::{LexerError, Token}; - -#[derive(Debug, Fail, Clone, PartialEq)] -pub enum ParseError { - #[fail(display = "{}", _0)] - Lexer(#[cause] LexerError), - #[fail(display = "An identifier was expected when parsing a pi type.")] - IdentifierExpectedInPiType { span: ByteSpan }, - #[fail(display = "Unknown repl command `:{}` found.", command)] - UnknownReplCommand { span: ByteSpan, command: String }, - #[fail(display = "Unexpected EOF, expected one of: {}.", expected)] - UnexpectedEof { - end: ByteIndex, - expected: ExpectedTokens, - }, - #[fail( - display = "Unexpected token {}, found, expected one of: {}.", - token, expected - )] - UnexpectedToken { - span: ByteSpan, - token: Token, - expected: ExpectedTokens, - }, - #[fail(display = "Extra token {} found", token)] - ExtraToken { - span: ByteSpan, - token: Token, - }, -} - -/// Flatten away an LALRPOP error, leaving the inner `ParseError` behind -pub fn from_lalrpop(filemap: &FileMap, err: LalrpopError) -> ParseError -where - T: Into>, -{ - match err { - LalrpopError::User { error } => error, - LalrpopError::InvalidToken { .. } => unreachable!(), - LalrpopError::UnrecognizedToken { - token: None, - expected, - } => ParseError::UnexpectedEof { - end: filemap.span().end(), - expected: ExpectedTokens(expected), - }, - LalrpopError::UnrecognizedToken { - token: Some((start, token, end)), - expected, - } => ParseError::UnexpectedToken { - span: ByteSpan::new(start, end), - token: token.into(), - expected: ExpectedTokens(expected), - }, - LalrpopError::ExtraToken { - token: (start, token, end), - } => ParseError::ExtraToken { - span: ByteSpan::new(start, end), - token: token.into(), - }, - } -} - -impl ParseError { - /// Return the span of source code that this error originated from - pub fn span(&self) -> ByteSpan { - match *self { - ParseError::Lexer(ref err) => err.span(), - ParseError::IdentifierExpectedInPiType { span } - | ParseError::UnknownReplCommand { span, .. } - | ParseError::UnexpectedToken { span, .. } - | ParseError::ExtraToken { span, .. } => span, - ParseError::UnexpectedEof { end, .. } => ByteSpan::new(end, end), - } - } - - /// Convert the error into a diagnostic message - pub fn to_diagnostic(&self) -> Diagnostic { - match *self { - ParseError::Lexer(ref err) => err.to_diagnostic(), - ParseError::IdentifierExpectedInPiType { span } => { - Diagnostic::new_error("identifier expected when parsing dependent function type") - .with_label( - Label::new_primary(span).with_message("ill-formed dependent function type"), - ) - }, - ParseError::UnknownReplCommand { span, ref command } => { - Diagnostic::new_error(format!("unknown repl command `:{}`", command)) - .with_label(Label::new_primary(span).with_message("unexpected command")) - }, - ParseError::UnexpectedToken { - span, - ref token, - ref expected, - } => Diagnostic::new_error(format!("expected one of {}, found `{}`", expected, token)) - .with_label(Label::new_primary(span).with_message("unexpected token")), - ParseError::UnexpectedEof { end, ref expected } => { - Diagnostic::new_error(format!("expected one of {}, found `EOF`", expected)) - .with_label( - Label::new_primary(ByteSpan::new(end, end)).with_message("unexpected EOF"), - ) - }, - ParseError::ExtraToken { span, ref token } => { - Diagnostic::new_error(format!("extra token `{}`", token)) - .with_label(Label::new_primary(span).with_message("extra token")) - }, - } - } -} - -impl From for ParseError { - fn from(src: LexerError) -> ParseError { - ParseError::Lexer(src) - } -} - -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct ExpectedTokens(pub Vec); - -impl fmt::Display for ExpectedTokens { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for (i, token) in self.0.iter().enumerate() { - match i { - 0 => write!(f, "{}", token)?, - i if i < self.0.len() - 1 => write!(f, ", {}", token)?, - _ => write!(f, ", or {}", token)?, - } - } - Ok(()) - } -} diff --git a/crates/pikelet-concrete/src/parse/grammar.lalrpop b/crates/pikelet-concrete/src/parse/grammar.lalrpop deleted file mode 100644 index 5e981edc1..000000000 --- a/crates/pikelet-concrete/src/parse/grammar.lalrpop +++ /dev/null @@ -1,239 +0,0 @@ -use codespan::FileMap; -use codespan::{ByteIndex, ByteSpan}; - -use crate::parse::{ParseError, Token}; -use crate::syntax::{FloatFormat, IntFormat}; -use crate::syntax::concrete::{Item, Literal, Pattern, Term, RecordTypeField, RecordIntroField}; - -#[LALR] -grammar<'err, 'input>( - import_paths: &mut Vec, - errors: &'err mut Vec, - filemap: &'input FileMap, -); - -extern { - type Location = ByteIndex; - type Error = ParseError; - - enum Token<&'input str> { - // Data - "identifier" => Token::Ident(<&'input str>), - "doc comment" => Token::DocComment(<&'input str>), - "string literal" => Token::StringLiteral(), - "character literal" => Token::CharLiteral(), - "binary literal" => Token::BinIntLiteral(), - "octal literal" => Token::OctIntLiteral(), - "decimal literal" => Token::DecIntLiteral(), - "hex literal" => Token::HexIntLiteral(), - "float literal" => Token::DecFloatLiteral(), - - // Keywords - "as" => Token::As, - "case" => Token::Case, - "else" => Token::Else, - "if" => Token::If, - "import" => Token::Import, - "in" => Token::In, - "let" => Token::Let, - "record" => Token::Record, - "Record" => Token::RecordType, - "then" => Token::Then, - "Type" => Token::Type, - "where" => Token::Where, - - // Symbols - "\\" => Token::BSlash, - "^" => Token::Caret, - ":" => Token::Colon, - "," => Token::Comma, - "." => Token::Dot, - ".." => Token::DotDot, - "=" => Token::Equal, - "->" => Token::LArrow, - "=>" => Token::LFatArrow, - "?" => Token::Question, - ";" => Token::Semi, - - // Delimiters - "(" => Token::LParen, - ")" => Token::RParen, - "{" => Token::LBrace, - "}" => Token::RBrace, - "[" => Token::LBracket, - "]" => Token::RBracket, - } -} - -Item: Item = { - <_comment: "doc comment"*> ":" ";" => { - Item::Declaration { name, ann } - }, - <_comment: "doc comment"*> )?> "=" - ";" => - { - Item::Definition { name, params, return_ann: return_ann.map(Box::new), body } - }, - ";" => { - errors.push(super::errors::from_lalrpop(filemap, recovered.error)); - Item::Error(ByteSpan::new(start, end)) - }, -}; - -Literal: Literal = { - => Literal::String(ByteSpan::new(start, end), value), - => Literal::Char(ByteSpan::new(start, end), value), - => Literal::Int(ByteSpan::new(start, end), value, IntFormat::Bin), - => Literal::Int(ByteSpan::new(start, end), value, IntFormat::Oct), - => Literal::Int(ByteSpan::new(start, end), value, IntFormat::Dec), - => Literal::Int(ByteSpan::new(start, end), value, IntFormat::Hex), - => Literal::Float(ByteSpan::new(start, end), value, FloatFormat::Dec), -}; - -pub Pattern: Pattern = { - AtomicPattern, - ":" => { - Pattern::Ann(Box::new(pattern), Box::new(ty)) - } -}; - -AtomicPattern : Pattern = { - "(" ")" => { - Pattern::Parens(ByteSpan::new(start, end), Box::new(pattern)) - }, - => Pattern::Literal(literal), - )?> => { - Pattern::Name(ByteSpan::new(start, end), ident, shift.map(|x| x as u32)) // FIXME: underflow? - }, - => { - errors.push(super::errors::from_lalrpop(filemap, recovered.error)); - Pattern::Error(ByteSpan::new(start, end)) - }, -} - -pub Term: Term = { - ExprTerm, - ":" => { - Term::Ann(Box::new(expr), Box::new(ty)) - }, - "where" "{" "}" => { - Term::Where(Box::new(expr), items, end) - } -}; - -ExprTerm: Term = { - ArrowTerm, - "import" => { - import_paths.push(path.clone()); - Term::Import(ByteSpan::new(start, end), ByteSpan::new(path_start, end), path) - }, - "\\" ":" "=>" => { - Term::FunIntro(start, vec![(vec![name], Some(Box::new(ann)))], Box::new(body)) - }, - "\\" "=>" => { - Term::FunIntro(start, params, Box::new(body)) - }, - "if" "then" "else" => { - Term::If(start, Box::new(cond), Box::new(if_true), Box::new(if_false)) - }, - "case" "{" ";")*> "}" => { - let mut arms = arms; - arms.extend(last); - Term::Case(ByteSpan::new(start, end), Box::new(head), arms) - }, - "let" "in" => { - Term::Let(start, items, Box::new(body)) - }, -}; - -ArrowTerm: Term = { - AppTerm, - // Naively we would want to write the following rules: - // - // ```lalrpop - // ":" ")")+> "->" => { - // Term::FunType(params, Box::new(body)) - // }, - // "->" => { - // Term::Arrow(Box::new(ann), Box::new(body)) - // }, - // ``` - // - // Alas this causes an ambiguity with the `AtomicTerm` rule. Therefore we - // have to hack this in by reparsing the binder: - "->" =>? { - super::reparse_fun_ty_hack(ByteSpan::new(start, end), binder, body) - }, -}; - -AppTerm: Term = { - AtomicTerm, - => Term::FunApp(Box::new(head), args), -}; - -AtomicTerm: Term = { - "(" ")" => Term::Parens(ByteSpan::new(start, end), Box::new(term)), - "Type" )?> => { - Term::Universe(ByteSpan::new(start, end), level.map(|x| x as u32)) // FIXME: underflow? - }, - => Term::Literal(literal), - "[" ";")*> "]" => { - let mut elems = elems; - elems.extend(last); - Term::ArrayIntro(ByteSpan::new(start, end), elems) - }, - "?" => Term::Hole(ByteSpan::new(start, end)), - )?> => { - Term::Name(ByteSpan::new(start, end), ident, shift.map(|x| x as u32)) // FIXME: underflow? - }, - "Record" "{" ";")*> "}" => { - let mut fields = fields; - fields.extend(last); - Term::RecordType(ByteSpan::new(start, end), fields) - }, - "record" "{" ";")*> "}" => { - let mut fields = fields; - fields.extend(last); - Term::RecordIntro(ByteSpan::new(start, end), fields) - }, - "." )?> => { - Term::RecordProj(ByteSpan::new(start, end), Box::new(term), label_start, label, shift.map(|x| x as u32)) - }, - => { - errors.push(super::errors::from_lalrpop(filemap, recovered.error)); - Term::Error(ByteSpan::new(start, end)) - }, -}; - -AtomicLamParam: (Vec<(ByteIndex, String)>, Option>) = { - => (vec![name], None), - "(" )?> ")" => (names, ann.map(Box::new)), -}; - -RecordTypeField: RecordTypeField = { - <_comment: "doc comment"*> )?> ":" => { - RecordTypeField { label, binder, ann } - }, -}; - -PatternArm: (Pattern, Term) = { - "=>" , -}; - -RecordIntroField: RecordIntroField = { - )?> => { - RecordIntroField::Punned { label, shift: shift.map(|x| x as u32) } - }, - )?> "=" => { - let return_ann = return_ann.map(Box::new); - RecordIntroField::Explicit { label, params, return_ann, term } - }, -}; - -IndexedIdent: (ByteIndex, String) = { - => (start, ident), -}; - -Ident: String = { - "identifier" => (<>).to_owned() -}; diff --git a/crates/pikelet-concrete/src/parse/lexer.rs b/crates/pikelet-concrete/src/parse/lexer.rs deleted file mode 100644 index 40cf813cd..000000000 --- a/crates/pikelet-concrete/src/parse/lexer.rs +++ /dev/null @@ -1,743 +0,0 @@ -use codespan::{ByteSpan, FileMap}; -use codespan_reporting::{Diagnostic, Label}; -use failure::Fail; - -use std::fmt; -use std::str::{CharIndices, FromStr}; - -use codespan::{ByteIndex, ByteOffset, RawOffset}; -use unicode_xid::UnicodeXID; - -fn is_symbol(ch: char) -> bool { - match ch { - '&' | '!' | ':' | ',' | '.' | '=' | '/' | '>' | '<' | '-' | '|' | '+' | ';' | '*' | '^' - | '?' => true, - _ => false, - } -} - -fn is_ident_start(ch: char) -> bool { - UnicodeXID::is_xid_start(ch) || ch == '_' || ch == '-' -} - -fn is_ident_continue(ch: char) -> bool { - UnicodeXID::is_xid_continue(ch) || ch == '_' || ch == '-' -} - -fn is_bin_digit(ch: char) -> bool { - ch.is_digit(2) -} - -fn is_oct_digit(ch: char) -> bool { - ch.is_digit(8) -} - -fn is_dec_digit(ch: char) -> bool { - ch.is_digit(10) -} - -fn is_hex_digit(ch: char) -> bool { - ch.is_digit(16) -} - -/// An error that occurred while lexing the source file -#[derive(Fail, Debug, Clone, PartialEq, Eq)] -pub enum LexerError { - #[fail(display = "An unexpected character {:?} was found.", found)] - UnexpectedCharacter { start: ByteIndex, found: char }, - #[fail(display = "Unexpected end of file.")] - UnexpectedEof { end: ByteIndex }, - #[fail(display = "Unterminated string literal.")] - UnterminatedStringLiteral { span: ByteSpan }, - #[fail(display = "Unterminated character literal.")] - UnterminatedCharLiteral { span: ByteSpan }, - #[fail(display = "Unterminated a binary literal.")] - UnterminatedBinLiteral { span: ByteSpan }, - #[fail(display = "Unterminated a octal literal.")] - UnterminatedOctLiteral { span: ByteSpan }, - #[fail(display = "Unterminated a hexidecimal literal.")] - UnterminatedHexLiteral { span: ByteSpan }, - #[fail(display = "Empty character literal.")] - EmptyCharLiteral { span: ByteSpan }, - #[fail(display = "An unknown escape code \\{} was found.", found)] - UnknownEscapeCode { start: ByteIndex, found: char }, - #[fail( - display = "An integer literal {} was too large for the target type.", - value - )] - IntegerLiteralOverflow { span: ByteSpan, value: String }, -} - -impl LexerError { - /// Return the span of source code that this error originated from - pub fn span(&self) -> ByteSpan { - match *self { - LexerError::UnexpectedCharacter { start, found } - | LexerError::UnknownEscapeCode { start, found } => { - ByteSpan::from_offset(start, ByteOffset::from_char_utf8(found)) - }, - LexerError::UnexpectedEof { end } => ByteSpan::new(end, end), - LexerError::UnterminatedStringLiteral { span } - | LexerError::UnterminatedCharLiteral { span } - | LexerError::UnterminatedBinLiteral { span } - | LexerError::UnterminatedOctLiteral { span } - | LexerError::UnterminatedHexLiteral { span } - | LexerError::EmptyCharLiteral { span } - | LexerError::IntegerLiteralOverflow { span, .. } => span, - } - } - - pub fn to_diagnostic(&self) -> Diagnostic { - match *self { - LexerError::UnexpectedCharacter { start, found } => { - let char_span = ByteSpan::from_offset(start, ByteOffset::from_char_utf8(found)); - Diagnostic::new_error(format!("unexpected character {:?}", found)) - .with_label(Label::new_primary(char_span)) - }, - LexerError::UnexpectedEof { end } => Diagnostic::new_error("unexpected end of file") - .with_label(Label::new_primary(ByteSpan::new(end, end))), - LexerError::UnterminatedStringLiteral { span } => { - Diagnostic::new_error("unterminated string literal") - .with_label(Label::new_primary(span)) - }, - LexerError::UnterminatedCharLiteral { span } => { - Diagnostic::new_error("unterminated character literal") - .with_label(Label::new_primary(span)) - }, - LexerError::UnterminatedBinLiteral { span } => { - Diagnostic::new_error("unterminated binary literal") - .with_label(Label::new_primary(span)) - }, - LexerError::UnterminatedOctLiteral { span } => { - Diagnostic::new_error("unterminated octal literal") - .with_label(Label::new_primary(span)) - }, - LexerError::UnterminatedHexLiteral { span } => { - Diagnostic::new_error("unterminated hexadecimal literal") - .with_label(Label::new_primary(span)) - }, - LexerError::EmptyCharLiteral { span } => { - Diagnostic::new_error("empty character literal") - .with_label(Label::new_primary(span)) - }, - LexerError::UnknownEscapeCode { start, found } => { - let char_span = ByteSpan::from_offset(start, ByteOffset::from_char_utf8(found)); - Diagnostic::new_error(format!("unknown escape code \\{}", found)) - .with_label(Label::new_primary(char_span)) - }, - LexerError::IntegerLiteralOverflow { span, ref value } => { - Diagnostic::new_error(format!("integer literal overflow with value `{}`", value)) - .with_label(Label::new_primary(span).with_message("overflowing literal")) - }, - } - } -} - -/// A token in the source file, to be emitted by the `Lexer` -#[derive(Clone, Debug, PartialEq)] -pub enum Token { - // Data - Ident(S), - DocComment(S), - StringLiteral(String), - CharLiteral(char), - BinIntLiteral(u64), - OctIntLiteral(u64), - DecIntLiteral(u64), - HexIntLiteral(u64), - DecFloatLiteral(f64), - - // Keywords - As, // as - Case, // case - Else, // else - If, // if - Import, // import - In, // in - Let, // let - Record, // record - RecordType, // Record - Then, // then - Type, // Type - Where, // where - - // Symbols - BSlash, // \ - Caret, // ^ - Colon, // : - Comma, // , - Dot, // . - DotDot, // .. - Equal, // = - LArrow, // -> - LFatArrow, // => - Question, // ? - Semi, // ; - - // Delimiters - LParen, // ( - RParen, // ) - LBrace, // { - RBrace, // } - LBracket, // [ - RBracket, // ] -} - -impl fmt::Display for Token { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Token::Ident(ref name) => write!(f, "{}", name), - Token::DocComment(ref comment) => write!(f, "||| {}", comment), - Token::StringLiteral(ref value) => write!(f, "{:?}", value), - Token::CharLiteral(ref value) => write!(f, "'{:?}'", value), - Token::BinIntLiteral(ref value) => write!(f, "{:b}", value), - Token::OctIntLiteral(ref value) => write!(f, "{:o}", value), - Token::DecIntLiteral(ref value) => write!(f, "{}", value), - Token::HexIntLiteral(ref value) => write!(f, "{:x}", value), - Token::DecFloatLiteral(ref value) => write!(f, "{}", value), - Token::As => write!(f, "as"), - Token::Case => write!(f, "case"), - Token::Else => write!(f, "else"), - Token::If => write!(f, "if"), - Token::Import => write!(f, "import"), - Token::In => write!(f, "in"), - Token::Let => write!(f, "let"), - Token::Record => write!(f, "record"), - Token::RecordType => write!(f, "Record"), - Token::Then => write!(f, "then"), - Token::Type => write!(f, "Type"), - Token::Where => write!(f, "where"), - Token::BSlash => write!(f, "\\"), - Token::Caret => write!(f, "^"), - Token::Colon => write!(f, ":"), - Token::Comma => write!(f, ","), - Token::Dot => write!(f, "."), - Token::DotDot => write!(f, ".."), - Token::Equal => write!(f, "="), - Token::LFatArrow => write!(f, "=>"), - Token::LArrow => write!(f, "->"), - Token::Question => write!(f, "?"), - Token::Semi => write!(f, ";"), - Token::LParen => write!(f, "("), - Token::RParen => write!(f, ")"), - Token::LBrace => write!(f, "{{"), - Token::RBrace => write!(f, "}}"), - Token::LBracket => write!(f, "["), - Token::RBracket => write!(f, "]"), - } - } -} - -impl<'input> From> for Token { - fn from(src: Token<&'input str>) -> Token { - match src { - Token::Ident(name) => Token::Ident(name.to_owned()), - Token::DocComment(comment) => Token::DocComment(comment.to_owned()), - Token::StringLiteral(value) => Token::StringLiteral(value), - Token::CharLiteral(value) => Token::CharLiteral(value), - Token::BinIntLiteral(value) => Token::BinIntLiteral(value), - Token::OctIntLiteral(value) => Token::OctIntLiteral(value), - Token::DecIntLiteral(value) => Token::DecIntLiteral(value), - Token::HexIntLiteral(value) => Token::HexIntLiteral(value), - Token::DecFloatLiteral(value) => Token::DecFloatLiteral(value), - Token::As => Token::As, - Token::Case => Token::Case, - Token::Else => Token::Else, - Token::If => Token::If, - Token::Import => Token::Import, - Token::In => Token::In, - Token::Let => Token::Let, - Token::Record => Token::Record, - Token::RecordType => Token::RecordType, - Token::Then => Token::Then, - Token::Type => Token::Type, - Token::Where => Token::Where, - Token::BSlash => Token::BSlash, - Token::Caret => Token::Caret, - Token::Colon => Token::Colon, - Token::Comma => Token::Comma, - Token::Dot => Token::Dot, - Token::DotDot => Token::DotDot, - Token::Equal => Token::Equal, - Token::LFatArrow => Token::LFatArrow, - Token::LArrow => Token::LArrow, - Token::Question => Token::Question, - Token::Semi => Token::Semi, - Token::LParen => Token::LParen, - Token::RParen => Token::RParen, - Token::LBrace => Token::LBrace, - Token::RBrace => Token::RBrace, - Token::LBracket => Token::LBracket, - Token::RBracket => Token::RBracket, - } - } -} - -/// An iterator over a source string that yields `Token`s for subsequent use by -/// the parser -pub struct Lexer<'input> { - filemap: &'input FileMap, - chars: CharIndices<'input>, - lookahead: Option<(usize, char)>, -} - -impl<'input> Lexer<'input> { - /// Create a new lexer from the source string - pub fn new(filemap: &'input FileMap) -> Self { - let mut chars = filemap.src().char_indices(); - - Lexer { - filemap, - lookahead: chars.next(), - chars, - } - } - - /// Returns the index of the end of the file - fn eof(&self) -> ByteIndex { - self.filemap.span().end() - } - - /// Return the next character in the source string - fn lookahead(&self) -> Option<(ByteIndex, char)> { - self.lookahead.map(|(index, ch)| { - let off = ByteOffset(index as RawOffset); - let index = self.filemap.span().start() + off; - (index, ch) - }) - } - - /// Bump the current position in the source string by one character, - /// returning the current character and byte position. - fn bump(&mut self) -> Option<(ByteIndex, char)> { - let current = self.lookahead(); - self.lookahead = self.chars.next(); - current - } - - /// Return a slice of the source string - fn slice(&self, start: ByteIndex, end: ByteIndex) -> &'input str { - &self.filemap.src_slice(ByteSpan::new(start, end)).unwrap() - } - - /// Test a predicate against the next character in the source - fn test_lookahead(&self, mut pred: F) -> bool - where - F: FnMut(char) -> bool, - { - self.lookahead.map_or(false, |(_, ch)| pred(ch)) - } - - /// Consume characters while the predicate matches for the current - /// character, then return the consumed slice and the end byte - /// position. - fn take_while(&mut self, start: ByteIndex, mut keep_going: F) -> (ByteIndex, &'input str) - where - F: FnMut(char) -> bool, - { - self.take_until(start, |ch| !keep_going(ch)) - } - - /// Consume characters until the predicate matches for the next character - /// in the lookahead, then return the consumed slice and the end byte - /// position. - fn take_until(&mut self, start: ByteIndex, mut terminate: F) -> (ByteIndex, &'input str) - where - F: FnMut(char) -> bool, - { - while let Some((end, ch)) = self.lookahead() { - if terminate(ch) { - return (end, self.slice(start, end)); - } else { - self.bump(); - } - } - - let eof = self.eof(); - (eof, self.slice(start, eof)) - } - - /// Consume a doc comment - fn doc_comment(&mut self, start: ByteIndex) -> SpannedToken<'input> { - let (end, mut comment) = - self.take_until(start + ByteOffset::from_str("|||"), |ch| ch == '\n'); - - // Skip preceding space - if comment.starts_with(' ') { - comment = &comment[1..]; - } - - (start, Token::DocComment(comment), end) - } - - /// Consume an identifier - fn ident(&mut self, start: ByteIndex) -> SpannedToken<'input> { - let (end, ident) = self.take_while(start, is_ident_continue); - - let token = match ident { - "as" => Token::As, - "case" => Token::Case, - "else" => Token::Else, - "if" => Token::If, - "import" => Token::Import, - "in" => Token::In, - "let" => Token::Let, - "record" => Token::Record, - "Record" => Token::RecordType, - "then" => Token::Then, - "Type" => Token::Type, - "where" => Token::Where, - ident => Token::Ident(ident), - }; - - (start, token, end) - } - - /// Consume an escape code - fn escape_code(&mut self, start: ByteIndex) -> Result { - match self.bump() { - Some((_, '\'')) => Ok('\''), - Some((_, '"')) => Ok('"'), - Some((_, '\\')) => Ok('\\'), - Some((_, '/')) => Ok('/'), - Some((_, 'n')) => Ok('\n'), - Some((_, 'r')) => Ok('\r'), - Some((_, 't')) => Ok('\t'), - // TODO: Unicode escape codes - Some((start, ch)) => Err(LexerError::UnknownEscapeCode { start, found: ch }), - None => Err(LexerError::UnexpectedEof { end: start }), - } - } - - /// Consume a string literal - fn string_literal(&mut self, start: ByteIndex) -> Result, LexerError> { - let mut string = String::new(); - let mut end = start; - - while let Some((next, ch)) = self.bump() { - end = next + ByteOffset::from_char_utf8(ch); - match ch { - '\\' => string.push(self.escape_code(next)?), - '"' => return Ok((start, Token::StringLiteral(string), end)), - ch => string.push(ch), - } - } - - Err(LexerError::UnterminatedStringLiteral { - span: ByteSpan::new(start, end), - }) - } - - /// Consume a character literal - fn char_literal(&mut self, start: ByteIndex) -> Result, LexerError> { - let ch = match self.bump() { - Some((next, '\\')) => self.escape_code(next)?, - Some((next, '\'')) => { - return Err(LexerError::EmptyCharLiteral { - span: ByteSpan::new(start, next + ByteOffset::from_char_utf8('\'')), - }); - }, - Some((_, ch)) => ch, - None => return Err(LexerError::UnexpectedEof { end: start }), - }; - - match self.bump() { - Some((end, '\'')) => Ok(( - start, - Token::CharLiteral(ch), - end + ByteOffset::from_char_utf8('\''), - )), - Some((next, ch)) => Err(LexerError::UnterminatedCharLiteral { - span: ByteSpan::new(start, next + ByteOffset::from_char_utf8(ch)), - }), - None => Err(LexerError::UnexpectedEof { end: start }), - } - } - - /// Consume a binary literal token - fn bin_literal( - &mut self, - start: ByteIndex, - ) -> Result<(ByteIndex, Token<&'input str>, ByteIndex), LexerError> { - self.bump(); // skip 'b' - let (end, src) = self.take_while(start + ByteOffset(2), is_bin_digit); - if src.is_empty() { - Err(LexerError::UnterminatedBinLiteral { - span: ByteSpan::new(start, end), - }) - } else { - let int = u64::from_str_radix(src, 2).unwrap(); - Ok((start, Token::BinIntLiteral(int), end)) - } - } - - /// Consume a octal literal token - fn oct_literal( - &mut self, - start: ByteIndex, - ) -> Result<(ByteIndex, Token<&'input str>, ByteIndex), LexerError> { - self.bump(); // skip 'o' - let (end, src) = self.take_while(start + ByteOffset(2), is_oct_digit); - if src.is_empty() { - Err(LexerError::UnterminatedOctLiteral { - span: ByteSpan::new(start, end), - }) - } else { - let int = u64::from_str_radix(src, 8).unwrap(); - Ok((start, Token::OctIntLiteral(int), end)) - } - } - - /// Consume a decimal literal - fn dec_literal(&mut self, start: ByteIndex) -> Result, LexerError> { - let (end, src) = self.take_while(start, is_dec_digit); - - if let Some((_, '.')) = self.lookahead() { - self.bump(); // skip '.' - let (end, src) = self.take_while(start, is_dec_digit); - - match f64::from_str(src) { - Ok(value) => Ok((start, Token::DecFloatLiteral(value), end)), - Err(_) => unimplemented!(), - } - } else { - match u64::from_str_radix(src, 10) { - Ok(value) => Ok((start, Token::DecIntLiteral(value), end)), - Err(_) => Err(LexerError::IntegerLiteralOverflow { - span: ByteSpan::new(start, end), - value: src.to_string(), - }), - } - } - } - - /// Consume a hexadecimal literal token - fn hex_literal( - &mut self, - start: ByteIndex, - ) -> Result<(ByteIndex, Token<&'input str>, ByteIndex), LexerError> { - self.bump(); // skip 'x' - let (end, src) = self.take_while(start + ByteOffset(2), is_hex_digit); - if src.is_empty() { - Err(LexerError::UnterminatedHexLiteral { - span: ByteSpan::new(start, end), - }) - } else { - let int = u64::from_str_radix(src, 16).unwrap(); - Ok((start, Token::HexIntLiteral(int), end)) - } - } -} - -pub type SpannedToken<'input> = (ByteIndex, Token<&'input str>, ByteIndex); - -impl<'input> Iterator for Lexer<'input> { - type Item = Result<(ByteIndex, Token<&'input str>, ByteIndex), LexerError>; - - #[allow(clippy::cyclomatic_complexity)] - fn next(&mut self) -> Option, LexerError>> { - while let Some((start, ch)) = self.bump() { - let end = start + ByteOffset::from_char_utf8(ch); - - return Some(match ch { - ch if is_symbol(ch) => { - let (end, symbol) = self.take_while(start, is_symbol); - - match symbol { - ":" => Ok((start, Token::Colon, end)), - "^" => Ok((start, Token::Caret, end)), - "," => Ok((start, Token::Comma, end)), - "." => Ok((start, Token::Dot, end)), - ".." => Ok((start, Token::DotDot, end)), - "=" => Ok((start, Token::Equal, end)), - "->" => Ok((start, Token::LArrow, end)), - "=>" => Ok((start, Token::LFatArrow, end)), - "?" => Ok((start, Token::Question, end)), - ";" => Ok((start, Token::Semi, end)), - symbol if symbol.starts_with("|||") => Ok(self.doc_comment(start)), - symbol if symbol.starts_with("--") => { - self.take_until(start, |ch| ch == '\n'); - continue; - }, - _ => Err(LexerError::UnexpectedCharacter { start, found: ch }), - } - }, - '\\' => Ok((start, Token::BSlash, end)), - '(' => Ok((start, Token::LParen, end)), - ')' => Ok((start, Token::RParen, end)), - '{' => Ok((start, Token::LBrace, end)), - '}' => Ok((start, Token::RBrace, end)), - '[' => Ok((start, Token::LBracket, end)), - ']' => Ok((start, Token::RBracket, end)), - '"' => self.string_literal(start), - '\'' => self.char_literal(start), - '0' if self.test_lookahead(|x| x == 'b') => self.bin_literal(start), - '0' if self.test_lookahead(|x| x == 'o') => self.oct_literal(start), - '0' if self.test_lookahead(|x| x == 'x') => self.hex_literal(start), - ch if is_ident_start(ch) => Ok(self.ident(start)), - ch if is_dec_digit(ch) => self.dec_literal(start), - ch if ch.is_whitespace() => continue, - _ => Err(LexerError::UnexpectedCharacter { start, found: ch }), - }); - } - - None - } -} - -#[cfg(test)] -mod tests { - use codespan::RawIndex; - use codespan::{CodeMap, FileName}; - - use super::*; - - /// A handy macro to give us a nice syntax for declaring test cases - /// - /// This was inspired by the tests in the LALRPOP lexer - macro_rules! test { - ($src:expr, $($span:expr => $token:expr,)*) => {{ - let mut codemap = CodeMap::new(); - let filemap = codemap.add_filemap(FileName::virtual_("test"), $src.into()); - - let lexed_tokens: Vec<_> = Lexer::new(&filemap).collect(); - let expected_tokens = vec![$({ - let start = ByteIndex($span.find("~").unwrap() as RawIndex + 1); - let end = ByteIndex($span.rfind("~").unwrap() as RawIndex + 2); - Ok((start, $token, end)) - }),*]; - - assert_eq!(lexed_tokens, expected_tokens); - }}; - } - - #[test] - fn data() { - test! { - " hello-hahaha8ABC ", - " ~~~~~~~~~~~~~~~~ " => Token::Ident("hello-hahaha8ABC"), - }; - } - - #[test] - fn comment() { - test! { - " -- hello this is dog\n ", - }; - } - - #[test] - fn doc_comment() { - test! { - " ||| hello this is dog", - " ~~~~~~~~~~~~~~~~~~~~~" => Token::DocComment("hello this is dog"), - }; - } - - #[test] - fn string_literal() { - test! { - r#" "a" "\t" "#, - r#" ~~~ "# => Token::StringLiteral("a".to_owned()), - r#" ~~~~ "# => Token::StringLiteral("\t".to_owned()), - }; - } - - #[test] - fn char_literal() { - test! { - r" 'a' '\t' ", - r" ~~~ " => Token::CharLiteral('a'), - r" ~~~~ " => Token::CharLiteral('\t'), - }; - } - - #[test] - fn bin_literal() { - test! { - " 0b010110 ", - " ~~~~~~~~ " => Token::BinIntLiteral(0b010110), - }; - } - - #[test] - fn oct_literal() { - test! { - " 0o12371 ", - " ~~~~~~~ " => Token::OctIntLiteral(0o12371), - }; - } - - #[test] - fn dec_literal() { - test! { - " 123 ", - " ~~~ " => Token::DecIntLiteral(123), - }; - } - - #[test] - fn hex_literal() { - test! { - " 0x123AF ", - " ~~~~~~~ " => Token::HexIntLiteral(0x123AF), - }; - } - - #[test] - fn float_literal() { - test! { - " 122.345 ", - " ~~~~~~~ " => Token::DecFloatLiteral(122.345), - }; - } - - #[test] - fn keywords() { - test! { - " as case else if import in let record Record then Type where ", - " ~~ " => Token::As, - " ~~~~ " => Token::Case, - " ~~~~ " => Token::Else, - " ~~ " => Token::If, - " ~~~~~~ " => Token::Import, - " ~~ " => Token::In, - " ~~~ " => Token::Let, - " ~~~~~~ " => Token::Record, - " ~~~~~~ " => Token::RecordType, - " ~~~~ " => Token::Then, - " ~~~~ " => Token::Type, - " ~~~~~ " => Token::Where, - }; - } - - #[test] - fn symbols() { - test! { - r" \ ^ : , .. = -> => ? ; ", - r" ~ " => Token::BSlash, - r" ~ " => Token::Caret, - r" ~ " => Token::Colon, - r" ~ " => Token::Comma, - r" ~~ " => Token::DotDot, - r" ~ " => Token::Equal, - r" ~~ " => Token::LArrow, - r" ~~ " => Token::LFatArrow, - r" ~ " => Token::Question, - r" ~ " => Token::Semi, - } - } - - #[test] - fn delimiters() { - test! { - " ( ) { } [ ] ", - " ~ " => Token::LParen, - " ~ " => Token::RParen, - " ~ " => Token::LBrace, - " ~ " => Token::RBrace, - " ~ " => Token::LBracket, - " ~ " => Token::RBracket, - } - } -} diff --git a/crates/pikelet-concrete/src/parse/mod.rs b/crates/pikelet-concrete/src/parse/mod.rs deleted file mode 100644 index ab3544762..000000000 --- a/crates/pikelet-concrete/src/parse/mod.rs +++ /dev/null @@ -1,111 +0,0 @@ -//! Parser utilities - -use codespan::{ByteIndex, ByteSpan, FileMap}; -use lalrpop_util::ParseError as LalrpopError; - -use crate::parse::lexer::Lexer; -use crate::syntax::concrete; - -mod errors; -mod lexer; - -pub use self::errors::{ExpectedTokens, ParseError}; -pub use self::lexer::{LexerError, Token}; - -macro_rules! parser { - ($name:ident, $output:ident, $parser_name:ident) => { - pub fn $name<'input>( - filemap: &'input FileMap, - ) -> (concrete::$output, Vec, Vec) { - let mut import_paths = Vec::new(); - let mut errors = Vec::new(); - let lexer = Lexer::new(filemap).map(|x| x.map_err(ParseError::from)); - let value = grammar::$parser_name::new() - .parse(&mut import_paths, &mut errors, filemap, lexer) - .unwrap_or_else(|err| { - errors.push(errors::from_lalrpop(filemap, err)); - concrete::$output::Error(filemap.span()) - }); - - (value, import_paths, errors) - } - }; -} - -parser!(pattern, Pattern, PatternParser); -parser!(term, Term, TermParser); - -mod grammar { - #![allow(clippy::all)] - - include!(concat!(env!("OUT_DIR"), "/parse/grammar.rs")); -} - -/// This is an ugly hack that cobbles together a pi type from a binder term and -/// a body. See the comments on the `PiTerm` rule in the `grammar.lalrpop` for -/// more information. -fn reparse_fun_ty_hack( - span: ByteSpan, - binder: concrete::Term, - body: concrete::Term, -) -> Result> { - use crate::syntax::concrete::Term; - - fn fun_ty_binder( - binder: &Term, - ) -> Result, LalrpopError> { - match *binder { - Term::Parens(_, ref term) => match **term { - Term::Ann(ref params, ref ann) => { - let mut names = Vec::new(); - param_names(&**params, &mut names)?; - Ok(Some((names, (**ann).clone()))) - }, - _ => Ok(None), - }, - _ => Ok(None), - } - } - - fn param_names( - term: &Term, - names: &mut Vec<(ByteIndex, String)>, - ) -> Result<(), LalrpopError> { - match *term { - Term::Name(span, ref name, None) => names.push((span.start(), name.clone())), - Term::FunApp(ref head, ref args) => { - param_names(head, names)?; - for arg in args { - param_names(arg, names)?; - } - }, - _ => { - return Err(LalrpopError::User { - error: ParseError::IdentifierExpectedInPiType { span: term.span() }, - }); - }, - } - Ok(()) - } - - match binder { - Term::FunApp(ref head, ref args) => { - use std::iter; - - let mut binders = Vec::with_capacity(args.len() + 1); - - for next in iter::once(&**head).chain(args).map(fun_ty_binder) { - match next? { - Some((names, ann)) => binders.push((names, ann)), - None => return Ok(Term::FunArrow(Box::new(binder.clone()), Box::new(body))), - } - } - - Ok(Term::FunType(span.start(), binders, Box::new(body))) - }, - binder => match fun_ty_binder(&binder)? { - Some(binder) => Ok(Term::FunType(span.start(), vec![binder], Box::new(body))), - None => Ok(Term::FunArrow(binder.into(), Box::new(body))), - }, - } -} diff --git a/crates/pikelet-concrete/src/resugar.rs b/crates/pikelet-concrete/src/resugar.rs deleted file mode 100644 index 010265c97..000000000 --- a/crates/pikelet-concrete/src/resugar.rs +++ /dev/null @@ -1,656 +0,0 @@ -use codespan::{ByteIndex, ByteSpan}; -use im; -use moniker::{Binder, BoundTerm, Embed, FreeVar, Nest, Scope, Var}; - -use pikelet_core::syntax::{core, domain}; -use pikelet_core::syntax::{Label, Level, LevelShift}; - -use crate::syntax::{concrete, FloatFormat, IntFormat}; - -/// The environment used when resugaring from the core to the concrete syntax -#[derive(Debug, Clone)] -pub struct ResugarEnv { - usages: im::HashMap, - renames: im::HashMap, String>, -} - -const KEYWORDS: &[&str] = &[ - "as", "case", "else", "if", "import", "in", "let", "record", "Record", "then", "Type", "where", -]; - -impl ResugarEnv { - pub fn new() -> ResugarEnv { - ResugarEnv { - usages: KEYWORDS.iter().map(|&kw| (kw.to_owned(), 0)).collect(), - renames: im::HashMap::new(), - } - } - - pub fn on_item(&mut self, label: &Label, binder: &Binder) -> String { - let Label(ref name) = *label; - let Binder(ref free_var) = *binder; - - self.renames.get(free_var).cloned().unwrap_or_else(|| { - match self.usages.get(name).cloned() { - Some(count) => { - let count = count + 1; - let mapped_name = format!("{}{}", name, count); - - self.usages.insert(name.clone(), count); - self.usages.insert(mapped_name.clone(), count); - self.renames.insert(free_var.clone(), mapped_name.clone()); - - mapped_name - }, - None => { - self.usages.insert(name.clone(), 0); - self.renames.insert(free_var.clone(), name.clone()); - - name.clone() - }, - } - }) - } - - // pub fn on_binder(&mut self, binder: &Binder, free_vars: &HashSet) -> String { - pub fn on_binder(&mut self, binder: &Binder) -> String { - let Binder(ref free_var) = *binder; - - self.renames.get(free_var).cloned().unwrap_or_else(|| { - let pretty_name = match free_var.pretty_name { - Some(ref name) => name.clone(), - None => "a".to_owned(), - }; - - match self.usages.get(&pretty_name).cloned() { - Some(count) => { - let count = count + 1; - let mapped_name = format!("{}{}", pretty_name, count); - - self.usages.insert(pretty_name, count); - self.usages.insert(mapped_name.clone(), count); - self.renames.insert(free_var.clone(), mapped_name.clone()); - - mapped_name - }, - None => { - self.usages.insert(pretty_name.clone(), 0); - self.renames.insert(free_var.clone(), pretty_name.clone()); - - pretty_name - }, - } - }) - } - - pub fn on_free_var(&self, free_var: &FreeVar) -> String { - self.renames.get(free_var).cloned().unwrap_or_else(|| { - panic!( - "on_free_var: expected {} to be bound in resugar environment", - free_var, - ); - }) - } -} - -/// Translate something to the corresponding concrete representation -pub trait Resugar { - fn resugar(&self, env: &ResugarEnv) -> T; -} - -/// The precedence of a term -/// -/// This is used to reconstruct the parentheses needed to reconstruct a valid -/// syntax tree in the concrete syntax -#[derive(Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq)] -pub struct Prec(i8); - -impl Prec { - /// This term will never be wrapped in parentheses - pub const NO_WRAP: Prec = Prec(-1); - /// Precedence corresponding to `Term` in the parser - pub const ANN: Prec = Prec(0); - /// Precedence corresponding to `LamTerm` in the parser - pub const LAM: Prec = Prec(1); - /// Precedence corresponding to `PiTerm` in the parser - pub const PI: Prec = Prec(2); - /// Precedence corresponding to `AppTerm` in the parser - pub const APP: Prec = Prec(3); - /// Precedence corresponding to `AtomicTerm` in the parser - pub const ATOMIC: Prec = Prec(4); -} - -fn parens_if(should_wrap: bool, inner: concrete::Term) -> concrete::Term { - if should_wrap { - concrete::Term::Parens(ByteSpan::default(), Box::new(inner)) - } else { - inner - } -} - -fn resugar_pattern( - env: &mut ResugarEnv, - pattern: &core::Pattern, - _prec: Prec, -) -> concrete::Pattern { - match *pattern { - core::Pattern::Ann(ref pattern, Embed(ref ty)) => concrete::Pattern::Ann( - Box::new(resugar_pattern(env, pattern, Prec::NO_WRAP)), - Box::new(resugar_term(env, ty, Prec::LAM)), - ), - core::Pattern::Binder(ref binder) => { - let name = env.on_binder(binder); - concrete::Pattern::Name(ByteSpan::default(), name, None) - }, - core::Pattern::Var(Embed(Var::Free(ref free_var)), shift) => { - let shift = match shift { - LevelShift(0) => None, - LevelShift(shift) => Some(shift), - }; - - let name = env.on_free_var(free_var); - concrete::Pattern::Name(ByteSpan::default(), name, shift) - }, - core::Pattern::Var(Embed(Var::Bound(_)), _) => { - // TODO: Better message - panic!("Tried to convert a term that was not locally closed"); - }, - core::Pattern::Literal(ref literal) => { - use pikelet_core::syntax::Literal; - - use crate::syntax::concrete::Literal::*; - use crate::syntax::concrete::Pattern; - - let span = ByteSpan::default(); - - match *literal { - // FIXME: Draw these names from some environment? - Literal::Bool(true) => Pattern::Name(span, "true".to_owned(), None), - Literal::Bool(false) => Pattern::Name(span, "false".to_owned(), None), - - Literal::String(ref val) => Pattern::Literal(String(span, val.clone())), - Literal::Char(val) => Pattern::Literal(Char(span, val)), - - Literal::U8(val) => Pattern::Literal(Int(span, u64::from(val), IntFormat::Dec)), - Literal::U16(val) => Pattern::Literal(Int(span, u64::from(val), IntFormat::Dec)), - Literal::U32(val) => Pattern::Literal(Int(span, u64::from(val), IntFormat::Dec)), - Literal::U64(val) => Pattern::Literal(Int(span, val, IntFormat::Dec)), - - // FIXME: Underflow for negative numbers - Literal::S8(val) => Pattern::Literal(Int(span, val as u64, IntFormat::Dec)), - Literal::S16(val) => Pattern::Literal(Int(span, val as u64, IntFormat::Dec)), - Literal::S32(val) => Pattern::Literal(Int(span, val as u64, IntFormat::Dec)), - Literal::S64(val) => Pattern::Literal(Int(span, val as u64, IntFormat::Dec)), - - Literal::F32(v) => Pattern::Literal(Float(span, f64::from(v), FloatFormat::Dec)), - Literal::F64(v) => Pattern::Literal(Float(span, v, FloatFormat::Dec)), - } - }, - } -} - -fn resugar_fun_ty( - env: &ResugarEnv, - scope: &Scope<(Binder, Embed), core::RcTerm>, - prec: Prec, -) -> concrete::Term { - let mut env = env.clone(); - - let ((binder, Embed(mut ann)), mut body) = scope.clone().unbind(); - let body_fvs = body.free_vars(); - - // Only use explicit parameter names if the body is dependent on - // the parameter or there is a human-readable name given. - // - // We'll be checking for readable names as we go, because if they've - // survived until now they're probably desirable to retain! - if body_fvs.contains(&binder.0) || binder.0.pretty_name.is_some() { - let name = env.on_binder(&binder); - let mut params = vec![( - vec![(ByteIndex::default(), name)], - resugar_term(&env, &ann, Prec::APP), - )]; - - // Parameter resugaring - // - // Share a parameter list if another pi is nested directly inside. - // For example: - // - // ``` - // (a : Type) -> (b : Type -> Type) -> ... - // (a : Type) (b : Type -> Type) -> ... - // ``` - while let core::Term::FunType(ref scope) = *body { - let ((next_binder, Embed(next_ann)), next_body) = scope.clone().unbind(); - - if core::Term::term_eq(&ann, &next_ann) && next_binder.0.pretty_name.is_some() { - // Combine the parameters if the type annotations are - // alpha-equivalent. For example: - // - // ``` - // (a : Type) (b : Type) -> ... - // (a b : Type) -> ... - // ``` - let next_name = env.on_binder(&next_binder); - let next_param = (ByteIndex::default(), next_name); - params.last_mut().unwrap().0.push(next_param); - } else if next_body.free_vars().contains(&next_binder.0) - || next_binder.0.pretty_name.is_some() - { - // Add a new parameter if the body is dependent on the parameter - // or there is a human-readable name given - let next_name = env.on_binder(&next_binder); - params.push(( - vec![(ByteIndex::default(), next_name)], - resugar_term(&env, &next_ann, Prec::APP), - )); - } else { - // Stop collapsing parameters if we encounter a non-dependent pi type. - return parens_if( - Prec::PI < prec, - concrete::Term::FunType( - ByteIndex::default(), - params, - Box::new(concrete::Term::FunArrow( - Box::new(resugar_term(&env, &next_ann, Prec::APP)), - Box::new(resugar_term(&env, &next_body, Prec::LAM)), - )), - ), - ); - } - - ann = next_ann; - body = next_body; - } - - parens_if( - Prec::PI < prec, - concrete::Term::FunType( - ByteIndex::default(), - params, - Box::new(resugar_term(&env, &body, Prec::LAM)), - ), - ) - } else { - // The body is not dependent on the parameter - so let's use an arrow - // instead! For example: - // - // ``` - // (a : Type) -> Type - // Type -> Type - // ``` - parens_if( - Prec::PI < prec, - concrete::Term::FunArrow( - Box::new(resugar_term(&env, &ann, Prec::APP)), - Box::new(resugar_term(&env, &body, Prec::LAM)), - ), - ) - } -} - -fn resugar_fun_intro( - env: &ResugarEnv, - scope: &Scope<(Binder, Embed), core::RcTerm>, - prec: Prec, -) -> concrete::Term { - let mut env = env.clone(); - - let ((binder, Embed(mut ann)), mut body) = scope.clone().unbind(); - - let name = env.on_binder(&binder); - let mut params = vec![( - vec![(ByteIndex::default(), name)], - Some(Box::new(resugar_term(&env, &ann, Prec::LAM))), - )]; - - // Parameter resugaring - // - // Share a parameter list if another lambda is nested directly inside. - // For example: - // - // ``` - // \(a : Type) => \(b : Type -> Type) => ... - // \(a : Type) (b : Type -> Type) => ... - // ``` - while let core::Term::FunIntro(ref scope) = *body { - let ((next_binder, Embed(next_ann)), next_body) = scope.clone().unbind(); - - // Combine the parameters if the type annotations are alpha-equivalent. - // For example: - // - // ``` - // \(a : Type) (b : Type) => ... - // \(a b : Type) => ... - // ``` - let next_name = env.on_binder(&next_binder); - if core::Term::term_eq(&ann, &next_ann) { - let next_param = (ByteIndex::default(), next_name); - params.last_mut().unwrap().0.push(next_param); - } else { - params.push(( - vec![(ByteIndex::default(), next_name)], - Some(Box::new(resugar_term(&env, &next_ann, Prec::LAM))), - )); - } - - ann = next_ann; - body = next_body; - } - - parens_if( - Prec::LAM < prec, - concrete::Term::FunIntro( - ByteIndex::default(), - params, - Box::new(resugar_term(&env, &body, Prec::LAM)), - ), - ) -} - -fn resugar_let( - env: &ResugarEnv, - scope: &Scope, Embed)>, core::RcTerm>, - prec: Prec, -) -> concrete::Term { - let mut env = env.clone(); - - let (bindings, mut body) = scope.clone().unbind(); - let bindings = bindings.unnest(); - - let mut items = Vec::with_capacity(bindings.len() * 2); - - for (binder, Embed(term)) in bindings { - let name = env.on_binder(&binder); - - match *term.inner { - core::Term::Ann(ref term, ref ann) => { - // pull lambda arguments from the body into the definition - let (term_params, term_body) = match resugar_term(&env, term, Prec::NO_WRAP) { - concrete::Term::FunIntro(_, params, term_body) => (params, *term_body), - term_body => (vec![], term_body), - }; - - items.push(concrete::Item::Declaration { - name: (ByteIndex::default(), name.clone()), - ann: resugar_term(&env, &ann, Prec::ANN), - }); - items.push(concrete::Item::Definition { - name: (ByteIndex::default(), name), - params: term_params, - return_ann: None, - body: term_body, - }); - }, - _ => { - // pull lambda arguments from the body into the definition - let (term_params, term_body) = match resugar_term(&env, &term, Prec::NO_WRAP) { - concrete::Term::FunIntro(_, params, term_body) => (params, *term_body), - term_body => (vec![], term_body), - }; - - items.push(concrete::Item::Definition { - name: (ByteIndex::default(), name), - params: term_params, - return_ann: None, - body: term_body, - }); - }, - } - } - - while let core::Term::Let(ref scope) = *body { - let (bindings, next_body) = scope.clone().unbind(); - - for (binder, Embed(term)) in bindings.unnest() { - let next_name = env.on_binder(&binder); - match *term.inner { - core::Term::Ann(ref term, ref ann) => { - // pull lambda arguments from the body into the definition - let (term_params, term_body) = match resugar_term(&env, term, Prec::NO_WRAP) { - concrete::Term::FunIntro(_, params, term_body) => (params, *term_body), - term_body => (vec![], term_body), - }; - - items.push(concrete::Item::Declaration { - name: (ByteIndex::default(), next_name.clone()), - ann: resugar_term(&env, &ann, Prec::ANN), - }); - items.push(concrete::Item::Definition { - name: (ByteIndex::default(), next_name), - params: term_params, - return_ann: None, - body: term_body, - }); - }, - _ => { - // pull lambda arguments from the body into the definition - let (term_params, term_body) = match resugar_term(&env, &term, Prec::NO_WRAP) { - concrete::Term::FunIntro(_, params, term_body) => (params, *term_body), - term_body => (vec![], term_body), - }; - - items.push(concrete::Item::Definition { - name: (ByteIndex::default(), next_name), - params: term_params, - return_ann: None, - body: term_body, - }); - }, - } - } - - body = next_body; - } - - parens_if( - Prec::LAM < prec, - concrete::Term::Let( - ByteIndex::default(), - items, - Box::new(resugar_term(&env, &body, Prec::NO_WRAP)), - ), - ) -} - -fn resugar_term(env: &ResugarEnv, term: &core::Term, prec: Prec) -> concrete::Term { - match *term { - core::Term::Ann(ref term, ref ty) => parens_if( - Prec::ANN < prec, - concrete::Term::Ann( - Box::new(resugar_term(env, term, Prec::LAM)), - Box::new(resugar_term(env, ty, Prec::ANN)), - ), - ), - core::Term::Universe(level) => { - let level = match level { - Level(0) => None, - Level(level) => Some(level), - }; - - parens_if( - Prec::APP < prec && level.is_some(), - concrete::Term::Universe(ByteSpan::default(), level), - ) - }, - core::Term::Literal(ref literal) => { - use pikelet_core::syntax::Literal; - - use crate::syntax::concrete::Literal::*; - use crate::syntax::concrete::Term; - - let span = ByteSpan::default(); - - match *literal { - // FIXME: Draw these names from some environment? - Literal::Bool(true) => Term::Name(span, "true".to_owned(), None), - Literal::Bool(false) => Term::Name(span, "false".to_owned(), None), - - Literal::String(ref val) => Term::Literal(String(span, val.clone())), - Literal::Char(val) => Term::Literal(Char(span, val)), - - Literal::U8(val) => Term::Literal(Int(span, u64::from(val), IntFormat::Dec)), - Literal::U16(val) => Term::Literal(Int(span, u64::from(val), IntFormat::Dec)), - Literal::U32(val) => Term::Literal(Int(span, u64::from(val), IntFormat::Dec)), - Literal::U64(val) => Term::Literal(Int(span, val, IntFormat::Dec)), - - // FIXME: Underflow for negative numbers - Literal::S8(val) => Term::Literal(Int(span, val as u64, IntFormat::Dec)), - Literal::S16(val) => Term::Literal(Int(span, val as u64, IntFormat::Dec)), - Literal::S32(val) => Term::Literal(Int(span, val as u64, IntFormat::Dec)), - Literal::S64(val) => Term::Literal(Int(span, val as u64, IntFormat::Dec)), - - Literal::F32(val) => Term::Literal(Float(span, f64::from(val), FloatFormat::Dec)), - Literal::F64(val) => Term::Literal(Float(span, val, FloatFormat::Dec)), - } - }, - core::Term::Var(Var::Free(ref free_var), shift) => { - let shift = match shift { - LevelShift(0) => None, - LevelShift(shift) => Some(shift), - }; - - let name = env.on_free_var(free_var); - concrete::Term::Name(ByteSpan::default(), name, shift) - }, - core::Term::Var(Var::Bound(_), _) => { - // TODO: Better message - panic!("Tried to convert a term that was not locally closed"); - }, - core::Term::Import(ref name) => parens_if( - Prec::LAM < prec, - concrete::Term::Import(ByteSpan::default(), ByteSpan::default(), name.clone()), - ), - core::Term::FunType(ref scope) => resugar_fun_ty(env, scope, prec), - core::Term::FunIntro(ref scope) => resugar_fun_intro(env, scope, prec), - core::Term::FunApp(ref head, ref arg) => parens_if( - Prec::APP < prec, - concrete::Term::FunApp( - Box::new(resugar_term(env, head, Prec::NO_WRAP)), - vec![resugar_term(env, arg, Prec::NO_WRAP)], // TODO - ), - ), - core::Term::Let(ref scope) => resugar_let(env, scope, prec), - core::Term::RecordType(ref scope) => { - let mut env = env.clone(); - let (scope, ()) = scope.clone().unbind(); - - let fields = scope - .unnest() - .into_iter() - .map(|(label, binder, Embed(ann))| { - let ann = resugar_term(&env, &ann, Prec::NO_WRAP); - let name = env.on_item(&label, &binder); - - concrete::RecordTypeField { - label: (ByteIndex::default(), label.0.clone()), - binder: match binder.0.pretty_name { - Some(ref pretty_name) if *pretty_name == name => None, - None | Some(_) => Some((ByteIndex::default(), name)), - }, - ann, - } - }) - .collect(); - - concrete::Term::RecordType(ByteSpan::default(), fields) - }, - core::Term::RecordIntro(ref fields) => { - let fields = fields - .iter() - .map(|&(ref label, ref term)| { - let (term_params, term_body) = match resugar_term(env, &term, Prec::NO_WRAP) { - concrete::Term::FunIntro(_, params, term_body) => (params, *term_body), - term_body => (vec![], term_body), - }; - - // TODO: use a punned label if possible? - concrete::RecordIntroField::Explicit { - label: (ByteIndex::default(), label.0.clone()), - params: term_params, - return_ann: None, - term: term_body, - } - }) - .collect(); - - // TODO: Add let to rename shadowed globals? - concrete::Term::RecordIntro(ByteSpan::default(), fields) - }, - core::Term::RecordProj(ref expr, Label(ref label), shift) => { - let shift = match shift { - LevelShift(0) => None, - LevelShift(shift) => Some(shift), - }; - - concrete::Term::RecordProj( - ByteSpan::default(), - Box::new(resugar_term(env, expr, Prec::ATOMIC)), - ByteIndex::default(), - label.clone(), - shift, - ) - }, - // TODO: Resugar boolean patterns into if-then-else expressions? - core::Term::Case(ref head, ref clauses) => concrete::Term::Case( - ByteSpan::default(), - Box::new(resugar_term(env, head, Prec::NO_WRAP)), - clauses - .iter() - .map(|scope| { - let (pattern, term) = scope.clone().unbind(); - let mut env = env.clone(); - ( - resugar_pattern(&mut env, &pattern, Prec::NO_WRAP), - resugar_term(&env, &term, Prec::NO_WRAP), - ) - }) - .collect(), - ), - core::Term::ArrayIntro(ref elems) => concrete::Term::ArrayIntro( - ByteSpan::default(), - elems - .iter() - .map(|elem| resugar_term(env, elem, Prec::NO_WRAP)) - .collect(), - ), - } -} - -impl Resugar for core::Term { - fn resugar(&self, env: &ResugarEnv) -> concrete::Term { - resugar_term(env, self, Prec::NO_WRAP) - } -} - -impl Resugar for domain::Value { - fn resugar(&self, env: &ResugarEnv) -> concrete::Term { - // FIXME: Make this more efficient? - resugar_term(env, &core::Term::from(self), Prec::NO_WRAP) - } -} - -impl Resugar for domain::Neutral { - fn resugar(&self, env: &ResugarEnv) -> concrete::Term { - // FIXME: Make this more efficient? - resugar_term(env, &core::Term::from(self), Prec::NO_WRAP) - } -} - -impl Resugar for core::RcTerm { - fn resugar(&self, env: &ResugarEnv) -> concrete::Term { - self.inner.resugar(env) - } -} - -impl Resugar for domain::RcValue { - fn resugar(&self, env: &ResugarEnv) -> concrete::Term { - self.inner.resugar(env) - } -} - -impl Resugar for domain::RcNeutral { - fn resugar(&self, env: &ResugarEnv) -> concrete::Term { - self.inner.resugar(env) - } -} diff --git a/crates/pikelet-concrete/src/syntax/concrete.rs b/crates/pikelet-concrete/src/syntax/concrete.rs deleted file mode 100644 index c9a3e538c..000000000 --- a/crates/pikelet-concrete/src/syntax/concrete.rs +++ /dev/null @@ -1,620 +0,0 @@ -//! The concrete syntax of the language - -use codespan::{ByteIndex, ByteSpan}; -use pretty::{BoxDoc, Doc}; -use std::fmt; - -use crate::syntax::{FloatFormat, IntFormat, PRETTY_FALLBACK_WIDTH, PRETTY_INDENT_WIDTH}; - -/// A group of lambda parameters that share an annotation -pub type FunIntroParamGroup = (Vec<(ByteIndex, String)>, Option>); - -/// The parameters to a lambda abstraction -pub type FunIntroParams = Vec; - -/// A group of parameters to a dependent function that share an annotation -pub type FunTypeParamGroup = (Vec<(ByteIndex, String)>, Term); - -/// The parameters to a dependent function type -pub type FunTypeParams = Vec; - -#[derive(Debug, Clone, PartialEq)] -pub struct RecordTypeField { - pub label: (ByteIndex, String), - pub binder: Option<(ByteIndex, String)>, - pub ann: Term, -} - -#[derive(Debug, Clone, PartialEq)] -pub enum RecordIntroField { - Punned { - label: (ByteIndex, String), - shift: Option, - }, - Explicit { - label: (ByteIndex, String), - params: FunIntroParams, - return_ann: Option>, - term: Term, - }, -} - -/// Top-level items within a module -#[derive(Debug, Clone, PartialEq)] -pub enum Item { - /// Declares the type associated with a name, prior to its definition - /// - /// ```text - /// foo : some-type - /// ``` - Declaration { - name: (ByteIndex, String), - ann: Term, - }, - /// Defines the term that should be associated with a name - /// - /// ```text - /// foo = some-body - /// foo x (y : some-type) = some-body - /// ``` - Definition { - name: (ByteIndex, String), - params: FunIntroParams, - return_ann: Option>, - body: Term, - }, - /// Items that could not be correctly parsed - /// - /// This is used for error recovery - Error(ByteSpan), -} - -impl Item { - /// Return the span of source code that this declaration originated from - pub fn span(&self) -> ByteSpan { - match *self { - Item::Definition { - name: (start, _), - body: ref term, - .. - } - | Item::Declaration { - name: (start, _), - ann: ref term, - } => ByteSpan::new(start, term.span().end()), - Item::Error(span) => span, - } - } - - pub fn to_doc(&self) -> Doc> { - match *self { - Item::Declaration { - name: (_, ref name), - ref ann, - .. - } => Doc::as_string(name) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ann.to_doc()), - Item::Definition { - name: (_, ref name), - ref params, - ref return_ann, - ref body, - } => Doc::as_string(name) - .append(Doc::space()) - .append(match params[..] { - [] => Doc::nil(), - _ => pretty_fun_intro_params(params).append(Doc::space()), - }) - .append(return_ann.as_ref().map_or(Doc::nil(), |return_ann| { - Doc::text(":") - .append(return_ann.to_doc()) - .append(Doc::space()) - })) - .append("=") - .append(Doc::space()) - .append(body.to_doc().nest(PRETTY_INDENT_WIDTH)), - Item::Error(_) => Doc::text(""), - } - .append(";") - } -} - -impl fmt::Display for Item { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.to_doc().group().render_fmt(PRETTY_FALLBACK_WIDTH, f) - } -} - -/// Literals -#[derive(Debug, Clone, PartialEq)] -pub enum Literal { - /// String literals - // TODO: Preserve escapes? - String(ByteSpan, String), - /// Character literals - // TODO: Preserve escapes? - Char(ByteSpan, char), - /// Integer literals - // TODO: Preserve digit separators? - Int(ByteSpan, u64, IntFormat), - /// Floating point literals - // TODO: Preserve digit separators? - Float(ByteSpan, f64, FloatFormat), -} - -impl Literal { - /// Return the span of source code that the literal originated from - pub fn span(&self) -> ByteSpan { - match *self { - Literal::String(span, _) - | Literal::Char(span, _) - | Literal::Int(span, _, _) - | Literal::Float(span, _, _) => span, - } - } - - pub fn to_doc(&self) -> Doc> { - match *self { - Literal::String(_, ref value) => Doc::text(format!("{:?}", value)), - Literal::Char(_, value) => Doc::text(format!("{:?}", value)), - Literal::Int(_, value, IntFormat::Bin) => Doc::text(format!("0b{:b}", value)), - Literal::Int(_, value, IntFormat::Oct) => Doc::text(format!("0o{:o}", value)), - Literal::Int(_, value, IntFormat::Dec) => Doc::text(format!("{}", value)), - Literal::Int(_, value, IntFormat::Hex) => Doc::text(format!("0x{:x}", value)), - Literal::Float(_, value, FloatFormat::Dec) => Doc::text(format!("{}", value)), - } - } -} - -/// Patterns -#[derive(Debug, Clone, PartialEq)] -pub enum Pattern { - /// A term that is surrounded with parentheses - /// - /// ```text - /// (p) - /// ``` - Parens(ByteSpan, Box), - /// Patterns annotated with types - /// - /// ```text - /// p : t - /// ``` - Ann(Box, Box), - /// Literal patterns - Literal(Literal), - /// Patterns that either introduce bound variables, or match by structural - /// equality with a constant in-scope - /// - /// ```text - /// x - /// true - /// false - /// ``` - Name(ByteSpan, String, Option), - /// Terms that could not be correctly parsed - /// - /// This is used for error recovery - Error(ByteSpan), -} - -impl Pattern { - /// Return the span of source code that this pattern originated from - pub fn span(&self) -> ByteSpan { - match *self { - Pattern::Parens(span, _) | Pattern::Name(span, _, _) | Pattern::Error(span) => span, - Pattern::Ann(ref pattern, ref ty) => pattern.span().to(ty.span()), - Pattern::Literal(ref literal) => literal.span(), - } - } - - pub fn to_doc(&self) -> Doc> { - match *self { - Pattern::Parens(_, ref term) => Doc::text("(").append(term.to_doc()).append(")"), - Pattern::Ann(ref term, ref ty) => term - .to_doc() - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ty.to_doc()), - Pattern::Name(_, ref name, None) => Doc::text(format!("{}", name)), - Pattern::Name(_, ref name, Some(shift)) => Doc::text(format!("{}^{}", name, shift)), - Pattern::Literal(ref literal) => literal.to_doc(), - Pattern::Error(_) => Doc::text(""), - } - } -} - -/// Terms -#[derive(Debug, Clone, PartialEq)] -pub enum Term { - /// A term that is surrounded with parentheses - /// - /// ```text - /// (e) - /// ``` - Parens(ByteSpan, Box), - /// A term annotated with a type - /// - /// ```text - /// e : t - /// ``` - Ann(Box, Box), - /// Type of types - /// - /// ```text - /// Type - /// ``` - Universe(ByteSpan, Option), - /// Literals - Literal(Literal), - /// Array literals - ArrayIntro(ByteSpan, Vec), - /// Holes - /// - /// ```text - /// _ - /// ``` - Hole(ByteSpan), - /// Names - /// - /// ```text - /// x - /// x^1 - /// ``` - Name(ByteSpan, String, Option), - /// An imported definition - /// - /// ```text - /// import "prelude" - /// ``` - Import(ByteSpan, ByteSpan, String), - /// Dependent function type - /// - /// ```text - /// (x : t1) -> t2 - /// (x y : t1) -> t2 - /// ``` - FunType(ByteIndex, FunTypeParams, Box), - /// Non-Dependent function type - /// - /// ```text - /// t1 -> t2 - /// ``` - FunArrow(Box, Box), - /// Function introduction - /// - /// ```text - /// \x => t - /// \x y => t - /// \x : t1 => t2 - /// \(x : t1) y (z : t2) => t3 - /// \(x y : t1) => t3 - /// ``` - FunIntro(ByteIndex, FunIntroParams, Box), - /// Function application - /// - /// ```text - /// e1 e2 - /// ``` - FunApp(Box, Vec), - /// Let binding - /// - /// ```text - /// let x : S32 - /// x = 1 - /// in - /// x - /// ``` - Let(ByteIndex, Vec, Box), - /// Where expressions - /// - /// ```text - /// id "hello" - /// where { - /// id : (A : Type) -> A -> A; - /// id A x = x; - /// } - /// ``` - Where(Box, Vec, ByteIndex), - /// If expression - /// - /// ```text - /// if t1 then t2 else t3 - /// ``` - If(ByteIndex, Box, Box, Box), - /// Case expression - /// - /// ```text - /// case t1 { pat => t2; .. } - /// ``` - Case(ByteSpan, Box, Vec<(Pattern, Term)>), - /// Record type - /// - /// ```text - /// Record { x : t1, .. } - /// ``` - RecordType(ByteSpan, Vec), - /// Record introduction - /// - /// ```text - /// record { x = t1, .. } - /// record { id (a : Type) (x : a) : a = x, .. } - /// ``` - RecordIntro(ByteSpan, Vec), - /// Record field projection - /// - /// ```text - /// e.l - /// e.l^1 - /// ``` - RecordProj(ByteSpan, Box, ByteIndex, String, Option), - /// Terms that could not be correctly parsed - /// - /// This is used for error recovery - Error(ByteSpan), -} - -impl Term { - /// Return the span of source code that this term originated from - pub fn span(&self) -> ByteSpan { - match *self { - Term::Parens(span, ..) - | Term::Universe(span, ..) - | Term::Hole(span) - | Term::Name(span, ..) - | Term::Import(span, ..) - | Term::Case(span, ..) - | Term::RecordType(span, ..) - | Term::RecordIntro(span, ..) - | Term::RecordProj(span, ..) - | Term::ArrayIntro(span, ..) - | Term::Error(span) => span, - Term::Literal(ref literal) => literal.span(), - Term::FunType(start, _, ref body) - | Term::FunIntro(start, _, ref body) - | Term::Let(start, _, ref body) - | Term::If(start, _, _, ref body) => ByteSpan::new(start, body.span().end()), - Term::Where(ref expr, _, end) => ByteSpan::new(expr.span().start(), end), - Term::Ann(ref term, ref ty) => term.span().to(ty.span()), - Term::FunArrow(ref ann, ref body) => ann.span().to(body.span()), - Term::FunApp(ref head, ref arg) => head.span().to(arg.last().unwrap().span()), - } - } - - pub fn to_doc(&self) -> Doc> { - match *self { - Term::Parens(_, ref term) => Doc::text("(").append(term.to_doc()).append(")"), - Term::Ann(ref term, ref ty) => Doc::nil() - .append(term.to_doc()) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ty.to_doc()), - Term::Universe(_, None) => Doc::text("Type"), - Term::Universe(_, Some(level)) => Doc::text(format!("Type^{}", level)), - Term::Literal(ref literal) => literal.to_doc(), - Term::ArrayIntro(_, ref elems) => Doc::nil() - .append("[") - .append(Doc::intersperse( - elems.iter().map(Term::to_doc), - Doc::text(";").append(Doc::space()), - )) - .append("]"), - Term::Hole(_) => Doc::text("_"), - Term::Name(_, ref name, None) => Doc::text(format!("{}", name)), - Term::Name(_, ref name, Some(shift)) => Doc::text(format!("{}^{}", name, shift)), - Term::Import(_, _, ref name) => Doc::nil() - .append("import") - .append(Doc::space()) - .append(format!("{:?}", name)), - Term::FunIntro(_, ref params, ref body) => Doc::nil() - .append("\\") - .append(pretty_fun_intro_params(params)) - .append(Doc::space()) - .append("=>") - .append(Doc::space()) - .append(body.to_doc()), - Term::FunType(_, ref params, ref body) => Doc::nil() - .append(pretty_fun_ty_params(params)) - .append(Doc::space()) - .append("->") - .append(Doc::space()) - .append(body.to_doc()), - Term::FunArrow(ref ann, ref body) => Doc::nil() - .append(ann.to_doc()) - .append(Doc::space()) - .append("->") - .append(Doc::space()) - .append(body.to_doc()), - Term::FunApp(ref head, ref args) => head.to_doc().append(Doc::space()).append( - Doc::intersperse(args.iter().map(|arg| arg.to_doc()), Doc::space()), - ), - Term::Let(_, ref items, ref body) => { - Doc::nil() - .append("let") - .append(Doc::space()) - .append(Doc::intersperse( - // FIXME: Indentation - items.iter().map(|item| item.to_doc()), - Doc::newline(), - )) - .append("in") - .append(body.to_doc()) - }, - Term::Where(ref expr, ref items, _) => Doc::nil() - .append(expr.to_doc()) - .append(Doc::newline()) - .append("where {") - .append(Doc::newline()) - .append(Doc::intersperse( - items.iter().map(|item| item.to_doc().group()), - Doc::newline(), - )) - .append(Doc::newline()) - .nest(PRETTY_INDENT_WIDTH) - .append("}"), - Term::If(_, ref cond, ref if_true, ref if_false) => Doc::nil() - .append("if") - .append(Doc::space()) - .append(cond.to_doc()) - .append(Doc::space()) - .append("then") - .append(Doc::space()) - .append(if_true.to_doc()) - .append(Doc::space()) - .append("else") - .append(Doc::space()) - .append(if_false.to_doc()), - Term::Case(_, ref head, ref clauses) => Doc::nil() - .append("case") - .append(Doc::space()) - .append(head.to_doc()) - .append(Doc::space()) - .append("of") - .append(Doc::space()) - .append("{") - .append(Doc::newline()) - .append(Doc::intersperse( - clauses.iter().map(|&(ref pattern, ref body)| { - Doc::nil() - .append(pattern.to_doc()) - .append(Doc::space()) - .append("=>") - .append(Doc::space()) - .append(body.to_doc()) - .append(";") - }), - Doc::newline(), - )) - .append(Doc::newline()) - .nest(PRETTY_INDENT_WIDTH) - .append("}"), - Term::RecordType(_, ref fields) if fields.is_empty() => Doc::text("Record {}"), - Term::RecordIntro(_, ref fields) if fields.is_empty() => Doc::text("record {}"), - Term::RecordType(_, ref fields) => Doc::nil() - .append("Record {") - .append(Doc::space()) - .append(Doc::intersperse( - fields.iter().map(|field| { - Doc::group( - Doc::nil() - .append(Doc::as_string(&field.label.1)) - .append(match field.binder { - Some((_, ref binder)) => Doc::space() - .append("as") - .append(Doc::space()) - .append(Doc::as_string(binder)), - None => Doc::nil(), - }) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(field.ann.to_doc()), - ) - }), - Doc::text(";").append(Doc::space()), - )) - .nest(PRETTY_INDENT_WIDTH) - .append(Doc::space()) - .append("}"), - Term::RecordIntro(_, ref fields) => Doc::nil() - .append("record {") - .append(Doc::space()) - .append(Doc::intersperse( - fields.iter().map(|field| match field { - RecordIntroField::Punned { - label: (_, ref label), - shift, - } => match shift { - None => Doc::text(format!("{}", label)), - Some(shift) => Doc::text(format!("{}^{}", label, shift)), - }, - RecordIntroField::Explicit { - label: (_, ref label), - ref params, - ref return_ann, - ref term, - } => Doc::group( - Doc::nil() - .append(Doc::as_string(label)) - .append(Doc::space()) - .append(match params[..] { - [] => Doc::nil(), - _ => pretty_fun_intro_params(params).append(Doc::space()), - }) - .append(return_ann.as_ref().map_or(Doc::nil(), |return_ann| { - Doc::text(":") - .append(return_ann.to_doc()) - .append(Doc::space()) - })) - .append("=") - .append(Doc::space()) - .append(term.to_doc()), - ), - }), - Doc::text(";").append(Doc::space()), - )) - .nest(PRETTY_INDENT_WIDTH) - .append(Doc::space()) - .append("}"), - Term::RecordProj(_, ref expr, _, ref label, None) => { - expr.to_doc().append(".").append(format!("{}", label)) - }, - Term::RecordProj(_, ref expr, _, ref label, Some(shift)) => Doc::nil() - .append(expr.to_doc()) - .append(".") - .append(format!("{}^{}", label, shift)), - Term::Error(_) => Doc::text(""), - } - } -} - -impl fmt::Display for Term { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.to_doc().group().render_fmt(PRETTY_FALLBACK_WIDTH, f) - } -} - -fn pretty_fun_intro_params(params: &[FunIntroParamGroup]) -> Doc> { - Doc::intersperse( - params.iter().map(|&(ref names, ref ann)| match *ann { - None if names.len() == 1 => Doc::as_string(&names[0].1), - None => unreachable!(), // FIXME - shouldn't be possible in AST - Some(ref ann) => Doc::nil() - .append("(") - .append(Doc::intersperse( - names.iter().map(|name| Doc::as_string(&name.1)), - Doc::space(), - )) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ann.to_doc()) - .append(")"), - }), - Doc::space(), - ) -} - -fn pretty_fun_ty_params(params: &[FunTypeParamGroup]) -> Doc> { - Doc::intersperse( - params.iter().map(|&(ref names, ref ann)| { - Doc::nil() - .append("(") - .append(Doc::intersperse( - names.iter().map(|name| Doc::as_string(&name.1)), - Doc::space(), - )) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ann.to_doc()) - .append(")") - }), - Doc::space(), - ) -} diff --git a/crates/pikelet-concrete/src/syntax/mod.rs b/crates/pikelet-concrete/src/syntax/mod.rs deleted file mode 100644 index f07332765..000000000 --- a/crates/pikelet-concrete/src/syntax/mod.rs +++ /dev/null @@ -1,75 +0,0 @@ -use moniker::{Binder, BoundPattern, BoundTerm, OnBoundFn, OnFreeFn, ScopeState, Var}; - -pub mod concrete; -pub mod raw; - -const PRETTY_INDENT_WIDTH: usize = 4; - -/// An effectively 'infinite' line length for when we don't have an explicit -/// width provided for pretty printing. -/// -/// `pretty.rs` seems to bug-out and break on every line when using -/// `usize::MAX`, so we'll just use a really big number instead... -pub const PRETTY_FALLBACK_WIDTH: usize = 1_000_000; - -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub enum IntFormat { - Bin, - Oct, - Dec, - Hex, -} - -impl BoundTerm for IntFormat { - fn term_eq(&self, _: &IntFormat) -> bool { - true - } - - fn close_term(&mut self, _: ScopeState, _: &impl OnFreeFn) {} - fn open_term(&mut self, _: ScopeState, _: &impl OnBoundFn) {} - fn visit_vars(&self, _: &mut impl FnMut(&Var)) {} - fn visit_mut_vars(&mut self, _: &mut impl FnMut(&mut Var)) {} -} - -impl BoundPattern for IntFormat { - fn pattern_eq(&self, _: &IntFormat) -> bool { - true - } - - fn close_pattern(&mut self, _: ScopeState, _: &impl OnFreeFn) {} - fn open_pattern(&mut self, _: ScopeState, _: &impl OnBoundFn) {} - fn visit_vars(&self, _: &mut impl FnMut(&Var)) {} - fn visit_mut_vars(&mut self, _: &mut impl FnMut(&mut Var)) {} - fn visit_binders(&self, _: &mut impl FnMut(&Binder)) {} - fn visit_mut_binders(&mut self, _: &mut impl FnMut(&mut Binder)) {} -} - -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub enum FloatFormat { - Dec, - // TODO: Binary and Hex floats? -} - -impl BoundTerm for FloatFormat { - fn term_eq(&self, _: &FloatFormat) -> bool { - true - } - - fn close_term(&mut self, _: ScopeState, _: &impl OnFreeFn) {} - fn open_term(&mut self, _: ScopeState, _: &impl OnBoundFn) {} - fn visit_vars(&self, _: &mut impl FnMut(&Var)) {} - fn visit_mut_vars(&mut self, _: &mut impl FnMut(&mut Var)) {} -} - -impl BoundPattern for FloatFormat { - fn pattern_eq(&self, _: &FloatFormat) -> bool { - true - } - - fn close_pattern(&mut self, _: ScopeState, _: &impl OnFreeFn) {} - fn open_pattern(&mut self, _: ScopeState, _: &impl OnBoundFn) {} - fn visit_vars(&self, _: &mut impl FnMut(&Var)) {} - fn visit_mut_vars(&mut self, _: &mut impl FnMut(&mut Var)) {} - fn visit_binders(&self, _: &mut impl FnMut(&Binder)) {} - fn visit_mut_binders(&mut self, _: &mut impl FnMut(&mut Binder)) {} -} diff --git a/crates/pikelet-concrete/src/syntax/raw.rs b/crates/pikelet-concrete/src/syntax/raw.rs deleted file mode 100644 index 40f27d295..000000000 --- a/crates/pikelet-concrete/src/syntax/raw.rs +++ /dev/null @@ -1,390 +0,0 @@ -//! The syntax of the language, unchecked and with implicit parts that need to -//! be elaborated in a type-directed way during type checking and inference - -use codespan::ByteSpan; -use moniker::{Binder, BoundPattern, BoundTerm, Embed, Nest, Scope, Var}; -use pretty::{BoxDoc, Doc}; -use std::fmt; -use std::ops; -use std::rc::Rc; - -use pikelet_core::syntax::{Label, Level, LevelShift}; - -use crate::syntax::{FloatFormat, IntFormat, PRETTY_FALLBACK_WIDTH}; - -/// Literals -#[derive(Debug, Clone, PartialEq, PartialOrd, BoundTerm, BoundPattern)] -pub enum Literal { - String(ByteSpan, String), - Char(ByteSpan, char), - Int(ByteSpan, u64, IntFormat), - Float(ByteSpan, f64, FloatFormat), -} - -impl Literal { - /// Return the span of source code that the literal originated from - pub fn span(&self) -> ByteSpan { - match *self { - Literal::String(span, ..) - | Literal::Char(span, ..) - | Literal::Int(span, ..) - | Literal::Float(span, ..) => span, - } - } - - pub fn to_doc(&self) -> Doc> { - match *self { - Literal::String(_, ref value) => Doc::text(format!("{:?}", value)), - Literal::Char(_, value) => Doc::text(format!("{:?}", value)), - Literal::Int(_, value, IntFormat::Bin) => Doc::text(format!("0b{:b}", value)), - Literal::Int(_, value, IntFormat::Oct) => Doc::text(format!("0o{:o}", value)), - Literal::Int(_, value, IntFormat::Dec) => Doc::text(format!("{}", value)), - Literal::Int(_, value, IntFormat::Hex) => Doc::text(format!("0x{:x}", value)), - Literal::Float(_, value, FloatFormat::Dec) => Doc::text(format!("{}", value)), - } - } -} - -impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.to_doc().group().render_fmt(PRETTY_FALLBACK_WIDTH, f) - } -} - -#[derive(Debug, Clone, PartialEq, BoundPattern)] -pub enum Pattern { - /// Patterns annotated with types - Ann(RcPattern, Embed), - /// Patterns that bind variables - Binder(ByteSpan, Binder), - /// Patterns to be compared structurally with a variable in scope - Var(ByteSpan, Embed>, LevelShift), - /// Literal patterns - Literal(Literal), -} - -impl Pattern { - /// Return the span of source code that this pattern originated from - pub fn span(&self) -> ByteSpan { - match *self { - Pattern::Ann(ref pattern, Embed(ref ty)) => pattern.span().to(ty.span()), - Pattern::Var(span, _, _) | Pattern::Binder(span, _) => span, - Pattern::Literal(ref literal) => literal.span(), - } - } - - pub fn to_doc(&self) -> Doc> { - match *self { - Pattern::Ann(ref pattern, Embed(ref ty)) => Doc::nil() - .append(pattern.to_doc()) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ty.to_doc_expr()), - ref pattern => pattern.to_doc_atomic(), - } - } - - fn to_doc_atomic(&self) -> Doc> { - match *self { - Pattern::Binder(_, ref binder) => Doc::as_string(binder), - Pattern::Var(_, Embed(ref var), shift) => Doc::as_string(format!("{}^{}", var, shift)), - Pattern::Literal(ref literal) => literal.to_doc(), - ref pattern => Doc::text("(").append(pattern.to_doc()).append(")"), - } - } -} - -impl fmt::Display for Pattern { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.to_doc().group().render_fmt(PRETTY_FALLBACK_WIDTH, f) - } -} - -/// Reference counted patterns -#[derive(Debug, Clone, PartialEq, BoundPattern)] -pub struct RcPattern { - pub inner: Rc, -} - -impl From for RcPattern { - fn from(src: Pattern) -> RcPattern { - RcPattern { - inner: Rc::new(src), - } - } -} - -impl ops::Deref for RcPattern { - type Target = Pattern; - - fn deref(&self) -> &Pattern { - &self.inner - } -} - -impl fmt::Display for RcPattern { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.inner, f) - } -} - -/// Terms, unchecked and with implicit syntax that needs to be elaborated -/// -/// For now the only implicit syntax we have is holes and lambdas that lack a -/// type annotation. -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub enum Term { - /// A term annotated with a type - Ann(RcTerm, RcTerm), - /// Universes - Universe(ByteSpan, Level), - /// Literals - Literal(Literal), - /// A hole - Hole(ByteSpan), - /// A variable - Var(ByteSpan, Var, LevelShift), - /// An imported definition - Import(ByteSpan, ByteSpan, String), - /// Dependent function types - FunType(ByteSpan, Scope<(Binder, Embed), RcTerm>), - /// Function introductions - FunIntro(ByteSpan, Scope<(Binder, Embed), RcTerm>), - /// Function application - FunApp(RcTerm, RcTerm), - /// Dependent record types - RecordType( - ByteSpan, - Scope, Embed)>, ()>, - ), - /// Record introductions - RecordIntro(ByteSpan, Vec<(Label, RcTerm)>), - /// Record field projection - RecordProj(ByteSpan, RcTerm, ByteSpan, Label, LevelShift), - /// Case expressions - Case(ByteSpan, RcTerm, Vec>), - /// Array literals - ArrayIntro(ByteSpan, Vec), - /// Let bindings - Let( - ByteSpan, - Scope, Embed)>, RcTerm>, - ), -} - -impl Term { - pub fn span(&self) -> ByteSpan { - match *self { - Term::Universe(span, ..) - | Term::Hole(span) - | Term::Var(span, ..) - | Term::Import(span, ..) - | Term::FunType(span, ..) - | Term::FunIntro(span, ..) - | Term::RecordType(span, ..) - | Term::RecordIntro(span, ..) - | Term::RecordProj(span, ..) - | Term::Case(span, ..) - | Term::ArrayIntro(span, ..) - | Term::Let(span, ..) => span, - Term::Literal(ref literal) => literal.span(), - Term::Ann(ref expr, ref ty) => expr.span().to(ty.span()), - Term::FunApp(ref head, ref arg) => head.span().to(arg.span()), - } - } - - pub fn to_doc(&self) -> Doc> { - match *self { - Term::Ann(ref term, ref ty) => Doc::nil() - .append(term.to_doc_expr()) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ty.to_doc_expr()), - ref term => term.to_doc_expr(), - } - } - - fn to_doc_expr(&self) -> Doc> { - match *self { - Term::Import(_, _, ref name) => Doc::nil() - .append("import") - .append(Doc::space()) - .append(format!("{:?}", name)), - Term::FunIntro(_, ref scope) => Doc::nil() - .append("\\") - .append(Doc::as_string(&scope.unsafe_pattern.0)) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append((scope.unsafe_pattern.1).0.to_doc_arrow()) - .append(Doc::space()) - .append("=>") - .append(Doc::space()) - .append(scope.unsafe_body.to_doc_expr()), - Term::Case(_, ref head, ref clauses) => Doc::nil() - .append("case") - .append(Doc::space()) - .append(head.to_doc_app()) - .append(Doc::space()) - .append("{") - .append(Doc::space()) - .append(Doc::intersperse( - clauses.iter().map(|scope| { - Doc::nil() - .append(scope.unsafe_pattern.to_doc()) - .append(Doc::space()) - .append("=>") - .append(Doc::space()) - .append(scope.unsafe_body.to_doc()) - .append(";") - }), - Doc::newline(), - )) - .append(Doc::space()) - .append("}"), - Term::Let(_, ref scope) => Doc::nil() - .append("let") - .append(Doc::space()) - .append(Doc::intersperse( - scope.unsafe_pattern.unsafe_patterns.iter().map( - |&(ref binder, Embed(ref term))| { - Doc::nil() - .append(Doc::as_string(binder)) - .append(Doc::space()) - .append("=") - .append(Doc::space()) - .append(term.to_doc()) - }, - ), - Doc::newline(), - )) - .append(Doc::space()) - .append("in") - .append(Doc::space()) - .append(scope.unsafe_body.to_doc_expr()), - ref term => term.to_doc_arrow(), - } - } - - fn to_doc_arrow(&self) -> Doc> { - match *self { - Term::FunType(_, ref scope) => Doc::nil() - .append("(") - .append(Doc::as_string(&scope.unsafe_pattern.0)) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append((scope.unsafe_pattern.1).0.to_doc_arrow()) - .append(")") - .append(Doc::space()) - .append("->") - .append(Doc::space()) - .append(scope.unsafe_body.to_doc_expr()), - ref term => term.to_doc_app(), - } - } - - fn to_doc_app(&self) -> Doc> { - match *self { - Term::FunApp(ref fun, ref arg) => Doc::nil() - .append(fun.to_doc_atomic()) - .append(Doc::space()) - .append(arg.to_doc_atomic()), - ref term => term.to_doc_atomic(), - } - } - - fn to_doc_atomic(&self) -> Doc> { - match *self { - Term::Universe(_, level) => Doc::text(format!("Type^{}", level)), - Term::ArrayIntro(_, ref elems) => Doc::nil() - .append("[") - .append(Doc::intersperse( - elems.iter().map(|elem| elem.to_doc()), - Doc::text(";").append(Doc::space()), - )) - .append("]"), - Term::Var(_, ref var, ref level) => Doc::text(format!("{}^{}", var, level)), - Term::Hole(_) => Doc::text("_"), - Term::RecordType(_, ref scope) => Doc::nil() - .append("Record {") - .append(Doc::space()) - .append(Doc::intersperse( - scope.unsafe_pattern.unsafe_patterns.iter().map( - |&(ref label, ref binder, Embed(ref ann))| { - Doc::nil() - .append(Doc::as_string(label)) - .append(Doc::space()) - .append("as") - .append(Doc::space()) - .append(Doc::as_string(binder)) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ann.to_doc()) - }, - ), - Doc::text(";").append(Doc::space()), - )) - .append(Doc::space()) - .append("}"), - Term::RecordIntro(_, ref fields) => Doc::nil() - .append("record {") - .append(Doc::space()) - .append(Doc::intersperse( - fields.iter().map(|&(ref label, ref value)| { - Doc::nil() - .append(Doc::as_string(label)) - .append(Doc::space()) - .append("=") - .append(Doc::space()) - .append(value.to_doc()) - }), - Doc::text(";").append(Doc::space()), - )) - .append(Doc::space()) - .append("}"), - Term::RecordProj(_, ref expr, _, ref label, ref shift) => Doc::nil() - .append(expr.to_doc_atomic()) - .append(".") - .append(format!("{}^{}", label, shift)), - ref term => Doc::text("(").append(term.to_doc()).append(")"), - } - } -} - -impl fmt::Display for Term { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.to_doc().group().render_fmt(PRETTY_FALLBACK_WIDTH, f) - } -} - -/// Reference counted terms -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub struct RcTerm { - pub inner: Rc, -} - -impl From for RcTerm { - fn from(src: Term) -> RcTerm { - RcTerm { - inner: Rc::new(src), - } - } -} - -impl ops::Deref for RcTerm { - type Target = Term; - - fn deref(&self) -> &Term { - &self.inner - } -} - -impl fmt::Display for RcTerm { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.inner, f) - } -} diff --git a/crates/pikelet-concrete/tests/check.rs b/crates/pikelet-concrete/tests/check.rs deleted file mode 100644 index f449b5f70..000000000 --- a/crates/pikelet-concrete/tests/check.rs +++ /dev/null @@ -1,215 +0,0 @@ -use codespan::CodeMap; - -use pikelet_concrete::desugar::{Desugar, DesugarEnv}; -use pikelet_concrete::elaborate::{self, Context, TypeError}; - -mod support; - -#[test] -fn record_intro() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Record { t : Type; x : String }"; - let given_expr = r#"record { t = String; x = "hello" }"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - support::parse_check_term(&mut codemap, &context, given_expr, &expected_ty); -} - -#[test] -fn record_intro_field_mismatch_lt() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let expected_ty = r"Record { x : String; y : String }"; - let given_expr = r#"record { x = "hello" }"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::check_term(&context, &raw_term, &expected_ty) { - Err(TypeError::RecordSizeMismatch { .. }) => {}, - Err(err) => panic!("unexpected error: {:?}", err), - Ok(term) => panic!("expected error but found: {}", term), - } -} - -#[test] -fn record_intro_field_mismatch_gt() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let expected_ty = r"Record { x : String }"; - let given_expr = r#"record { x = "hello"; y = "hello" }"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::check_term(&context, &raw_term, &expected_ty) { - Err(TypeError::RecordSizeMismatch { .. }) => {}, - Err(err) => panic!("unexpected error: {:?}", err), - Ok(term) => panic!("expected error but found: {}", term), - } -} - -#[test] -fn record_intro_dependent_record_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Record { t : Type; x : t }"; - let given_expr = r#"record { t = String; x = "hello" }"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - support::parse_check_term(&mut codemap, &context, given_expr, &expected_ty); -} - -#[test] -fn record_intro_dependent_record_ty_propagate_types() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Record { t : Type; x : t }"; - let given_expr = r#"record { t = S32; x = 1 }"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - support::parse_check_term(&mut codemap, &context, given_expr, &expected_ty); -} - -#[test] -fn case_expr() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"String"; - let given_expr = r#"case "helloo" { - "hi" => "haha"; - "hello" => "byee"; - greeting => (import "prim/string/append") greeting "!!"; - }"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - support::parse_check_term(&mut codemap, &context, given_expr, &expected_ty); -} - -#[test] -fn case_expr_bad_literal() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let expected_ty = r"String"; - let given_expr = r#"case "helloo" { - "hi" => "haha"; - 1 => "byee"; - }"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::check_term(&context, &raw_term, &expected_ty) { - Err(TypeError::LiteralMismatch { .. }) => {}, - Err(err) => panic!("unexpected error: {:?}", err), - Ok(term) => panic!("expected error but found: {}", term), - } -} - -#[test] -fn case_expr_wildcard() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"S32"; - let given_expr = r#"case "helloo" { - _ => 123; - }"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - support::parse_check_term(&mut codemap, &context, given_expr, &expected_ty); -} - -#[test] -fn case_expr_empty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"String"; - let given_expr = r#"case "helloo" {}"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - support::parse_check_term(&mut codemap, &context, given_expr, &expected_ty); -} - -#[test] -fn array_intro_0_string() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Array 0 String"; - let given_expr = r#"[]"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - support::parse_check_term(&mut codemap, &context, given_expr, &expected_ty); -} - -#[test] -fn array_intro_3_string() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Array 3 String"; - let given_expr = r#"["hello"; "hi"; "byee"]"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - support::parse_check_term(&mut codemap, &context, given_expr, &expected_ty); -} - -#[test] -fn array_intro_len_mismatch() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let expected_ty = r"Array 3 String"; - let given_expr = r#"["hello"; "hi"]"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::check_term(&context, &raw_term, &expected_ty) { - Err(TypeError::ArrayLengthMismatch { .. }) => {}, - Err(err) => panic!("unexpected error: {:?}", err), - Ok(term) => panic!("expected error but found: {}", term), - } -} - -#[test] -fn array_intro_elem_ty_mismatch() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let expected_ty = r"Array 3 String"; - let given_expr = r#"["hello"; "hi"; 4]"#; - - let expected_ty = support::parse_nf_term(&mut codemap, &context, expected_ty); - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::check_term(&context, &raw_term, &expected_ty) { - Err(_) => {}, - Ok(term) => panic!("expected error but found: {}", term), - } -} diff --git a/crates/pikelet-concrete/tests/desugar.rs b/crates/pikelet-concrete/tests/desugar.rs deleted file mode 100644 index 7b74e6ac4..000000000 --- a/crates/pikelet-concrete/tests/desugar.rs +++ /dev/null @@ -1,519 +0,0 @@ -use codespan::{ByteSpan, CodeMap, FileName}; -use codespan_reporting::termcolor::{ColorChoice, StandardStream}; -use goldenfile::Mint; -use moniker::{assert_term_eq, Binder, Embed, FreeVar, Scope, Var}; -use pretty_assertions::assert_eq; -use std::io::Write; - -use pikelet_concrete::desugar::{Desugar, DesugarEnv, DesugarError}; -use pikelet_concrete::parse; -use pikelet_concrete::syntax::raw::{RcTerm, Term}; -use pikelet_concrete::syntax::{concrete, raw}; -use pikelet_core::syntax::{Level, LevelShift}; - -fn golden(filename: &str, literal: &str) { - let path = "tests/goldenfiles"; - - let mut mint = Mint::new(path); - let mut file = mint.new_goldenfile(filename).unwrap(); - let env = DesugarEnv::new(im::HashMap::new()); - - let term = parse_desugar_term(&env, literal); - - write!(file, "{:#?}", term).unwrap(); -} - -fn parse_term(codemap: &mut CodeMap, src: &str) -> concrete::Term { - let filemap = codemap.add_filemap(FileName::virtual_("test"), src.into()); - let (concrete_term, _import_paths, errors) = parse::term(&filemap); - - if !errors.is_empty() { - let writer = StandardStream::stdout(ColorChoice::Always); - for error in errors { - codespan_reporting::emit(&mut writer.lock(), &codemap, &error.to_diagnostic()).unwrap(); - } - panic!("parse error!") - } - - concrete_term -} - -fn parse_desugar_term(env: &DesugarEnv, src: &str) -> raw::RcTerm { - let mut codemap = CodeMap::new(); - - match parse_term(&mut codemap, src).desugar(env) { - Ok(raw_term) => raw_term, - Err(error) => { - let writer = StandardStream::stdout(ColorChoice::Always); - codespan_reporting::emit(&mut writer.lock(), &codemap, &error.to_diagnostic()).unwrap(); - panic!("type error!"); - }, - } -} - -fn var(x: &FreeVar) -> RcTerm { - RcTerm::from(Term::Var( - ByteSpan::default(), - Var::Free(x.clone()), - LevelShift(0), - )) -} - -fn u0() -> RcTerm { - RcTerm::from(Term::Universe(ByteSpan::default(), Level(0))) -} - -#[test] -fn free_var() { - let env = DesugarEnv::new(im::HashMap::new()); - - match *parse_desugar_term(&env, r"or-elim").inner { - raw::Term::Var(_, Var::Free(ref free_var), LevelShift(0)) => { - assert_eq!(free_var.pretty_name, Some("or-elim".to_owned())); - }, - ref term => panic!("unexpected term: {}", term), - } -} - -#[test] -fn ty() { - golden("ty", r"Type"); -} - -#[test] -fn ty_level() { - golden("ty_level", r"Type^2"); -} - -#[test] -fn ann() { - golden("ann", r"Type : Type"); -} - -#[test] -fn ann_ann_left() { - golden("ann_ann_left", r"Type : Type : Type"); -} - -#[test] -fn ann_ann_right() { - golden("ann_ann_right", r"Type : (Type : Type)"); -} - -#[test] -fn ann_ann_ann() { - golden("ann_ann_ann", r"(Type : Type) : (Type : Type)"); -} - -#[test] -fn fun_intro_ann() { - let env = DesugarEnv::new(im::HashMap::new()); - - let x = FreeVar::fresh_named("x"); - - assert_term_eq!( - parse_desugar_term(&env, r"\x : Type -> Type => x"), - RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new( - ( - Binder(x.clone()), - Embed(RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new((Binder(FreeVar::fresh_unnamed()), Embed(u0())), u0()), - ))), - ), - var(&x), - ), - )), - ); -} - -#[test] -fn fun_intro() { - let env = DesugarEnv::new(im::HashMap::new()); - - let x = FreeVar::fresh_named("x"); - let y = FreeVar::fresh_named("y"); - let hole = || RcTerm::from(Term::Hole(ByteSpan::default())); - - assert_term_eq!( - parse_desugar_term(&env, r"\x : (\y => y) => x"), - RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new( - ( - Binder(x.clone()), - Embed(RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new((Binder(y.clone()), Embed(hole())), var(&y)), - ))) - ), - var(&x), - ), - )), - ); -} - -#[test] -fn fun_intro2_ann() { - let env = DesugarEnv::new(im::HashMap::new()); - - let x = FreeVar::fresh_named("x"); - let y = FreeVar::fresh_named("y"); - - assert_term_eq!( - parse_desugar_term(&env, r"\(x y : Type) => x"), - RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new( - (Binder(x.clone()), Embed(u0())), - RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new((Binder(y.clone()), Embed(u0())), var(&x)), - )), - ), - )), - ); -} - -#[test] -fn arrow() { - let env = DesugarEnv::new(im::HashMap::new()); - - assert_term_eq!( - parse_desugar_term(&env, r"Type -> Type"), - RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new((Binder(FreeVar::fresh_unnamed()), Embed(u0())), u0()), - )), - ); -} - -#[test] -fn fun_ty() { - let env = DesugarEnv::new(im::HashMap::new()); - - let x = FreeVar::fresh_named("x"); - - assert_term_eq!( - parse_desugar_term(&env, r"(x : Type -> Type) -> x"), - RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new( - ( - Binder(x.clone()), - Embed(RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new((Binder(FreeVar::fresh_unnamed()), Embed(u0())), u0()), - ))), - ), - var(&x), - ), - )), - ); -} - -#[test] -fn fun_ty2() { - let env = DesugarEnv::new(im::HashMap::new()); - - let x = FreeVar::fresh_named("x"); - let y = FreeVar::fresh_named("y"); - - assert_term_eq!( - parse_desugar_term(&env, r"(x y : Type) -> x"), - RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new( - (Binder(x.clone()), Embed(u0())), - RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new((Binder(y.clone()), Embed(u0())), var(&x)), - )), - ), - )), - ); -} - -#[test] -fn fun_ty_arrow() { - let env = DesugarEnv::new(im::HashMap::new()); - - let x = FreeVar::fresh_named("x"); - - assert_term_eq!( - parse_desugar_term(&env, r"(x : Type) -> x -> x"), - RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new( - (Binder(x.clone()), Embed(u0())), - RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new((Binder(FreeVar::fresh_unnamed()), Embed(var(&x))), var(&x)), - )), - ), - )), - ); -} - -#[test] -fn fun_intro_fun_app() { - let env = DesugarEnv::new(im::HashMap::new()); - - let x = FreeVar::fresh_named("x"); - let y = FreeVar::fresh_named("y"); - - assert_term_eq!( - parse_desugar_term(&env, r"\(x : Type -> Type) (y : Type) => x y"), - RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new( - ( - Binder(x.clone()), - Embed(RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new((Binder(FreeVar::fresh_unnamed()), Embed(u0())), u0()), - ))), - ), - RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new( - (Binder(y.clone()), Embed(u0())), - RcTerm::from(Term::FunApp(var(&x), var(&y))), - ), - )), - ), - )), - ); -} - -#[test] -fn id() { - let env = DesugarEnv::new(im::HashMap::new()); - - let x = FreeVar::fresh_named("x"); - let a = FreeVar::fresh_named("a"); - - assert_term_eq!( - parse_desugar_term(&env, r"\(a : Type) (x : a) => x"), - RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new( - (Binder(a.clone()), Embed(u0())), - RcTerm::from(Term::FunIntro( - ByteSpan::default(), - Scope::new((Binder(x.clone()), Embed(var(&a))), var(&x)), - )), - ), - )), - ); -} - -#[test] -fn id_ty() { - let env = DesugarEnv::new(im::HashMap::new()); - - let a = FreeVar::fresh_named("a"); - - assert_term_eq!( - parse_desugar_term(&env, r"(a : Type) -> a -> a"), - RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new( - (Binder(a.clone()), Embed(u0())), - RcTerm::from(Term::FunType( - ByteSpan::default(), - Scope::new((Binder(FreeVar::fresh_unnamed()), Embed(var(&a))), var(&a)), - )), - ), - )), - ); -} - -#[test] -fn let_forward_declarations() { - let mut codemap = CodeMap::new(); - let desugar_env = DesugarEnv::new(im::HashMap::new()); - - let src = " - let - foo : Type; - bar : Type; - bar = Record {}; - foo = Record {}; - in - record {} - "; - - if let Err(err) = parse_term(&mut codemap, src).desugar(&desugar_env) { - let writer = StandardStream::stdout(ColorChoice::Always); - codespan_reporting::emit(&mut writer.lock(), &codemap, &err.to_diagnostic()).unwrap(); - panic!("type error!") - } -} - -// #[test] -// fn let_forward_declarations_forward_ref() { -// let mut codemap = CodeMap::new(); -// let desugar_env = DesugarEnv::new(im::HashMap::new()); - -// let src = " -// let -// foo : Type; -// bar : Type; -// bar = foo; -// foo = Record {}; -// in -// record {} -// "; - -// match parse_term(&mut codemap, src).desugar(&desugar_env) { -// Ok(_) => panic!("expected error"), -// Err(DesugarError::UndefinedName { .. }) => {}, -// Err(err) => panic!("unexpected error: {}", err), -// } -// } - -#[test] -fn let_declaration_after_definition() { - let mut codemap = CodeMap::new(); - let desugar_env = DesugarEnv::new(im::HashMap::new()); - - let src = " - let - foo = Record {}; - foo : Type; - in - record {} - "; - - match parse_term(&mut codemap, src).desugar(&desugar_env) { - Ok(_) => panic!("expected error"), - Err(DesugarError::DeclarationFollowedDefinition { .. }) => {}, - Err(err) => panic!("unexpected error: {}", err), - } -} - -#[test] -fn let_duplicate_declarations() { - let mut codemap = CodeMap::new(); - let desugar_env = DesugarEnv::new(im::HashMap::new()); - - let src = " - let - foo : Type; - foo : Type; - in - record {} - "; - - match parse_term(&mut codemap, src).desugar(&desugar_env) { - Ok(_) => panic!("expected error"), - Err(DesugarError::DuplicateDeclarations { .. }) => {}, - Err(err) => panic!("unexpected error: {}", err), - } -} - -#[test] -fn let_duplicate_definitions() { - let mut codemap = CodeMap::new(); - let desugar_env = DesugarEnv::new(im::HashMap::new()); - - let src = " - let - foo = Type; - foo = Type; - in - record {} - "; - - match parse_term(&mut codemap, src).desugar(&desugar_env) { - Ok(_) => panic!("expected error"), - Err(DesugarError::DuplicateDefinitions { .. }) => {}, - Err(err) => panic!("unexpected error: {}", err), - } -} - -mod sugar { - use super::*; - - #[test] - fn fun_intro_params() { - let env = DesugarEnv::new(im::HashMap::new()); - - assert_term_eq!( - parse_desugar_term(&env, r"\x (y : Type) z => x"), - parse_desugar_term(&env, r"\x => \y : Type => \z => x"), - ); - } - - #[test] - fn fun_intro_params_multi() { - let env = DesugarEnv::new(im::HashMap::new()); - - assert_term_eq!( - parse_desugar_term(&env, r"\(x : Type) (y : Type) z => x"), - parse_desugar_term(&env, r"\(x y : Type) z => x"), - ); - } - - #[test] - fn fun_ty_params() { - let env = DesugarEnv::new(im::HashMap::new()); - - assert_term_eq!( - parse_desugar_term(&env, r"(a : Type) -> (x y z : a) -> x"), - parse_desugar_term(&env, r"(a : Type) -> (x : a) -> (y : a) -> (z : a) -> x"), - ); - } - - #[test] - fn fun_ty_params_multi() { - let env = DesugarEnv::new(im::HashMap::new()); - - assert_term_eq!( - parse_desugar_term(&env, r"(a : Type) (x y z : a) (w : x) -> x"), - parse_desugar_term( - &env, - r"(a : Type) -> (x : a) -> (y : a) -> (z : a) -> (w : x) -> x" - ), - ); - } - - #[test] - fn arrow() { - let env = DesugarEnv::new(im::HashMap::new()); - - assert_term_eq!( - parse_desugar_term(&env, r"(a : Type) -> a -> a"), - parse_desugar_term(&env, r"(a : Type) -> (x : a) -> a"), - ) - } - - #[test] - fn if_then_else() { - let env = DesugarEnv::new(im::hashmap! { - "true".to_owned() => FreeVar::fresh_named("true"), - "false".to_owned() => FreeVar::fresh_named("false"), - }); - - assert_term_eq!( - parse_desugar_term(&env, r#"if true then "true" else "false""#), - parse_desugar_term(&env, r#"case true { true => "true"; false => "false" }"#), - ) - } - - #[test] - fn record_field_puns() { - let env = DesugarEnv::new(im::hashmap! { - "x".to_owned() => FreeVar::fresh_named("x"), - "y".to_owned() => FreeVar::fresh_named("y"), - }); - - assert_term_eq!( - parse_desugar_term(&env, r#"record { x; y }"#), - parse_desugar_term(&env, r#"record { x = x; y = y }"#), - ) - } -} diff --git a/crates/pikelet-concrete/tests/goldenfiles/ann b/crates/pikelet-concrete/tests/goldenfiles/ann deleted file mode 100644 index 1a698fcf8..000000000 --- a/crates/pikelet-concrete/tests/goldenfiles/ann +++ /dev/null @@ -1,26 +0,0 @@ -RcTerm { - inner: Ann( - RcTerm { - inner: Universe( - Span { - start: ByteIndex(1), - end: ByteIndex(5) - }, - Level( - 0 - ) - ) - }, - RcTerm { - inner: Universe( - Span { - start: ByteIndex(8), - end: ByteIndex(12) - }, - Level( - 0 - ) - ) - } - ) -} \ No newline at end of file diff --git a/crates/pikelet-concrete/tests/goldenfiles/ann_ann_ann b/crates/pikelet-concrete/tests/goldenfiles/ann_ann_ann deleted file mode 100644 index c082bc309..000000000 --- a/crates/pikelet-concrete/tests/goldenfiles/ann_ann_ann +++ /dev/null @@ -1,56 +0,0 @@ -RcTerm { - inner: Ann( - RcTerm { - inner: Ann( - RcTerm { - inner: Universe( - Span { - start: ByteIndex(2), - end: ByteIndex(6) - }, - Level( - 0 - ) - ) - }, - RcTerm { - inner: Universe( - Span { - start: ByteIndex(9), - end: ByteIndex(13) - }, - Level( - 0 - ) - ) - } - ) - }, - RcTerm { - inner: Ann( - RcTerm { - inner: Universe( - Span { - start: ByteIndex(18), - end: ByteIndex(22) - }, - Level( - 0 - ) - ) - }, - RcTerm { - inner: Universe( - Span { - start: ByteIndex(25), - end: ByteIndex(29) - }, - Level( - 0 - ) - ) - } - ) - } - ) -} \ No newline at end of file diff --git a/crates/pikelet-concrete/tests/goldenfiles/ann_ann_left b/crates/pikelet-concrete/tests/goldenfiles/ann_ann_left deleted file mode 100644 index b824c9323..000000000 --- a/crates/pikelet-concrete/tests/goldenfiles/ann_ann_left +++ /dev/null @@ -1,41 +0,0 @@ -RcTerm { - inner: Ann( - RcTerm { - inner: Universe( - Span { - start: ByteIndex(1), - end: ByteIndex(5) - }, - Level( - 0 - ) - ) - }, - RcTerm { - inner: Ann( - RcTerm { - inner: Universe( - Span { - start: ByteIndex(8), - end: ByteIndex(12) - }, - Level( - 0 - ) - ) - }, - RcTerm { - inner: Universe( - Span { - start: ByteIndex(15), - end: ByteIndex(19) - }, - Level( - 0 - ) - ) - } - ) - } - ) -} \ No newline at end of file diff --git a/crates/pikelet-concrete/tests/goldenfiles/ann_ann_right b/crates/pikelet-concrete/tests/goldenfiles/ann_ann_right deleted file mode 100644 index a056f88ab..000000000 --- a/crates/pikelet-concrete/tests/goldenfiles/ann_ann_right +++ /dev/null @@ -1,41 +0,0 @@ -RcTerm { - inner: Ann( - RcTerm { - inner: Universe( - Span { - start: ByteIndex(1), - end: ByteIndex(5) - }, - Level( - 0 - ) - ) - }, - RcTerm { - inner: Ann( - RcTerm { - inner: Universe( - Span { - start: ByteIndex(9), - end: ByteIndex(13) - }, - Level( - 0 - ) - ) - }, - RcTerm { - inner: Universe( - Span { - start: ByteIndex(16), - end: ByteIndex(20) - }, - Level( - 0 - ) - ) - } - ) - } - ) -} \ No newline at end of file diff --git a/crates/pikelet-concrete/tests/goldenfiles/ty b/crates/pikelet-concrete/tests/goldenfiles/ty deleted file mode 100644 index 9dc3d0c93..000000000 --- a/crates/pikelet-concrete/tests/goldenfiles/ty +++ /dev/null @@ -1,11 +0,0 @@ -RcTerm { - inner: Universe( - Span { - start: ByteIndex(1), - end: ByteIndex(5) - }, - Level( - 0 - ) - ) -} \ No newline at end of file diff --git a/crates/pikelet-concrete/tests/goldenfiles/ty_level b/crates/pikelet-concrete/tests/goldenfiles/ty_level deleted file mode 100644 index 53310b9ff..000000000 --- a/crates/pikelet-concrete/tests/goldenfiles/ty_level +++ /dev/null @@ -1,11 +0,0 @@ -RcTerm { - inner: Universe( - Span { - start: ByteIndex(1), - end: ByteIndex(7) - }, - Level( - 2 - ) - ) -} \ No newline at end of file diff --git a/crates/pikelet-concrete/tests/infer.rs b/crates/pikelet-concrete/tests/infer.rs deleted file mode 100644 index 2fd752453..000000000 --- a/crates/pikelet-concrete/tests/infer.rs +++ /dev/null @@ -1,898 +0,0 @@ -use codespan::{ByteIndex, ByteSpan, CodeMap}; -use moniker::{assert_term_eq, FreeVar, Var}; -use pretty_assertions::assert_eq; - -use pikelet_concrete::desugar::{Desugar, DesugarEnv}; -use pikelet_concrete::elaborate::{self, Context, TypeError}; -use pikelet_concrete::syntax::{concrete, raw}; - -mod support; - -#[test] -fn undefined_name() { - use pikelet_core::syntax::LevelShift; - - let context = Context::default(); - - let x = FreeVar::fresh_named("x"); - let given_expr = raw::RcTerm::from(raw::Term::Var( - ByteSpan::default(), - Var::Free(x.clone()), - LevelShift(0), - )); - - assert_eq!( - elaborate::infer_term(&context, &given_expr), - Err(TypeError::UndefinedName { - span: ByteSpan::default(), - free_var: x.clone(), - }), - ); -} - -#[test] -fn import_not_found() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r#"import "does-not-exist" : Record {}"#; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::infer_term(&context, &raw_term) { - Err(TypeError::UndefinedImport { .. }) => {}, - Err(err) => panic!("unexpected error: {:?}", err), - Ok((term, ty)) => panic!("expected error, found {} : {:?}", term, ty), - } -} - -#[test] -fn ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^1"; - let given_expr = r"Type"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn ty_levels() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^1"; - let given_expr = r"Type^0 : Type^1 : Type^2 : Type^3"; //... Type^∞ ...+:ïœĄ(ω) - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn ann_ty_id() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type -> Type"; - let given_expr = r"(\a => a) : Type -> Type"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn ann_arrow_ty_id() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(Type -> Type) -> (Type -> Type)"; - let given_expr = r"(\a => a) : (Type -> Type) -> (Type -> Type)"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn ann_id_as_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r"(\a => a) : Type"; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::infer_term(&context, &raw_term) { - Err(TypeError::UnexpectedFunction { .. }) => {}, - other => panic!("unexpected result: {:#?}", other), - } -} - -#[test] -fn fun_app() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^1"; - let given_expr = r"(\a : Type^1 => a) Type"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn fun_app_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r"Type Type"; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - assert_eq!( - elaborate::infer_term(&context, &raw_term), - Err(TypeError::ArgAppliedToNonFunction { - fn_span: ByteSpan::new(ByteIndex(1), ByteIndex(5)), - arg_span: ByteSpan::new(ByteIndex(6), ByteIndex(10)), - found: Box::new(concrete::Term::Universe(ByteSpan::default(), Some(1))), - }), - ) -} - -#[test] -fn fun_intro() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a : Type) -> Type"; - let given_expr = r"\a : Type => a"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn fun_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^1"; - let given_expr = r"(a : Type) -> a"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn id() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a : Type) -> a -> a"; - let given_expr = r"\(a : Type) (x : a) => x"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn id_ann() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a : Type) -> a -> a"; - let given_expr = r"(\a (x : a) => x) : (A : Type) -> A -> A"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -// Passing `Type` to the polymorphic identity function should yield the type -// identity function -#[test] -fn id_fun_app_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type -> Type"; - let given_expr = r"(\(a : Type^1) (x : a) => x) Type"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -// Passing `Type` to the `Type` identity function should yield `Type` -#[test] -fn id_fun_app_ty_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^1"; - let given_expr = r"(\(a : Type^2) (x : a) => x) (Type^1) Type"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn id_fun_app_ty_arr_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^1"; - let given_expr = r"(\(a : Type^2) (x : a) => x) (Type^1) (Type -> Type)"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn id_fun_app_arr_fun_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type -> Type"; - let given_expr = r"(\(a : Type^1) (x : a) => x) (Type -> Type) (\x => x)"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn apply() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a b : Type) -> (a -> b) -> a -> b"; - let given_expr = r"\(a b : Type) (f : a -> b) (x : a) => f x"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn const_() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a b : Type) -> a -> b -> a"; - let given_expr = r"\(a b : Type) (x : a) (y : b) => x"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn const_flipped() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a b : Type) -> a -> b -> b"; - let given_expr = r"\(a b : Type) (x : a) (y : b) => y"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn flip() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a b c : Type) -> (a -> b -> c) -> (b -> a -> c)"; - let given_expr = r"\(a b c : Type) (f : a -> b -> c) (y : b) (x : a) => f x y"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn compose() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a b c : Type) -> (b -> c) -> (a -> b) -> (a -> c)"; - let given_expr = r"\(a b c : Type) (f : b -> c) (g : a -> b) (x : a) => f (g x)"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn let_expr_1() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"String"; - let given_expr = r#" - let x = "helloo"; - in - x - "#; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn let_expr_2() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"String"; - let given_expr = r#" - let x = "helloo"; - y = x; - in - x - "#; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn let_shift_universes() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - let - id : (a : Type) -> a -> a; - id a x = x; - - test1 = id String "hello"; - test2 = id S32 1; - test3 = id^1 Type String; - test4 = id^2 Type String; - test5 = id^2 Type^1 String; - test6 = id^2 Type^1 Type; - - id1 : (a : Type^1) -> a -> a = id^1; - id2 : (a : Type^2) -> a -> a = id^2; - id11 : (a : Type^2) -> a -> a = id1^1; - id22 : (a : Type^4) -> a -> a = id2^2; - in - record {} - "#; - - support::parse_infer_term(&mut codemap, &context, given_expr); -} - -#[test] -fn let_shift_universes_id_self_application() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - // Here is a curious example from the Idris docs: - // http://docs.idris-lang.org/en/v1.3.0/tutorial/miscellany.html#cumulativity - // - // ```idris - // myid : (a : Type) -> a -> a - // myid _ x = x - // - // idid : (a : Type) -> a -> a - // idid = myid _ myid - // ``` - // - // This would cause a cycle in the universe hierarchy in Idris, but is - // perfectly ok when implemented using explicit universe shifting. - - let given_expr = r#" - let - id : (a : Type) -> a -> a; - id a x = x; - - id-id : (a : Type) -> a -> a; - id-id = id^1 ((a : Type) -> a -> a) id; - in - record {} - "#; - - support::parse_infer_term(&mut codemap, &context, given_expr); -} - -#[test] -fn let_shift_universes_literals() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - let - id : (a : Type) -> a -> a; - id a x = x; - - test2 = "hello" : id^1 Type String; - in - record {} - "#; - - support::parse_infer_term(&mut codemap, &context, given_expr); -} - -#[test] -fn let_shift_universes_literals_bad() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r#" - let - id : (a : Type) -> a -> a; - id a x = x; - - test2 = "hello" : id^2 Type^1 String^1; - in - record {} - "#; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::infer_term(&context, &raw_term) { - Ok(_) => panic!("expected error"), - Err(TypeError::LiteralMismatch { .. }) => {}, - Err(err) => panic!("unexpected error: {}", err), - } -} - -#[test] -fn let_shift_universes_too_little() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r#" - let - id : (a : Type) -> a -> a; - id a x = x; - - test1 = id^1 Type^1 Type; - in - record {} - "#; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::infer_term(&context, &raw_term) { - Ok(_) => panic!("expected error"), - Err(TypeError::Mismatch { .. }) => {}, - Err(err) => panic!("unexpected error: {}", err), - } -} - -#[test] -fn let_shift_universes_too_much() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - let - id : (a : Type) -> a -> a; - id a x = x; - - test1 = id^2 Type String; - in - record {} - "#; - - support::parse_infer_term(&mut codemap, &context, given_expr); -} - -#[test] -fn case_expr() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"String"; - let given_expr = r#"case "helloo" { - "hi" => "haha"; - "hello" => "byee"; - greeting => (import "prim/string/append") greeting "!!"; - }"#; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn case_expr_bool() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"String"; - let given_expr = r#"case true { - true => "hello"; - false => "hi"; - }"#; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn case_expr_bool_bad() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r#"case "hello" { - true => "hello"; - false => "hi"; - }"#; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::infer_term(&context, &raw_term) { - Err(TypeError::Mismatch { .. }) => {}, - Err(err) => panic!("unexpected error: {:?}", err), - Ok((term, ty)) => panic!("expected error, found {} : {:?}", term, ty), - } -} - -#[test] -fn case_expr_wildcard() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"String"; - let given_expr = r#"case "helloo" { - test => test; - }"#; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn case_expr_empty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r#"case "helloo" {}"#; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::infer_term(&context, &raw_term) { - Err(TypeError::AmbiguousEmptyCase { .. }) => {}, - other => panic!("unexpected result: {:#?}", other), - } -} - -mod church_encodings { - use super::*; - - #[test] - fn void() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r" - record { - void; not; - } where { - ||| Logical absurdity - void : Type^1; - void = (a : Type) -> a; - - - ||| Logical negation - not : Type -> Type^1; - not a = a -> void; - } - "; - - support::parse_infer_term(&mut codemap, &context, given_expr); - } - - #[test] - fn unit() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r" - record { - unit; unit-intro; unit-elim; - } where { - unit : Type^1; - unit = (a : Type) -> a -> a; - - unit-intro : unit; - unit-intro a x = x; - - unit-elim : (a : Type) -> unit -> a -> a; - unit-elim a f x = f a x; - } - "; - - support::parse_infer_term(&mut codemap, &context, given_expr); - } - - #[test] - fn and() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r" - record { - and; and-elim-left; and-elim-right; - } where { - ||| Logical conjunction (Church encoded) - ||| - ||| You could also interpret this as a product type - and : Type -> Type -> Type^1; - and p q = (c : Type) -> (p -> q -> c) -> c; - - ||| Introduce a logical conjunction between two types - and-intro : (p q : Type) -> p -> q -> and p q; - and-intro p q x y c f = f x y; - - and-elim-left : (p q : Type) -> and p q -> p; - and-elim-left p q (pq : and p q) = pq p (\x y => x); - - and-elim-right : (p q : Type) -> and p q -> q; - and-elim-right p q (pq : and p q) = pq q (\x y => y); - } - "; - - support::parse_infer_term(&mut codemap, &context, given_expr); - } - - #[test] - fn or() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r" - record { - or; or-intro-left; or-intro-right; - } where { - ||| Logical disjunction (Church encoded) - ||| - ||| You could also interpret this as a sum type - or : Type -> Type -> Type^1; - or p q = (c : Type) -> (p -> c) -> (q -> c) -> c; - - or-intro-left : (p q : Type) -> p -> or p q; - or-intro-left p q x = - \(c : Type) (on-p : p -> c) (on-q : q -> c) => on-p x; - - or-intro-right : (p q : Type) -> q -> or p q; - or-intro-right p q y = - \(c : Type) (on-p : p -> c) (on-q : q -> c) => on-q y; - } - "; - - support::parse_infer_term(&mut codemap, &context, given_expr); - } -} - -#[test] -fn empty_record_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type"; - let given_expr = r"Record {}"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn empty_record_intro() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Record {}"; - let given_expr = r"record {}"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn dependent_record_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^2"; - let given_expr = r"Record { t : Type^1; x : t }"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn record_intro() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Record { t : Type; x : String }"; - let given_expr = r#"record { t = String; x = "Hello" }"#; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn record_proj() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"String"; - let given_expr = r#"(record { t = String; x = "hello" } : Record { t : Type; x : String }).x"#; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn record_proj_missing() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r#"(record { x = "hello" } : Record { x : String }).bloop"#; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::infer_term(&context, &raw_term) { - Err(TypeError::NoFieldInType { .. }) => {}, - x => panic!("expected a field lookup error, found {:?}", x), - } -} - -#[test] -fn record_proj_weird1() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^1"; - let given_expr = r"Record { - data : Record { - t : Type; - x : t; - }; - - f : data.t -> Type; - test : f data.x; - }"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn record_proj_weird2() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"Type^1"; - let given_expr = r"Record { - Array : U16 -> Type -> Type; - t : Record { n : U16; x : Array n S8; y : Array n S8 }; - inner-prod : (len : U16) -> Array len S8 -> Array len S8 -> S32; - - test1 : S32 -> Type; - test2 : test1 (inner-prod t.n t.x t.y); - }"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn record_proj_shift() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let expected_ty = r"(a : Type^1) -> a -> a"; - let given_expr = r"record { - id = \(a : Type) (x : a) => x; - }.id^1"; - - assert_term_eq!( - support::parse_infer_term(&mut codemap, &context, given_expr).1, - support::parse_nf_term(&mut codemap, &context, expected_ty), - ); -} - -#[test] -fn array_intro_ambiguous() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - let given_expr = r#"[1; 2 : S32]"#; - - let raw_term = support::parse_term(&mut codemap, given_expr) - .desugar(&desugar_env) - .unwrap(); - - match elaborate::infer_term(&context, &raw_term) { - Err(TypeError::AmbiguousArrayLiteral { .. }) => {}, - Err(err) => panic!("unexpected error: {:?}", err), - Ok((term, ty)) => panic!("expected error, found {} : {:?}", term, ty), - } -} diff --git a/crates/pikelet-concrete/tests/normalize.rs b/crates/pikelet-concrete/tests/normalize.rs deleted file mode 100644 index 4b2521451..000000000 --- a/crates/pikelet-concrete/tests/normalize.rs +++ /dev/null @@ -1,395 +0,0 @@ -use codespan::CodeMap; -use moniker::{assert_term_eq, Binder, Embed, FreeVar, Scope, Var}; -use pretty_assertions::assert_eq; - -use pikelet_concrete::elaborate::Context; -use pikelet_core::syntax::core::{RcTerm, Term}; -use pikelet_core::syntax::domain::{Neutral, RcNeutral, RcValue, Value}; - -mod support; - -#[test] -fn var() { - let context = Context::default(); - - let x = FreeVar::fresh_named("x"); - let var = RcTerm::from(Term::var(Var::Free(x.clone()), 0)); - - assert_eq!( - pikelet_core::nbe::nf_term(&context, &var).unwrap(), - RcValue::from(Value::var(Var::Free(x), 0)), - ); -} - -#[test] -fn ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - assert_eq!( - support::parse_nf_term(&mut codemap, &context, r"Type"), - RcValue::from(Value::universe(0)), - ); -} - -#[test] -fn fun_intro() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let x = FreeVar::fresh_named("x"); - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, r"\x : Type => x"), - RcValue::from(Value::FunIntro(Scope::new( - (Binder(x.clone()), Embed(RcValue::from(Value::universe(0)))), - RcValue::from(Value::var(Var::Free(x), 0)), - ))), - ); -} - -#[test] -fn fun_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let x = FreeVar::fresh_named("x"); - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, r"(x : Type) -> x"), - RcValue::from(Value::FunType(Scope::new( - (Binder(x.clone()), Embed(RcValue::from(Value::universe(0)))), - RcValue::from(Value::var(Var::Free(x), 0)), - ))), - ); -} - -#[test] -fn fun_intro_fun_app() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r"\(x : Type -> Type) (y : Type) => x y"; - - let x = FreeVar::fresh_named("x"); - let y = FreeVar::fresh_named("y"); - let ty_arr = RcValue::from(Value::FunType(Scope::new( - ( - Binder(FreeVar::fresh_unnamed()), - Embed(RcValue::from(Value::universe(0))), - ), - RcValue::from(Value::universe(0)), - ))); - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr,), - RcValue::from(Value::FunIntro(Scope::new( - (Binder(x.clone()), Embed(ty_arr)), - RcValue::from(Value::FunIntro(Scope::new( - (Binder(y.clone()), Embed(RcValue::from(Value::universe(0)))), - RcValue::from(Value::Neutral( - RcNeutral::from(Neutral::var(Var::Free(x), 0)), - vec![RcValue::from(Value::var(Var::Free(y), 0))], - )), - ))), - ))), - ); -} - -#[test] -fn fun_ty_fun_app() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r"(x : Type -> Type) -> (y : Type) -> x y"; - - let x = FreeVar::fresh_named("x"); - let y = FreeVar::fresh_named("y"); - let ty_arr = RcValue::from(Value::FunType(Scope::new( - ( - Binder(FreeVar::fresh_unnamed()), - Embed(RcValue::from(Value::universe(0))), - ), - RcValue::from(Value::universe(0)), - ))); - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - RcValue::from(Value::FunType(Scope::new( - (Binder(x.clone()), Embed(ty_arr)), - RcValue::from(Value::FunType(Scope::new( - (Binder(y.clone()), Embed(RcValue::from(Value::universe(0)))), - RcValue::from(Value::Neutral( - RcNeutral::from(Neutral::var(Var::Free(x), 0)), - vec![RcValue::from(Value::var(Var::Free(y), 0))], - )), - ))), - ))), - ); -} - -// Passing `Type` to the polymorphic identity function should yield the type -// identity function -#[test] -fn id_fun_app_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r"(\(a : Type^1) (x : a) => x) Type"; - let expected_expr = r"\x : Type => x"; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -// Passing `Type` to the `Type` identity function should yield `Type` -#[test] -fn id_fun_app_ty_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r"(\(a : Type^2) (x : a) => x) (Type^1) Type"; - let expected_expr = r"Type"; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -// Passing `Type -> Type` to the `Type` identity function should yield -// `Type -> Type` -#[test] -fn id_fun_app_ty_arr_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r"(\(a : Type^2) (x : a) => x) (Type^1) (Type -> Type)"; - let expected_expr = r"Type -> Type"; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -// Passing the id function to itself should yield the id function -#[test] -fn id_fun_app_id() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r" - (\(a : Type^1) (x : a) => x) - ((a : Type) -> a -> a) - (\(a : Type) (x : a) => x) - "; - let expected_expr = r"\(a : Type) (x : a) => x"; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -// Passing the id function to the 'const' combinator should yield a -// function that always returns the id function -#[test] -fn const_fun_app_id_ty() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r" - (\(a : Type^1) (b : Type^2) (x : a) (y : b) => x) - ((a : Type) -> a -> a) - (Type^1) - (\(a : Type) (x : a) => x) - Type - "; - let expected_expr = r"\(a : Type) (x : a) => x"; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn horrifying_fun_app_1() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r"(\(t : Type) (f : (a : Type) -> Type) => f t) String (\(a : Type) => a)"; - let expected_expr = r"String"; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn horrifying_fun_app_2() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#"(\(t: String) (f: String -> String) => f t) "hello""#; - let expected_expr = r#"\(f : String -> String) => f "hello""#; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn let_expr_1() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - let x = "helloo"; - in - x - "#; - let expected_expr = r#" - "helloo" - "#; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn let_expr_2() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - let x = "helloo"; - y = x; - in - x - "#; - let expected_expr = r#" - "helloo" - "#; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn if_true() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - if true then "true" else "false" - "#; - let expected_expr = r#" - "true" - "#; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn if_false() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - if false then "true" else "false" - "#; - let expected_expr = r#" - "false" - "#; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn if_eval_cond() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - let is-hi (greeting : String) = case greeting { - "hi" => true; - _ => false; - }; - in - record { - test-hi = if is-hi "hi" then "true" else "false"; - test-bye = if is-hi "bye" then "true" else "false"; - } - "#; - let expected_expr = r#" - record { - test-hi = "true"; - test-bye = "false"; - } - "#; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn case_expr_bool() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r#" - record { - test-true = case true { - true => "true"; - false => "false"; - }; - test-false = case false { - true => "true"; - false => "false"; - }; - } - "#; - let expected_expr = r#" - record { - test-true = "true"; - test-false = "false"; - } - "#; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} - -#[test] -fn record_ty_shadow() { - let mut codemap = CodeMap::new(); - let context = Context::default(); - - let given_expr = r"(\t : Type => Record { String : Type; x : t; y : String }) String"; - let expected_expr = r#"Record { String as String1 : Type; x : String; y : String1 }"#; - - assert_term_eq!( - support::parse_nf_term(&mut codemap, &context, given_expr), - support::parse_nf_term(&mut codemap, &context, expected_expr), - ); -} diff --git a/crates/pikelet-concrete/tests/parse.rs b/crates/pikelet-concrete/tests/parse.rs deleted file mode 100644 index ce2583cd5..000000000 --- a/crates/pikelet-concrete/tests/parse.rs +++ /dev/null @@ -1,86 +0,0 @@ -use codespan::{ByteIndex, ByteSpan}; -use codespan::{CodeMap, FileName}; -use pretty_assertions::assert_eq; - -use pikelet_concrete::parse::{self, LexerError, ParseError}; -use pikelet_concrete::syntax::concrete; - -#[test] -fn imports() { - let src = r#" - record { - prims = import "prims.pi"; - prelude = import "prelude.pi"; - } - "#; - let mut codemap = CodeMap::new(); - let filemap = codemap.add_filemap(FileName::virtual_("test"), src.into()); - - let parse_result = parse::term(&filemap); - - assert_eq!( - parse_result.1, - vec!["prims.pi".to_owned(), "prelude.pi".to_owned()], - ); -} - -#[test] -fn fun_ty_bad_ident() { - let src = "((x : Type) : Type) -> Type"; - let mut codemap = CodeMap::new(); - let filemap = codemap.add_filemap(FileName::virtual_("test"), src.into()); - - let parse_result = parse::term(&filemap); - - assert_eq!( - parse_result, - ( - concrete::Term::Error(ByteSpan::new(ByteIndex(1), ByteIndex(28))), - vec![], - vec![ParseError::IdentifierExpectedInPiType { - span: ByteSpan::new(ByteIndex(2), ByteIndex(12)), - }], - ) - ); -} - -#[test] -fn fun_ty_bad_ident_multi() { - let src = "((x : Type) : Type) (x : Type) -> Type"; - let mut codemap = CodeMap::new(); - let filemap = codemap.add_filemap(FileName::virtual_("test"), src.into()); - - let parse_result = parse::term(&filemap); - - assert_eq!( - parse_result, - ( - concrete::Term::Error(ByteSpan::new(ByteIndex(1), ByteIndex(39))), - vec![], - vec![ParseError::IdentifierExpectedInPiType { - span: ByteSpan::new(ByteIndex(2), ByteIndex(12)), - }], - ) - ); -} - -#[test] -fn integer_overflow() { - let src = "Type^111111111111111111111111111111"; - let mut codemap = CodeMap::new(); - let filemap = codemap.add_filemap(FileName::virtual_("test"), src.into()); - - let parse_result = parse::term(&filemap); - - assert_eq!( - parse_result, - ( - concrete::Term::Error(ByteSpan::new(ByteIndex(1), ByteIndex(36))), - vec![], - vec![ParseError::Lexer(LexerError::IntegerLiteralOverflow { - span: ByteSpan::new(ByteIndex(6), ByteIndex(36)), - value: "111111111111111111111111111111".to_owned(), - })], - ) - ); -} diff --git a/crates/pikelet-concrete/tests/resugar.rs b/crates/pikelet-concrete/tests/resugar.rs deleted file mode 100644 index 499685770..000000000 --- a/crates/pikelet-concrete/tests/resugar.rs +++ /dev/null @@ -1,332 +0,0 @@ -use codespan::{ByteIndex, ByteSpan}; -use moniker::{Binder, Embed, FreeVar, Nest, Scope, Var}; -use pretty_assertions::assert_eq; - -use pikelet_concrete::resugar::{Resugar, ResugarEnv}; -use pikelet_concrete::syntax::concrete; -use pikelet_core::syntax::{core, Label, LevelShift, Literal}; - -fn span() -> ByteSpan { - ByteSpan::default() -} - -fn index() -> ByteIndex { - ByteIndex::default() -} - -#[test] -fn ann() { - let core_term = core::Term::Ann( - core::RcTerm::from(core::Term::universe(0)), - core::RcTerm::from(core::Term::universe(0)), - ); - - let concrete_term = concrete::Term::Ann( - Box::new(concrete::Term::Universe(span(), None)), - Box::new(concrete::Term::Universe(span(), None)), - ); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn universe0() { - let core_term = core::Term::universe(0); - let concrete_term = concrete::Term::Universe(span(), None); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn universe1() { - let core_term = core::Term::universe(1); - let concrete_term = concrete::Term::Universe(span(), Some(1)); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -// TODO: core::Term::Literal - -#[test] -fn lit_bool_true() { - let core_term = core::Term::Literal(Literal::Bool(true)); - let concrete_term = concrete::Term::Name(span(), "true".to_owned(), None); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn lit_bool_false() { - let core_term = core::Term::Literal(Literal::Bool(false)); - let concrete_term = concrete::Term::Name(span(), "false".to_owned(), None); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn lit_string() { - let core_term = core::Term::Literal(Literal::String("hello".to_owned())); - let concrete_term = - concrete::Term::Literal(concrete::Literal::String(span(), "hello".to_owned())); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn var() { - let free_var = FreeVar::fresh_named("x"); - let mut env = ResugarEnv::new(); - env.on_item(&Label("x".to_owned()), &Binder(free_var.clone())); - - let core_term = core::Term::var(Var::Free(free_var), 0); - let concrete_term = concrete::Term::Name(span(), "x".to_owned(), None); - - assert_eq!(core_term.resugar(&env), concrete_term); -} - -#[test] -fn var_shadow_keyword() { - let free_var = FreeVar::fresh_named("if"); - let mut env = ResugarEnv::new(); - env.on_item(&Label("if".to_owned()), &Binder(free_var.clone())); - - let core_term = core::Term::var(Var::Free(free_var), 0); - let concrete_term = concrete::Term::Name(span(), "if1".to_owned(), None); - - assert_eq!(core_term.resugar(&env), concrete_term); -} - -#[test] -fn import() { - let core_term = core::Term::Import("type".to_owned()); - let concrete_term = concrete::Term::Import(span(), span(), "type".to_owned()); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -// TODO: core::Term::Pi - -#[test] -fn arrow() { - let core_term = core::RcTerm::from(core::Term::FunType(Scope::new( - ( - Binder(FreeVar::fresh_unnamed()), - Embed(core::RcTerm::from(core::RcTerm::from( - core::Term::universe(0), - ))), - ), - core::RcTerm::from(core::RcTerm::from(core::Term::universe(0))), - ))); - - let concrete_term = concrete::Term::FunArrow( - Box::new(concrete::Term::Universe(span(), None)), - Box::new(concrete::Term::Universe(span(), None)), - ); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn arrow_parens() { - let core_term = core::Term::FunType(Scope::new( - ( - Binder(FreeVar::fresh_unnamed()), - Embed(core::RcTerm::from(core::Term::FunType(Scope::new( - ( - Binder(FreeVar::fresh_unnamed()), - Embed(core::RcTerm::from(core::RcTerm::from( - core::Term::universe(0), - ))), - ), - core::RcTerm::from(core::RcTerm::from(core::Term::universe(0))), - )))), - ), - core::RcTerm::from(core::RcTerm::from(core::Term::universe(1))), - )); - - let concrete_term = concrete::Term::FunArrow( - Box::new(concrete::Term::Parens( - span(), - Box::new(concrete::Term::FunArrow( - Box::new(concrete::Term::Universe(span(), None)), - Box::new(concrete::Term::Universe(span(), None)), - )), - )), - Box::new(concrete::Term::Universe(span(), Some(1))), - ); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -// TODO: core::Term::Lam -// TODO: core::Term::App - -#[test] -fn let_shadow_keyword() { - let var_else1 = FreeVar::fresh_named("else"); - let var_else2 = FreeVar::fresh_named("else"); - - let core_module = core::Term::Let(Scope::new( - Nest::new(vec![ - ( - Binder(var_else1.clone()), - Embed(core::RcTerm::from(core::Term::universe(0))), - ), - ( - Binder(var_else2.clone()), - Embed(core::RcTerm::from(core::Term::universe(0))), - ), - ]), - core::RcTerm::from(core::Term::RecordIntro(vec![])), - )); - - let concrete_module = concrete::Term::Let( - index(), - vec![ - concrete::Item::Definition { - name: (index(), "else1".to_owned()), - params: vec![], - return_ann: None, - body: concrete::Term::Universe(span(), None), - }, - concrete::Item::Definition { - name: (index(), "else2".to_owned()), - params: vec![], - return_ann: None, - body: concrete::Term::Universe(span(), None), - }, - ], - Box::new(concrete::Term::RecordIntro(span(), vec![])), - ); - - assert_eq!(core_module.resugar(&ResugarEnv::new()), concrete_module); -} - -#[test] -fn record_ty_empty() { - let core_term = core::Term::RecordType(Scope::new(Nest::new(vec![]), ())); - let concrete_term = concrete::Term::RecordType(span(), vec![]); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn record_ty() { - let mut env = ResugarEnv::new(); - env.on_item( - &Label("String".to_owned()), - &Binder(FreeVar::fresh_named("String")), - ); - - let var_string = FreeVar::fresh_named("String"); - let var_x = FreeVar::fresh_named("x"); - - let core_term = core::Term::RecordType(Scope::new( - Nest::new(vec![ - ( - Label("String".to_owned()), - Binder(var_string.clone()), - Embed(core::RcTerm::from(core::RcTerm::from( - core::Term::universe(0), - ))), - ), - ( - Label("x".to_owned()), - Binder(var_x.clone()), - Embed(core::RcTerm::from(core::RcTerm::from(core::Term::var( - Var::Free(var_string), - 0, - )))), - ), - ]), - (), - )); - let concrete_term = concrete::Term::RecordType( - span(), - vec![ - concrete::RecordTypeField { - label: (index(), "String".to_owned()), - binder: Some((index(), "String1".to_owned())), - ann: concrete::Term::Universe(span(), None), - }, - concrete::RecordTypeField { - label: (index(), "x".to_owned()), - binder: None, - ann: concrete::Term::Name(span(), "String1".to_owned(), None), - }, - ], - ); - - assert_eq!(core_term.resugar(&env), concrete_term); -} - -// TODO: core::Term::Record - -#[test] -fn record_empty() { - let core_term = core::Term::RecordIntro(vec![]); - let concrete_term = concrete::Term::RecordIntro(span(), vec![]); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn record_proj_atomic() { - let core_term = core::Term::RecordProj( - core::RcTerm::from(core::RcTerm::from(core::Term::universe(0))), - Label("hello".to_owned()), - LevelShift(0), - ); - - let concrete_term = concrete::Term::RecordProj( - span(), - Box::new(concrete::Term::Universe(span(), None)), - index(), - "hello".to_owned(), - None, - ); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -#[test] -fn record_proj_fun_app() { - let core_term = core::Term::RecordProj( - core::RcTerm::from(core::RcTerm::from(core::Term::universe(1))), - Label("hello".to_owned()), - LevelShift(0), - ); - - let concrete_term = concrete::Term::RecordProj( - span(), - Box::new(concrete::Term::Parens( - span(), - Box::new(concrete::Term::Universe(span(), Some(1))), - )), - index(), - "hello".to_owned(), - None, - ); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} - -// TODO: core::Term::Case - -#[test] -fn array_intro() { - let core_term = core::Term::ArrayIntro(vec![ - core::RcTerm::from(core::RcTerm::from(core::Term::universe(1))), - core::RcTerm::from(core::RcTerm::from(core::Term::universe(1))), - ]); - - let concrete_term = concrete::Term::ArrayIntro( - span(), - vec![ - concrete::Term::Universe(span(), Some(1)), - concrete::Term::Universe(span(), Some(1)), - ], - ); - - assert_eq!(core_term.resugar(&ResugarEnv::new()), concrete_term); -} diff --git a/crates/pikelet-concrete/tests/support/mod.rs b/crates/pikelet-concrete/tests/support/mod.rs deleted file mode 100644 index 2e832d1c9..000000000 --- a/crates/pikelet-concrete/tests/support/mod.rs +++ /dev/null @@ -1,64 +0,0 @@ -#![allow(dead_code)] - -use codespan::{CodeMap, FileName}; -use codespan_reporting; -use codespan_reporting::termcolor::{ColorChoice, StandardStream}; - -use pikelet_concrete::desugar::{Desugar, DesugarEnv}; -use pikelet_concrete::elaborate::{self, Context}; -use pikelet_concrete::parse; -use pikelet_concrete::syntax::concrete; -use pikelet_core::nbe; -use pikelet_core::syntax::core::RcTerm; -use pikelet_core::syntax::domain::{RcType, RcValue}; - -pub fn parse_term(codemap: &mut CodeMap, src: &str) -> concrete::Term { - let filemap = codemap.add_filemap(FileName::virtual_("test"), src.into()); - let (concrete_term, _import_paths, errors) = parse::term(&filemap); - - if !errors.is_empty() { - let writer = StandardStream::stdout(ColorChoice::Always); - for error in errors { - codespan_reporting::emit(&mut writer.lock(), &codemap, &error.to_diagnostic()).unwrap(); - } - panic!("parse error!") - } - - concrete_term -} - -pub fn parse_infer_term(codemap: &mut CodeMap, context: &Context, src: &str) -> (RcTerm, RcType) { - let raw_term = parse_term(codemap, src) - .desugar(&DesugarEnv::new(context.mappings())) - .unwrap(); - match elaborate::infer_term(context, &raw_term) { - Ok((term, ty)) => (term, ty), - Err(error) => { - let writer = StandardStream::stdout(ColorChoice::Always); - codespan_reporting::emit(&mut writer.lock(), &codemap, &error.to_diagnostic()).unwrap(); - panic!("type error!"); - }, - } -} - -pub fn parse_nf_term(codemap: &mut CodeMap, context: &Context, src: &str) -> RcValue { - let term = parse_infer_term(codemap, context, src).0; - match nbe::nf_term(context, &term) { - Ok(value) => value, - Err(error) => panic!("normalize error: {}", error), - } -} - -pub fn parse_check_term(codemap: &mut CodeMap, context: &Context, src: &str, expected: &RcType) { - let raw_term = parse_term(codemap, src) - .desugar(&DesugarEnv::new(context.mappings())) - .unwrap(); - match elaborate::check_term(context, &raw_term, expected) { - Ok(_) => {}, - Err(error) => { - let writer = StandardStream::stdout(ColorChoice::Always); - codespan_reporting::emit(&mut writer.lock(), &codemap, &error.to_diagnostic()).unwrap(); - panic!("type error!"); - }, - } -} diff --git a/crates/pikelet-core/Cargo.toml b/crates/pikelet-core/Cargo.toml deleted file mode 100644 index 3d03ed6a8..000000000 --- a/crates/pikelet-core/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "pikelet-core" -version = "0.1.0" -license = "Apache-2.0" -readme = "README.md" -authors = ["Brendan Zabarauskas "] -homepage = "https://github.com/pikelet-lang/pikelet" -repository = "https://github.com/pikelet-lang/pikelet" -edition = "2018" -publish = false - -[dependencies] -codespan = "0.2.0" -codespan-reporting = "0.2.0" -failure = "0.1.3" -im = "12.2.0" -moniker = { version = "0.5.0", features = ["codespan", "im"] } -pretty = { version = "0.5.2", features = ["termcolor"] } -unicode-xid = "0.1.0" - -[dev-dependencies] -pretty_assertions = "0.5.1" diff --git a/crates/pikelet-core/README.md b/crates/pikelet-core/README.md deleted file mode 100644 index 31a2336bc..000000000 --- a/crates/pikelet-core/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# Pikelet Core Syntax - -This crate is responsible for: - -- defining data structures for: - - core terms - - weak head normal forms - - normal forms (TODO) -- normalization-by-evaluation -- checking the core terms (TODO) diff --git a/crates/pikelet-core/src/lib.rs b/crates/pikelet-core/src/lib.rs deleted file mode 100644 index 2c0a95515..000000000 --- a/crates/pikelet-core/src/lib.rs +++ /dev/null @@ -1,4 +0,0 @@ -//! The syntax of the language - -pub mod nbe; -pub mod syntax; diff --git a/crates/pikelet-core/src/nbe.rs b/crates/pikelet-core/src/nbe.rs deleted file mode 100644 index e9133230c..000000000 --- a/crates/pikelet-core/src/nbe.rs +++ /dev/null @@ -1,266 +0,0 @@ -use failure::Fail; -use moniker::{Binder, Embed, FreeVar, Nest, Scope, Var}; - -use crate::syntax::core::{Pattern, RcPattern, RcTerm, Term}; -use crate::syntax::domain::{Head, Neutral, RcNeutral, RcValue, Value}; -use crate::syntax::Import; - -/// An error produced during normalization -/// -/// If a term has been successfully type checked prior to evaluation or -/// normalization, then this error should never be produced. -#[derive(Debug, Clone, PartialEq, Fail)] -#[fail(display = "{}", message)] -pub struct NbeError { - pub message: String, -} - -impl NbeError { - pub fn new(message: impl Into) -> NbeError { - NbeError { - message: message.into(), - } - } -} - -/// An environment where normalization happens -pub trait Env { - fn get_import(&self, name: &str) -> Option<&Import>; - fn get_definition(&self, free_var: &FreeVar) -> Option<&RcTerm>; -} - -/// Reduce a term to its normal form -pub fn nf_term(env: &dyn Env, term: &RcTerm) -> Result { - match *term.inner { - // E-ANN - Term::Ann(ref expr, _) => nf_term(env, expr), - - // E-TYPE - Term::Universe(level) => Ok(RcValue::from(Value::Universe(level))), - - Term::Literal(ref lit) => Ok(RcValue::from(Value::Literal(lit.clone()))), - - // E-VAR, E-VAR-DEF - Term::Var(ref var, shift) => match *var { - Var::Free(ref name) => match env.get_definition(name) { - Some(term) => { - let mut value = nf_term(env, term)?; - value.shift_universes(shift); - Ok(value) - }, - None => Ok(RcValue::from(Value::var(var.clone(), shift))), - }, - - // We should always be substituting bound variables with fresh - // variables when entering scopes using `unbind`, so if we've - // encountered one here this is definitely a bug! - Var::Bound(_) => Err(NbeError::new(format!("unexpected bound var `{}`", var))), - }, - - Term::Import(ref name) => match env.get_import(name) { - Some(&Import::Term(ref term)) => nf_term(env, term), - Some(&Import::Prim(ref interpretation)) => match interpretation(&[]) { - Some(value) => Ok(value), - None => Ok(RcValue::from(Value::from(Neutral::Head(Head::Import( - name.clone(), - ))))), - }, - None => Ok(RcValue::from(Value::from(Neutral::Head(Head::Import( - name.clone(), - ))))), - }, - - // E-PI - Term::FunType(ref scope) => { - let ((name, Embed(ann)), body) = scope.clone().unbind(); - - Ok(RcValue::from(Value::FunType(Scope::new( - (name, Embed(nf_term(env, &ann)?)), - nf_term(env, &body)?, - )))) - }, - - // E-LAM - Term::FunIntro(ref scope) => { - let ((name, Embed(ann)), body) = scope.clone().unbind(); - - Ok(RcValue::from(Value::FunIntro(Scope::new( - (name, Embed(nf_term(env, &ann)?)), - nf_term(env, &body)?, - )))) - }, - - // E-APP - Term::FunApp(ref head, ref arg) => { - match *nf_term(env, head)?.inner { - Value::FunIntro(ref scope) => { - // FIXME: do a local unbind here - let ((Binder(free_var), Embed(_)), body) = scope.clone().unbind(); - nf_term(env, &body.substs(&[(free_var, arg.clone())])) - }, - Value::Neutral(ref neutral, ref spine) => { - let arg = nf_term(env, arg)?; - let mut spine = spine.clone(); - - match *neutral.inner { - Neutral::Head(Head::Import(ref name)) => { - spine.push(arg); - - match env.get_import(name) { - Some(&Import::Term(ref _term)) => { - // nf_term(env, term) - unimplemented!("import applications") - }, - Some(&Import::Prim(ref interpretation)) => { - match interpretation(&spine) { - Some(value) => return Ok(value), - None => {}, - } - }, - None => {}, - } - }, - Neutral::Head(Head::Var(..)) - | Neutral::RecordProj(..) - | Neutral::Case(..) => spine.push(arg), - } - - Ok(RcValue::from(Value::Neutral(neutral.clone(), spine))) - }, - _ => Err(NbeError::new("argument applied to non function")), - } - }, - - // E-LET - Term::Let(ref scope) => { - let (bindings, body) = scope.clone().unbind(); - let mut mappings = Vec::with_capacity(bindings.unsafe_patterns.len()); - - for (Binder(free_var), Embed(term)) in bindings.unnest() { - let value = nf_term(env, &term.substs(&mappings))?; - mappings.push((free_var, RcTerm::from(&*value.inner))); - } - - nf_term(env, &body.substs(&mappings)) - }, - - // E-RECORD-TYPE, E-EMPTY-RECORD-TYPE - Term::RecordType(ref scope) => { - let (fields, ()) = scope.clone().unbind(); - let fields = Nest::new( - fields - .unnest() - .into_iter() - .map(|(label, binder, Embed(ann))| { - Ok((label, binder, Embed(nf_term(env, &ann)?))) - }) - .collect::>()?, - ); - - Ok(RcValue::from(Value::RecordType(Scope::new(fields, ())))) - }, - - // E-RECORD, E-EMPTY-RECORD - Term::RecordIntro(ref fields) => { - let fields = fields - .iter() - .map(|&(ref label, ref term)| Ok((label.clone(), nf_term(env, &term)?))) - .collect::>()?; - - Ok(RcValue::from(Value::RecordIntro(fields))) - }, - - // E-PROJ - Term::RecordProj(ref expr, ref label, shift) => { - match *nf_term(env, expr)? { - Value::Neutral(ref neutral, ref spine) => { - return Ok(RcValue::from(Value::Neutral( - RcNeutral::from(Neutral::RecordProj(neutral.clone(), label.clone(), shift)), - spine.clone(), - ))); - }, - Value::RecordIntro(ref fields) => { - for &(ref current_label, ref current_expr) in fields { - if current_label == label { - return Ok(current_expr.clone()); - } - } - }, - _ => {}, - } - - Err(NbeError::new(format!( - "projected on non existent field `{}`", - label - ))) - }, - - // E-CASE - Term::Case(ref head, ref clauses) => { - let head = nf_term(env, head)?; - - if let Value::Neutral(ref neutral, ref spine) = *head { - Ok(RcValue::from(Value::Neutral( - RcNeutral::from(Neutral::Case( - neutral.clone(), - clauses - .iter() - .map(|clause| { - let (pattern, body) = clause.clone().unbind(); - Ok(Scope::new(pattern, nf_term(env, &body)?)) - }) - .collect::>()?, - )), - spine.clone(), - ))) - } else { - for clause in clauses { - let (pattern, body) = clause.clone().unbind(); - if let Some(mappings) = match_value(env, &pattern, &head)? { - let mappings = mappings - .into_iter() - .map(|(free_var, value)| (free_var, RcTerm::from(&*value.inner))) - .collect::>(); - return nf_term(env, &body.substs(&mappings)); - } - } - Err(NbeError::new("no patterns applicable")) - } - }, - - // E-ARRAY - Term::ArrayIntro(ref elems) => Ok(RcValue::from(Value::ArrayIntro( - elems - .iter() - .map(|elem| nf_term(env, elem)) - .collect::>()?, - ))), - } -} - -/// If the pattern matches the value, this function returns the substitutions -/// needed to apply the pattern to some body expression -pub fn match_value( - env: &dyn Env, - pattern: &RcPattern, - value: &RcValue, -) -> Result, RcValue)>>, NbeError> { - match (&*pattern.inner, &*value.inner) { - (&Pattern::Binder(Binder(ref free_var)), _) => { - Ok(Some(vec![(free_var.clone(), value.clone())])) - }, - (&Pattern::Var(Embed(Var::Free(ref free_var)), _), _) => { - match env.get_definition(free_var).map(|term| nf_term(env, term)) { - Some(Ok(ref term)) if term == value => Ok(Some(vec![])), - Some(Ok(_)) | None => Ok(None), - Some(Err(err)) => Err(err), - } - }, - (&Pattern::Literal(ref pattern_lit), &Value::Literal(ref value_lit)) - if pattern_lit == value_lit => - { - Ok(Some(vec![])) - }, - (_, _) => Ok(None), - } -} diff --git a/crates/pikelet-core/src/syntax/core.rs b/crates/pikelet-core/src/syntax/core.rs deleted file mode 100644 index 450692854..000000000 --- a/crates/pikelet-core/src/syntax/core.rs +++ /dev/null @@ -1,509 +0,0 @@ -//! The core syntax of the language - -use moniker::{Binder, BoundPattern, BoundTerm, Embed, FreeVar, Nest, Scope, Var}; -use pretty::{BoxDoc, Doc}; -use std::fmt; -use std::ops; -use std::rc::Rc; - -use crate::syntax::domain::{Head, Neutral, Value}; -use crate::syntax::{Label, Level, LevelShift, Literal, PRETTY_FALLBACK_WIDTH}; - -#[derive(Debug, Clone, PartialEq, BoundPattern)] -pub enum Pattern { - /// Patterns annotated with types - Ann(RcPattern, Embed), - /// Patterns that bind variables - Binder(Binder), - /// Patterns to be compared structurally with a variable in scope - Var(Embed>, LevelShift), - /// Literal patterns - Literal(Literal), -} - -impl Pattern { - pub fn to_doc(&self) -> Doc> { - match *self { - Pattern::Ann(ref pattern, Embed(ref ty)) => Doc::nil() - .append(pattern.to_doc()) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ty.to_doc()), // fun-intro? - ref pattern => pattern.to_doc_atomic(), - } - } - - fn to_doc_atomic(&self) -> Doc> { - match *self { - Pattern::Binder(ref binder) => Doc::as_string(binder), - Pattern::Var(Embed(ref var), shift) => Doc::as_string(format!("{}^{}", var, shift)), - Pattern::Literal(ref literal) => literal.to_doc(), - ref pattern => Doc::text("(").append(pattern.to_doc()).append(")"), - } - } -} - -impl fmt::Display for Pattern { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.to_doc().group().render_fmt(PRETTY_FALLBACK_WIDTH, f) - } -} - -/// Reference counted patterns -#[derive(Debug, Clone, PartialEq, BoundPattern)] -pub struct RcPattern { - pub inner: Rc, -} - -impl From for RcPattern { - fn from(src: Pattern) -> RcPattern { - RcPattern { - inner: Rc::new(src), - } - } -} - -impl ops::Deref for RcPattern { - type Target = Pattern; - - fn deref(&self) -> &Pattern { - &self.inner - } -} - -impl fmt::Display for RcPattern { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.inner, f) - } -} - -/// The core term syntax -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub enum Term { - /// A term annotated with a type - Ann(RcTerm, RcTerm), - /// Universes - Universe(Level), - /// Literals - Literal(Literal), - /// A variable - Var(Var, LevelShift), - /// An imported definition - Import(String), - /// Dependent function types - FunType(Scope<(Binder, Embed), RcTerm>), - /// Function introductions - FunIntro(Scope<(Binder, Embed), RcTerm>), - /// Function applications - FunApp(RcTerm, RcTerm), - /// Dependent record types - RecordType(Scope, Embed)>, ()>), - /// Record introductions - RecordIntro(Vec<(Label, RcTerm)>), - /// Record field projection - RecordProj(RcTerm, Label, LevelShift), - /// Case expressions - Case(RcTerm, Vec>), - /// Array literals - ArrayIntro(Vec), - /// Let bindings - Let(Scope, Embed)>, RcTerm>), -} - -impl Term { - pub fn universe(level: impl Into) -> Term { - Term::Universe(level.into()) - } - - pub fn var(var: impl Into>, shift: impl Into) -> Term { - Term::Var(var.into(), shift.into()) - } - - pub fn to_doc(&self) -> Doc> { - match *self { - Term::Ann(ref term, ref ty) => Doc::nil() - .append(term.to_doc_expr()) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ty.to_doc_expr()), - ref term => term.to_doc_expr(), - } - } - - fn to_doc_expr(&self) -> Doc> { - match *self { - Term::Import(ref name) => Doc::nil() - .append("import") - .append(Doc::space()) - .append(format!("{:?}", name)), - Term::FunIntro(ref scope) => Doc::nil() - .append("\\") - .append(Doc::as_string(&scope.unsafe_pattern.0)) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append((scope.unsafe_pattern.1).0.to_doc_arrow()) - .append(Doc::space()) - .append("=>") - .append(Doc::space()) - .append(scope.unsafe_body.to_doc_expr()), - Term::Case(ref head, ref clauses) => Doc::nil() - .append("case") - .append(Doc::space()) - .append(head.to_doc_app()) - .append(Doc::space()) - .append("{") - .append(Doc::space()) - .append(Doc::intersperse( - clauses.iter().map(|scope| { - Doc::nil() - .append(scope.unsafe_pattern.to_doc()) - .append(Doc::space()) - .append("=>") - .append(Doc::space()) - .append(scope.unsafe_body.to_doc()) - .append(";") - }), - Doc::newline(), - )) - .append(Doc::space()) - .append("}"), - Term::Let(ref scope) => Doc::nil() - .append("let") - .append(Doc::space()) - .append(Doc::intersperse( - scope.unsafe_pattern.unsafe_patterns.iter().map( - |&(ref binder, Embed(ref term))| { - Doc::nil() - .append(Doc::as_string(binder)) - .append(Doc::space()) - .append("=") - .append(Doc::space()) - .append(term.to_doc()) - }, - ), - Doc::newline(), - )) - .append(Doc::space()) - .append("in") - .append(Doc::space()) - .append(scope.unsafe_body.to_doc_expr()), - ref term => term.to_doc_arrow(), - } - } - - fn to_doc_arrow(&self) -> Doc> { - match *self { - Term::FunType(ref scope) => Doc::nil() - .append("(") - .append(Doc::as_string(&scope.unsafe_pattern.0)) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append((scope.unsafe_pattern.1).0.to_doc_arrow()) - .append(")") - .append(Doc::space()) - .append("->") - .append(Doc::space()) - .append(scope.unsafe_body.to_doc_expr()), - ref term => term.to_doc_app(), - } - } - - fn to_doc_app(&self) -> Doc> { - match *self { - Term::FunApp(ref fun, ref arg) => Doc::nil() - .append(fun.to_doc_atomic()) - .append(Doc::space()) - .append(arg.to_doc_atomic()), - ref term => term.to_doc_atomic(), - } - } - - fn to_doc_atomic(&self) -> Doc> { - match *self { - Term::Universe(level) => Doc::text(format!("Type^{}", level)), - Term::ArrayIntro(ref elems) => Doc::nil() - .append("[") - .append(Doc::intersperse( - elems.iter().map(|elem| elem.to_doc()), - Doc::text(";").append(Doc::space()), - )) - .append("]"), - Term::Var(ref var, ref level) => Doc::text(format!("{}^{}", var, level)), - Term::RecordType(ref scope) => Doc::nil() - .append("Record {") - .append(Doc::space()) - .append(Doc::intersperse( - scope.unsafe_pattern.unsafe_patterns.iter().map( - |&(ref label, ref binder, Embed(ref ann))| { - Doc::nil() - .append(Doc::as_string(label)) - .append(Doc::space()) - .append("as") - .append(Doc::space()) - .append(Doc::as_string(binder)) - .append(Doc::space()) - .append(":") - .append(Doc::space()) - .append(ann.to_doc()) - }, - ), - Doc::text(";").append(Doc::space()), - )) - .append(Doc::space()) - .append("}"), - Term::RecordIntro(ref fields) => Doc::nil() - .append("record {") - .append(Doc::space()) - .append(Doc::intersperse( - fields.iter().map(|&(ref label, ref value)| { - Doc::nil() - .append(Doc::as_string(label)) - .append(Doc::space()) - .append("=") - .append(Doc::space()) - .append(value.to_doc()) - }), - Doc::text(";").append(Doc::space()), - )) - .append(Doc::space()) - .append("}"), - Term::RecordProj(ref expr, ref label, ref shift) => Doc::nil() - .append(expr.to_doc_atomic()) - .append(".") - .append(format!("{}^{}", label, shift)), - ref term => Doc::text("(").append(term.to_doc()).append(")"), - } - } -} - -impl fmt::Display for Term { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.to_doc().group().render_fmt(PRETTY_FALLBACK_WIDTH, f) - } -} - -/// Reference counted terms -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub struct RcTerm { - pub inner: Rc, -} - -impl RcTerm { - pub fn substs(&self, mappings: &[(FreeVar, RcTerm)]) -> RcTerm { - match *self.inner { - Term::Ann(ref term, ref ty) => { - RcTerm::from(Term::Ann(term.substs(mappings), ty.substs(mappings))) - }, - Term::Universe(_) | Term::Literal(_) => self.clone(), - Term::Var(ref var, _) => match mappings.iter().find(|&(ref name, _)| var == name) { - Some(&(_, ref term)) => term.clone(), - None => self.clone(), - }, - Term::Import(ref name) => RcTerm::from(Term::Import(name.clone())), - Term::FunType(ref scope) => { - let (ref name, Embed(ref ann)) = scope.unsafe_pattern; - RcTerm::from(Term::FunType(Scope { - unsafe_pattern: (name.clone(), Embed(ann.substs(mappings))), - unsafe_body: scope.unsafe_body.substs(mappings), - })) - }, - Term::FunIntro(ref scope) => { - let (ref name, Embed(ref ann)) = scope.unsafe_pattern; - RcTerm::from(Term::FunIntro(Scope { - unsafe_pattern: (name.clone(), Embed(ann.substs(mappings))), - unsafe_body: scope.unsafe_body.substs(mappings), - })) - }, - Term::Let(ref scope) => { - let unsafe_patterns = scope - .unsafe_pattern - .unsafe_patterns - .iter() - .map(|&(ref binder, Embed(ref term))| { - (binder.clone(), Embed(term.substs(mappings))) - }) - .collect(); - - RcTerm::from(Term::Let(Scope { - unsafe_pattern: Nest { unsafe_patterns }, - unsafe_body: scope.unsafe_body.substs(mappings), - })) - }, - Term::FunApp(ref head, ref arg) => { - RcTerm::from(Term::FunApp(head.substs(mappings), arg.substs(mappings))) - }, - Term::RecordType(ref scope) if scope.unsafe_pattern.unsafe_patterns.is_empty() => { - self.clone() - }, - Term::RecordType(ref scope) => { - let unsafe_patterns = scope - .unsafe_pattern - .unsafe_patterns - .iter() - .map(|&(ref label, ref binder, Embed(ref ann))| { - (label.clone(), binder.clone(), Embed(ann.substs(mappings))) - }) - .collect(); - - RcTerm::from(Term::RecordType(Scope { - unsafe_pattern: Nest { unsafe_patterns }, - unsafe_body: (), - })) - }, - Term::RecordIntro(ref fields) if fields.is_empty() => self.clone(), - Term::RecordIntro(ref fields) => { - let fields = fields - .iter() - .map(|&(ref label, ref expr)| (label.clone(), expr.substs(mappings))) - .collect(); - - RcTerm::from(Term::RecordIntro(fields)) - }, - Term::RecordProj(ref expr, ref label, shift) => RcTerm::from(Term::RecordProj( - expr.substs(mappings), - label.clone(), - shift, - )), - Term::Case(ref head, ref clauses) => RcTerm::from(Term::Case( - head.substs(mappings), - clauses - .iter() - .map(|scope| { - Scope { - unsafe_pattern: scope.unsafe_pattern.clone(), // subst? - unsafe_body: scope.unsafe_body.substs(mappings), - } - }) - .collect(), - )), - Term::ArrayIntro(ref elems) => RcTerm::from(Term::ArrayIntro( - elems.iter().map(|elem| elem.substs(mappings)).collect(), - )), - } - } -} - -impl From for RcTerm { - fn from(src: Term) -> RcTerm { - RcTerm { - inner: Rc::new(src), - } - } -} - -impl ops::Deref for RcTerm { - type Target = Term; - - fn deref(&self) -> &Term { - &self.inner - } -} - -impl fmt::Display for RcTerm { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.inner, f) - } -} - -impl<'a> From<&'a Value> for Term { - fn from(src: &'a Value) -> Term { - // Bypassing `Scope::new` and `Scope::unbind` here should be fine - // because we aren't altering the structure of the scopes during this - // transformation. This should save on some traversals of the AST! - match *src { - Value::Universe(level) => Term::Universe(level), - Value::Literal(ref lit) => Term::Literal(lit.clone()), - Value::FunType(ref scope) => { - let (ref name, Embed(ref ann)) = scope.unsafe_pattern; - Term::FunType(Scope { - unsafe_pattern: (name.clone(), Embed(RcTerm::from(&**ann))), - unsafe_body: RcTerm::from(&*scope.unsafe_body), - }) - }, - Value::FunIntro(ref scope) => { - let (ref name, Embed(ref ann)) = scope.unsafe_pattern; - Term::FunIntro(Scope { - unsafe_pattern: (name.clone(), Embed(RcTerm::from(&**ann))), - unsafe_body: RcTerm::from(&*scope.unsafe_body), - }) - }, - Value::RecordType(ref scope) => { - let unsafe_patterns = scope - .unsafe_pattern - .unsafe_patterns - .iter() - .map(|&(ref label, ref binder, Embed(ref ann))| { - (label.clone(), binder.clone(), Embed(RcTerm::from(&**ann))) - }) - .collect(); - - Term::RecordType(Scope { - unsafe_pattern: Nest { unsafe_patterns }, - unsafe_body: (), - }) - }, - Value::RecordIntro(ref fields) => { - let fields = fields - .iter() - .map(|&(ref label, ref expr)| (label.clone(), RcTerm::from(&**expr))) - .collect(); - - Term::RecordIntro(fields) - }, - Value::ArrayIntro(ref elems) => { - Term::ArrayIntro(elems.iter().map(|elem| RcTerm::from(&**elem)).collect()) - }, - Value::Neutral(ref neutral, ref spine) => { - spine.iter().fold(Term::from(&*neutral.inner), |acc, arg| { - Term::FunApp(RcTerm::from(acc), RcTerm::from(&**arg)) - }) - }, - } - } -} - -impl<'a> From<&'a Value> for RcTerm { - fn from(src: &'a Value) -> RcTerm { - RcTerm::from(Term::from(src)) - } -} - -impl<'a> From<&'a Neutral> for Term { - fn from(src: &'a Neutral) -> Term { - match *src { - Neutral::Head(ref head) => Term::from(head), - Neutral::RecordProj(ref expr, ref name, shift) => { - Term::RecordProj(RcTerm::from(&**expr), name.clone(), shift) - }, - Neutral::Case(ref head, ref clauses) => Term::Case( - RcTerm::from(&**head), - clauses - .iter() - .map(|clause| Scope { - unsafe_pattern: clause.unsafe_pattern.clone(), - unsafe_body: RcTerm::from(&*clause.unsafe_body), - }) - .collect(), - ), - } - } -} - -impl<'a> From<&'a Neutral> for RcTerm { - fn from(src: &'a Neutral) -> RcTerm { - RcTerm::from(Term::from(src)) - } -} - -impl<'a> From<&'a Head> for Term { - fn from(src: &'a Head) -> Term { - match *src { - Head::Var(ref var, shift) => Term::Var(var.clone(), shift), - Head::Import(ref name) => Term::Import(name.clone()), - } - } -} diff --git a/crates/pikelet-core/src/syntax/domain.rs b/crates/pikelet-core/src/syntax/domain.rs deleted file mode 100644 index 3f08afe50..000000000 --- a/crates/pikelet-core/src/syntax/domain.rs +++ /dev/null @@ -1,244 +0,0 @@ -//! The semantic domain of the language - -use moniker::{Binder, BoundPattern, BoundTerm, Embed, FreeVar, Nest, Scope, Var}; -use std::ops; -use std::rc::Rc; - -use crate::syntax::core::{RcPattern, RcTerm, Term}; -use crate::syntax::{Label, Level, LevelShift, Literal}; - -/// Values -/// -/// These are either in _normal form_ (they cannot be reduced further) or are -/// _neutral terms_ (there is a possibility of reducing further depending -/// on the bindings given in the context) -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub enum Value { - /// Universes - Universe(Level), - /// Literals - Literal(Literal), - /// Dependent function types - FunType(Scope<(Binder, Embed), RcValue>), - /// Function introductions - FunIntro(Scope<(Binder, Embed), RcValue>), - /// Dependent record types - RecordType(Scope, Embed)>, ()>), - /// Dependent record introductions - RecordIntro(Vec<(Label, RcValue)>), - /// Array literals - ArrayIntro(Vec), - /// Neutral terms - /// - /// A term whose computation has stopped because of an attempt to compute an - /// application `Head`. - Neutral(RcNeutral, Spine), -} - -impl Value { - pub fn universe(level: impl Into) -> Value { - Value::Universe(level.into()) - } - - pub fn var(var: impl Into>, shift: impl Into) -> Value { - Value::Neutral(RcNeutral::from(Neutral::var(var, shift)), Spine::new()) - } - - pub fn substs(&self, mappings: &[(FreeVar, RcTerm)]) -> RcTerm { - // FIXME: This seems quite wasteful! - RcTerm::from(Term::from(self)).substs(mappings) - } - - /// Returns `true` if the value is in weak head normal form - pub fn is_whnf(&self) -> bool { - match *self { - Value::Universe(_) - | Value::Literal(_) - | Value::FunType(_) - | Value::FunIntro(_) - | Value::RecordType(_) - | Value::RecordIntro(_) - | Value::ArrayIntro(_) => true, - Value::Neutral(_, _) => false, - } - } - - /// Returns `true` if the value is in normal form (ie. it contains no neutral terms within it) - pub fn is_nf(&self) -> bool { - match *self { - Value::Universe(_) | Value::Literal(_) => true, - Value::FunType(ref scope) | Value::FunIntro(ref scope) => { - (scope.unsafe_pattern.1).0.is_nf() && scope.unsafe_body.is_nf() - }, - Value::RecordType(ref scope) => scope - .unsafe_pattern - .unsafe_patterns - .iter() - .all(|(_, _, Embed(ref term))| term.is_nf()), - Value::RecordIntro(ref fields) => fields.iter().all(|&(_, ref term)| term.is_nf()), - Value::ArrayIntro(ref elems) => elems.iter().all(|elem| elem.is_nf()), - Value::Neutral(_, _) => false, - } - } - - pub fn head_app(&self) -> Option<(&Head, &Spine)> { - if let Value::Neutral(ref neutral, ref spine) = *self { - if let Neutral::Head(ref head) = **neutral { - return Some((head, spine)); - } - } - None - } - - pub fn free_var_app(&self) -> Option<(&FreeVar, LevelShift, &[RcValue])> { - self.head_app().and_then(|(head, spine)| match *head { - Head::Var(Var::Free(ref free_var), shift) => Some((free_var, shift, &spine[..])), - Head::Import(_) | Head::Var(Var::Bound(_), _) => None, - }) - } -} - -/// Reference counted values -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub struct RcValue { - pub inner: Rc, -} - -impl RcValue { - pub fn shift_universes(&mut self, shift: LevelShift) { - match *Rc::make_mut(&mut self.inner) { - Value::Universe(ref mut level) => *level += shift, - Value::Literal(_) => {}, - Value::FunType(ref mut scope) | Value::FunIntro(ref mut scope) => { - (scope.unsafe_pattern.1).0.shift_universes(shift); - scope.unsafe_body.shift_universes(shift); - }, - Value::RecordType(ref mut scope) => { - for &mut (_, _, Embed(ref mut term)) in &mut scope.unsafe_pattern.unsafe_patterns { - term.shift_universes(shift); - } - }, - Value::RecordIntro(ref mut fields) => { - for &mut (_, ref mut term) in fields { - term.shift_universes(shift); - } - }, - Value::ArrayIntro(ref mut elems) => { - for elem in elems { - elem.shift_universes(shift); - } - }, - Value::Neutral(ref mut neutral, ref mut spine) => { - neutral.shift_universes(shift); - for arg in spine { - arg.shift_universes(shift); - } - }, - } - } -} - -impl From for RcValue { - fn from(src: Value) -> RcValue { - RcValue { - inner: Rc::new(src), - } - } -} - -impl ops::Deref for RcValue { - type Target = Value; - - fn deref(&self) -> &Value { - &self.inner - } -} - -/// The head of an application -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub enum Head { - /// Variables that have not yet been replaced with a definition - Var(Var, LevelShift), - /// Imported definitions - Import(String), - // TODO: Metavariables -} - -/// The spine of a neutral term -/// -/// These are arguments that are awaiting application -pub type Spine = Vec; - -/// Neutral values -/// -/// These might be able to be reduced further depending on the bindings in the -/// context -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub enum Neutral { - /// Head of an application - Head(Head), - /// Field projection - RecordProj(RcNeutral, Label, LevelShift), - /// Case expressions - Case(RcNeutral, Vec>), -} - -impl Neutral { - pub fn var(var: impl Into>, shift: impl Into) -> Neutral { - Neutral::Head(Head::Var(var.into(), shift.into())) - } -} - -/// Reference counted neutral values -#[derive(Debug, Clone, PartialEq, BoundTerm)] -pub struct RcNeutral { - pub inner: Rc, -} - -impl RcNeutral { - pub fn shift_universes(&mut self, shift: LevelShift) { - match *Rc::make_mut(&mut self.inner) { - // Neutral::Head(Head::Var(_, ref mut head_shift)) => { - // *head_shift += shift; // NOTE: Not sure if this is correct! - // }, - Neutral::Head(Head::Var(_, _)) | Neutral::Head(Head::Import(_)) => {}, - Neutral::RecordProj(ref mut expr, _, _) => expr.shift_universes(shift), - Neutral::Case(ref mut expr, ref mut clauses) => { - expr.shift_universes(shift); - for clause in clauses { - // FIXME: implement shifting for patterns as well! - // clause.unsafe_pattern.shift_universes(shift); - clause.unsafe_body.shift_universes(shift); - } - }, - } - } -} - -impl From for RcNeutral { - fn from(src: Neutral) -> RcNeutral { - RcNeutral { - inner: Rc::new(src), - } - } -} - -impl ops::Deref for RcNeutral { - type Target = Neutral; - - fn deref(&self) -> &Neutral { - &self.inner - } -} - -/// Types are at the term level, so this is just an alias -pub type Type = Value; - -/// Types are at the term level, so this is just an alias -pub type RcType = RcValue; - -impl From for Value { - fn from(src: Neutral) -> Value { - Value::Neutral(RcNeutral::from(src), Spine::new()) - } -} diff --git a/crates/pikelet-core/src/syntax/mod.rs b/crates/pikelet-core/src/syntax/mod.rs deleted file mode 100644 index 84241d4f6..000000000 --- a/crates/pikelet-core/src/syntax/mod.rs +++ /dev/null @@ -1,161 +0,0 @@ -use moniker::{BoundPattern, BoundTerm}; -use pretty::{BoxDoc, Doc}; -use std::fmt; -use std::ops::{Add, AddAssign}; - -pub mod core; -pub mod domain; - -/// An effectively 'infinite' line length for when we don't have an explicit -/// width provided for pretty printing. -/// -/// `pretty.rs` seems to bug-out and break on every line when using -/// `usize::MAX`, so we'll just use a really big number instead... -pub const PRETTY_FALLBACK_WIDTH: usize = 1_000_000; - -/// Imported definitions -#[derive(Clone)] -pub enum Import { - Term(core::RcTerm), - Prim(for<'a> fn(&'a [domain::RcValue]) -> Option), -} - -impl fmt::Debug for Import { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Import::Term(ref term) => f.debug_tuple("Term").field(term).finish(), - Import::Prim(_) => f.debug_tuple("Prim").field(&"|params| { .. }").finish(), - } - } -} - -/// Literals -/// -/// We could church encode all the things, but that would be prohibitively expensive! -#[derive(Debug, Clone, PartialEq, PartialOrd, BoundTerm, BoundPattern)] -pub enum Literal { - Bool(bool), - String(String), - Char(char), - U8(u8), - U16(u16), - U32(u32), - U64(u64), - S8(i8), - S16(i16), - S32(i32), - S64(i64), - F32(f32), - F64(f64), -} - -impl Literal { - pub fn to_doc(&self) -> Doc> { - match *self { - Literal::Bool(true) => Doc::text("true"), - Literal::Bool(false) => Doc::text("false"), - Literal::String(ref value) => Doc::text(format!("{:?}", value)), - Literal::Char(value) => Doc::text(format!("{:?}", value)), - Literal::U8(value) => Doc::as_string(&value), - Literal::U16(value) => Doc::as_string(&value), - Literal::U32(value) => Doc::as_string(&value), - Literal::U64(value) => Doc::as_string(&value), - Literal::S8(value) => Doc::as_string(&value), - Literal::S16(value) => Doc::as_string(&value), - Literal::S32(value) => Doc::as_string(&value), - Literal::S64(value) => Doc::as_string(&value), - Literal::F32(value) => Doc::as_string(&value), - Literal::F64(value) => Doc::as_string(&value), - } - } -} - -impl fmt::Display for Literal { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.to_doc().group().render_fmt(PRETTY_FALLBACK_WIDTH, f) - } -} - -/// A universe level -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, BoundTerm)] -pub struct Level(pub u32); - -impl Level { - pub fn succ(self) -> Level { - Level(self.0 + 1) - } -} - -impl From for Level { - fn from(src: u32) -> Level { - Level(src) - } -} - -impl fmt::Display for Level { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -/// A shift in universe level -#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, BoundTerm, BoundPattern)] -pub struct LevelShift(pub u32); - -impl From for LevelShift { - fn from(src: u32) -> LevelShift { - LevelShift(src) - } -} - -impl fmt::Display for LevelShift { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.0) - } -} - -impl Add for LevelShift { - type Output = LevelShift; - - fn add(self, other: LevelShift) -> LevelShift { - LevelShift(self.0 + other.0) - } -} - -impl AddAssign for LevelShift { - fn add_assign(&mut self, other: LevelShift) { - self.0 += other.0; - } -} - -impl Add for Level { - type Output = Level; - - fn add(self, other: LevelShift) -> Level { - Level(self.0 + other.0) - } -} - -impl AddAssign for Level { - fn add_assign(&mut self, other: LevelShift) { - self.0 += other.0; - } -} - -/// A label that describes the name of a field in a record -/// -/// Labels are significant when comparing for alpha-equality -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, BoundPattern, BoundTerm)] -pub struct Label(pub String); - -impl From for Label { - fn from(src: String) -> Label { - Label(src) - } -} - -impl fmt::Display for Label { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.0) - } -} diff --git a/crates/pikelet-driver/Cargo.toml b/crates/pikelet-driver/Cargo.toml deleted file mode 100644 index 9ae3317cb..000000000 --- a/crates/pikelet-driver/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "pikelet-driver" -version = "0.1.0" -license = "Apache-2.0" -readme = "README.md" -authors = ["Brendan Zabarauskas "] -homepage = "https://github.com/pikelet-lang/pikelet" -repository = "https://github.com/pikelet-lang/pikelet" -edition = "2018" -publish = false - -[dependencies] -codespan = "0.2.0" -codespan-reporting = "0.2.0" -pikelet-concrete = { version = "0.1.0", path = "../pikelet-concrete" } -pikelet-core = { version = "0.1.0", path = "../pikelet-core" } -pikelet-library = { version = "0.1.0", path = "../pikelet-library" } diff --git a/crates/pikelet-driver/README.md b/crates/pikelet-driver/README.md deleted file mode 100644 index 9ef12edc5..000000000 --- a/crates/pikelet-driver/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Pikelet Driver - -This is the main entry-point for the Pikelet compiler pipeline. We eventually -intend to turn this into a database of Pikelet terms that can be incrementally -checked, compiled, and evaluated. diff --git a/crates/pikelet-driver/src/lib.rs b/crates/pikelet-driver/src/lib.rs deleted file mode 100644 index 0b433c385..000000000 --- a/crates/pikelet-driver/src/lib.rs +++ /dev/null @@ -1,276 +0,0 @@ -//! The Pikelet Compiler -//! -//! # Compiler Architecture -//! -//! In order to create a separation of concerns, we break up our compiler into many -//! small stages, beginning with a source string, and ultimately ending up with -//! compiled machine code. -//! -//! Below is a rough flow chart showing how source strings are currently lexed, -//! parsed, desugared, and type checked/elaborated: -//! -//! ```bob -//! .------------. -//! | String | -//! '------------' -//! | -//! - - - - - - - - - - - | - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -//! Frontend | -//! | -//! pikelet_concrete::parse::lexer -//! | -//! v -//! .---------------------------------------. -//! | pikelet_concrete::parse::lexer::Token | -//! '---------------------------------------' -//! | -//! pikelet_concrete::parse -//! | -//! v -//! .------------------------------------------. -//! | pikelet_concrete::syntax::concrete::Term |---------> Code formatter (TODO) -//! '------------------------------------------' -//! | -//! pikelet_concrete::desugar -//! | -//! v -//! .-------------------------------------. -//! | pikelet_concrete::syntax::raw::Term | -//! '-------------------------------------' -//! | .-------------------------------------. -//! pikelet_concrete::elaborate::{check,infer} <---------- | pikelet_core::syntax::domain::Value | -//! | '-------------------------------------' -//! v ^ -//! .----------------------------------. | -//! | pikelet_core::syntax::core::Term | -- pikelet_core::normalize -------' -//! '----------------------------------' -//! | -//! | -//! - - - - - - - - - - - | - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -//! Middle (TODO) | -//! | -//! v -//! A-Normal Form (ANF) -//! | -//! v -//! Closure Conversion (CC) -//! | -//! v -//! Static Single Assignment (SSA) -//! | -//! | -//! - - - - - - - - - - - | - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -//! Backend (TODO) | -//! | -//! v -//! Codegen -//! | -//! *-------> Bytecode? -//! | -//! *-------> WASM? -//! | -//! *-------> Cranelift IR? -//! | -//! '-------> LLVM IR? -//! ``` -//! -//! As you can see we have only built the front-end as of the time of writing. When -//! we begin to build out a [compiler back end](https://github.com/pikelet-lang/pikelet/issues/9), -//! more stages will be added after type checking and elaboration. -//! -//! ## Name binding -//! -//! Name binding is a surprisingly challenging thing to implement in type checkers -//! and compilers. We use the [`moniker` crate](https://github.com/brendanzab/moniker) -//! for this. Unfortunately this uses a quite slow method of name binding, and could -//! result in performance blowouts in the future. This is something to keep an eye on! -//! -//! ## Performance considerations -//! -//! As you can see from the diagram above, this architecture leads to an -//! easy-to-reason about pipeline. It does however result in the creation of lots of -//! intermediate allocations of heap-allocated tree data structures that will -//! ultimately be discarded. This is quite similar to the problem we face with -//! iterators: -//! -//! ```rust,ignore -//! // 'internal' iteration -//! vec![1, 2, 3].map(|x| x * x).filter(|x| x < 3) -//! -//! // 'external' iteration -//! vec![1, 2, 3].iter().map(|x| x * x).filter(|x| x < 3).collect() -//! ``` -//! -//! The first example, which uses 'internal' iteration allocates a new collection -//! after each operation, resulting in three allocated collections. We can improve -//! the performance however by using 'external' iteration - ie. returning a series -//! of chained iterator adaptors, that only perform the allocation on the call to -//! `collect`. This emulates the 'fusion' that languages like Haskell perform to -//! reduce intermediate allocations. -//! -//! We could potentially get some fusion between the stages of our compiler by way -//! of the [visitor pattern](https://github.com/pikelet-lang/pikelet/issues/75). -//! -//! ## Support for interactive development -//! -//! It would be interesting to see how Pikelet could be implemented using an -//! [asynchronous, query-based architecture](https://github.com/pikelet-lang/pikelet/issues/103). -//! This will become more important as the demands of interactive development -//! and incremental compilation become more pressing. In this model we would -//! have to think of compilation as less a pure function from source code to -//! machine code, and more as interacting with a database. -//! -//! ### Resources -//! -//! - [Queries: demand-driven compilation (Rustc Book)](https://rust-lang-nursery.github.io/rustc-guide/query.html) -//! - [Anders Hejlsberg on Modern Compiler Construction (YouTube)](https://www.youtube.com/watch?v=wSdV1M7n4gQ) - -use codespan::CodeMap; -pub use codespan::FileName; -pub use codespan_reporting::{termcolor, ColorArg, Diagnostic}; -use std::io; - -use pikelet_concrete::desugar::{Desugar, DesugarEnv}; -use pikelet_concrete::elaborate::Context; -use pikelet_concrete::resugar::Resugar; -use pikelet_concrete::syntax::raw; -use pikelet_core::syntax::{core, domain, Import}; - -/// An environment that keeps track of the state of a Pikelet program during -/// compilation or interactive sessions -#[derive(Debug, Clone)] -pub struct Driver { - /// The base type checking context, containing the built-in definitions - context: Context, - /// The base desugar environment, using the definitions from the `context` - desugar_env: DesugarEnv, - /// A codemap that owns the source code for any terms that are currently loaded - code_map: CodeMap, -} - -impl Driver { - /// Create a new Pikelet environment, containing only the built-in definitions - pub fn new() -> Driver { - let context = Context::default(); - let desugar_env = DesugarEnv::new(context.mappings()); - - Driver { - context, - desugar_env, - code_map: CodeMap::new(), - } - } - - /// Create a new Pikelet environment, with the prelude loaded as well - pub fn with_prelude() -> Driver { - let mut pikelet = Driver::new(); - - pikelet - .register_file( - "prim".to_owned(), - FileName::virtual_("prim"), - pikelet_library::PRIM.to_owned(), - ) - .unwrap(); - - pikelet - .register_file( - "prelude".to_owned(), - FileName::virtual_("prelude"), - pikelet_library::PRELUDE.to_owned(), - ) - .unwrap(); - - pikelet - } - - /// Add a binding to the driver's top-level environment - pub fn add_binding(&mut self, name: &str, term: core::RcTerm, ann: domain::RcType) { - let fv = self.desugar_env.on_binding(&name); - self.context.insert_declaration(fv.clone(), ann.clone()); - self.context.insert_definition(fv.clone(), term.clone()); - } - - /// Register a file with the driver - pub fn register_file( - &mut self, - path: String, - name: FileName, - src: String, - ) -> Result<(), Vec> { - let (term, ty) = self.infer_file(name, src)?; - // FIXME: Check if import already exists - self.context.insert_import(path, Import::Term(term), ty); - - Ok(()) - } - - /// Infer the type of a file - pub fn infer_file( - &mut self, - name: FileName, - src: String, - ) -> Result<(core::RcTerm, domain::RcType), Vec> { - let file_map = self.code_map.add_filemap(name, src); - // TODO: follow import paths - let (concrete_term, _import_paths, errors) = pikelet_concrete::parse::term(&file_map); - if !errors.is_empty() { - return Err(errors.iter().map(|error| error.to_diagnostic()).collect()); - } - let raw_term = self.desugar(&concrete_term)?; - self.infer_term(&raw_term) - } - - /// Normalize the contents of a file - pub fn normalize_file( - &mut self, - name: FileName, - src: String, - ) -> Result> { - use pikelet_concrete::elaborate::InternalError; - - let (term, _) = self.infer_file(name, src)?; - pikelet_core::nbe::nf_term(&self.context, &term) - .map_err(|err| vec![InternalError::from(err).to_diagnostic()]) - } - - /// Infer the type of a term - pub fn infer_term( - &self, - raw_term: &raw::RcTerm, - ) -> Result<(core::RcTerm, domain::RcType), Vec> { - pikelet_concrete::elaborate::infer_term(&self.context, &raw_term) - .map_err(|err| vec![err.to_diagnostic()]) - } - - /// Normalize a term - pub fn normalize_term(&self, term: &core::RcTerm) -> Result> { - use pikelet_concrete::elaborate::InternalError; - - pikelet_core::nbe::nf_term(&self.context, term) - .map_err(|err| vec![InternalError::from(err).to_diagnostic()]) - } - - /// Desugar a term - pub fn desugar(&self, src: &impl Desugar) -> Result> { - src.desugar(&self.desugar_env) - .map_err(|e| vec![e.to_diagnostic()]) - } - - /// Resugar a term - pub fn resugar(&self, src: &impl Resugar) -> T { - self.context.resugar(src) - } - - /// Emit the diagnostics using the given writer - pub fn emit<'a>( - &self, - mut writer: impl termcolor::WriteColor, - diagnostics: impl IntoIterator, - ) -> io::Result<()> { - for diagnostic in diagnostics { - codespan_reporting::emit(&mut writer, &self.code_map, diagnostic)?; - } - Ok(()) - } -} diff --git a/crates/pikelet-driver/tests/prelude.rs b/crates/pikelet-driver/tests/prelude.rs deleted file mode 100644 index d74e1f3ed..000000000 --- a/crates/pikelet-driver/tests/prelude.rs +++ /dev/null @@ -1,31 +0,0 @@ -use pikelet_driver::termcolor::{ColorChoice, StandardStream}; -use pikelet_driver::{Driver, FileName}; - -#[test] -fn with_prelude() { - let _driver = Driver::with_prelude(); -} - -#[test] -fn prelude() { - let mut driver = Driver::new(); - let writer = StandardStream::stdout(ColorChoice::Always); - - if let Err(diagnostics) = driver.register_file( - "prim".to_owned(), - FileName::virtual_("prim"), - pikelet_library::PRIM.to_owned(), - ) { - driver.emit(writer.lock(), &diagnostics).unwrap(); - panic!("load error!") - } - - if let Err(diagnostics) = driver.register_file( - "prelude".to_owned(), - FileName::virtual_("prelude"), - pikelet_library::PRELUDE.to_owned(), - ) { - driver.emit(writer.lock(), &diagnostics).unwrap(); - panic!("load error!") - } -} diff --git a/crates/pikelet-language-server/Cargo.toml b/crates/pikelet-language-server/Cargo.toml deleted file mode 100644 index 352dbd7a4..000000000 --- a/crates/pikelet-language-server/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "pikelet-language-server" -version = "0.1.0" -license = "Apache-2.0" -readme = "README.md" -authors = ["Brendan Zabarauskas "] -description = "Language Server Protocol implementation for Pikelet" -homepage = "https://github.com/pikelet-lang/pikelet" -repository = "https://github.com/pikelet-lang/pikelet" -publish = false - -[dependencies] -failure = "0.1.2" -languageserver-types = "0.51.0" -pikelet-driver = { version = "0.1.0", path = "../pikelet-driver" } -serde = "1" -serde_derive = "1" -serde_json = "1" -structopt = "0.2.12" diff --git a/crates/pikelet-language-server/README.md b/crates/pikelet-language-server/README.md deleted file mode 100644 index 1ac9373d9..000000000 --- a/crates/pikelet-language-server/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Pikelet language server - -This is an implementation of the [Language Server Protocol][lsp] (LSP) for Pikelet. - -[lsp]: https://microsoft.github.io/language-server-protocol/ - -## Clients - -Clients to this language server can currently be found under the [`editors`] -directory in the Pikelet repository. - -[`editors`]: /editors diff --git a/crates/pikelet-language-server/src/lib.rs b/crates/pikelet-language-server/src/lib.rs deleted file mode 100644 index 6dcbe8f3e..000000000 --- a/crates/pikelet-language-server/src/lib.rs +++ /dev/null @@ -1,81 +0,0 @@ -//! A language server for Pikelet - -extern crate failure; -extern crate languageserver_types as lsp_ty; -extern crate pikelet_driver; -extern crate serde; -#[macro_use] -extern crate serde_derive; -extern crate serde_json; -#[macro_use] -extern crate structopt; - -use failure::Error; - -mod rpc; - -#[derive(Debug, StructOpt)] -pub struct Opts { - // TODO -} - -fn server_capabilities() -> lsp_ty::ServerCapabilities { - lsp_ty::ServerCapabilities { - text_document_sync: None, - hover_provider: None, - completion_provider: None, - signature_help_provider: None, - definition_provider: None, - type_definition_provider: None, - implementation_provider: None, - references_provider: None, - document_highlight_provider: None, - document_symbol_provider: None, - workspace_symbol_provider: None, - code_action_provider: None, - code_lens_provider: None, - document_formatting_provider: None, - document_range_formatting_provider: None, - document_on_type_formatting_provider: None, - rename_provider: None, - color_provider: None, - folding_range_provider: None, - execute_command_provider: None, - workspace: None, - } -} - -/// Run `language-server` with the given options -pub fn run(_opts: Opts) -> Result<(), Error> { - // TODO: multi-threading - // FIXME: Just sketching this out - this currently doesn't work! :/ - - { - let stdin = std::io::stdin(); - let init_content = rpc::recv_content(&mut stdin.lock())?; - match serde_json::from_str::>(&init_content) { - Ok(rpc::JsonRpc { - result: rpc::LspCommand::Initialize { .. }, - .. - }) => {}, - Ok(_) => unimplemented!(), - Err(_) => unimplemented!(), - } - } - - { - let stdout = std::io::stdout(); - let capabilities = server_capabilities(); - let init_resp = rpc::JsonRpc::new(0, lsp_ty::InitializeResult { capabilities }); - rpc::send_content(&mut stdout.lock(), serde_json::to_string(&init_resp)?)?; - } - - // loop { - // match rpc::recv_content(&mut stdin) { - // Ok(_) => unimplemented!(), - // Err(error) => eprintln!("error: {}", error), - // } - // } - - Ok(()) -} diff --git a/crates/pikelet-language-server/src/rpc.rs b/crates/pikelet-language-server/src/rpc.rs deleted file mode 100644 index f69953ecd..000000000 --- a/crates/pikelet-language-server/src/rpc.rs +++ /dev/null @@ -1,260 +0,0 @@ -//! Base RPC handling for the Language Server Protocol -//! -//! # Example message -//! -//! ```text -//! Content-Length: ...\r\n -//! \r\n -//! { -//! "jsonrpc": "2.0", -//! "id": 1, -//! "method": "textDocument/didOpen", -//! "params": { -//! ... -//! } -//! } -//! ``` -//! -//! # References -//! -//! - [Language Server Protocol Specification: Base Protocol][base-protocol] -//! - [JSON-RPC 2.0 Specification][json-rpc] -//! -//! [base-protocol]: https://microsoft.github.io/language-server-protocol/specification#base-protocol -//! [json-rpc]: https://www.jsonrpc.org/specification - -use lsp_ty; -use std::io::{self, BufRead, Write}; - -/// Sends an RPC call containing the given content -#[allow(dead_code)] -pub fn send_content(writer: &mut impl Write, content: String) -> Result<(), io::Error> { - let content_length = content.len(); - let content_type = "application/vscode-jsonrpc; charset=utf-8"; - - // Header part - // - // https://microsoft.github.io/language-server-protocol/specification#header-part - - write!(writer, "Content-Length: {}\r\n", content_length)?; - write!(writer, "Content-Type: {}\r\n", content_type)?; - write!(writer, "\r\n")?; - - // Content part - // - // https://microsoft.github.io/language-server-protocol/specification#content-part - - write!(writer, "{}", content)?; - - writer.flush()?; - - Ok(()) -} - -/// Receives an RPC call from the given reader, returning the content as a string -#[allow(dead_code)] -pub fn recv_content(reader: &mut impl BufRead) -> Result { - // Header part - // - // https://microsoft.github.io/language-server-protocol/specification#header-part - // - // content-length ::= "Content-Length: " length "\r\n" - // content-type ::= "Content-Type: " string "\r\n" - // unknown ::= string ": " string "\r\n" - // header ::= content-length / content-type / unknown - // headers ::= header headers / "\r\n" - - let mut content_len = None::; - let charset = None::<&str>; // TODO - - // Loop through headers, collecting the relevant information - let mut header_buffer = String::new(); - loop { - reader.read_line(&mut header_buffer)?; - { - let mut splits = header_buffer.splitn(2, ": "); - match (splits.next(), splits.next()) { - // Content-Length header - (Some("Content-Length"), Some(value)) => { - if content_len.is_none() { - content_len = Some(value.trim_right().parse().map_err(|err| { - io::Error::new( - io::ErrorKind::InvalidData, - format!("`Content-Length` was not a valid number: {:?}", err), - ) - })?); - } - }, - // Content-Type header - (Some("Content-Type"), Some(_)) => {}, // TODO: parse content type? - // Other headers, skipped to ensure forwards compatibility - (Some(name), Some(_)) => eprintln!("Skipping unknown header: {:?}", name), - // End of the headers - (Some("\r\n"), None) => break, - (Some(header), None) => eprintln!("Skipping malformed header: {:?}", header), - (None, _) => eprintln!("Malformed header, skipping"), - } - } - header_buffer.clear(); - } - - // Content part - // - // https://microsoft.github.io/language-server-protocol/specification#content-part - match content_len { - Some(content_len) => { - // Read the content into a pre-allocated buffer - // let mut buffer = vec![0; content_len + 2]; // why do we need to add 2? - let mut buffer = vec![0; content_len]; - reader.read_exact(&mut buffer)?; - - match charset { - // Map `utf8` to `utf-8` for backwards compatibility - // If no charset is given, we'll default to `utf-8` - Some("utf-8") | Some("utf8") | None => String::from_utf8(buffer) - .map_err(|error| io::Error::new(io::ErrorKind::InvalidInput, error)), - // Should be fine to continue after this, because we've already - // consumed the buffer - Some(charset) => Err(io::Error::new( - io::ErrorKind::InvalidData, - format!("Unknown charset: {}", charset), - )), - } - }, - // FIXME: Can we recover from this? We'd need to try to skip to the - // next thing that looks like a header :/ - None => Err(io::Error::new( - io::ErrorKind::InvalidData, - "Missing content length", - )), - } -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct JsonRpc { - pub jsonrpc: String, - pub id: usize, - pub result: T, -} - -impl JsonRpc { - #[allow(dead_code)] - pub fn new(id: usize, result: T) -> JsonRpc { - JsonRpc { - jsonrpc: "2.0".into(), - id, - result, - } - } -} - -/// A Command that was sent from the client to the server -#[derive(Debug, Serialize, Deserialize)] -#[serde(tag = "method")] -pub enum LspCommand { - #[serde(rename = "initialize")] - Initialize { - id: usize, - params: lsp_ty::InitializeParams, - }, - #[serde(rename = "initialized")] - Initialized, - #[serde(rename = "textDocument/didOpen")] - DidOpen { - params: lsp_ty::DidOpenTextDocumentParams, - }, - #[serde(rename = "textDocument/didChange")] - DidChange { - params: lsp_ty::DidChangeTextDocumentParams, - }, - #[serde(rename = "textDocument/hover")] - Hover { - id: usize, - params: lsp_ty::TextDocumentPositionParams, - }, - #[serde(rename = "textDocument/completion")] - Completion { - id: usize, - params: lsp_ty::CompletionParams, - }, - #[serde(rename = "$/cancelRequest")] - CancelRequest { params: lsp_ty::CancelParams }, - #[serde(rename = "completionItem/resolve")] - CompletionItemResolve { - id: usize, - params: lsp_ty::CompletionItem, - }, -} - -#[cfg(test)] -mod tests { - use super::*; - - mod recv_content { - use super::*; - - #[test] - fn valid_empty_no_charset() { - let message = "Content-Length: 0\r\n\r\n"; - let mut cursor = io::Cursor::new(message); - assert_eq!(recv_content(&mut cursor).unwrap(), ""); - } - - #[test] - fn valid_no_charset() { - let message = "Content-Length: 13\r\n\r\nhello, world!"; - let mut cursor = io::Cursor::new(message); - assert_eq!(recv_content(&mut cursor).unwrap(), "hello, world!"); - } - - #[test] - fn valid_explicit_charset_utf_8() { - let message = - "Content-Length: 13\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\nhello, world!"; - let mut cursor = io::Cursor::new(message); - assert_eq!(recv_content(&mut cursor).unwrap(), "hello, world!"); - } - - #[test] - fn valid_explicit_charset_utf8() { - let message = - "Content-Length: 13\r\nContent-Type: application/vscode-jsonrpc; charset=utf8\r\n\r\nhello, world!"; - let mut cursor = io::Cursor::new(message); - assert_eq!(recv_content(&mut cursor).unwrap(), "hello, world!"); - } - - #[test] - fn valid_unknown_header() { - let message = "Content-Length: 13\r\nX-Foo: silly\r\n\r\nhello, world!"; - let mut cursor = io::Cursor::new(message); - assert_eq!(recv_content(&mut cursor).unwrap(), "hello, world!"); - } - - // TODO: test more combinations - - // #[test] - // fn combinations() { - // let things = vec![ - // ("Content-Length: 0\r\n\r\n", Ok("")), - // ("Content-Length: 13\r\n\r\nhello, world!", Ok("hello, world!")), - // ("Content-Length: 13\r\nX-Foo: silly\r\n\r\nhello, world!", Ok("hello, world!")), - // ("Content-Length: 13\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\nhello, world!", Ok("hello, world!")), - // ("Content-Length: 13\r\nContent-Type: application/vscode-jsonrpc; charset=utf8\r\n\r\nhello, world!", Ok("hello, world!")), - // ("Content-Type: application/vscode-jsonrpc; charset=utf-8\r\nContent-Length: 13\r\n\r\nhello, world!", Ok("hello, world!")), - // ("Content-Type: application/vscode-jsonrpc; charset=utf8\r\nContent-Length: 13\r\n\r\nhello, world!", Ok("hello, world!")), - // ("\r\nhello, world!", Err(_)), - // ("Content-Length: 13.0\r\n\r\nhello, world!", Err(_)), - // ("Content-Type: application/vscode-jsonrpc; charset=utf8\r\n\r\nhello, world!", Err(_)), - // ]; - // } - } - - // #[test] - // fn send_content_recv_content_roundtrip() { - // // TODO: finish - // // TODO: quickcheck? - // let content = "hello, world!"; - // let result = unimplemented!(); - // assert_eq!(content, result); - // } -} diff --git a/crates/pikelet-library/Cargo.toml b/crates/pikelet-library/Cargo.toml deleted file mode 100644 index b086a329c..000000000 --- a/crates/pikelet-library/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "pikelet-library" -version = "0.1.0" -license = "Apache-2.0" -readme = "README.md" -authors = ["Brendan Zabarauskas "] -description = "Builtin libraries for Pikelet" -homepage = "https://github.com/pikelet-lang/pikelet" -repository = "https://github.com/pikelet-lang/pikelet" -edition = "2018" -publish = false - -[dependencies] diff --git a/crates/pikelet-library/README.md b/crates/pikelet-library/README.md deleted file mode 100644 index 06419863f..000000000 --- a/crates/pikelet-library/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Pikelet Core Libraries - -The beginning of our core Pikelet libraries! - -![](https://theawesomer.com/photos/2017/09/pancake_cat_1.jpg) diff --git a/crates/pikelet-library/src/lib.rs b/crates/pikelet-library/src/lib.rs deleted file mode 100644 index 5a812370c..000000000 --- a/crates/pikelet-library/src/lib.rs +++ /dev/null @@ -1,4 +0,0 @@ -//! Builtin libraries - -pub const PRIM: &str = include_str!("prim.pi"); -pub const PRELUDE: &str = include_str!("prelude.pi"); diff --git a/crates/pikelet-library/src/prelude.pi b/crates/pikelet-library/src/prelude.pi deleted file mode 100644 index ad32b3161..000000000 --- a/crates/pikelet-library/src/prelude.pi +++ /dev/null @@ -1,355 +0,0 @@ --- NOTE: Many of these functions could benefit from implicit arguments for --- improved ergonomics. For example, the identity function could be written as: --- --- ``` --- id : {a : Type} -> a -> a; --- id x = x; --- ``` --- --- This would then allow one to write: --- --- ``` --- Pikelet> id "hello" --- "hello" : String --- ``` - -record { - id; const; compose; flip; - - Unit; unit; - - Prod; Sum; - - Eq; eq; - Eq-String; Eq-Char; Eq-Bool; Eq-Unit; - Eq-U8; Eq-U16; Eq-U32; Eq-U64; - Eq-S8; Eq-S16; Eq-S32; Eq-S64; - Eq-F32; Eq-F64; - - Semigroup; append; - Semigroup-String; Semigroup-Unit; - Semigroup-U8-Add; Semigroup-U16-Add; Semigroup-U32-Add; Semigroup-U64-Add; - Semigroup-S8-Add; Semigroup-S16-Add; Semigroup-S32-Add; Semigroup-S64-Add; - Semigroup-F32-Add; Semigroup-F64-Add; - Semigroup-U8-Mul; Semigroup-U16-Mul; Semigroup-U32-Mul; Semigroup-U64-Mul; - Semigroup-S8-Mul; Semigroup-S16-Mul; Semigroup-S32-Mul; Semigroup-S64-Mul; - Semigroup-F32-Mul; Semigroup-F64-Mul; - - Monoid; empty; - Monoid-String; Monoid-Unit; - Monoid-U8-Add; Monoid-U16-Add; Monoid-U32-Add; Monoid-U64-Add; - Monoid-S8-Add; Monoid-S16-Add; Monoid-S32-Add; Monoid-S64-Add; - Monoid-F32-Add; Monoid-F64-Add; - Monoid-U8-Mul; Monoid-U16-Mul; Monoid-U32-Mul; Monoid-U64-Mul; - Monoid-S8-Mul; Monoid-S16-Mul; Monoid-S32-Mul; Monoid-S64-Mul; - Monoid-F32-Mul; Monoid-F64-Mul; - - Group; - - Num; add; zero; mul; one; - Num-U8; Num-U16; Num-U32; Num-U64; - Num-S8; Num-S16; Num-S32; Num-S64; - Num-F32; Num-F64; - - Category; - -- id; - seq; - -- compose; - Category-Function; - - Functor; map; - Endofunctor-Function; -} where { - prim = import "prim"; - - ||| The polymorphic identity function - id : (a : Type) -> a -> a; - id a x = x; - - ||| Creates a function that always returns the same value - const : (a b : Type) -> a -> b -> a; - const a b x y = x; - - ||| Function composition - compose : (a b c : Type) -> (b -> c) -> (a -> b) -> (a -> c); - compose a b c f g x = f (g x); - - ||| Flip the order of the first two arguments to a function - flip : (a b c : Type) -> (a -> b -> c) -> (b -> a -> c); - flip a b c f x y = f y x; - - - ||| The unit type - ||| - ||| This is a synonym for the empty record, and can be constructed using the - ||| `unit` function. - Unit : Type; - Unit = Record {}; - - ||| Create an element of the `Unit` type - unit : Unit; - unit = record {}; - - - ||| Dependent products - Prod : (A : Type) (B : A -> Type) -> Type; - Prod A B = (a : A) -> B a; - - ||| Dependent sums (subtypes) - Sum : (A : Type) (B : A -> Type) -> Type; - Sum A B = Record { - val : A; - proof : B val; - }; - - - ||| Module for defining equality between two terms - Eq (a : Type) = Record { - ||| Compare two terms for equality - eq : a -> a -> Bool; - }; - - ||| Compare two terms for equality - eq : (a : Type) (EQ : Eq a) -> a -> a -> Bool; - eq _ EQ = EQ.eq; - - Eq-String : Eq String = record { eq = prim.string.eq }; - Eq-Char : Eq Char = record { eq = prim.char.eq }; - Eq-Bool : Eq Bool = record { eq = prim.bool.eq }; - - Eq-Unit : Eq Unit = record { - eq x y = true; - }; - - Eq-U8 : Eq U8 = record { eq = prim.u8.eq }; - Eq-U16 : Eq U16 = record { eq = prim.u16.eq }; - Eq-U32 : Eq U32 = record { eq = prim.u32.eq }; - Eq-U64 : Eq U64 = record { eq = prim.u64.eq }; - Eq-S8 : Eq S8 = record { eq = prim.i8.eq }; - Eq-S16 : Eq S16 = record { eq = prim.i16.eq }; - Eq-S32 : Eq S32 = record { eq = prim.i32.eq }; - Eq-S64 : Eq S64 = record { eq = prim.i64.eq }; - Eq-F32 : Eq F32 = record { eq = prim.f32.eq }; - Eq-F64 : Eq F64 = record { eq = prim.f64.eq }; - - - ||| An associative operation on `a`. - Semigroup (a : Type) = Record { - ||| The associative operation - append : a -> a -> a; - - -- TODO: Laws via property testing or proofs? - -- append-assoc : (x y z : a) -> append x (append y z) = append (append x y) z - }; - - append : (a : Type) (S : Semigroup a) -> a -> a -> a; - append _ S = S.append; - - - Semigroup-String : Semigroup String = record { append = prim.string.append }; - - Semigroup-Unit : Semigroup Unit = record { - append x y = unit; - }; - - Semigroup-U8-Add : Semigroup U8 = record { append = prim.u8.add }; - Semigroup-U16-Add : Semigroup U16 = record { append = prim.u16.add }; - Semigroup-U32-Add : Semigroup U32 = record { append = prim.u32.add }; - Semigroup-U64-Add : Semigroup U64 = record { append = prim.u64.add }; - Semigroup-S8-Add : Semigroup S8 = record { append = prim.i8.add }; - Semigroup-S16-Add : Semigroup S16 = record { append = prim.i16.add }; - Semigroup-S32-Add : Semigroup S32 = record { append = prim.i32.add }; - Semigroup-S64-Add : Semigroup S64 = record { append = prim.i64.add }; - Semigroup-F32-Add : Semigroup F32 = record { append = prim.f32.add }; - Semigroup-F64-Add : Semigroup F64 = record { append = prim.f64.add }; - - Semigroup-U8-Mul : Semigroup U8 = record { append = prim.u8.mul }; - Semigroup-U16-Mul : Semigroup U16 = record { append = prim.u16.mul }; - Semigroup-U32-Mul : Semigroup U32 = record { append = prim.u32.mul }; - Semigroup-U64-Mul : Semigroup U64 = record { append = prim.u64.mul }; - Semigroup-S8-Mul : Semigroup S8 = record { append = prim.i8.mul }; - Semigroup-S16-Mul : Semigroup S16 = record { append = prim.i16.mul }; - Semigroup-S32-Mul : Semigroup S32 = record { append = prim.i32.mul }; - Semigroup-S64-Mul : Semigroup S64 = record { append = prim.i64.mul }; - Semigroup-F32-Mul : Semigroup F32 = record { append = prim.f32.mul }; - Semigroup-F64-Mul : Semigroup F64 = record { append = prim.f64.mul }; - - - ||| A semigroup that also has an identity element. - Monoid (a : Type) = Record { - ||| The semigroup - semigroup : Semigroup a; - ||| The identity element of `semigroup.append` - empty : a; - - -- TODO: Laws via property testing or proofs? - -- append-empty : (x : a) -> semigroup.append x empty = x - -- empty-append : (x : a) -> semigroup.append empty x = x - }; - - empty : (a : Type) (M : Monoid a) -> a; - empty _ M = M.empty; - - - Monoid-String : Monoid String = record { semigroup = Semigroup-String; empty = "" }; - - Monoid-Unit : Monoid Unit = record { - semigroup = Semigroup-Unit; - empty = unit; - }; - - Monoid-U8-Add : Monoid U8 = record { semigroup = Semigroup-U8-Add; empty = 0 }; - Monoid-U16-Add : Monoid U16 = record { semigroup = Semigroup-U16-Add; empty = 0 }; - Monoid-U32-Add : Monoid U32 = record { semigroup = Semigroup-U32-Add; empty = 0 }; - Monoid-U64-Add : Monoid U64 = record { semigroup = Semigroup-U64-Add; empty = 0 }; - Monoid-S8-Add : Monoid S8 = record { semigroup = Semigroup-S8-Add; empty = 0 }; - Monoid-S16-Add : Monoid S16 = record { semigroup = Semigroup-S16-Add; empty = 0 }; - Monoid-S32-Add : Monoid S32 = record { semigroup = Semigroup-S32-Add; empty = 0 }; - Monoid-S64-Add : Monoid S64 = record { semigroup = Semigroup-S64-Add; empty = 0 }; - Monoid-F32-Add : Monoid F32 = record { semigroup = Semigroup-F32-Add; empty = 0 }; - Monoid-F64-Add : Monoid F64 = record { semigroup = Semigroup-F64-Add; empty = 0 }; - - Monoid-U8-Mul : Monoid U8 = record { semigroup = Semigroup-U8-Mul; empty = 1 }; - Monoid-U16-Mul : Monoid U16 = record { semigroup = Semigroup-U16-Mul; empty = 1 }; - Monoid-U32-Mul : Monoid U32 = record { semigroup = Semigroup-U32-Mul; empty = 1 }; - Monoid-U64-Mul : Monoid U64 = record { semigroup = Semigroup-U64-Mul; empty = 1 }; - Monoid-S8-Mul : Monoid S8 = record { semigroup = Semigroup-S8-Mul; empty = 1 }; - Monoid-S16-Mul : Monoid S16 = record { semigroup = Semigroup-S16-Mul; empty = 1 }; - Monoid-S32-Mul : Monoid S32 = record { semigroup = Semigroup-S32-Mul; empty = 1 }; - Monoid-S64-Mul : Monoid S64 = record { semigroup = Semigroup-S64-Mul; empty = 1 }; - Monoid-F32-Mul : Monoid F32 = record { semigroup = Semigroup-F32-Mul; empty = 1 }; - Monoid-F64-Mul : Monoid F64 = record { semigroup = Semigroup-F64-Mul; empty = 1 }; - - - ||| A monoid that also has an inverse element. - Group (a : Type) = Record { - ||| The monoid - monoid : Monoid a; - ||| The inverse of `monoid.semigroup.append` - inverse : a -> a; - - -- TODO: Laws via property testing or proofs? - -- append-left-inverse : (a : Type) -> monoid.semigroup.append (inverse a) a = monoid.empty - }; - - - ||| General trait for numeric types - Num (a : Type) = Record { - ||| The additive monoid - add : Monoid a; - ||| The multiplicative monoid - mul : Monoid a; - - -- TODO: Subtraction? - -- TODO: Division? - -- TODO: Lawfulness? - }; - - add : (a : Type) (N : Num a) -> a -> a -> a; - add a N = append a N.add.semigroup; - - zero : (a : Type) (N : Num a) -> a; - zero a N = empty a N.add; - - mul : (a : Type) (N : Num a) -> a -> a -> a; - mul a N = append a N.mul.semigroup; - - one : (a : Type) (N : Num a) -> a; - one a N = empty a N.mul; - - - Num-U8 : Num U8 = record { add = Monoid-U8-Add; mul = Monoid-U8-Mul }; - Num-U16 : Num U16 = record { add = Monoid-U16-Add; mul = Monoid-U16-Mul }; - Num-U32 : Num U32 = record { add = Monoid-U32-Add; mul = Monoid-U32-Mul }; - Num-U64 : Num U64 = record { add = Monoid-U64-Add; mul = Monoid-U64-Mul }; - Num-S8 : Num S8 = record { add = Monoid-S8-Add; mul = Monoid-S8-Mul }; - Num-S16 : Num S16 = record { add = Monoid-S16-Add; mul = Monoid-S16-Mul }; - Num-S32 : Num S32 = record { add = Monoid-S32-Add; mul = Monoid-S32-Mul }; - Num-S64 : Num S64 = record { add = Monoid-S64-Add; mul = Monoid-S64-Mul }; - Num-F32 : Num F32 = record { add = Monoid-F32-Add; mul = Monoid-F32-Mul }; - Num-F64 : Num F64 = record { add = Monoid-F64-Add; mul = Monoid-F64-Mul }; - - - ||| A category is a very general structure that provides a common way of composing - ||| units of functionality - ||| - ||| The most common category programmers would be familiar with would be `Type`s - ||| are the objects, and the functions between those types are the arrows. Many - ||| other categories exist though, for example: - ||| - ||| - nodes in a directed graph, and the edges between those nodes. - ||| - etc. - Category = Record { - ||| An object in the category - Object : Type; - ||| Arrows between the objects in the category - Arrow : Object -> Object -> Type; - ||| The identity arrow - id : (a : Object) -> Arrow a a; - ||| The sequencing of two arrows - seq : (a b c : Object) -> Arrow a b -> Arrow b c -> Arrow a c; - - -- TODO: Laws via property testing or proofs? - -- TODO: E-Category - ie. equivalence relation on morphisms? - -- https://gist.github.com/brendanzab/9285eb8dfef5b6d6ccd87d90d6579590#gistcomment-2401643 - -- id-left : (a b : Object) (f : Arrow a b) -> seq id f = f; - -- id-right : (a b : Object) (f : Arrow a b) -> seq f id = f; - -- seq-assoc : (a b c d : Object) (f : Arrow a b) (g : Arrow b c) (h : Arrow c d) -> seq (seq f g) h = seq f (seq g h); - -- seq-cong : (a b c : Object) (f0 f1 : Arrow a b) (g0 g1 : Arrow b c) (p : rel f0 f1) (q : g0 = g1) -> seq f0 g0 = seq f1 g1; - }; - - -- ||| The identity arrow - -- id : (C : Category) (a : C.Object) -> C.Arrow a a; - -- id C = C.id; - - ||| The sequencing of two arrows - seq : (C : Category) (a b c : C.Object) -> C.Arrow a b -> C.Arrow b c -> C.Arrow a c; - seq C = C.seq; - - -- ||| The composition of two arrows - -- compose : (C : Category) (a b c : C.Object) -> C.Arrow b c -> C.Arrow a b -> C.Arrow a c; - -- compose C a b c f g = seq C a b c g f; - - - Category-Function : Category^1 = record { - Object = Type; - Arrow (a : Type) (b : Type) = a -> b; - id (a : Type) (x : a) = x; - seq (a b c : Type) (f : a -> b) (g : b -> c) (x : a) = g (f x); - }; - - - ||| Provides a mapping from objects-to-objects and arrows-to-arrows for two - ||| categories, `Source` and `Target` - ||| - ||| Mappings can be anything from applying a function to each element of a - ||| collection, to compiling a source language to a target language. - ||| - ||| Haskell programmers might find this definition a little foreign - this - ||| is because we use general categories in the definition, rather than - ||| specializing it into the category of Pikelet functions - Functor = Record { - ||| The source category - Source : Category; - ||| The target category - Target : Category; - ||| Maps an object in `Source` to an object in `Target` - Map : Source.Object -> Target.Object; - ||| Maps an arrow in `Source` into an arrow in `Target` - map : (a b : Source.Object) -> Source.Arrow a b -> Target.Arrow (Map a) (Map b); - - -- TODO: Laws via property testing or proofs? - }; - - ||| Maps an arrow in `F.Source` into an arrow in `F.Target` - map : (F : Functor) (a b : F.Source.Object) -> F.Source.Arrow a b -> F.Target.Arrow (F.Map a) (F.Map b); - map F = F.map; - - - Endofunctor-Function : Functor^1 = record { - Source = Category-Function; - Target = Category-Function; - Map x = x; - map (a b : Type) (f : a -> b) (x : a) = f x; - }; -} diff --git a/crates/pikelet-library/src/prim.pi b/crates/pikelet-library/src/prim.pi deleted file mode 100644 index 554b4e8b4..000000000 --- a/crates/pikelet-library/src/prim.pi +++ /dev/null @@ -1,170 +0,0 @@ -record { - bool = record { - eq = import "prim/bool/eq"; - ge = import "prim/bool/ge"; - gt = import "prim/bool/gt"; - le = import "prim/bool/le"; - lt = import "prim/bool/lt"; - ne = import "prim/bool/ne"; - }; - - f32 = record { - add = import "prim/f32/add"; - div = import "prim/f32/div"; - eq = import "prim/f32/eq"; - ge = import "prim/f32/ge"; - gt = import "prim/f32/gt"; - le = import "prim/f32/le"; - lt = import "prim/f32/lt"; - mul = import "prim/f32/mul"; - ne = import "prim/f32/ne"; - sub = import "prim/f32/sub"; - to-string = import "prim/f32/to-string"; - }; - - f64 = record { - add = import "prim/f64/add"; - div = import "prim/f64/div"; - eq = import "prim/f64/eq"; - ge = import "prim/f64/ge"; - gt = import "prim/f64/gt"; - le = import "prim/f64/le"; - lt = import "prim/f64/lt"; - mul = import "prim/f64/mul"; - ne = import "prim/f64/ne"; - sub = import "prim/f64/sub"; - to-string = import "prim/f64/to-string"; - }; - - i8 = record { - add = import "prim/i8/add"; - div = import "prim/i8/div"; - eq = import "prim/i8/eq"; - ge = import "prim/i8/ge"; - gt = import "prim/i8/gt"; - le = import "prim/i8/le"; - lt = import "prim/i8/lt"; - mul = import "prim/i8/mul"; - ne = import "prim/i8/ne"; - sub = import "prim/i8/sub"; - to-string = import "prim/i8/to-string"; - }; - - i16 = record { - add = import "prim/i16/add"; - div = import "prim/i16/div"; - eq = import "prim/i16/eq"; - ge = import "prim/i16/ge"; - gt = import "prim/i16/gt"; - le = import "prim/i16/le"; - lt = import "prim/i16/lt"; - mul = import "prim/i16/mul"; - ne = import "prim/i16/ne"; - sub = import "prim/i16/sub"; - to-string = import "prim/i16/to-string"; - }; - - i32 = record { - add = import "prim/i32/add"; - div = import "prim/i32/div"; - eq = import "prim/i32/eq"; - ge = import "prim/i32/ge"; - gt = import "prim/i32/gt"; - le = import "prim/i32/le"; - lt = import "prim/i32/lt"; - mul = import "prim/i32/mul"; - ne = import "prim/i32/ne"; - sub = import "prim/i32/sub"; - to-string = import "prim/i32/to-string"; - }; - - i64 = record { - add = import "prim/i64/add"; - div = import "prim/i64/div"; - eq = import "prim/i64/eq"; - ge = import "prim/i64/ge"; - gt = import "prim/i64/gt"; - le = import "prim/i64/le"; - lt = import "prim/i64/lt"; - mul = import "prim/i64/mul"; - ne = import "prim/i64/ne"; - sub = import "prim/i64/sub"; - to-string = import "prim/i64/to-string"; - }; - - u8 = record { - add = import "prim/u8/add"; - div = import "prim/u8/div"; - eq = import "prim/u8/eq"; - ge = import "prim/u8/ge"; - gt = import "prim/u8/gt"; - le = import "prim/u8/le"; - lt = import "prim/u8/lt"; - mul = import "prim/u8/mul"; - ne = import "prim/u8/ne"; - sub = import "prim/u8/sub"; - to-string = import "prim/u8/to-string"; - }; - - u16 = record { - add = import "prim/u16/add"; - div = import "prim/u16/div"; - eq = import "prim/u16/eq"; - ge = import "prim/u16/ge"; - gt = import "prim/u16/gt"; - le = import "prim/u16/le"; - lt = import "prim/u16/lt"; - mul = import "prim/u16/mul"; - ne = import "prim/u16/ne"; - sub = import "prim/u16/sub"; - to-string = import "prim/u16/to-string"; - }; - - u32 = record { - add = import "prim/u32/add"; - div = import "prim/u32/div"; - eq = import "prim/u32/eq"; - ge = import "prim/u32/ge"; - gt = import "prim/u32/gt"; - le = import "prim/u32/le"; - lt = import "prim/u32/lt"; - mul = import "prim/u32/mul"; - ne = import "prim/u32/ne"; - sub = import "prim/u32/sub"; - to-string = import "prim/u32/to-string"; - }; - - u64 = record { - add = import "prim/u64/add"; - div = import "prim/u64/div"; - eq = import "prim/u64/eq"; - ge = import "prim/u64/ge"; - gt = import "prim/u64/gt"; - le = import "prim/u64/le"; - lt = import "prim/u64/lt"; - mul = import "prim/u64/mul"; - ne = import "prim/u64/ne"; - sub = import "prim/u64/sub"; - to-string = import "prim/u64/to-string"; - }; - - char = record { - eq = import "prim/char/eq"; - ge = import "prim/char/ge"; - gt = import "prim/char/gt"; - le = import "prim/char/le"; - lt = import "prim/char/lt"; - ne = import "prim/char/ne"; - to-string = import "prim/char/to-string"; - }; - - string = record { - eq = import "prim/string/eq"; - ge = import "prim/string/ge"; - gt = import "prim/string/gt"; - le = import "prim/string/le"; - lt = import "prim/string/lt"; - ne = import "prim/string/ne"; - append = import "prim/string/append"; - }; -} diff --git a/crates/pikelet-repl/Cargo.toml b/crates/pikelet-repl/Cargo.toml deleted file mode 100644 index 4f345fe13..000000000 --- a/crates/pikelet-repl/Cargo.toml +++ /dev/null @@ -1,21 +0,0 @@ -[package] -name = "pikelet-repl" -version = "0.1.0" -license = "Apache-2.0" -readme = "README.md" -authors = ["Brendan Zabarauskas "] -homepage = "https://github.com/pikelet-lang/pikelet" -repository = "https://github.com/pikelet-lang/pikelet" -edition = "2018" -publish = false - -[dependencies] -codespan = "0.2.0" -combine = "3.6.3" -failure = "0.1.3" -linefeed = "0.5.3" -pikelet-concrete = { version = "0.1.0", path = "../pikelet-concrete" } -pikelet-core = { version = "0.1.0", path = "../pikelet-core" } -pikelet-driver = { version = "0.1.0", path = "../pikelet-driver" } -structopt = "0.2.12" -term_size = "0.3.1" diff --git a/crates/pikelet-repl/README.md b/crates/pikelet-repl/README.md deleted file mode 100644 index 6cce9c49c..000000000 --- a/crates/pikelet-repl/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Pikelet REPL - -Entrypoint for the interactive REPL diff --git a/crates/pikelet-repl/src/lib.rs b/crates/pikelet-repl/src/lib.rs deleted file mode 100644 index 15fb8f543..000000000 --- a/crates/pikelet-repl/src/lib.rs +++ /dev/null @@ -1,346 +0,0 @@ -//! The REPL (Read-Eval-Print-Loop) - -use failure::Error; -use linefeed::{Interface, ReadResult, Signal}; -use std::path::PathBuf; -use std::str::FromStr; - -use pikelet_driver::termcolor::StandardStream; -use pikelet_driver::{ColorArg, Diagnostic, Driver, FileName}; - -/// Options for the `repl` subcommand -#[derive(Debug, structopt::StructOpt)] -pub struct Opts { - /// Configure coloring of output - #[structopt( - long = "color", - parse(try_from_str), - default_value = "auto", - raw(possible_values = "ColorArg::VARIANTS") - )] - pub color: ColorArg, - - /// The prompt to display before expressions - #[structopt(long = "prompt", default_value = "Pikelet> ")] - pub prompt: String, - - /// Disable the welcome banner on startup - #[structopt(long = "no-banner")] - pub no_banner: bool, - - /// Disable saving of command history on exit - #[structopt(long = "no-history")] - pub no_history: bool, - - /// The file to save the command history to - #[structopt( - long = "history-file", - parse(from_os_str), - default_value = "repl-history" - )] - pub history_file: PathBuf, - - /// Files to preload into the REPL - #[structopt(name = "FILE", parse(from_os_str))] - pub files: Vec, -} - -fn print_welcome_banner() { - const WELCOME_BANNER: &[&str] = &[ - r" ____ _ __ __ __ ", - r" / __ \(_) /_____ / /__ / /_ ", - r" / /_/ / / //_/ _ \/ / _ \/ __/ ", - r" / ____/ / ,< / __/ / __/ /_ ", - r"/_/ /_/_/|_|\___/_/\___/\__/ ", - r"", - ]; - - for (i, line) in WELCOME_BANNER.iter().enumerate() { - // warning on `env!` is a known issue - #[cfg_attr(feature = "cargo-clippy", allow(print_literal))] - match i { - 2 => println!("{}Version {}", line, env!("CARGO_PKG_VERSION")), - 3 => println!("{}{}", line, env!("CARGO_PKG_HOMEPAGE")), - 4 => println!("{}:? for help", line), - _ => println!("{}", line), - } - } -} - -fn print_help_text() { - const HELP_TEXT: &[&str] = &[ - "", - "Command Arguments Purpose", - "", - " normalize a term", - ":? :h :help display this help text", - ":core print the core representation of a term", - ":let = add a named term to the REPL context", - ":q :quit quit the repl", - ":t :type infer the type of a term", - "", - ]; - - for line in HELP_TEXT { - println!("{}", line); - } -} - -/// Run the `repl` subcommand with the given options -pub fn run(opts: Opts) -> Result<(), Error> { - use std::fs::File; - use std::io::Read; - - let interface = Interface::new("repl")?; - let writer = StandardStream::stderr(opts.color.into()); - let mut driver = Driver::with_prelude(); - - interface.set_prompt(&opts.prompt)?; - interface.set_report_signal(Signal::Interrupt, true); - interface.set_report_signal(Signal::Quit, true); - - if !opts.no_history && interface.load_history(&opts.history_file).is_err() { - // No previous REPL history! - } - - if !opts.no_banner { - print_welcome_banner(); - } - - // preload specified files - for path in &opts.files { - // FIXME: allow for customization of internal path - let internal_path = path.to_str().unwrap().to_owned(); - let external_path = FileName::Real(path.clone()); - - let mut file = File::open(path)?; - let mut src = String::new(); - file.read_to_string(&mut src)?; - - if let Err(diagnostics) = driver.register_file(internal_path, external_path, src) { - driver.emit(writer.lock(), &diagnostics).unwrap(); - return Err(failure::format_err!("encountered an error!")); - } - } - - loop { - match interface.read_line()? { - ReadResult::Input(line) => { - if !opts.no_history && !line.trim().is_empty() { - interface.add_history_unique(line.clone()); - } - - let repl_command = match line.parse() { - Ok(repl_command) => repl_command, - Err(diagnostics) => { - driver.emit(writer.lock(), &diagnostics).unwrap(); - continue; - }, - }; - - match eval_print(&mut driver, repl_command) { - Ok(ControlFlow::Continue) => {}, - Ok(ControlFlow::Break) => break, - Err(diagnostics) => driver.emit(writer.lock(), &diagnostics).unwrap(), - } - }, - ReadResult::Signal(Signal::Quit) | ReadResult::Eof => break, - ReadResult::Signal(Signal::Interrupt) => println!("Interrupt"), - ReadResult::Signal(_) => {}, - } - } - - if !opts.no_history { - interface.save_history(&opts.history_file)?; - } - - println!("Bye bye"); - - Ok(()) -} - -#[derive(Clone)] -enum ControlFlow { - Break, - Continue, -} - -/// Commands entered in the REPL -#[derive(Debug, Clone)] -pub enum ReplCommand { - /// Normalize a term - /// - /// ```text - /// - /// ``` - Normalize(String), - /// Show the core representation of a term - /// - /// ```text - /// :core - /// ``` - Core(String), - /// Print some help about using the REPL - /// - /// ```text - /// :? - /// :h - /// :help - /// ``` - Help, - /// Add a declaration to the REPL environment - /// - /// ```text - /// :let = - /// ``` - Let(String, String), - /// No command - NoOp, - /// Quit the REPL - /// - /// ```text - /// :q - /// :quit - /// ``` - Quit, - /// Print the type of the term - /// - /// ```text - /// :t - /// :type - /// ``` - TypeOf(String), -} - -impl FromStr for ReplCommand { - type Err = Vec; - - fn from_str(src: &str) -> Result> { - use combine::char::*; - use combine::*; - - let anys1 = || many1(any()); - let spaces1 = || skip_many1(space()); - let ident = || { - value(()) - .with(letter()) - .and(many::(alpha_num())) - .map(|(hd, tl)| format!("{}{}", hd, tl)) - }; - - let cmd = choice(( - token(':').with(choice(( - attempt( - choice(( - attempt(string("help")), - attempt(string("?")), - attempt(string("h")), - )) - .map(|_| ReplCommand::Help), - ), - attempt( - choice((attempt(string("quit")), attempt(string("q")))) - .map(|_| ReplCommand::Quit), - ), - attempt( - string("core") - .with(spaces1()) - .with(anys1()) - .map(ReplCommand::Core), - ), - attempt( - choice((attempt(string("type")), string("t"))) - .with(spaces1()) - .with(anys1()) - .map(ReplCommand::TypeOf), - ), - attempt( - string("let") - .with(spaces1()) - .with(ident()) - .skip(spaces()) - .skip(string("=")) - .skip(spaces()) - .and(anys1()) - .map(|(ident, src)| ReplCommand::Let(ident, src)), - ), - ))), - anys1().map(ReplCommand::Normalize), - )); - - let mut parser = spaces().with(cmd).skip(spaces()); - - match parser.parse(src) { - Ok((cmd, _)) => Ok(cmd), - Err(_) => { - // TODO: better errors here! - Err(vec![Diagnostic::new_error("malformed REPL command")]) - }, - } - } -} - -fn eval_print( - driver: &mut Driver, - repl_command: ReplCommand, -) -> Result> { - use codespan::ByteSpan; - - use pikelet_concrete::syntax::concrete::Term; - - fn term_width() -> usize { - term_size::dimensions() - .map(|(width, _)| width) - .unwrap_or(1_000_000) - } - - let file_name = FileName::virtual_("repl"); - - match repl_command { - ReplCommand::Help => print_help_text(), - - ReplCommand::Normalize(term_src) => { - let (term, inferred) = driver.infer_file(file_name, term_src)?; - let evaluated = driver.normalize_term(&term)?; - - let ann_term = Term::Ann( - Box::new(driver.resugar(&evaluated)), - Box::new(driver.resugar(&inferred)), - ); - - println!("{}", ann_term.to_doc().group().pretty(term_width())); - }, - ReplCommand::Core(term_src) => { - use pikelet_core::syntax::core::{RcTerm, Term}; - - let (term, inferred) = driver.infer_file(file_name, term_src)?; - let ann_term = Term::Ann(term, RcTerm::from(Term::from(&*inferred))); - - println!("{}", ann_term.to_doc().group().pretty(term_width())); - }, - ReplCommand::Let(name, term_src) => { - let (term, inferred) = driver.infer_file(file_name, term_src)?; - driver.add_binding(&name, term.clone(), inferred.clone()); - - let ann_term = Term::Ann( - Box::new(Term::Name(ByteSpan::default(), name, None)), - Box::new(driver.resugar(&inferred)), - ); - - println!("{}", ann_term.to_doc().group().pretty(term_width())); - - return Ok(ControlFlow::Continue); - }, - ReplCommand::TypeOf(term_src) => { - let (_, inferred) = driver.infer_file(file_name, term_src)?; - let inferred = driver.resugar(&inferred); - - println!("{}", inferred.to_doc().group().pretty(term_width())); - }, - - ReplCommand::NoOp => {}, - ReplCommand::Quit => return Ok(ControlFlow::Break), - } - - Ok(ControlFlow::Continue) -} diff --git a/crates/pikelet/Cargo.toml b/crates/pikelet/Cargo.toml deleted file mode 100644 index a82113831..000000000 --- a/crates/pikelet/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "pikelet" -version = "0.1.0" -readme = "../../README.md" -license = "Apache-2.0" -authors = ["Brendan Zabarauskas "] -description = "An implementation of a small dependently typed lambda calculus in Rust." -homepage = "https://github.com/pikelet-lang/pikelet" -repository = "https://github.com/pikelet-lang/pikelet" -edition = "2018" -publish = false - -[dependencies] -failure = "0.1.3" -pikelet-language-server = { version = "0.1.0", path = "../pikelet-language-server" } -pikelet-repl = { version = "0.1.0", path = "../pikelet-repl" } -structopt = "0.2.12" diff --git a/crates/pikelet/README.md b/crates/pikelet/README.md deleted file mode 100644 index 212dc4676..000000000 --- a/crates/pikelet/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Pikelet - -The top-level command line interface for working with Pikelet diff --git a/crates/pikelet/src/lib.rs b/crates/pikelet/src/lib.rs deleted file mode 100644 index 298fe5709..000000000 --- a/crates/pikelet/src/lib.rs +++ /dev/null @@ -1,31 +0,0 @@ -//! The command line interface for Pikelet - -use failure::Error; - -// TODO: test using https://github.com/killercup/assert_cli - -#[derive(Debug, structopt::StructOpt)] -#[structopt(name = "pikelet")] -pub struct Opts { - /// Subcommand to run - #[structopt(subcommand)] - pub command: Command, -} - -#[derive(Debug, structopt::StructOpt)] -pub enum Command { - /// A REPL for running expressions - #[structopt(name = "repl")] - Repl(pikelet_repl::Opts), - /// Start an instance of the the language server - #[structopt(name = "language-server")] - LanguageServer(pikelet_language_server::Opts), -} - -/// Run `pikelet` with the given options -pub fn run(opts: Opts) -> Result<(), Error> { - match opts.command { - Command::LanguageServer(opts) => pikelet_language_server::run(opts), - Command::Repl(opts) => pikelet_repl::run(opts), - } -} diff --git a/crates/pikelet/src/main.rs b/crates/pikelet/src/main.rs deleted file mode 100644 index bc8031471..000000000 --- a/crates/pikelet/src/main.rs +++ /dev/null @@ -1,7 +0,0 @@ -use failure::Error; -use pikelet::Opts; -use structopt::StructOpt; - -fn main() -> Result<(), Error> { - pikelet::run(Opts::from_args()) -} diff --git a/editors/README.md b/editors/README.md deleted file mode 100644 index 9feed8c7d..000000000 --- a/editors/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Editor integrations for Pikelet - -So far we support the following editors: - -- VS Code - -## Language Server - -The language server that these editor integrations currently use can be found -in the [`crates/pikelet-language-server`] directory. - -[`crates/pikelet-language-server`]: /crates/pikelet-language-server diff --git a/editors/code/.editorconfig b/editors/code/.editorconfig deleted file mode 100644 index 9e333d260..000000000 --- a/editors/code/.editorconfig +++ /dev/null @@ -1,12 +0,0 @@ -# https://editorconfig.org -root = true - -[*] -charset = utf-8 -end_of_line = lf -indent_style = space -insert_final_newline = true -trim_trailing_whitespace = true - -[*.{ts,json}] -indent_size = 4 diff --git a/editors/code/.gitattributes b/editors/code/.gitattributes deleted file mode 100644 index 70e63fff5..000000000 --- a/editors/code/.gitattributes +++ /dev/null @@ -1,2 +0,0 @@ -# Set default behavior to automatically normalize line endings. -* text=auto diff --git a/editors/code/.gitignore b/editors/code/.gitignore deleted file mode 100644 index 76b5a59d2..000000000 --- a/editors/code/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -out -node_modules -*.vsix diff --git a/editors/code/.vscode/extensions.json b/editors/code/.vscode/extensions.json deleted file mode 100644 index 642092810..000000000 --- a/editors/code/.vscode/extensions.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "recommendations": [ - "editorconfig.editorconfig", - "eg2.tslint", - "esbenp.prettier-vscode", - "rust-lang.rust" - ] -} diff --git a/editors/code/.vscode/launch.json b/editors/code/.vscode/launch.json deleted file mode 100644 index 464404730..000000000 --- a/editors/code/.vscode/launch.json +++ /dev/null @@ -1,16 +0,0 @@ -// A launch configuration that launches the extension inside a new window -// Use IntelliSense to learn about possible attributes. -// Hover to view descriptions of existing attributes. -// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 -{ - "version": "0.2.0", - "configurations": [ - { - "name": "Extension", - "type": "extensionHost", - "request": "launch", - "runtimeExecutable": "${execPath}", - "args": ["--extensionDevelopmentPath=${workspaceFolder}"] - } - ] -} diff --git a/editors/code/.vscode/settings.json b/editors/code/.vscode/settings.json deleted file mode 100644 index f89ed5f1d..000000000 --- a/editors/code/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "editor.formatOnSave": true -} diff --git a/editors/code/.vscodeignore b/editors/code/.vscodeignore deleted file mode 100644 index f369b5e55..000000000 --- a/editors/code/.vscodeignore +++ /dev/null @@ -1,4 +0,0 @@ -.vscode/** -.vscode-test/** -.gitignore -vsc-extension-quickstart.md diff --git a/editors/code/CHANGELOG.md b/editors/code/CHANGELOG.md deleted file mode 100644 index 2b6b0c2e4..000000000 --- a/editors/code/CHANGELOG.md +++ /dev/null @@ -1,7 +0,0 @@ -# Change Log -All notable changes to the "pikelet" extension will be documented in this file. - -Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file. - -## [Unreleased] -- Initial release diff --git a/editors/code/README.md b/editors/code/README.md deleted file mode 100644 index 5505620f8..000000000 --- a/editors/code/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Pikelet support for Visual Studio Code - -Adds language support for Pikelet to Visual Studio Code. Supports: - -- syntax highlighting diff --git a/editors/code/language-configuration.json b/editors/code/language-configuration.json deleted file mode 100644 index b7821ddc0..000000000 --- a/editors/code/language-configuration.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "comments": { - // symbol used for single line comment. Remove this entry if your language does not support line comments - "lineComment": "--" - // symbols used for start and end a block comment. Remove this entry if your language does not support block comments - // "blockComment": [ "{-", "-}" ] - }, - // symbols used as brackets - "brackets": [["{", "}"], ["[", "]"], ["(", ")"]], - // symbols that are auto closed when typing - "autoClosingPairs": [ - ["{", "}"], - ["[", "]"], - ["(", ")"], - ["\"", "\""], - ["'", "'"] - ], - // symbols that that can be used to surround a selection - "surroundingPairs": [ - ["{", "}"], - ["[", "]"], - ["(", ")"], - ["\"", "\""], - ["'", "'"] - ] -} diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json deleted file mode 100644 index 40728a5e0..000000000 --- a/editors/code/package-lock.json +++ /dev/null @@ -1,3217 +0,0 @@ -{ - "name": "pikelet", - "version": "0.0.1", - "lockfileVersion": 1, - "requires": true, - "dependencies": { - "@types/node": { - "version": "10.12.10", - "resolved": "https://registry.npmjs.org/@types/node/-/node-10.12.10.tgz", - "integrity": "sha512-8xZEYckCbUVgK8Eg7lf5Iy4COKJ5uXlnIOnePN0WUwSQggy9tolM+tDJf7wMOnT/JT/W9xDYIaYggt3mRV2O5w==", - "dev": true - }, - "ajv": { - "version": "6.5.5", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.5.5.tgz", - "integrity": "sha512-7q7gtRQDJSyuEHjuVgHoUa2VuemFiCMrfQc9Tc08XTAc4Zj/5U1buQJ0HU6i7fKjXU09SVgSmxa4sLvuvS8Iyg==", - "dev": true, - "requires": { - "fast-deep-equal": "^2.0.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - } - }, - "ansi-cyan": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ansi-cyan/-/ansi-cyan-0.1.1.tgz", - "integrity": "sha1-U4rlKK+JgvKK4w2G8vF0VtJgmHM=", - "dev": true, - "requires": { - "ansi-wrap": "0.1.0" - } - }, - "ansi-red": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/ansi-red/-/ansi-red-0.1.1.tgz", - "integrity": "sha1-jGOPnRCAgAo1PJwoyKgcpHBdlGw=", - "dev": true, - "requires": { - "ansi-wrap": "0.1.0" - } - }, - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", - "dev": true - }, - "ansi-wrap": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz", - "integrity": "sha1-qCJQ3bABXponyoLoLqYDu/pF768=", - "dev": true - }, - "append-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/append-buffer/-/append-buffer-1.0.2.tgz", - "integrity": "sha1-2CIM9GYIFSXv6lBhTz3mUU36WPE=", - "dev": true, - "requires": { - "buffer-equal": "^1.0.0" - } - }, - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "requires": { - "sprintf-js": "~1.0.2" - } - }, - "arr-diff": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-1.1.0.tgz", - "integrity": "sha1-aHwydYFjWI/vfeezb6vklesaOZo=", - "dev": true, - "requires": { - "arr-flatten": "^1.0.1", - "array-slice": "^0.2.3" - } - }, - "arr-flatten": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", - "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", - "dev": true - }, - "arr-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-2.1.0.tgz", - "integrity": "sha1-IPnqtexw9cfSFbEHexw5Fh0pLH0=", - "dev": true - }, - "array-differ": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-differ/-/array-differ-1.0.0.tgz", - "integrity": "sha1-7/UuN1gknTO+QCuLuOVkuytdQDE=", - "dev": true - }, - "array-slice": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/array-slice/-/array-slice-0.2.3.tgz", - "integrity": "sha1-3Tz7gO15c6dRF82sabC5nshhhvU=", - "dev": true - }, - "array-union": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", - "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=", - "dev": true, - "requires": { - "array-uniq": "^1.0.1" - } - }, - "array-uniq": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz", - "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=", - "dev": true - }, - "array-unique": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.2.1.tgz", - "integrity": "sha1-odl8yvy8JiXMcPrc6zalDFiwGlM=", - "dev": true - }, - "arrify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", - "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", - "dev": true - }, - "asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", - "dev": true, - "requires": { - "safer-buffer": "~2.1.0" - } - }, - "assert-plus": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", - "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", - "dev": true - }, - "asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", - "dev": true - }, - "aws-sign2": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", - "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", - "dev": true - }, - "aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==", - "dev": true - }, - "babel-code-frame": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", - "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=", - "dev": true, - "requires": { - "chalk": "^1.1.3", - "esutils": "^2.0.2", - "js-tokens": "^3.0.2" - }, - "dependencies": { - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true - } - } - }, - "balanced-match": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", - "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=", - "dev": true - }, - "bcrypt-pbkdf": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", - "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", - "dev": true, - "requires": { - "tweetnacl": "^0.14.3" - } - }, - "block-stream": { - "version": "0.0.9", - "resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz", - "integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=", - "dev": true, - "requires": { - "inherits": "~2.0.0" - } - }, - "boolbase": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=", - "dev": true - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "braces": { - "version": "1.8.5", - "resolved": "https://registry.npmjs.org/braces/-/braces-1.8.5.tgz", - "integrity": "sha1-uneWLhLf+WnWt2cR6RS3N4V79qc=", - "dev": true, - "requires": { - "expand-range": "^1.8.1", - "preserve": "^0.2.0", - "repeat-element": "^1.1.2" - } - }, - "browser-stdout": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.0.tgz", - "integrity": "sha1-81HTKWnTL6XXpVZxVCY9korjvR8=", - "dev": true - }, - "buffer-crc32": { - "version": "0.2.13", - "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", - "integrity": "sha1-DTM+PwDqxQqhRUq9MO+MKl2ackI=", - "dev": true - }, - "buffer-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/buffer-equal/-/buffer-equal-1.0.0.tgz", - "integrity": "sha1-WWFrSYME1Var1GaWayLu2j7KX74=", - "dev": true - }, - "buffer-from": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", - "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==", - "dev": true - }, - "builtin-modules": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz", - "integrity": "sha1-Jw8HbFpywC9bZaR9+Uxf46J4iS8=", - "dev": true - }, - "caseless": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", - "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", - "dev": true - }, - "chalk": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz", - "integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.1", - "escape-string-regexp": "^1.0.5", - "supports-color": "^5.3.0" - }, - "dependencies": { - "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", - "dev": true, - "requires": { - "color-convert": "^1.9.0" - } - }, - "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", - "dev": true - }, - "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", - "dev": true, - "requires": { - "has-flag": "^3.0.0" - } - } - } - }, - "cheerio": { - "version": "1.0.0-rc.2", - "resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.2.tgz", - "integrity": "sha1-S59TqBsn5NXawxwP/Qz6A8xoMNs=", - "dev": true, - "requires": { - "css-select": "~1.2.0", - "dom-serializer": "~0.1.0", - "entities": "~1.1.1", - "htmlparser2": "^3.9.1", - "lodash": "^4.15.0", - "parse5": "^3.0.1" - } - }, - "clone": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/clone/-/clone-0.2.0.tgz", - "integrity": "sha1-xhJqkK1Pctv1rNskPMN3JP6T/B8=", - "dev": true - }, - "clone-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz", - "integrity": "sha1-4+JbIHrE5wGvch4staFnksrD3Fg=", - "dev": true - }, - "clone-stats": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-0.0.1.tgz", - "integrity": "sha1-uI+UqCzzi4eR1YBG6kAprYjKmdE=", - "dev": true - }, - "cloneable-readable": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/cloneable-readable/-/cloneable-readable-1.1.2.tgz", - "integrity": "sha512-Bq6+4t+lbM8vhTs/Bef5c5AdEMtapp/iFb6+s4/Hh9MVTt8OLKH7ZOOZSCT+Ys7hsHvqv0GuMPJ1lnQJVHvxpg==", - "dev": true, - "requires": { - "inherits": "^2.0.1", - "process-nextick-args": "^2.0.0", - "readable-stream": "^2.3.5" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, - "requires": { - "color-name": "1.1.3" - } - }, - "color-name": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true - }, - "combined-stream": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.7.tgz", - "integrity": "sha512-brWl9y6vOB1xYPZcpZde3N9zDByXTosAeMDo4p1wzo6UMOX4vumB+TP1RZ76sfE6Md68Q0NJSrE/gbezd4Ul+w==", - "dev": true, - "requires": { - "delayed-stream": "~1.0.0" - } - }, - "commander": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.19.0.tgz", - "integrity": "sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==", - "dev": true - }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", - "dev": true - }, - "convert-source-map": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.6.0.tgz", - "integrity": "sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.1" - } - }, - "core-util-is": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", - "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=", - "dev": true - }, - "css-select": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", - "integrity": "sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=", - "dev": true, - "requires": { - "boolbase": "~1.0.0", - "css-what": "2.1", - "domutils": "1.5.1", - "nth-check": "~1.0.1" - } - }, - "css-what": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-2.1.2.tgz", - "integrity": "sha512-wan8dMWQ0GUeF7DGEPVjhHemVW/vy6xUYmFzRY8RYqgA0JtXC9rJmbScBjqSu6dg9q0lwPQy6ZAmJVr3PPTvqQ==", - "dev": true - }, - "dashdash": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", - "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", - "dev": true, - "requires": { - "ms": "2.0.0" - } - }, - "deep-assign": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/deep-assign/-/deep-assign-1.0.0.tgz", - "integrity": "sha1-sJJ0O+hCfcYh6gBnzex+cN0Z83s=", - "dev": true, - "requires": { - "is-obj": "^1.0.0" - } - }, - "define-properties": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", - "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", - "dev": true, - "requires": { - "object-keys": "^1.0.12" - } - }, - "delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", - "dev": true - }, - "denodeify": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/denodeify/-/denodeify-1.2.1.tgz", - "integrity": "sha1-OjYof1A05pnnV3kBBSwubJQlFjE=", - "dev": true - }, - "diff": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/diff/-/diff-3.3.1.tgz", - "integrity": "sha512-MKPHZDMB0o6yHyDryUOScqZibp914ksXwAMYMTHj6KO8UeKsRYNJD3oNCKjTqZon+V488P7N/HzXF8t7ZR95ww==", - "dev": true - }, - "dom-serializer": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.0.tgz", - "integrity": "sha1-BzxpdUbOB4DOI75KKOKT5AvDDII=", - "dev": true, - "requires": { - "domelementtype": "~1.1.1", - "entities": "~1.1.1" - }, - "dependencies": { - "domelementtype": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz", - "integrity": "sha1-vSh3PiZCiBrsUVRJJCmcXNgiGFs=", - "dev": true - } - } - }, - "domelementtype": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.2.1.tgz", - "integrity": "sha512-SQVCLFS2E7G5CRCMdn6K9bIhRj1bS6QBWZfF0TUPh4V/BbqrQ619IdSS3/izn0FZ+9l+uODzaZjb08fjOfablA==", - "dev": true - }, - "domhandler": { - "version": "2.4.2", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz", - "integrity": "sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==", - "dev": true, - "requires": { - "domelementtype": "1" - } - }, - "domutils": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz", - "integrity": "sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8=", - "dev": true, - "requires": { - "dom-serializer": "0", - "domelementtype": "1" - } - }, - "duplexer": { - "version": "0.1.1", - "resolved": "http://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", - "integrity": "sha1-rOb/gIwc5mtX0ev5eXessCM0z8E=", - "dev": true - }, - "duplexify": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.6.1.tgz", - "integrity": "sha512-vM58DwdnKmty+FSPzT14K9JXb90H+j5emaR4KYbr2KTIz00WHGbWOe5ghQTx233ZCLZtrGDALzKwcjEtSt35mA==", - "dev": true, - "requires": { - "end-of-stream": "^1.0.0", - "inherits": "^2.0.1", - "readable-stream": "^2.0.0", - "stream-shift": "^1.0.0" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "ecc-jsbn": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", - "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", - "dev": true, - "requires": { - "jsbn": "~0.1.0", - "safer-buffer": "^2.1.0" - } - }, - "end-of-stream": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", - "integrity": "sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==", - "dev": true, - "requires": { - "once": "^1.4.0" - } - }, - "entities": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", - "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==", - "dev": true - }, - "escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", - "dev": true - }, - "esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true - }, - "esutils": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", - "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=", - "dev": true - }, - "event-stream": { - "version": "3.3.4", - "resolved": "http://registry.npmjs.org/event-stream/-/event-stream-3.3.4.tgz", - "integrity": "sha1-SrTJoPWlTbkzi0w02Gv86PSzVXE=", - "dev": true, - "requires": { - "duplexer": "~0.1.1", - "from": "~0", - "map-stream": "~0.1.0", - "pause-stream": "0.0.11", - "split": "0.3", - "stream-combiner": "~0.0.4", - "through": "~2.3.1" - } - }, - "expand-brackets": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-0.1.5.tgz", - "integrity": "sha1-3wcoTjQqgHzXM6xa9yQR5YHRF3s=", - "dev": true, - "requires": { - "is-posix-bracket": "^0.1.0" - } - }, - "expand-range": { - "version": "1.8.2", - "resolved": "https://registry.npmjs.org/expand-range/-/expand-range-1.8.2.tgz", - "integrity": "sha1-opnv/TNf4nIeuujiV+x5ZE/IUzc=", - "dev": true, - "requires": { - "fill-range": "^2.1.0" - } - }, - "extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "dev": true - }, - "extend-shallow": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-1.1.4.tgz", - "integrity": "sha1-Gda/lN/AnXa6cR85uHLSH/TdkHE=", - "dev": true, - "requires": { - "kind-of": "^1.1.0" - } - }, - "extglob": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/extglob/-/extglob-0.3.2.tgz", - "integrity": "sha1-Lhj/PS9JqydlzskCPwEdqo2DSaE=", - "dev": true, - "requires": { - "is-extglob": "^1.0.0" - }, - "dependencies": { - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=", - "dev": true - } - } - }, - "extsprintf": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", - "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", - "dev": true - }, - "fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=", - "dev": true - }, - "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", - "dev": true - }, - "fd-slicer": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", - "integrity": "sha1-JcfInLH5B3+IkbvmHY85Dq4lbx4=", - "dev": true, - "requires": { - "pend": "~1.2.0" - } - }, - "filename-regex": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/filename-regex/-/filename-regex-2.0.1.tgz", - "integrity": "sha1-wcS5vuPglyXdsQa3XB4wH+LxiyY=", - "dev": true - }, - "fill-range": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-2.2.4.tgz", - "integrity": "sha512-cnrcCbj01+j2gTG921VZPnHbjmdAf8oQV/iGeV2kZxGSyfYjjTyY79ErsK1WJWMpw6DaApEX72binqJE+/d+5Q==", - "dev": true, - "requires": { - "is-number": "^2.1.0", - "isobject": "^2.0.0", - "randomatic": "^3.0.0", - "repeat-element": "^1.1.2", - "repeat-string": "^1.5.2" - } - }, - "first-chunk-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/first-chunk-stream/-/first-chunk-stream-1.0.0.tgz", - "integrity": "sha1-Wb+1DNkF9g18OUzT2ayqtOatk04=", - "dev": true - }, - "flush-write-stream": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.0.3.tgz", - "integrity": "sha512-calZMC10u0FMUqoiunI2AiGIIUtUIvifNwkHhNupZH4cbNnW1Itkoh/Nf5HFYmDrwWPjrUxpkZT0KhuCq0jmGw==", - "dev": true, - "requires": { - "inherits": "^2.0.1", - "readable-stream": "^2.0.4" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "for-in": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", - "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", - "dev": true - }, - "for-own": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/for-own/-/for-own-0.1.5.tgz", - "integrity": "sha1-UmXGgaTylNq78XyVCbZ2OqhFEM4=", - "dev": true, - "requires": { - "for-in": "^1.0.1" - } - }, - "forever-agent": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", - "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", - "dev": true - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "from": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/from/-/from-0.1.7.tgz", - "integrity": "sha1-g8YK/Fi5xWmXAH7Rp2izqzA6RP4=", - "dev": true - }, - "fs-mkdirp-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs-mkdirp-stream/-/fs-mkdirp-stream-1.0.0.tgz", - "integrity": "sha1-C3gV/DIBxqaeFNuYzgmMFpNSWes=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.11", - "through2": "^2.0.3" - } - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true - }, - "getpass": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", - "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0" - } - }, - "glob": { - "version": "7.1.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.3.tgz", - "integrity": "sha512-vcfuiIxogLV4DlGBHIUOwI0IbrJ8HWPc4MU7HzviGeNho/UJDfi6B5p3sHeWIQ0KGIU0Jpxi5ZHxemQfLkkAwQ==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "glob-base": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/glob-base/-/glob-base-0.3.0.tgz", - "integrity": "sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q=", - "dev": true, - "requires": { - "glob-parent": "^2.0.0", - "is-glob": "^2.0.0" - }, - "dependencies": { - "glob-parent": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-2.0.0.tgz", - "integrity": "sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg=", - "dev": true, - "requires": { - "is-glob": "^2.0.0" - } - }, - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=", - "dev": true - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "dev": true, - "requires": { - "is-extglob": "^1.0.0" - } - } - } - }, - "glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", - "dev": true, - "requires": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - } - }, - "glob-stream": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/glob-stream/-/glob-stream-5.3.5.tgz", - "integrity": "sha1-pVZlqajM3EGRWofHAeMtTgFvrSI=", - "dev": true, - "requires": { - "extend": "^3.0.0", - "glob": "^5.0.3", - "glob-parent": "^3.0.0", - "micromatch": "^2.3.7", - "ordered-read-streams": "^0.3.0", - "through2": "^0.6.0", - "to-absolute-glob": "^0.1.1", - "unique-stream": "^2.0.2" - }, - "dependencies": { - "glob": { - "version": "5.0.15", - "resolved": "https://registry.npmjs.org/glob/-/glob-5.0.15.tgz", - "integrity": "sha1-G8k2ueAvSmA/zCIuz3Yz0wuLk7E=", - "dev": true, - "requires": { - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "2 || 3", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", - "dev": true - }, - "readable-stream": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", - "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - } - }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=", - "dev": true - }, - "through2": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-0.6.5.tgz", - "integrity": "sha1-QaucZ7KdVyCQcUEOHXp6lozTrUg=", - "dev": true, - "requires": { - "readable-stream": ">=1.0.33-1 <1.1.0-0", - "xtend": ">=4.0.0 <4.1.0-0" - } - } - } - }, - "graceful-fs": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.1.15.tgz", - "integrity": "sha512-6uHUhOPEBgQ24HM+r6b/QwWfZq+yiFcipKFrOFiBEnWdy5sdzYoi+pJeQaPI5qOLRFqWmAXUPQNsielzdLoecA==", - "dev": true - }, - "growl": { - "version": "1.10.3", - "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.3.tgz", - "integrity": "sha512-hKlsbA5Vu3xsh1Cg3J7jSmX/WaW6A5oBeqzM88oNbCRQFz+zUaXm6yxS4RVytp1scBoJzSYl4YAEOQIt6O8V1Q==", - "dev": true - }, - "gulp-chmod": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/gulp-chmod/-/gulp-chmod-2.0.0.tgz", - "integrity": "sha1-AMOQuSigeZslGsz2MaoJ4BzGKZw=", - "dev": true, - "requires": { - "deep-assign": "^1.0.0", - "stat-mode": "^0.2.0", - "through2": "^2.0.0" - } - }, - "gulp-filter": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/gulp-filter/-/gulp-filter-5.1.0.tgz", - "integrity": "sha1-oF4Rr/sHz33PQafeHLe2OsN4PnM=", - "dev": true, - "requires": { - "multimatch": "^2.0.0", - "plugin-error": "^0.1.2", - "streamfilter": "^1.0.5" - } - }, - "gulp-gunzip": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/gulp-gunzip/-/gulp-gunzip-1.0.0.tgz", - "integrity": "sha1-FbdBFF6Dqcb1CIYkG1fMWHHxUak=", - "dev": true, - "requires": { - "through2": "~0.6.5", - "vinyl": "~0.4.6" - }, - "dependencies": { - "isarray": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", - "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", - "dev": true - }, - "readable-stream": { - "version": "1.0.34", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", - "integrity": "sha1-Elgg40vIQtLyqq+v5MKRbuMsFXw=", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "0.0.1", - "string_decoder": "~0.10.x" - } - }, - "string_decoder": { - "version": "0.10.31", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", - "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=", - "dev": true - }, - "through2": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-0.6.5.tgz", - "integrity": "sha1-QaucZ7KdVyCQcUEOHXp6lozTrUg=", - "dev": true, - "requires": { - "readable-stream": ">=1.0.33-1 <1.1.0-0", - "xtend": ">=4.0.0 <4.1.0-0" - } - } - } - }, - "gulp-remote-src-vscode": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/gulp-remote-src-vscode/-/gulp-remote-src-vscode-0.5.1.tgz", - "integrity": "sha512-mw4OGjtC/jlCWJFhbcAlel4YPvccChlpsl3JceNiB/DLJi24/UPxXt53/N26lgI3dknEqd4ErfdHrO8sJ5bATQ==", - "dev": true, - "requires": { - "event-stream": "3.3.4", - "node.extend": "^1.1.2", - "request": "^2.79.0", - "through2": "^2.0.3", - "vinyl": "^2.0.1" - }, - "dependencies": { - "clone": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", - "integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=", - "dev": true - }, - "clone-stats": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz", - "integrity": "sha1-s3gt/4u1R04Yuba/D9/ngvh3doA=", - "dev": true - }, - "vinyl": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-2.2.0.tgz", - "integrity": "sha512-MBH+yP0kC/GQ5GwBqrTPTzEfiiLjta7hTtvQtbxBgTeSXsmKQRQecjibMbxIXzVT3Y9KJK+drOz1/k+vsu8Nkg==", - "dev": true, - "requires": { - "clone": "^2.1.1", - "clone-buffer": "^1.0.0", - "clone-stats": "^1.0.0", - "cloneable-readable": "^1.0.0", - "remove-trailing-separator": "^1.0.1", - "replace-ext": "^1.0.0" - } - } - } - }, - "gulp-sourcemaps": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/gulp-sourcemaps/-/gulp-sourcemaps-1.6.0.tgz", - "integrity": "sha1-uG/zSdgBzrVuHZ59x7vLS33uYAw=", - "dev": true, - "requires": { - "convert-source-map": "^1.1.1", - "graceful-fs": "^4.1.2", - "strip-bom": "^2.0.0", - "through2": "^2.0.0", - "vinyl": "^1.0.0" - }, - "dependencies": { - "clone": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", - "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=", - "dev": true - }, - "replace-ext": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-0.0.1.tgz", - "integrity": "sha1-KbvZIHinOfC8zitO5B6DeVNSKSQ=", - "dev": true - }, - "vinyl": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-1.2.0.tgz", - "integrity": "sha1-XIgDbPVl5d8FVYv8kR+GVt8hiIQ=", - "dev": true, - "requires": { - "clone": "^1.0.0", - "clone-stats": "^0.0.1", - "replace-ext": "0.0.1" - } - } - } - }, - "gulp-symdest": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/gulp-symdest/-/gulp-symdest-1.1.1.tgz", - "integrity": "sha512-UHd3MokfIN7SrFdsbV5uZTwzBpL0ZSTu7iq98fuDqBGZ0dlHxgbQBJwfd6qjCW83snkQ3Hz9IY4sMRMz2iTq7w==", - "dev": true, - "requires": { - "event-stream": "3.3.4", - "mkdirp": "^0.5.1", - "queue": "^3.1.0", - "vinyl-fs": "^2.4.3" - } - }, - "gulp-untar": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/gulp-untar/-/gulp-untar-0.0.7.tgz", - "integrity": "sha512-0QfbCH2a1k2qkTLWPqTX+QO4qNsHn3kC546YhAP3/n0h+nvtyGITDuDrYBMDZeW4WnFijmkOvBWa5HshTic1tw==", - "dev": true, - "requires": { - "event-stream": "~3.3.4", - "streamifier": "~0.1.1", - "tar": "^2.2.1", - "through2": "~2.0.3", - "vinyl": "^1.2.0" - }, - "dependencies": { - "clone": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", - "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=", - "dev": true - }, - "replace-ext": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-0.0.1.tgz", - "integrity": "sha1-KbvZIHinOfC8zitO5B6DeVNSKSQ=", - "dev": true - }, - "vinyl": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-1.2.0.tgz", - "integrity": "sha1-XIgDbPVl5d8FVYv8kR+GVt8hiIQ=", - "dev": true, - "requires": { - "clone": "^1.0.0", - "clone-stats": "^0.0.1", - "replace-ext": "0.0.1" - } - } - } - }, - "gulp-vinyl-zip": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/gulp-vinyl-zip/-/gulp-vinyl-zip-2.1.2.tgz", - "integrity": "sha512-wJn09jsb8PyvUeyFF7y7ImEJqJwYy40BqL9GKfJs6UGpaGW9A+N68Q+ajsIpb9AeR6lAdjMbIdDPclIGo1/b7Q==", - "dev": true, - "requires": { - "event-stream": "3.3.4", - "queue": "^4.2.1", - "through2": "^2.0.3", - "vinyl": "^2.0.2", - "vinyl-fs": "^3.0.3", - "yauzl": "^2.2.1", - "yazl": "^2.2.1" - }, - "dependencies": { - "clone": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", - "integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=", - "dev": true - }, - "clone-stats": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz", - "integrity": "sha1-s3gt/4u1R04Yuba/D9/ngvh3doA=", - "dev": true - }, - "glob-stream": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/glob-stream/-/glob-stream-6.1.0.tgz", - "integrity": "sha1-cEXJlBOz65SIjYOrRtC0BMx73eQ=", - "dev": true, - "requires": { - "extend": "^3.0.0", - "glob": "^7.1.1", - "glob-parent": "^3.1.0", - "is-negated-glob": "^1.0.0", - "ordered-read-streams": "^1.0.0", - "pumpify": "^1.3.5", - "readable-stream": "^2.1.5", - "remove-trailing-separator": "^1.0.1", - "to-absolute-glob": "^2.0.0", - "unique-stream": "^2.0.2" - } - }, - "is-valid-glob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-valid-glob/-/is-valid-glob-1.0.0.tgz", - "integrity": "sha1-Kb8+/3Ab4tTTFdusw5vDn+j2Aao=", - "dev": true - }, - "ordered-read-streams": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/ordered-read-streams/-/ordered-read-streams-1.0.1.tgz", - "integrity": "sha1-d8DLN8QVJdZBZtmQ/61+xqDhNj4=", - "dev": true, - "requires": { - "readable-stream": "^2.0.1" - } - }, - "queue": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/queue/-/queue-4.5.1.tgz", - "integrity": "sha512-AMD7w5hRXcFSb8s9u38acBZ+309u6GsiibP4/0YacJeaurRshogB7v/ZcVPxP5gD5+zIw6ixRHdutiYUJfwKHw==", - "dev": true, - "requires": { - "inherits": "~2.0.0" - } - }, - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "to-absolute-glob": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/to-absolute-glob/-/to-absolute-glob-2.0.2.tgz", - "integrity": "sha1-GGX0PZ50sIItufFFt4z/fQ98hJs=", - "dev": true, - "requires": { - "is-absolute": "^1.0.0", - "is-negated-glob": "^1.0.0" - } - }, - "vinyl": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-2.2.0.tgz", - "integrity": "sha512-MBH+yP0kC/GQ5GwBqrTPTzEfiiLjta7hTtvQtbxBgTeSXsmKQRQecjibMbxIXzVT3Y9KJK+drOz1/k+vsu8Nkg==", - "dev": true, - "requires": { - "clone": "^2.1.1", - "clone-buffer": "^1.0.0", - "clone-stats": "^1.0.0", - "cloneable-readable": "^1.0.0", - "remove-trailing-separator": "^1.0.1", - "replace-ext": "^1.0.0" - } - }, - "vinyl-fs": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/vinyl-fs/-/vinyl-fs-3.0.3.tgz", - "integrity": "sha512-vIu34EkyNyJxmP0jscNzWBSygh7VWhqun6RmqVfXePrOwi9lhvRs//dOaGOTRUQr4tx7/zd26Tk5WeSVZitgng==", - "dev": true, - "requires": { - "fs-mkdirp-stream": "^1.0.0", - "glob-stream": "^6.1.0", - "graceful-fs": "^4.0.0", - "is-valid-glob": "^1.0.0", - "lazystream": "^1.0.0", - "lead": "^1.0.0", - "object.assign": "^4.0.4", - "pumpify": "^1.3.5", - "readable-stream": "^2.3.3", - "remove-bom-buffer": "^3.0.0", - "remove-bom-stream": "^1.2.0", - "resolve-options": "^1.1.0", - "through2": "^2.0.0", - "to-through": "^2.0.0", - "value-or-function": "^3.0.0", - "vinyl": "^2.0.0", - "vinyl-sourcemap": "^1.1.0" - } - } - } - }, - "har-schema": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", - "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", - "dev": true - }, - "har-validator": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", - "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", - "dev": true, - "requires": { - "ajv": "^6.5.5", - "har-schema": "^2.0.0" - } - }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-ansi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "has-flag": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz", - "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE=", - "dev": true - }, - "has-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", - "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=", - "dev": true - }, - "he": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/he/-/he-1.1.1.tgz", - "integrity": "sha1-k0EP0hsAlzUVH4howvJx80J+I/0=", - "dev": true - }, - "htmlparser2": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.10.0.tgz", - "integrity": "sha512-J1nEUGv+MkXS0weHNWVKJJ+UrLfePxRWpN3C9bEi9fLxL2+ggW94DQvgYVXsaT30PGwYRIZKNZXuyMhp3Di4bQ==", - "dev": true, - "requires": { - "domelementtype": "^1.3.0", - "domhandler": "^2.3.0", - "domutils": "^1.5.1", - "entities": "^1.1.1", - "inherits": "^2.0.1", - "readable-stream": "^3.0.6" - }, - "dependencies": { - "domelementtype": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.0.tgz", - "integrity": "sha1-sXrtguirWeUt2cGbF1bg/BhyBMI=", - "dev": true - } - } - }, - "http-signature": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", - "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "jsprim": "^1.2.2", - "sshpk": "^1.7.0" - } - }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", - "dev": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", - "dev": true - }, - "is": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/is/-/is-3.2.1.tgz", - "integrity": "sha1-0Kwq1V63sL7JJqUmb2xmKqqD3KU=", - "dev": true - }, - "is-absolute": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-absolute/-/is-absolute-1.0.0.tgz", - "integrity": "sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA==", - "dev": true, - "requires": { - "is-relative": "^1.0.0", - "is-windows": "^1.0.1" - } - }, - "is-buffer": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", - "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", - "dev": true - }, - "is-dotfile": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-dotfile/-/is-dotfile-1.0.3.tgz", - "integrity": "sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE=", - "dev": true - }, - "is-equal-shallow": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/is-equal-shallow/-/is-equal-shallow-0.1.3.tgz", - "integrity": "sha1-IjgJj8Ih3gvPpdnqxMRdY4qhxTQ=", - "dev": true, - "requires": { - "is-primitive": "^2.0.0" - } - }, - "is-extendable": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", - "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", - "dev": true - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", - "dev": true - }, - "is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", - "dev": true, - "requires": { - "is-extglob": "^2.1.0" - } - }, - "is-negated-glob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-negated-glob/-/is-negated-glob-1.0.0.tgz", - "integrity": "sha1-aRC8pdqMleeEtXUbl2z1oQ/uNtI=", - "dev": true - }, - "is-number": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-2.1.0.tgz", - "integrity": "sha1-Afy7s5NGOlSPL0ZszhbezknbkI8=", - "dev": true, - "requires": { - "kind-of": "^3.0.2" - }, - "dependencies": { - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "is-obj": { - "version": "1.0.1", - "resolved": "http://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=", - "dev": true - }, - "is-posix-bracket": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz", - "integrity": "sha1-MzTceXdDaOkvAW5vvAqI9c1ua8Q=", - "dev": true - }, - "is-primitive": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-primitive/-/is-primitive-2.0.0.tgz", - "integrity": "sha1-IHurkWOEmcB7Kt8kCkGochADRXU=", - "dev": true - }, - "is-relative": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-relative/-/is-relative-1.0.0.tgz", - "integrity": "sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==", - "dev": true, - "requires": { - "is-unc-path": "^1.0.0" - } - }, - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha1-EtSj3U5o4Lec6428hBc66A2RykQ=", - "dev": true - }, - "is-typedarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", - "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", - "dev": true - }, - "is-unc-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-unc-path/-/is-unc-path-1.0.0.tgz", - "integrity": "sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==", - "dev": true, - "requires": { - "unc-path-regex": "^0.1.2" - } - }, - "is-utf8": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", - "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", - "dev": true - }, - "is-valid-glob": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/is-valid-glob/-/is-valid-glob-0.3.0.tgz", - "integrity": "sha1-1LVcafUYhvm2XHDWwmItN+KfSP4=", - "dev": true - }, - "is-windows": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", - "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", - "dev": true - }, - "isarray": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", - "dev": true - }, - "isobject": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", - "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", - "dev": true, - "requires": { - "isarray": "1.0.0" - } - }, - "isstream": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", - "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", - "dev": true - }, - "js-tokens": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", - "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=", - "dev": true - }, - "js-yaml": { - "version": "3.14.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.0.tgz", - "integrity": "sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A==", - "dev": true, - "requires": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - } - }, - "jsbn": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", - "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", - "dev": true - }, - "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", - "dev": true - }, - "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "json-stable-stringify": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz", - "integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=", - "dev": true, - "requires": { - "jsonify": "~0.0.0" - } - }, - "json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", - "dev": true - }, - "jsonify": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", - "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", - "dev": true - }, - "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", - "dev": true, - "requires": { - "assert-plus": "1.0.0", - "extsprintf": "1.3.0", - "json-schema": "0.2.3", - "verror": "1.10.0" - } - }, - "kind-of": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-1.1.0.tgz", - "integrity": "sha1-FAo9LUGjbS78+pN3tiwk+ElaXEQ=", - "dev": true - }, - "lazystream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.0.tgz", - "integrity": "sha1-9plf4PggOS9hOWvolGJAe7dxaOQ=", - "dev": true, - "requires": { - "readable-stream": "^2.0.5" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "lead": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/lead/-/lead-1.0.0.tgz", - "integrity": "sha1-bxT5mje+Op3XhPVJVpDlkDRm7kI=", - "dev": true, - "requires": { - "flush-write-stream": "^1.0.2" - } - }, - "linkify-it": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.1.0.tgz", - "integrity": "sha512-4REs8/062kV2DSHxNfq5183zrqXMl7WP0WzABH9IeJI+NLm429FgE1PDecltYfnOoFDFlZGh2T8PfZn0r+GTRg==", - "dev": true, - "requires": { - "uc.micro": "^1.0.1" - } - }, - "lodash": { - "version": "4.17.19", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", - "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==", - "dev": true - }, - "lodash.isequal": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha1-QVxEePK8wwEgwizhDtMib30+GOA=", - "dev": true - }, - "map-stream": { - "version": "0.1.0", - "resolved": "http://registry.npmjs.org/map-stream/-/map-stream-0.1.0.tgz", - "integrity": "sha1-5WqpTEyAVaFkBKBnS3jyFffI4ZQ=", - "dev": true - }, - "markdown-it": { - "version": "8.4.2", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-8.4.2.tgz", - "integrity": "sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==", - "dev": true, - "requires": { - "argparse": "^1.0.7", - "entities": "~1.1.1", - "linkify-it": "^2.0.0", - "mdurl": "^1.0.1", - "uc.micro": "^1.0.5" - } - }, - "math-random": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/math-random/-/math-random-1.0.1.tgz", - "integrity": "sha1-izqsWIuKZuSXXjzepn97sylgH6w=", - "dev": true - }, - "mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha1-/oWy7HWlkDfyrf7BAP1sYBdhFS4=", - "dev": true - }, - "merge-stream": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-1.0.1.tgz", - "integrity": "sha1-QEEgLVCKNCugAXQAjfDCUbjBNeE=", - "dev": true, - "requires": { - "readable-stream": "^2.0.1" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "micromatch": { - "version": "2.3.11", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-2.3.11.tgz", - "integrity": "sha1-hmd8l9FyCzY0MdBNDRUpO9OMFWU=", - "dev": true, - "requires": { - "arr-diff": "^2.0.0", - "array-unique": "^0.2.1", - "braces": "^1.8.2", - "expand-brackets": "^0.1.4", - "extglob": "^0.3.1", - "filename-regex": "^2.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.1", - "kind-of": "^3.0.2", - "normalize-path": "^2.0.1", - "object.omit": "^2.0.0", - "parse-glob": "^3.0.4", - "regex-cache": "^0.4.2" - }, - "dependencies": { - "arr-diff": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-2.0.0.tgz", - "integrity": "sha1-jzuCf5Vai9ZpaX5KQlasPOrjVs8=", - "dev": true, - "requires": { - "arr-flatten": "^1.0.1" - } - }, - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=", - "dev": true - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "dev": true, - "requires": { - "is-extglob": "^1.0.0" - } - }, - "kind-of": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", - "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", - "dev": true, - "requires": { - "is-buffer": "^1.1.5" - } - } - } - }, - "mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "dev": true - }, - "mime-db": { - "version": "1.37.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.37.0.tgz", - "integrity": "sha512-R3C4db6bgQhlIhPU48fUtdVmKnflq+hRdad7IyKhtFj06VPNVdk2RhiYL3UjQIlso8L+YxAtFkobT0VK+S/ybg==", - "dev": true - }, - "mime-types": { - "version": "2.1.21", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.21.tgz", - "integrity": "sha512-3iL6DbwpyLzjR3xHSFNFeb9Nz/M8WDkX33t1GFQnFOllWk8pOrh/LSrB5OXlnlW5P9LH73X6loW/eogc+F5lJg==", - "dev": true, - "requires": { - "mime-db": "~1.37.0" - } - }, - "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", - "dev": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "0.0.8", - "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "dev": true - }, - "mkdirp": { - "version": "0.5.1", - "resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", - "dev": true, - "requires": { - "minimist": "0.0.8" - } - }, - "mocha": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-4.1.0.tgz", - "integrity": "sha512-0RVnjg1HJsXY2YFDoTNzcc1NKhYuXKRrBAG2gDygmJJA136Cs2QlRliZG1mA0ap7cuaT30mw16luAeln+4RiNA==", - "dev": true, - "requires": { - "browser-stdout": "1.3.0", - "commander": "2.11.0", - "debug": "3.1.0", - "diff": "3.3.1", - "escape-string-regexp": "1.0.5", - "glob": "7.1.2", - "growl": "1.10.3", - "he": "1.1.1", - "mkdirp": "0.5.1", - "supports-color": "4.4.0" - }, - "dependencies": { - "commander": { - "version": "2.11.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.11.0.tgz", - "integrity": "sha512-b0553uYA5YAEGgyYIGYROzKQ7X5RAqedkfjiZxwi0kL1g3bOaBNNZfYkzt/CL0umgD5wc9Jec2FbB98CjkMRvQ==", - "dev": true - }, - "glob": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz", - "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - } - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true - }, - "multimatch": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/multimatch/-/multimatch-2.1.0.tgz", - "integrity": "sha1-nHkGoi+0wCkZ4vX3UWG0zb1LKis=", - "dev": true, - "requires": { - "array-differ": "^1.0.0", - "array-union": "^1.0.1", - "arrify": "^1.0.0", - "minimatch": "^3.0.0" - } - }, - "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", - "dev": true - }, - "node.extend": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/node.extend/-/node.extend-1.1.8.tgz", - "integrity": "sha512-L/dvEBwyg3UowwqOUTyDsGBU6kjBQOpOhshio9V3i3BMPv5YUb9+mWNN8MK0IbWqT0AqaTSONZf0aTuMMahWgA==", - "dev": true, - "requires": { - "has": "^1.0.3", - "is": "^3.2.1" - } - }, - "normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", - "dev": true, - "requires": { - "remove-trailing-separator": "^1.0.1" - } - }, - "now-and-later": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/now-and-later/-/now-and-later-2.0.0.tgz", - "integrity": "sha1-vGHLtFbXnLMiB85HygUTb/Ln1u4=", - "dev": true, - "requires": { - "once": "^1.3.2" - } - }, - "nth-check": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz", - "integrity": "sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==", - "dev": true, - "requires": { - "boolbase": "~1.0.0" - } - }, - "oauth-sign": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", - "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", - "dev": true - }, - "object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", - "dev": true - }, - "object-keys": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.0.12.tgz", - "integrity": "sha512-FTMyFUm2wBcGHnH2eXmz7tC6IwlqQZ6mVZ+6dm6vZ4IQIHjs6FdNsQBuKGPuUUUY6NfJw2PshC08Tn6LzLDOag==", - "dev": true - }, - "object.assign": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", - "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", - "dev": true, - "requires": { - "define-properties": "^1.1.2", - "function-bind": "^1.1.1", - "has-symbols": "^1.0.0", - "object-keys": "^1.0.11" - } - }, - "object.omit": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/object.omit/-/object.omit-2.0.1.tgz", - "integrity": "sha1-Gpx0SCnznbuFjHbKNXmuKlTr0fo=", - "dev": true, - "requires": { - "for-own": "^0.1.4", - "is-extendable": "^0.1.1" - } - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", - "dev": true, - "requires": { - "wrappy": "1" - } - }, - "ordered-read-streams": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/ordered-read-streams/-/ordered-read-streams-0.3.0.tgz", - "integrity": "sha1-cTfmmzKYuzQiR6G77jiByA4v14s=", - "dev": true, - "requires": { - "is-stream": "^1.0.1", - "readable-stream": "^2.0.1" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", - "dev": true - }, - "os-tmpdir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", - "dev": true - }, - "osenv": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", - "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", - "dev": true, - "requires": { - "os-homedir": "^1.0.0", - "os-tmpdir": "^1.0.0" - } - }, - "parse-glob": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/parse-glob/-/parse-glob-3.0.4.tgz", - "integrity": "sha1-ssN2z7EfNVE7rdFz7wu246OIORw=", - "dev": true, - "requires": { - "glob-base": "^0.3.0", - "is-dotfile": "^1.0.0", - "is-extglob": "^1.0.0", - "is-glob": "^2.0.0" - }, - "dependencies": { - "is-extglob": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-1.0.0.tgz", - "integrity": "sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA=", - "dev": true - }, - "is-glob": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-2.0.1.tgz", - "integrity": "sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM=", - "dev": true, - "requires": { - "is-extglob": "^1.0.0" - } - } - } - }, - "parse-semver": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/parse-semver/-/parse-semver-1.1.1.tgz", - "integrity": "sha1-mkr9bfBj3Egm+T+6SpnPIj9mbLg=", - "dev": true, - "requires": { - "semver": "^5.1.0" - } - }, - "parse5": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-3.0.3.tgz", - "integrity": "sha512-rgO9Zg5LLLkfJF9E6CCmXlSE4UVceloys8JrFqCcHloC3usd/kJCyPDwH2SOlzix2j3xaP9sUX3e8+kvkuleAA==", - "dev": true, - "requires": { - "@types/node": "*" - } - }, - "path-dirname": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz", - "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=", - "dev": true - }, - "path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", - "dev": true - }, - "path-parse": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.6.tgz", - "integrity": "sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw==", - "dev": true - }, - "pause-stream": { - "version": "0.0.11", - "resolved": "http://registry.npmjs.org/pause-stream/-/pause-stream-0.0.11.tgz", - "integrity": "sha1-/lo0sMvOErWqaitAPuLnO2AvFEU=", - "dev": true, - "requires": { - "through": "~2.3" - } - }, - "pend": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", - "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", - "dev": true - }, - "performance-now": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", - "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", - "dev": true - }, - "plugin-error": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/plugin-error/-/plugin-error-0.1.2.tgz", - "integrity": "sha1-O5uzM1zPAPQl4HQ34ZJ2ln2kes4=", - "dev": true, - "requires": { - "ansi-cyan": "^0.1.1", - "ansi-red": "^0.1.1", - "arr-diff": "^1.0.1", - "arr-union": "^2.0.1", - "extend-shallow": "^1.1.2" - } - }, - "preserve": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/preserve/-/preserve-0.2.0.tgz", - "integrity": "sha1-gV7R9uvGWSb4ZbMQwHE7yzMVzks=", - "dev": true - }, - "prettier": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.15.2.tgz", - "integrity": "sha512-YgPLFFA0CdKL4Eg2IHtUSjzj/BWgszDHiNQAe0VAIBse34148whfdzLagRL+QiKS+YfK5ftB6X4v/MBw8yCoug==", - "dev": true - }, - "process-nextick-args": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz", - "integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw==", - "dev": true - }, - "psl": { - "version": "1.1.29", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.29.tgz", - "integrity": "sha512-AeUmQ0oLN02flVHXWh9sSJF7mcdFq0ppid/JkErufc3hGIV/AMa8Fo9VgDo/cT2jFdOWoFvHp90qqBH54W+gjQ==", - "dev": true - }, - "pump": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz", - "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", - "dev": true, - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "pumpify": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-1.5.1.tgz", - "integrity": "sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==", - "dev": true, - "requires": { - "duplexify": "^3.6.0", - "inherits": "^2.0.3", - "pump": "^2.0.0" - } - }, - "punycode": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", - "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true - }, - "q": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", - "integrity": "sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc=", - "dev": true - }, - "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", - "dev": true - }, - "querystringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.1.0.tgz", - "integrity": "sha512-sluvZZ1YiTLD5jsqZcDmFyV2EwToyXZBfpoVOmktMmW+VEnhgakFHnasVph65fOjGPTWN0Nw3+XQaSeMayr0kg==", - "dev": true - }, - "queue": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/queue/-/queue-3.1.0.tgz", - "integrity": "sha1-bEnQHwCeIlZ4h4nyv/rGuLmZBYU=", - "dev": true, - "requires": { - "inherits": "~2.0.0" - } - }, - "randomatic": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/randomatic/-/randomatic-3.1.1.tgz", - "integrity": "sha512-TuDE5KxZ0J461RVjrJZCJc+J+zCkTb1MbH9AQUq68sMhOMcy9jLcb3BrZKgp9q9Ncltdg4QVqWrH02W2EFFVYw==", - "dev": true, - "requires": { - "is-number": "^4.0.0", - "kind-of": "^6.0.0", - "math-random": "^1.0.1" - }, - "dependencies": { - "is-number": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz", - "integrity": "sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==", - "dev": true - }, - "kind-of": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", - "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==", - "dev": true - } - } - }, - "read": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", - "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", - "dev": true, - "requires": { - "mute-stream": "~0.0.4" - } - }, - "readable-stream": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.0.6.tgz", - "integrity": "sha512-9E1oLoOWfhSXHGv6QlwXJim7uNzd9EVlWK+21tCU9Ju/kR0/p2AZYPz4qSchgO8PlLIH4FpZYfzwS+rEksZjIg==", - "dev": true, - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "regex-cache": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/regex-cache/-/regex-cache-0.4.4.tgz", - "integrity": "sha512-nVIZwtCjkC9YgvWkpM55B5rBhBYRZhAaJbgcFYXXsHnbZ9UZI9nnVWYZpBlCqv9ho2eZryPnWrZGsOdPwVWXWQ==", - "dev": true, - "requires": { - "is-equal-shallow": "^0.1.3" - } - }, - "remove-bom-buffer": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/remove-bom-buffer/-/remove-bom-buffer-3.0.0.tgz", - "integrity": "sha512-8v2rWhaakv18qcvNeli2mZ/TMTL2nEyAKRvzo1WtnZBl15SHyEhrCu2/xKlJyUFKHiHgfXIyuY6g2dObJJycXQ==", - "dev": true, - "requires": { - "is-buffer": "^1.1.5", - "is-utf8": "^0.2.1" - } - }, - "remove-bom-stream": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/remove-bom-stream/-/remove-bom-stream-1.2.0.tgz", - "integrity": "sha1-BfGlk/FuQuH7kOv1nejlaVJflSM=", - "dev": true, - "requires": { - "remove-bom-buffer": "^3.0.0", - "safe-buffer": "^5.1.0", - "through2": "^2.0.3" - } - }, - "remove-trailing-separator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", - "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=", - "dev": true - }, - "repeat-element": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", - "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==", - "dev": true - }, - "repeat-string": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", - "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", - "dev": true - }, - "replace-ext": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-1.0.0.tgz", - "integrity": "sha1-3mMSg3P8v3w8z6TeWkgMRaZ5WOs=", - "dev": true - }, - "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", - "dev": true, - "requires": { - "aws-sign2": "~0.7.0", - "aws4": "^1.8.0", - "caseless": "~0.12.0", - "combined-stream": "~1.0.6", - "extend": "~3.0.2", - "forever-agent": "~0.6.1", - "form-data": "~2.3.2", - "har-validator": "~5.1.0", - "http-signature": "~1.2.0", - "is-typedarray": "~1.0.0", - "isstream": "~0.1.2", - "json-stringify-safe": "~5.0.1", - "mime-types": "~2.1.19", - "oauth-sign": "~0.9.0", - "performance-now": "^2.1.0", - "qs": "~6.5.2", - "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", - "tunnel-agent": "^0.6.0", - "uuid": "^3.3.2" - } - }, - "requires-port": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", - "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=", - "dev": true - }, - "resolve": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.8.1.tgz", - "integrity": "sha512-AicPrAC7Qu1JxPCZ9ZgCZlY35QgFnNqc+0LtbRNxnVw4TXvjQ72wnuL9JQcEBgXkI9JM8MsT9kaQoHcpCRJOYA==", - "dev": true, - "requires": { - "path-parse": "^1.0.5" - } - }, - "resolve-options": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/resolve-options/-/resolve-options-1.1.0.tgz", - "integrity": "sha1-MrueOcBtZzONyTeMDW1gdFZq0TE=", - "dev": true, - "requires": { - "value-or-function": "^3.0.0" - } - }, - "rimraf": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.2.tgz", - "integrity": "sha512-lreewLK/BlghmxtfH36YYVg1i8IAce4TI7oao75I1g245+6BctqTVQiBP3YUJ9C6DQOXJmkYR9X9fCLtCOJc5w==", - "dev": true, - "requires": { - "glob": "^7.0.5" - } - }, - "safe-buffer": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", - "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", - "dev": true - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "dev": true - }, - "semver": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz", - "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==", - "dev": true - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - }, - "source-map-support": { - "version": "0.5.9", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.9.tgz", - "integrity": "sha512-gR6Rw4MvUlYy83vP0vxoVNzM6t8MUXqNuRsuBmBHQDu1Fh6X015FrLdgoDKcNdkwGubozq0P4N0Q37UyFVr1EA==", - "dev": true, - "requires": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "split": { - "version": "0.3.3", - "resolved": "http://registry.npmjs.org/split/-/split-0.3.3.tgz", - "integrity": "sha1-zQ7qXmOiEd//frDwkcQTPi0N0o8=", - "dev": true, - "requires": { - "through": "2" - } - }, - "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true - }, - "sshpk": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.15.2.tgz", - "integrity": "sha512-Ra/OXQtuh0/enyl4ETZAfTaeksa6BXks5ZcjpSUNrjBr0DvrJKX+1fsKDPpT9TBXgHAFsa4510aNVgI8g/+SzA==", - "dev": true, - "requires": { - "asn1": "~0.2.3", - "assert-plus": "^1.0.0", - "bcrypt-pbkdf": "^1.0.0", - "dashdash": "^1.12.0", - "ecc-jsbn": "~0.1.1", - "getpass": "^0.1.1", - "jsbn": "~0.1.0", - "safer-buffer": "^2.0.2", - "tweetnacl": "~0.14.0" - } - }, - "stat-mode": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/stat-mode/-/stat-mode-0.2.2.tgz", - "integrity": "sha1-5sgLYjEj19gM8TLOU480YokHJQI=", - "dev": true - }, - "stream-combiner": { - "version": "0.0.4", - "resolved": "http://registry.npmjs.org/stream-combiner/-/stream-combiner-0.0.4.tgz", - "integrity": "sha1-TV5DPBhSYd3mI8o/RMWGvPXErRQ=", - "dev": true, - "requires": { - "duplexer": "~0.1.1" - } - }, - "stream-shift": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", - "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=", - "dev": true - }, - "streamfilter": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/streamfilter/-/streamfilter-1.0.7.tgz", - "integrity": "sha512-Gk6KZM+yNA1JpW0KzlZIhjo3EaBJDkYfXtYSbOwNIQ7Zd6006E6+sCFlW1NDvFG/vnXhKmw6TJJgiEQg/8lXfQ==", - "dev": true, - "requires": { - "readable-stream": "^2.0.2" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "streamifier": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/streamifier/-/streamifier-0.1.1.tgz", - "integrity": "sha1-l+mNj6TRBdYqJpHR3AfoINuN/E8=", - "dev": true - }, - "string_decoder": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.2.0.tgz", - "integrity": "sha512-6YqyX6ZWEYguAxgZzHGL7SsCeGx3V2TtOTqZz1xSTSWnqsbWwbptafNyvf/ACquZUXV3DANr5BDIwNYe1mN42w==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "strip-bom": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", - "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", - "dev": true, - "requires": { - "is-utf8": "^0.2.0" - } - }, - "strip-bom-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-bom-stream/-/strip-bom-stream-1.0.0.tgz", - "integrity": "sha1-5xRDmFd9Uaa+0PoZlPoF9D/ZiO4=", - "dev": true, - "requires": { - "first-chunk-stream": "^1.0.0", - "strip-bom": "^2.0.0" - } - }, - "supports-color": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.4.0.tgz", - "integrity": "sha512-rKC3+DyXWgK0ZLKwmRsrkyHVZAjNkfzeehuFWdGGcqGDTZFH73+RH6S/RDAAxl9GusSjZSUWYLmT9N5pzXFOXQ==", - "dev": true, - "requires": { - "has-flag": "^2.0.0" - } - }, - "tar": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/tar/-/tar-2.2.2.tgz", - "integrity": "sha512-FCEhQ/4rE1zYv9rYXJw/msRqsnmlje5jHP6huWeBZ704jUTy02c5AZyWujpMR1ax6mVw9NyJMfuK2CMDWVIfgA==", - "dev": true, - "requires": { - "block-stream": "*", - "fstream": "^1.0.12", - "inherits": "2" - }, - "dependencies": { - "fstream": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", - "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "inherits": "~2.0.0", - "mkdirp": ">=0.5 0", - "rimraf": "2" - } - } - } - }, - "through": { - "version": "2.3.8", - "resolved": "http://registry.npmjs.org/through/-/through-2.3.8.tgz", - "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", - "dev": true - }, - "through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", - "dev": true, - "requires": { - "readable-stream": "~2.3.6", - "xtend": "~4.0.1" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, - "through2-filter": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/through2-filter/-/through2-filter-2.0.0.tgz", - "integrity": "sha1-YLxVoNrLdghdsfna6Zq0P4PWIuw=", - "dev": true, - "requires": { - "through2": "~2.0.0", - "xtend": "~4.0.0" - } - }, - "tmp": { - "version": "0.0.29", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.29.tgz", - "integrity": "sha1-8lEl/w3Z2jzLDC3Tce4SiLuRKMA=", - "dev": true, - "requires": { - "os-tmpdir": "~1.0.1" - } - }, - "to-absolute-glob": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/to-absolute-glob/-/to-absolute-glob-0.1.1.tgz", - "integrity": "sha1-HN+kcqnvUMI57maZm2YsoOs5k38=", - "dev": true, - "requires": { - "extend-shallow": "^2.0.1" - }, - "dependencies": { - "extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", - "dev": true, - "requires": { - "is-extendable": "^0.1.0" - } - } - } - }, - "to-through": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-through/-/to-through-2.0.0.tgz", - "integrity": "sha1-/JKtq6ByZHvAtn1rA2ZKoZUJOvY=", - "dev": true, - "requires": { - "through2": "^2.0.3" - } - }, - "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", - "dev": true, - "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "dev": true - } - } - }, - "tslib": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.9.3.tgz", - "integrity": "sha512-4krF8scpejhaOgqzBEcGM7yDIEfi0/8+8zDRZhNZZ2kjmHJ4hv3zCbQWxoJGz1iw5U0Jl0nma13xzHXcncMavQ==", - "dev": true - }, - "tslint": { - "version": "5.11.0", - "resolved": "https://registry.npmjs.org/tslint/-/tslint-5.11.0.tgz", - "integrity": "sha1-mPMMAurjzecAYgHkwzywi0hYHu0=", - "dev": true, - "requires": { - "babel-code-frame": "^6.22.0", - "builtin-modules": "^1.1.1", - "chalk": "^2.3.0", - "commander": "^2.12.1", - "diff": "^3.2.0", - "glob": "^7.1.1", - "js-yaml": "^3.7.0", - "minimatch": "^3.0.4", - "resolve": "^1.3.2", - "semver": "^5.3.0", - "tslib": "^1.8.0", - "tsutils": "^2.27.2" - }, - "dependencies": { - "commander": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.19.0.tgz", - "integrity": "sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==", - "dev": true - } - } - }, - "tslint-config-prettier": { - "version": "1.17.0", - "resolved": "https://registry.npmjs.org/tslint-config-prettier/-/tslint-config-prettier-1.17.0.tgz", - "integrity": "sha512-NKWNkThwqE4Snn4Cm6SZB7lV5RMDDFsBwz6fWUkTxOKGjMx8ycOHnjIbhn7dZd5XmssW3CwqUjlANR6EhP9YQw==", - "dev": true - }, - "tsutils": { - "version": "2.29.0", - "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-2.29.0.tgz", - "integrity": "sha512-g5JVHCIJwzfISaXpXE1qvNalca5Jwob6FjI4AoPlqMusJ6ftFE7IkkFoMhVLRgK+4Kx3gkzb8UZK5t5yTTvEmA==", - "dev": true, - "requires": { - "tslib": "^1.8.1" - } - }, - "tunnel": { - "version": "0.0.4", - "resolved": "http://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz", - "integrity": "sha1-LTeFoVjBdMmhbcLARuxfxfF0IhM=", - "dev": true - }, - "tunnel-agent": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", - "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", - "dev": true, - "requires": { - "safe-buffer": "^5.0.1" - } - }, - "tweetnacl": { - "version": "0.14.5", - "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", - "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", - "dev": true - }, - "typed-rest-client": { - "version": "0.9.0", - "resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-0.9.0.tgz", - "integrity": "sha1-92jMDcP06VDwbgSCXDaz54NKofI=", - "dev": true, - "requires": { - "tunnel": "0.0.4", - "underscore": "1.8.3" - }, - "dependencies": { - "underscore": { - "version": "1.8.3", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz", - "integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI=", - "dev": true - } - } - }, - "typescript": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-2.9.2.tgz", - "integrity": "sha512-Gr4p6nFNaoufRIY4NMdpQRNmgxVIGMs4Fcu/ujdYk3nAZqk7supzBE9idmvfZIlH/Cuj//dvi+019qEue9lV0w==", - "dev": true - }, - "uc.micro": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.5.tgz", - "integrity": "sha512-JoLI4g5zv5qNyT09f4YAvEZIIV1oOjqnewYg5D38dkQljIzpPT296dbIGvKro3digYI1bkb7W6EP1y4uDlmzLg==", - "dev": true - }, - "unc-path-regex": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz", - "integrity": "sha1-5z3T17DXxe2G+6xrCufYxqadUPo=", - "dev": true - }, - "underscore": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.9.1.tgz", - "integrity": "sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg==", - "dev": true - }, - "unique-stream": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/unique-stream/-/unique-stream-2.2.1.tgz", - "integrity": "sha1-WqADz76Uxf+GbE59ZouxxNuts2k=", - "dev": true, - "requires": { - "json-stable-stringify": "^1.0.0", - "through2-filter": "^2.0.0" - } - }, - "uri-js": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", - "dev": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "url-join": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/url-join/-/url-join-1.1.0.tgz", - "integrity": "sha1-dBxsL0WWxIMNZxhGCSDQySIC3Hg=", - "dev": true - }, - "url-parse": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.4.4.tgz", - "integrity": "sha512-/92DTTorg4JjktLNLe6GPS2/RvAd/RGr6LuktmWSMLEOa6rjnlrFXNgSbSmkNvCoL2T028A0a1JaJLzRMlFoHg==", - "dev": true, - "requires": { - "querystringify": "^2.0.0", - "requires-port": "^1.0.0" - } - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", - "dev": true - }, - "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", - "dev": true - }, - "vali-date": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/vali-date/-/vali-date-1.0.0.tgz", - "integrity": "sha1-G5BKWWCfsyjvB4E4Qgk09rhnCaY=", - "dev": true - }, - "value-or-function": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/value-or-function/-/value-or-function-3.0.0.tgz", - "integrity": "sha1-HCQ6ULWVwb5Up1S/7OhWO5/42BM=", - "dev": true - }, - "verror": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", - "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", - "dev": true, - "requires": { - "assert-plus": "^1.0.0", - "core-util-is": "1.0.2", - "extsprintf": "^1.2.0" - } - }, - "vinyl": { - "version": "0.4.6", - "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-0.4.6.tgz", - "integrity": "sha1-LzVsh6VQolVGHza76ypbqL94SEc=", - "dev": true, - "requires": { - "clone": "^0.2.0", - "clone-stats": "^0.0.1" - } - }, - "vinyl-fs": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/vinyl-fs/-/vinyl-fs-2.4.4.tgz", - "integrity": "sha1-vm/zJwy1Xf19MGNkDegfJddTIjk=", - "dev": true, - "requires": { - "duplexify": "^3.2.0", - "glob-stream": "^5.3.2", - "graceful-fs": "^4.0.0", - "gulp-sourcemaps": "1.6.0", - "is-valid-glob": "^0.3.0", - "lazystream": "^1.0.0", - "lodash.isequal": "^4.0.0", - "merge-stream": "^1.0.0", - "mkdirp": "^0.5.0", - "object-assign": "^4.0.0", - "readable-stream": "^2.0.4", - "strip-bom": "^2.0.0", - "strip-bom-stream": "^1.0.0", - "through2": "^2.0.0", - "through2-filter": "^2.0.0", - "vali-date": "^1.0.0", - "vinyl": "^1.0.0" - }, - "dependencies": { - "clone": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", - "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=", - "dev": true - }, - "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "replace-ext": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-0.0.1.tgz", - "integrity": "sha1-KbvZIHinOfC8zitO5B6DeVNSKSQ=", - "dev": true - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "vinyl": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-1.2.0.tgz", - "integrity": "sha1-XIgDbPVl5d8FVYv8kR+GVt8hiIQ=", - "dev": true, - "requires": { - "clone": "^1.0.0", - "clone-stats": "^0.0.1", - "replace-ext": "0.0.1" - } - } - } - }, - "vinyl-source-stream": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vinyl-source-stream/-/vinyl-source-stream-1.1.2.tgz", - "integrity": "sha1-YrU6E1YQqJbpjKlr7jqH8Aio54A=", - "dev": true, - "requires": { - "through2": "^2.0.3", - "vinyl": "^0.4.3" - } - }, - "vinyl-sourcemap": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/vinyl-sourcemap/-/vinyl-sourcemap-1.1.0.tgz", - "integrity": "sha1-kqgAWTo4cDqM2xHYswCtS+Y7PhY=", - "dev": true, - "requires": { - "append-buffer": "^1.0.2", - "convert-source-map": "^1.5.0", - "graceful-fs": "^4.1.6", - "normalize-path": "^2.1.1", - "now-and-later": "^2.0.0", - "remove-bom-buffer": "^3.0.0", - "vinyl": "^2.0.0" - }, - "dependencies": { - "clone": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", - "integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=", - "dev": true - }, - "clone-stats": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz", - "integrity": "sha1-s3gt/4u1R04Yuba/D9/ngvh3doA=", - "dev": true - }, - "vinyl": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-2.2.0.tgz", - "integrity": "sha512-MBH+yP0kC/GQ5GwBqrTPTzEfiiLjta7hTtvQtbxBgTeSXsmKQRQecjibMbxIXzVT3Y9KJK+drOz1/k+vsu8Nkg==", - "dev": true, - "requires": { - "clone": "^2.1.1", - "clone-buffer": "^1.0.0", - "clone-stats": "^1.0.0", - "cloneable-readable": "^1.0.0", - "remove-trailing-separator": "^1.0.1", - "replace-ext": "^1.0.0" - } - } - } - }, - "vsce": { - "version": "1.53.2", - "resolved": "https://registry.npmjs.org/vsce/-/vsce-1.53.2.tgz", - "integrity": "sha512-yo7ctgQPK7hKnez/be3Tj7RG3eZzgkFhx/27y9guwzhMxHfjlU1pusAsFT8wBEZKZlYA5HNJAx8oClw4WDWi+A==", - "dev": true, - "requires": { - "cheerio": "^1.0.0-rc.1", - "commander": "^2.8.1", - "denodeify": "^1.2.1", - "glob": "^7.0.6", - "lodash": "^4.17.10", - "markdown-it": "^8.3.1", - "mime": "^1.3.4", - "minimatch": "^3.0.3", - "osenv": "^0.1.3", - "parse-semver": "^1.1.1", - "read": "^1.0.7", - "semver": "^5.1.0", - "tmp": "0.0.29", - "url-join": "^1.1.0", - "vso-node-api": "6.1.2-preview", - "yauzl": "^2.3.1", - "yazl": "^2.2.2" - } - }, - "vscode": { - "version": "1.1.22", - "resolved": "https://registry.npmjs.org/vscode/-/vscode-1.1.22.tgz", - "integrity": "sha512-G/zu7PRAN1yF80wg+l6ebIexDflU3uXXeabacJuLearTIfObKw4JaI8aeHwDEmpnCkc3MkIr3Bclkju2gtEz6A==", - "dev": true, - "requires": { - "glob": "^7.1.2", - "gulp-chmod": "^2.0.0", - "gulp-filter": "^5.0.1", - "gulp-gunzip": "1.0.0", - "gulp-remote-src-vscode": "^0.5.1", - "gulp-symdest": "^1.1.1", - "gulp-untar": "^0.0.7", - "gulp-vinyl-zip": "^2.1.2", - "mocha": "^4.0.1", - "request": "^2.83.0", - "semver": "^5.4.1", - "source-map-support": "^0.5.0", - "url-parse": "^1.4.3", - "vinyl-source-stream": "^1.1.0" - } - }, - "vscode-jsonrpc": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-4.0.0.tgz", - "integrity": "sha512-perEnXQdQOJMTDFNv+UF3h1Y0z4iSiaN9jIlb0OqIYgosPCZGYh/MCUlkFtV2668PL69lRDO32hmvL2yiidUYg==" - }, - "vscode-languageclient": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-4.4.2.tgz", - "integrity": "sha512-9TUzsg1UM6n1UEyPlWbDf7tK1wJAK7UGFRmGDN8sz4KmbbDiVRh6YicaB/5oRSVTpuV47PdJpYlOl3SJ0RiK1Q==", - "requires": { - "vscode-languageserver-protocol": "^3.10.3" - } - }, - "vscode-languageserver-protocol": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.13.0.tgz", - "integrity": "sha512-2ZGKwI+P2ovQll2PGAp+2UfJH+FK9eait86VBUdkPd9HRlm8e58aYT9pV/NYanHOcp3pL6x2yTLVCFMcTer0mg==", - "requires": { - "vscode-jsonrpc": "^4.0.0", - "vscode-languageserver-types": "3.13.0" - } - }, - "vscode-languageserver-types": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.13.0.tgz", - "integrity": "sha512-BnJIxS+5+8UWiNKCP7W3g9FlE7fErFw0ofP5BXJe7c2tl0VeWh+nNHFbwAS2vmVC4a5kYxHBjRy0UeOtziemVA==" - }, - "vso-node-api": { - "version": "6.1.2-preview", - "resolved": "https://registry.npmjs.org/vso-node-api/-/vso-node-api-6.1.2-preview.tgz", - "integrity": "sha1-qrNUbfJFHs2JTgcbuZtd8Zxfp48=", - "dev": true, - "requires": { - "q": "^1.0.1", - "tunnel": "0.0.4", - "typed-rest-client": "^0.9.0", - "underscore": "^1.8.3" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", - "dev": true - }, - "xtend": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", - "integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=", - "dev": true - }, - "yauzl": { - "version": "2.10.0", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", - "integrity": "sha1-x+sXyT4RLLEIb6bY5R+wZnt5pfk=", - "dev": true, - "requires": { - "buffer-crc32": "~0.2.3", - "fd-slicer": "~1.1.0" - } - }, - "yazl": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/yazl/-/yazl-2.5.0.tgz", - "integrity": "sha512-rgptqKwX/f1/7bIRF1FHb4HGsP5k11QyxBpDl1etUDfNpTa7CNjDOYNPFnIaEzZ9dRq0c47IEJS+sy+T39JCLw==", - "dev": true, - "requires": { - "buffer-crc32": "~0.2.3" - } - } - } -} diff --git a/editors/code/package.json b/editors/code/package.json deleted file mode 100644 index aa346f2b3..000000000 --- a/editors/code/package.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "name": "pikelet", - "displayName": "Pikelet", - "description": "Pikelet language support for VS Code", - "version": "0.0.1", - "publisher": "brendanzab", - "license": "Apache-2.0", - "repository": { - "type": "git", - "url": "https://github.com/pikelet-lang/pikelet/" - }, - "bugs": { - "url": "https://github.com/pikelet-lang/pikelet/issues" - }, - "categories": [ - "Programming Languages" - ], - "keywords": [ - "pikelet", - "pikelet-lang" - ], - "preview": true, - "scripts": { - "fix": "prettier **/*.{json,ts} --write && tslint --project . --fix", - "lint": "tslint --config ./tslint.json './src/**/*.ts'", - "compile": "tsc -p ./tsconfig.json", - "watch": "tsc -watch -p ./", - "prettier": "prettier **/*.{json,ts}", - "install-dev-extension": "npm install && vsce package -o ./out/rls-vscode-dev.vsix && code --install-extension ./out/rls-vscode-dev.vsix", - "postinstall": "vscode-install", - "travis": "npm run compile && npm run lint && npm run prettier --list-different", - "update-vscode": "vscode-install", - "vscode:prepublish": "npm run compile" - }, - "dependencies": { - "vscode-languageclient": "^4.1.4" - }, - "devDependencies": { - "prettier": "^1.15.2", - "tslint": "^5.11.0", - "tslint-config-prettier": "^1.17.0", - "typescript": "^2.6.1", - "vsce": "^1.53.2", - "vscode": "^1.1.22" - }, - "prettier": { - "tabWidth": 4, - "singleQuote": true - }, - "engines": { - "vscode": "^1.28.0" - }, - "activationEvents": [ - "onLanguage:pikelet" - ], - "main": "./out/extension", - "contributes": { - "languages": [ - { - "id": "pikelet", - "aliases": [ - "Pikelet", - "pikelet" - ], - "extensions": [ - "pi", - ".đŸ„ž" - ], - "configuration": "./language-configuration.json" - } - ], - "grammars": [ - { - "language": "pikelet", - "scopeName": "source.pikelet", - "path": "./syntaxes/pikelet.tmLanguage.json" - } - ], - "configuration": { - "pikelet.languageServer.path": { - "type": [ - "string" - ], - "default": "pikelet-language-server", - "description": "Specifies the path to the language server executable." - } - } - } -} diff --git a/editors/code/pikelet.code-workspace b/editors/code/pikelet.code-workspace deleted file mode 100644 index 90f3950e8..000000000 --- a/editors/code/pikelet.code-workspace +++ /dev/null @@ -1,11 +0,0 @@ -{ - "folders": [ - { - "path": "/Users/brendan/code/pikelet" - }, - { - "path": "." - } - ], - "settings": {} -} diff --git a/editors/code/src/extension.ts b/editors/code/src/extension.ts deleted file mode 100644 index c983fdd7a..000000000 --- a/editors/code/src/extension.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { ExtensionContext, workspace } from 'vscode'; -import { - Executable, - LanguageClient, - LanguageClientOptions, - ServerOptions -} from 'vscode-languageclient'; - -let client: LanguageClient; - -export function activate(context: ExtensionContext) { - // Get the workspace's configuration - const config = workspace.getConfiguration('pikelet'); - - // The server is implemented in node - const serverExecutable: Executable = { - command: config.get('languageServer.path', 'pikelet'), - args: ['language-server'], - options: { - // Enable backtraces so that we can get better reporting for - // any inevitable bugs we might encounter... - env: { - RUST_BACKTRACE: 1 - } - } - }; - - // If the extension is launched in debug mode then the debug server options are used - // Otherwise the run options are used - const serverOptions: ServerOptions = { - run: serverExecutable, - debug: serverExecutable - }; - - // Options to control the language client - const clientOptions: LanguageClientOptions = { - // Register the server for pikelet documents - documentSelector: [ - { - scheme: 'file', - language: 'pikelet' - } - ] - }; - - // Create the language client and start the client. - client = new LanguageClient( - 'pikelet-language-server', - 'Pikelet Language Server', - serverOptions, - clientOptions - ); - - // Start the client. This will also launch the server - client.start(); -} - -export function deactivate(): Thenable | undefined { - if (!client) { - return undefined; - } - return client.stop(); -} diff --git a/editors/code/syntaxes/pikelet.tmLanguage.json b/editors/code/syntaxes/pikelet.tmLanguage.json deleted file mode 100644 index 0b17395ad..000000000 --- a/editors/code/syntaxes/pikelet.tmLanguage.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json", - "name": "Pikelet", - "patterns": [ - { - "include": "#keywords" - }, - { - "include": "#comments" - }, - { - "include": "#punctuation" - }, - { - "include": "#primitives" - }, - { - "include": "#identifiers" - }, - { - "include": "#strings" - } - ], - "repository": { - "keywords": { - "patterns": [ - { - "name": "keyword.control.pikelet", - "match": "\\b(as|case|else|if|import|in|let|record|Record|then|Type|where)\\b" - } - ] - }, - "comments": { - "patterns": [ - { - "name": "comment.pikelet", - "match": "--.*$" - }, - { - "name": "comment.block.documentation.pikelet", - "match": "\\|\\|\\|.*$" - } - ] - }, - "punctuation": { - "patterns": [ - { - "name": "punctuation.pikelet", - "match": ";|\\(|\\)|\\{|\\}|\\[|\\]" - }, - { - "name": "keyword.operator", - "match": ":|=|\\.|->" - } - ] - }, - "primitives": { - "patterns": [ - { - "name": "support.type.primitive.pikelet", - "match": "\\b(Bool|Char|String|Array|U8|U16|U32|U64|S8|S16|S32|S64|F32|F64)(\\^[0-9]+)?\\b" - }, - { - "name": "support.constant.pikelet", - "match": "\\b(true|false)\\b" - } - ] - }, - "identifiers": { - "patterns": [ - { - "name": "source.pikelet", - "match": "[A-Za-z_\\-][0-9A-Za-z_\\-]*(\\^[0-9]+)?" - } - ] - }, - "strings": { - "name": "string.quoted.double.pikelet", - "begin": "\"", - "end": "\"", - "patterns": [ - { - "name": "constant.character.escape.pikelet", - "match": "\\\\." - } - ] - } - }, - "scopeName": "source.pikelet" -} diff --git a/editors/code/tsconfig.json b/editors/code/tsconfig.json deleted file mode 100644 index ef93c52b7..000000000 --- a/editors/code/tsconfig.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "compilerOptions": { - "module": "commonjs", - "target": "es6", - "outDir": "out", - "lib": ["es6"], - "sourceMap": true, - "rootDir": "src", - "strict": true - }, - "exclude": ["node_modules", ".vscode-test"] -} diff --git a/editors/code/tslint.json b/editors/code/tslint.json deleted file mode 100644 index bdeb4895e..000000000 --- a/editors/code/tslint.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "defaultSeverity": "error", - "extends": ["tslint:recommended", "tslint-config-prettier"], - "rules": { - "quotemark": [true, "single"], - "interface-name": false, - "object-literal-sort-keys": false - } -} diff --git a/editors/code/vsc-extension-quickstart.md b/editors/code/vsc-extension-quickstart.md deleted file mode 100644 index b2478da0c..000000000 --- a/editors/code/vsc-extension-quickstart.md +++ /dev/null @@ -1,27 +0,0 @@ -# Welcome to your VS Code Extension - -## What's in the folder -* This folder contains all of the files necessary for your extension. -* `package.json` - this is the manifest file in which you declare your language support and define -the location of the grammar file that has been copied into your extension. -* `syntaxes/pikelet.tmLanguage.json` - this is the Text mate grammar file that is used for tokenization. -* `language-configuration.json` - this the language configuration, defining the tokens that are used for -comments and brackets. - -## Get up and running straight away -* Make sure the language configuration settings in `language-configuration.json` are accurate. -* Press `F5` to open a new window with your extension loaded. -* Create a new file with a file name suffix matching your language. -* Verify that syntax highlighting works and that the language configuration settings are working. - -## Make changes -* You can relaunch the extension from the debug toolbar after making changes to the files listed above. -* You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes. - -## Add more language features -* To add features such as intellisense, hovers and validators check out the VS Code extenders documentation at -https://code.visualstudio.com/docs - -## Install your extension -* To start using your extension with Visual Studio Code copy it into the `/.vscode/extensions` folder and restart Code. -* To share your extension with the world, read on https://code.visualstudio.com/docs about publishing an extension. diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 000000000..f456f5411 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,7 @@ +# Pikelet examples + +This directory includes a bunch of different example Pikelet programs. + +We make sure to ensure these are kept up-to-date in [`../pikelet/tests/examples.rs`]. + +[`../pikelet/tests/examples.rs`]: ../pikelet/tests/examples.rs diff --git a/examples/comments.pi b/examples/comments.pi new file mode 100644 index 000000000..14be2205e --- /dev/null +++ b/examples/comments.pi @@ -0,0 +1,9 @@ +-- This is a line comment +record { + -- Another line comment + ||| This is a doc comment + x = Record {} +} : Record { + ||| This is another doc comment + x : Type, +} diff --git a/examples/functions.pi b/examples/functions.pi new file mode 100644 index 000000000..917911f3d --- /dev/null +++ b/examples/functions.pi @@ -0,0 +1,13 @@ +record { + id-String = fun a => a, + const-String-S32 = fun a b => a, + + id = fun A a => a, + const = fun A B a b => a, +} : Record { + id-String : String -> String, + const-String-S32 : String -> S32 -> String, + + id : Fun (A : Type) -> A -> A, + const : Fun (A : Type) (B : Type) -> A -> B -> A, +} diff --git a/examples/hello-world.pi b/examples/hello-world.pi new file mode 100644 index 000000000..54262db70 --- /dev/null +++ b/examples/hello-world.pi @@ -0,0 +1 @@ +"Hello beautiful world!" diff --git a/examples/literals.pi b/examples/literals.pi new file mode 100644 index 000000000..7344d207f --- /dev/null +++ b/examples/literals.pi @@ -0,0 +1,37 @@ +record { + b2 = 0b1001_0101, + b8 = 0o01234567, + b10 = 0123456789, + b16 = 0x01234_abcdef_ABCDEF, + + char-tab = '\t', + char-carriage-return = '\r', + char-newline = '\n', + char-null = '\0', + char-backslash = '\\', + char-single-quote = '\'', + char-double-quote = '\"', + char-ascii-escape = '\x32', + char-unicode-escape = '\u{0001}', + + string-utf8 = "Γ ⊱ e ∈ τ ... đŸŒ±đŸŒłđŸŒČ🌿", + string-escapes = "\t\r\n\0\\ \'\" \x32 \u{0001}", +} : Record { + b2 : S32, + b8 : S32, + b10 : S32, + b16 : U64, + + char-tab : Char, + char-carriage-return : Char, + char-newline : Char, + char-null : Char, + char-backslash : Char, + char-single-quote : Char, + char-double-quote : Char, + char-ascii-escape : Char, + char-unicode-escape : Char, + + string-utf8 : String, + string-escapes : String, +} diff --git a/examples/prelude.pi b/examples/prelude.pi new file mode 100644 index 000000000..09da09bf0 --- /dev/null +++ b/examples/prelude.pi @@ -0,0 +1,145 @@ +-- TODO: type annotations for record terms +record { + id = fun A a => a, + always = fun A B a b => a, + + dep-compose = fun A B C a-b a-b-c a => + a-b-c a (a-b a), + compose = fun A B C a-b b-c => + dep-compose A (fun a => B) (fun a b => C) a-b (fun a b => b-c b), + + dep-flip = fun A B C a-b-c b a => + a-b-c a b, + flip = fun A B C => + dep-flip A B (fun a b => C), + + dep-subst = fun A B C a-b-c a-b a => + a-b-c a (a-b a), + subst = fun A B C => + dep-subst A (fun a => B) (fun a b => C), + + Unit = Record {}, + unit = record {}, + + Prod = fun A B => Fun (a : A) -> B a, + Sum = fun A B => Record { val : A, proof : B val }, + + Semigroup = Record { + ||| The carrier type + Carrier : Type, + ||| The associative operation + append : Carrier -> Carrier -> Carrier, + }, + + Category = Record { + ||| An object in the category + Object : Type, + ||| Arrows between the objects in the category + Arrow : Object -> Object -> Type, + ||| The identity arrow + id : Fun (A : Object) -> Arrow A A, + ||| The sequencing of two arrows + seq : Fun (A B C : Object) -> Arrow A B -> Arrow B C -> Arrow A C, + }, + + -- FIXME: level shifts for record fields + -- category-pikelet = record { + -- Object = Type, + -- Arrow = fun A B => A -> B, + -- -- TODO: Use combinator definitions + -- id = fun A a => a, + -- seq = fun A B C a-b b-c a => b-c (a-b a), + -- }, +} : Record { + ||| The polymorphic identity function. + id : Fun (A : Type) -> A -> A, + + ||| Creates a function that always returns the same value. + always : Fun (A B : Type) -> A -> B -> A, + + + ||| Dependent function composition. + dep-compose : + Fun (A : Type) + (B : A -> Type) + (C : Fun (a : A) -> B a -> Type) + (a-b : Fun (a : A) -> B a) + (a-b-c : Fun (a : A) (b : B a) -> C a b) + -> (Fun (a : A) -> C a (a-b a)), + + ||| Function composition. + compose : Fun (A B C : Type) -> (A -> B) -> (B -> C) -> (A -> C), + + + ||| Flip the order of the first two inputs to a dependent function. + dep-flip : + Fun (A B : Type) + (C : A -> B -> Type) + (a-b-c : Fun (a : A) (b : B) -> C a b) + -> (Fun (b : B) (a : A) -> C a b), + + ||| Flip the order of the first two inputs to a function + flip : Fun (A B C : Type) -> (A -> B -> C) -> (B -> A -> C), + + + ||| Dependent substitution. + ||| + ||| Takes three inputs and then returns the first input applied to the third, + ||| which is then applied to the result of the second input applied to the third. + ||| + ||| Also known as the 'S Combinator' in the [SKI combinator calculus][ski-wiki]. + ||| + ||| # References + ||| + ||| - [Outrageous but Meaningful Coincidences: Dependent type-safe syntax and evaluation][dep-rep] + ||| (Described in Section 5 as an infix `_Ëą_` operator) + ||| + ||| [ski-wiki]: https://en.wikipedia.org/wiki/SKI_combinator_calculus + ||| [dep-rep]: https://personal.cis.strath.ac.uk/conor.mcbride/pub/DepRep/DepRep.pdf + dep-subst : + Fun (A : Type) + (B : A -> Type) + (C : Fun (a : A) -> B a -> Type) + (a-b-c : Fun (a : A) (b : B a) -> C a b) + (a-b : Fun (a : A) -> B a) + -> (Fun (a : A) -> C a (a-b a)), + + ||| Substitution. + subst : Fun (A B C : Type) -> (A -> B -> C) -> (A -> B) -> (A -> C), + + + ||| The unit type + ||| + ||| This is a synonym for the empty record, and can be constructed using the + ||| `unit` function. + Unit : Type, + + ||| Create an element of the `Unit` type + unit : Unit, + + + ||| Dependent products + Prod : Fun (A : Type) (B : A -> Type) -> Type, + + ||| Dependent sums (subtypes) + Sum : Fun (A : Type) (B : A -> Type) -> Type, + + + ||| A carrier equipped with an associative operation + Semigroup : Type^1, + + ||| A category is a very general structure that provides a common way of + ||| composing units of functionality + ||| + ||| The most common category programmers would be familiar with would be `Type`s + ||| are the objects, and the functions between those types are the arrows. Many + ||| other categories exist though, for example: + ||| + ||| - nodes in a directed graph, and the edges between those nodes. + ||| - etc. + Category : Type^1, + + -- FIXME: level shifts for record fields + -- ||| Category of Pikelet functions and types. + -- category-pikelet : Category^1, +} diff --git a/examples/record-mesh.pi b/examples/record-mesh.pi new file mode 100644 index 000000000..a4b16ec1a --- /dev/null +++ b/examples/record-mesh.pi @@ -0,0 +1,47 @@ +record { + Vertex = Record { + point : Array 3 F32, + normal : Array 3 F32, + color : Array 4 F32, + }, + Mesh = Record { + ||| The index data to use. + index-data : List U16, + ||| The vertex data. + vertex-data : List Vertex, + }, + + examples = record { + cube = record { + index-data = [ + 0, 1, 3, 3, 1, 2, + 1, 4, 2, 2, 4, 7, + 4, 5, 7, 7, 5, 6, + 5, 0, 6, 6, 0, 3, + 3, 2, 6, 6, 2, 7, + 5, 4, 0, 0, 4, 1, + ], + vertex-data = [ + record { point = [0, 0, 0], normal = [0, 0, 1], color = [1, 0, 0, 1] }, + record { point = [1, 0, 0], normal = [0, 0, 1], color = [0, 1, 0, 1] }, + record { point = [1, 1, 0], normal = [0, 0, 1], color = [0, 0, 1, 1] }, + record { point = [0, 1, 0], normal = [0, 0, 1], color = [1, 0, 1, 1] }, + record { point = [1, 0, -1], normal = [0, 0, 1], color = [1, 0, 0, 1] }, + record { point = [0, 0, -1], normal = [0, 0, 1], color = [0, 1, 0, 1] }, + record { point = [0, 1, -1], normal = [0, 0, 1], color = [0, 0, 1, 1] }, + record { point = [1, 1, -1], normal = [0, 0, 1], color = [1, 0, 1, 1] }, + ], + }, + }, +} : Record { + ||| The type of vertex in a mesh. + Vertex : Type, + ||| Mesh data, made up of an index buffer and a vertex buffer. + Mesh : Type, + + ||| Example meshes. + examples : Record { + ||| Cube mesh. + cube : Mesh, + } +} diff --git a/examples/record-term-deps.pi b/examples/record-term-deps.pi new file mode 100644 index 000000000..48aae46c7 --- /dev/null +++ b/examples/record-term-deps.pi @@ -0,0 +1,11 @@ +record { + A = U32, + B = A, + a = 23, + b = a, +} : Record { + A : Type, + B : Type, + a : A, + b : B, +} diff --git a/examples/record-type-deps.pi b/examples/record-type-deps.pi new file mode 100644 index 000000000..063c0567e --- /dev/null +++ b/examples/record-type-deps.pi @@ -0,0 +1,7 @@ +record { + A = U32, + a = 23, +} : Record { + A : Type, + a : A, +} diff --git a/examples/universes.pi b/examples/universes.pi new file mode 100644 index 000000000..5abacc35a --- /dev/null +++ b/examples/universes.pi @@ -0,0 +1,11 @@ +record { + S32 = S32, + Type0 = Type^0, + Type1 = Type^1, + Type256 = Type^256, +} : Record { + S32 : Type^0 : Type^1 : Type^257, + Type0 : Type^1, + Type1 : Type^3, + Type256 : Type^257, +} diff --git a/examples/window-settings.pi b/examples/window-settings.pi new file mode 100644 index 000000000..2a9c5bc13 --- /dev/null +++ b/examples/window-settings.pi @@ -0,0 +1,27 @@ +record { + window = record { + title = "Voyager", + default-size = record { + width = 1280.0, + height = 720.0, + }, + fullscreen = false, + }, + controls = record { + move-speed = 2.0, + look-speed = 15.0, + }, +} : Record { + window : Record { + title : String, + default-size : Record { + width : F32, + height : F32, + }, + fullscreen : Bool, + }, + controls : Record { + move-speed : F32, + look-speed : F32, + }, +} diff --git a/package.json b/package.json new file mode 100644 index 000000000..b6a9a3117 --- /dev/null +++ b/package.json @@ -0,0 +1,6 @@ +{ + "private": true, + "workspaces": [ + "book" + ] +} diff --git a/pikelet-cli/Cargo.toml b/pikelet-cli/Cargo.toml new file mode 100644 index 000000000..051cb4607 --- /dev/null +++ b/pikelet-cli/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "pikelet-cli" +version = "0.1.0" +authors = ["Brendan Zabarauskas "] +edition = "2018" +publish = false +description = "Command line interface for interacting with the Pikelet programming language" +homepage = "https://github.com/pikelet-lang/pikelet" +repository = "https://github.com/pikelet-lang/pikelet" +readme = "README.md" +keywords = ["pikelet", "cli"] +categories = ["command-line-utilities"] +license = "Apache-2.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[[bin]] +name = "pikelet" +path = "src/main.rs" + +[features] +default = ["editor", "language-server"] +editor = ["pikelet-editor"] +language-server = ["pikelet-language-server"] + +[dependencies] +anyhow = "1.0" +codespan-reporting = "0.9.5" +crossbeam-channel = "0.4" +pikelet = { path = "../pikelet" } +pikelet-editor = { path = "../pikelet-editor", optional = true } +pikelet-language-server = { path = "../pikelet-language-server", optional = true } +pretty = "0.10" +rustyline = "6.2" +structopt = "0.3" +term_size = "0.3" +xdg = "2.2" diff --git a/pikelet-cli/README.md b/pikelet-cli/README.md new file mode 100644 index 000000000..bc71b642c --- /dev/null +++ b/pikelet-cli/README.md @@ -0,0 +1,3 @@ +# pikelet-cli + +Command line interface for interacting with the Pikelet programming language diff --git a/pikelet-cli/src/check.rs b/pikelet-cli/src/check.rs new file mode 100644 index 000000000..0d5f33137 --- /dev/null +++ b/pikelet-cli/src/check.rs @@ -0,0 +1,61 @@ +use codespan_reporting::diagnostic::Severity; +use codespan_reporting::files::SimpleFiles; +use codespan_reporting::term::termcolor::{BufferedStandardStream, ColorChoice}; +use pikelet::lang::{core, surface}; +use pikelet::pass::surface_to_core; +use std::io::Write; +use std::path::PathBuf; + +/// Check some Pikelet source files. +#[derive(structopt::StructOpt)] +pub struct Options { + /// Validate the elaborated core language. + #[structopt(long = "validate-core")] + validate_core: bool, + /// The Pikelet source files to be checked. + #[structopt(name = "FILE")] + file_names: Vec, +} + +pub fn run(options: Options) -> anyhow::Result<()> { + let pretty_alloc = pretty::BoxAllocator; + let mut writer = BufferedStandardStream::stderr(ColorChoice::Always); + let reporting_config = codespan_reporting::term::Config::default(); + + let globals = core::Globals::default(); + let (messages_tx, messages_rx) = crossbeam_channel::unbounded(); + let mut files = SimpleFiles::new(); + let mut surface_to_core = surface_to_core::State::new(&globals, messages_tx.clone()); + let mut core_typing = match options.validate_core { + true => Some(core::typing::State::new(&globals, messages_tx.clone())), + false => None, + }; + + let mut is_ok = true; + + for file_name in &options.file_names { + let source = std::fs::read_to_string(file_name)?; + let file_id = files.add(file_name.display().to_string(), source); + let file = files.get(file_id).unwrap(); + + let surface_term = surface::Term::from_str(file_id, file.source(), &messages_tx); + + let (core_term, _) = surface_to_core.synth_type(&surface_term); + if let Some(core_typing) = &mut core_typing { + let _ = core_typing.synth_type(&core_term); + } + + for message in messages_rx.try_iter() { + let diagnostic = message.to_diagnostic(&pretty_alloc); + is_ok &= diagnostic.severity < Severity::Error; + + codespan_reporting::term::emit(&mut writer, &reporting_config, &files, &diagnostic)?; + writer.flush()?; + } + } + + match is_ok { + true => Ok(()), + false => Err(anyhow::anyhow!("errors found in supplied source files")), + } +} diff --git a/pikelet-cli/src/lib.rs b/pikelet-cli/src/lib.rs new file mode 100644 index 000000000..89832a88c --- /dev/null +++ b/pikelet-cli/src/lib.rs @@ -0,0 +1,46 @@ +use anyhow::anyhow; + +pub mod check; +pub mod repl; + +/// The Pikelet command line interface. +#[derive(structopt::StructOpt)] +pub enum Options { + /// Check some Pikelet source files. + #[structopt(name = "check")] + Check(check::Options), + /// Runs the structured editor. + #[cfg(feature = "editor")] + #[structopt(name = "editor")] + Editor, + /// Runs the language server. + #[cfg(feature = "language-server")] + #[structopt(name = "language-server")] + LanguageServer, + /// Runs the REPL/interactive mode. + #[structopt(name = "repl")] + Repl(repl::Options), +} + +/// Run the CLI with the given options +pub fn run(options: Options) -> anyhow::Result<()> { + match options { + Options::Check(options) => check::run(options), + #[cfg(feature = "editor")] + Options::Editor => { + // FIXME: `iced::Error` is not `Send + Sync`, and so is incompatible with `anyhow::Result`. + // See this issue for more information: https://github.com/hecrj/iced/issues/516 + pikelet_editor::run().map_err(|err| anyhow!("{}", err)) + } + #[cfg(feature = "language-server")] + Options::LanguageServer => pikelet_language_server::run(), + Options::Repl(options) => repl::run(options), + } +} + +fn term_width() -> usize { + match term_size::dimensions() { + Some((width, _)) => width, + None => std::usize::MAX, + } +} diff --git a/pikelet-cli/src/main.rs b/pikelet-cli/src/main.rs new file mode 100644 index 000000000..19013d2bc --- /dev/null +++ b/pikelet-cli/src/main.rs @@ -0,0 +1,6 @@ +use pikelet_cli::Options; +use structopt::StructOpt; + +fn main() -> anyhow::Result<()> { + pikelet_cli::run(Options::from_args()) +} diff --git a/pikelet-cli/src/repl.rs b/pikelet-cli/src/repl.rs new file mode 100644 index 000000000..a463aa676 --- /dev/null +++ b/pikelet-cli/src/repl.rs @@ -0,0 +1,140 @@ +use codespan_reporting::diagnostic::Severity; +use codespan_reporting::files::SimpleFiles; +use codespan_reporting::term::termcolor::{BufferedStandardStream, ColorChoice}; +use pikelet::lang::{core, surface}; +use pikelet::pass::{surface_to_core, surface_to_pretty}; +use rustyline::error::ReadlineError; +use std::io::Write; +use std::sync::Arc; + +const HISTORY_FILE_NAME: &str = "history"; + +/// The Pikelet REPL/interactive mode. +#[derive(structopt::StructOpt)] +pub struct Options { + /// The prompt to display before expressions. + #[structopt(long = "prompt", default_value = "> ")] + pub prompt: String, + /// Disable the welcome banner on startup. + #[structopt(long = "no-banner")] + pub no_banner: bool, + /// Disable saving of command history on exit. + #[structopt(long = "no-history")] + pub no_history: bool, +} + +fn print_welcome_banner() { + const WELCOME_BANNER: &[&str] = &[ + r" ____ _ __ __ __ ", + r" / __ \(_) /_____ / /__ / /_ ", + r" / /_/ / / //_/ _ \/ / _ \/ __/ ", + r" / ____/ / ,< / __/ / __/ /_ ", + r"/_/ /_/_/|_|\___/_/\___/\__/ ", + r"", + ]; + + for (i, line) in WELCOME_BANNER.iter().enumerate() { + // warning on `env!` is a known issue + #[allow(clippy::print_literal)] + match i { + 2 => println!("{}Version {}", line, env!("CARGO_PKG_VERSION")), + 3 => println!("{}{}", line, env!("CARGO_PKG_HOMEPAGE")), + 4 => println!("{}:? for help", line), + _ => println!("{}", line.trim_end()), + } + } +} + +pub fn run(options: Options) -> anyhow::Result<()> { + let mut editor = { + let config = rustyline::Config::builder() + .history_ignore_space(true) + .history_ignore_dups(true) + .build(); + + rustyline::Editor::<()>::with_config(config) + }; + + if !options.no_banner { + print_welcome_banner() + } + + // TODO: Use appropriate directory on Windows + let xdg_dirs = xdg::BaseDirectories::with_prefix("pikelet/repl")?; + let history_path = xdg_dirs.get_data_home().join(HISTORY_FILE_NAME); + + if !options.no_history && editor.load_history(&history_path).is_err() { + // No previous REPL history! + } + + let pretty_alloc = pretty::BoxAllocator; + let mut writer = BufferedStandardStream::stderr(ColorChoice::Always); + let reporting_config = codespan_reporting::term::Config::default(); + + let globals = core::Globals::default(); + let (messages_tx, messages_rx) = crossbeam_channel::unbounded(); + let mut files = SimpleFiles::new(); + let mut state = surface_to_core::State::new(&globals, messages_tx.clone()); + + 'repl: loop { + let (file_id, file) = match editor.readline(&options.prompt) { + Ok(line) => { + let file_id = files.add("", line); + (file_id, files.get(file_id).unwrap()) + } + Err(ReadlineError::Interrupted) => { + println!("Interrupted!"); + continue 'repl; + } + Err(ReadlineError::Eof) => break 'repl, + Err(error) => return Err(error.into()), + }; + + if !options.no_history { + editor.add_history_entry(file.source()); + } + + // TODO: Parse REPL commands + // + // Command Arguments Purpose + // + // normalize a term in the context + // :? :h :help display this help text + // :core print the core representation of a term + // :local : define a local assumption in the REPL context + // :local = define a local definition in the REPL context + // :q :quit quit the repl + // :t :type infer the type of a term + let surface_term = surface::Term::from_str(file_id, file.source(), &messages_tx); + let (core_term, r#type) = state.synth_type(&surface_term); + + let mut is_ok = true; + for message in messages_rx.try_iter() { + let diagnostic = message.to_diagnostic(&pretty_alloc); + is_ok &= diagnostic.severity < Severity::Error; + + codespan_reporting::term::emit(&mut writer, &reporting_config, &files, &diagnostic)?; + writer.flush()?; + } + + if is_ok { + let ann_term = core::Term::generated(core::TermData::Ann( + Arc::new(state.normalize_term(&core_term)), + Arc::new(state.read_back_value(&r#type)), + )); + let term = state.core_to_surface_term(&ann_term); + let doc = surface_to_pretty::from_term(&pretty_alloc, &term); + + println!("{}", doc.1.pretty(crate::term_width())); + } + } + + if !options.no_history && !editor.history().is_empty() { + let history_path = xdg_dirs.place_data_file(HISTORY_FILE_NAME)?; + editor.save_history(&history_path)?; + } + + println!("Bye bye"); + + Ok(()) +} diff --git a/pikelet-editor/Cargo.toml b/pikelet-editor/Cargo.toml new file mode 100644 index 000000000..fe68f4a45 --- /dev/null +++ b/pikelet-editor/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "pikelet-editor" +version = "0.1.0" +authors = ["Brendan Zabarauskas "] +edition = "2018" +publish = false +description = "Graphical user interface for interacting with the Pikelet programming language" +homepage = "https://github.com/pikelet-lang/pikelet" +repository = "https://github.com/pikelet-lang/pikelet" +readme = "README.md" +keywords = ["pikelet", "gui"] +categories = [] +license = "Apache-2.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +iced = "0.2" +pikelet = { path = "../pikelet" } diff --git a/pikelet-editor/README.md b/pikelet-editor/README.md new file mode 100644 index 000000000..d64e61542 --- /dev/null +++ b/pikelet-editor/README.md @@ -0,0 +1,54 @@ +# pikelet-editor + +This is intended to provided the basis for a structured programming environment for Pikelet. +While the hopes behind this are ambitious, we will need to proceed carefully, taking care to avoid getting trapped down rabbit holes. + +It should both be natively compiled, be able to be run in a browser, or embedded in other applications, like game engines. +For this, libraries like [Iced](https://github.com/hecrj/iced) could prove useful. +Another possibility could be to embed this within Electron-based IDEs like VS Code. +We might also want to eventually investigate implementing the programming environment using platform-specific GUI toolkits. + +## Inspiration + +### Projects + +- [Aardappel](http://strlen.com/aardappel-language/) +- [Alfa](http://www.cse.chalmers.se/~hallgren/Alfa/) +- [Dark](https://darklang.com/) +- [Eve](http://witheve.com/) +- [Factor](https://factorcode.org/) +- [fructure](https://github.com/disconcision/fructure) +- [Glamorous Toolkit](https://gtoolkit.com/) +- [Hazel](https://hazel.org/) +- [Houdini Networks](https://www.sidefx.com/docs/houdini/network/index.html) +- [Livelits](https://github.com/hazelgrove/livelits-tyde/blob/master/livelits-tyde.pdf) +- [medit](https://github.com/molikto/medit) +- [MPS](https://www.jetbrains.com/mps/) +- [Pharo](https://pharo.org/) +- [Self](https://www.jetbrains.com/mps/) +- [Symbolics Lisp](https://twitter.com/RainerJoswig/status/1213528401774071813) +- [TreeSheets](http://strlen.com/treesheets/) +- [Unreal Blueprints](https://docs.unrealengine.com/en-US/Engine/Blueprints) + +### Collections + +- [Visual Programming Codex](https://github.com/ivanreese/visual-programming-codex) +- [The Whole Code Catalog](https://futureofcoding.org/catalog/) + +### Papers + +- "Tangible Functional Programming"
+ Conal M. Elliott, + [CiteSeer](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.422.6896) +- "An Extensible Proof Text Editor"
+ Thomas Hallgren, Aarne Ranta, + [CiteSeer](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.138.2186) +- "Interactive visual functional programming"
+ Keith Hanna, + [ACM](https://dl.acm.org/doi/10.1145/583852.581493) +- "A Document-Centered Environment for Haskell"
+ Keith Hanna, + [ACM](https://dl.acm.org/doi/10.1007/11964681_12) +- "Accessible AST-Based Programming for Visually-Impaired Programmers"
+ Emmanuel Schanzer, Sina Bahram, Shriram Krishnamurthi, + [PDF](https://cs.brown.edu/~sk/Publications/Papers/Published/sbk-accessible-ast-blocks/paper.pdf) diff --git a/pikelet-editor/src/lib.rs b/pikelet-editor/src/lib.rs new file mode 100644 index 000000000..8df271830 --- /dev/null +++ b/pikelet-editor/src/lib.rs @@ -0,0 +1,138 @@ +use iced::{Column, Container, Element, Row, Sandbox, Settings, Text, TextInput}; + +pub fn run() -> Result<(), iced::Error> { + State::run(Settings::default()) +} + +#[derive(Debug, Clone)] +pub enum Message { + InputChanged(String), + InputSubmit, +} + +pub struct State { + globals: pikelet::lang::core::Globals, + input: iced::text_input::State, + input_value: String, +} + +impl Sandbox for State { + type Message = Message; + + fn new() -> State { + State { + globals: pikelet::lang::core::Globals::default(), + input: iced::text_input::State::default(), + input_value: "".to_owned(), + } + } + + fn title(&self) -> String { + "Pikelet".to_owned() + } + + fn update(&mut self, message: Message) { + match message { + Message::InputChanged(value) => { + self.input_value = value; + } + Message::InputSubmit => { + // TODO: execute expression + self.input_value.clear(); + } + } + } + + fn view(&mut self) -> Element { + let State { + globals, + input, + input_value, + } = self; + + Container::new( + Column::new() + .push(Text::new("Hi this is Pikelet!")) + // TODO: Move to separate window? + .push(Text::new("Globals:")) + .push( + globals + .entries() + .fold(Column::new(), |column, (name, (r#type, term))| { + column.push({ + let entry = Row::new() + .push(Text::new(name)) + .push(Text::new(" : ")) + .push(view_term(r#type)); + + match term { + None => entry.push(Text::new("")).push(Text::new("")), + Some(term) => { + entry.push(Text::new(" = ")).push(view_term(term)) + } + } + }) + }), + ) + .push( + TextInput::new( + input, + "Pikelet expression
", + input_value, + Message::InputChanged, + ) + .on_submit(Message::InputSubmit), + ), + ) + .into() + } +} + +fn view_term(term: &pikelet::lang::core::Term) -> Element { + use pikelet::lang::core::{Constant, TermData, UniverseLevel, UniverseOffset}; + + match &term.data { + TermData::Global(name) => Text::new(name).into(), + TermData::Local(_) => Text::new("todo").into(), + + TermData::Ann(term, r#type) => Row::new() + .push(view_term(term)) + .push(Text::new(" : ")) + .push(view_term(r#type)) + .into(), + + TermData::TypeType(UniverseLevel(level)) => Row::new() + .push(Text::new(format!("Univ^{}", level))) // TODO: superscript? + .into(), + TermData::Lift(term, UniverseOffset(offset)) => Row::new() + .push(view_term(term)) + .push(Text::new(format!("^{}", offset))) + .into(), + + TermData::FunctionType(_, _, _) => Text::new("todo").into(), + TermData::FunctionTerm(_, _) => Text::new("todo").into(), + TermData::FunctionElim(_, _) => Text::new("todo").into(), + + TermData::RecordTerm(_) => Text::new("todo").into(), + TermData::RecordType(_) => Text::new("todo").into(), + TermData::RecordElim(_, _) => Text::new("todo").into(), + + TermData::ArrayTerm(_) => Text::new("todo").into(), + TermData::ListTerm(_) => Text::new("todo").into(), + + TermData::Constant(Constant::U8(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::U16(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::U32(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::U64(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::S8(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::S16(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::S32(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::S64(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::F32(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::F64(data)) => Text::new(data.to_string()).into(), + TermData::Constant(Constant::Char(data)) => Text::new(format!("{:?}", data)).into(), + TermData::Constant(Constant::String(data)) => Text::new(format!("{:?}", data)).into(), + + TermData::Error => Text::new("ERROR!").into(), + } +} diff --git a/pikelet-language-server/Cargo.toml b/pikelet-language-server/Cargo.toml new file mode 100644 index 000000000..7a7f08d6f --- /dev/null +++ b/pikelet-language-server/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "pikelet-language-server" +version = "0.1.0" +authors = ["Brendan Zabarauskas "] +edition = "2018" +publish = false +description = "Language server protocol implementation for the Pikelet programming language" +homepage = "https://github.com/pikelet-lang/pikelet" +repository = "https://github.com/pikelet-lang/pikelet" +readme = "README.md" +keywords = ["pikelet"] +categories = [] +license = "Apache-2.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +anyhow = "1.0" +flexi_logger = "0.15" +log = "0.4" +lsp-server = "0.3" +lsp-types = "0.79" +serde_json = "1.0.57" +serde = { version = "1.0.114", features = ["derive"] } diff --git a/pikelet-language-server/README.md b/pikelet-language-server/README.md new file mode 100644 index 000000000..4fb0fdb8c --- /dev/null +++ b/pikelet-language-server/README.md @@ -0,0 +1,3 @@ +# pikelet-language-server + +Language server protocol implementation for the Pikelet programming language diff --git a/pikelet-language-server/src/lib.rs b/pikelet-language-server/src/lib.rs new file mode 100644 index 000000000..61c5c3e9a --- /dev/null +++ b/pikelet-language-server/src/lib.rs @@ -0,0 +1,50 @@ +use log::info; +use lsp_server::{Connection, Message}; +use lsp_types::{InitializeParams, ServerCapabilities}; + +pub fn run() -> anyhow::Result<()> { + // Set up logging. Because `stdio_transport` gets a lock on stdout and stdin, we must have + // our logging only write out to stderr. + flexi_logger::Logger::with_str("info").start()?; + info!("Starting Pikelet LSP server"); + + // Create the transport. Includes the stdio (stdin and stdout) versions but this could + // also be implemented to use sockets or HTTP. + let (connection, io_threads) = Connection::stdio(); + + // Run the server and wait for the two threads to end (typically by trigger LSP Exit event). + let server_capabilities = serde_json::to_value(&ServerCapabilities::default())?; + let initialization_params = connection.initialize(server_capabilities)?; + main_loop(&connection, initialization_params)?; + io_threads.join()?; + + // Shut down gracefully. + info!("Shutting down server"); + + Ok(()) +} + +fn main_loop(connection: &Connection, params: serde_json::Value) -> anyhow::Result<()> { + let _params: InitializeParams = serde_json::from_value(params).unwrap(); + + info!("Starting Pikelet main loop"); + for msg in &connection.receiver { + info!("Received msg: {:?}", msg); + match msg { + Message::Request(request) => { + if connection.handle_shutdown(&request)? { + return Ok(()); + } + info!("Got request: {:?}", request); + } + Message::Response(response) => { + info!("Received response: {:?}", response); + } + Message::Notification(notification) => { + info!("Received notification: {:?}", notification); + } + } + } + + Ok(()) +} diff --git a/pikelet/Cargo.toml b/pikelet/Cargo.toml new file mode 100644 index 000000000..be4f0eebc --- /dev/null +++ b/pikelet/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "pikelet" +version = "0.1.0" +authors = ["Brendan Zabarauskas"] +edition = "2018" +publish = false +description = "The Pikelet programming language" +homepage = "https://github.com/pikelet-lang/pikelet" +repository = "https://github.com/pikelet-lang/pikelet" +readme = "../README.md" +keywords = ["pikelet", "language"] +categories = [] +license = "Apache-2.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +codespan-reporting = "0.9.5" +contracts = "0.6" +crossbeam-channel = "0.4" +im = "15" +itertools = "0.9" +lalrpop-util = "0.19" +logos = "0.11" +num-traits = "0.2" +once_cell = "1.4" +pretty = "0.10" +regex = "1.3" + +[build-dependencies] +lalrpop = "0.19" diff --git a/pikelet/README.md b/pikelet/README.md new file mode 100644 index 000000000..47f6be2df --- /dev/null +++ b/pikelet/README.md @@ -0,0 +1,3 @@ +# pikelet + +See the [top-level README](../README.md) for more information. diff --git a/crates/pikelet-concrete/build.rs b/pikelet/build.rs similarity index 63% rename from crates/pikelet-concrete/build.rs rename to pikelet/build.rs index 8790fa517..4494207d5 100644 --- a/crates/pikelet-concrete/build.rs +++ b/pikelet/build.rs @@ -1,7 +1,6 @@ fn main() { lalrpop::Configuration::new() .always_use_colors() - .use_cargo_dir_conventions() - .process() + .process_current_dir() .unwrap(); } diff --git a/pikelet/src/lang.rs b/pikelet/src/lang.rs new file mode 100644 index 000000000..d3ac9fd41 --- /dev/null +++ b/pikelet/src/lang.rs @@ -0,0 +1,95 @@ +//! Intermediate languages of the Pikelet compiler. + +pub mod surface; +// 🠃 +pub mod core; +// 🠃 +pub mod anf; +// 🠃 +pub mod cc; +// 🠃 +// ... + +/// File identifier +pub type FileId = usize; + +/// Location metadata, for diagnostic reporting purposes. +#[derive(Debug, Copy, Clone)] +pub enum Location { + /// Generated code. + Generated, + /// Ranges in a text file. + FileRange(FileId, Range), +} + +impl Location { + pub fn generated() -> Location { + Location::Generated + } + + pub fn file_range(file_id: FileId, range: impl Into) -> Location { + Location::FileRange(file_id, range.into()) + } + + pub fn merge(self, other: Location) -> Location { + match (self, other) { + (Location::Generated, Location::Generated) => Location::Generated, + (Location::FileRange(file_id0, range0), Location::FileRange(file_id1, range1)) => { + assert_eq!( + file_id0, file_id1, + "tried to merge source locations with different file ids" + ); + Location::FileRange(file_id0, Range::merge(range0, range1)) + } + (_, _) => panic!("incompatible source ranges"), + } + } +} + +/// A range of source code. +#[derive(Debug, Copy, Clone)] +pub struct Range { + pub start: usize, + pub end: usize, +} + +impl Range { + pub fn merge(self, other: Range) -> Range { + Range { + start: std::cmp::min(self.start, other.start), + end: std::cmp::max(self.end, other.end), + } + } +} + +impl Into> for Range { + fn into(self) -> std::ops::Range { + self.start..self.end + } +} + +impl From> for Range { + fn from(src: std::ops::Range) -> Range { + Range { + start: src.start, + end: src.end, + } + } +} + +/// Data that covers some range of source code. +#[derive(Debug, Clone)] +pub struct Located { + pub location: Location, + pub data: Data, +} + +impl Located { + pub fn new(location: Location, data: Data) -> Located { + Located { location, data } + } + + pub fn generated(data: Data) -> Located { + Located::new(Location::generated(), data) + } +} diff --git a/pikelet/src/lang/anf.rs b/pikelet/src/lang/anf.rs new file mode 100644 index 000000000..3c19f22ee --- /dev/null +++ b/pikelet/src/lang/anf.rs @@ -0,0 +1,86 @@ +//! The A-normal form language, with types preserved. +//! +//! This language makes an explicit distinction between _computations_ and +//! _values_, and makes the evaluation indifferent to the order in which +//! computations are executed (somewhat like [applicative functors in +//! Haskell][applicative-functors]). It does this through alterations to the +//! syntactic structure of the [core language][crate::lang::core], while +//! avoiding making many significant changes to the type structure which +//! would make type preservation more challenging. +//! +//! The main inspiration for this language is William Bowman's dissertation, +//! [Compiling with Dependent Types][wjb-dissertation]. +//! +//! Note: the 'A' in 'A-Normal Form' does not stand for anything, at least +//! [according to one of the original authors, Matthias Felleisen][just-a]. +//! I really wish there was a better name for this language. +//! +//! [applicative-functors]: https://wiki.haskell.org/Applicative_functor +//! [wjb-dissertation]: https://www.williamjbowman.com/resources/wjb-dissertation.pdf +//! [just-a]: https://vimeo.com/387739817 + +pub use crate::lang::core::{Constant, LocalIndex, UniverseLevel, UniverseOffset}; + +/// Values are terms that do not reduce. +pub enum Value { + /// Global variables. + Global(String), + /// Local variables. + Local(LocalIndex), + + /// Annotated values + Ann(Box, Box), + + /// The type of types. + TypeType(UniverseLevel), + /// Lift a value by the given number of universe levels. + Lift(Box, UniverseOffset), + + /// Function types. + /// + /// Also known as: pi type, dependent product type. + FunctionType(Option, Box, Box), + /// Function terms. + /// + /// Also known as: lambda abstraction, anonymous function. + FunctionTerm(String, Box), + + /// Record types. + RecordType(Vec<(String, Box)>), + /// Record terms. + RecordTerm(Vec<(String, Box)>), + + /// Constants. + Constant(Constant), + + /// Error sentinel. + Error, +} + +impl From for Value { + fn from(constant: Constant) -> Value { + Value::Constant(constant) + } +} + +/// Computations eliminate values. +pub enum Computation { + /// Values. + Value(Box), + /// Function eliminations. + /// + /// Also known as: function application. + FunctionElim(Box, Box), + /// Record eliminations. + /// + /// Also known as: record projection, field lookup. + RecordElim(Box, String), +} + +/// Programs that are ready to be executed. +pub struct Configuration { + /// A list of computations to be used when we execute this program. + pub bindings: Vec, + /// The final output of the program. + pub output: Computation, +} diff --git a/pikelet/src/lang/cc.rs b/pikelet/src/lang/cc.rs new file mode 100644 index 000000000..ffc7396e1 --- /dev/null +++ b/pikelet/src/lang/cc.rs @@ -0,0 +1,13 @@ +//! The closure converted language, with types preserved. +//! +//! This language makes an explicit distinction between the _use_ of a +//! computation and the _definition_ of a computation. In doing this we make +//! implicit environment capture explicit through the use of an explicit +//! closure object, which holds the closed code and the local environment. +//! +//! The main inspiration for this language is William Bowman's dissertation, +//! [Compiling with Dependent Types][wjb-dissertation]. +//! +//! [wjb-dissertation](https://www.williamjbowman.com/resources/wjb-dissertation.pdf) + +// TODO: Define syntax diff --git a/pikelet/src/lang/core.rs b/pikelet/src/lang/core.rs new file mode 100644 index 000000000..598fcf15d --- /dev/null +++ b/pikelet/src/lang/core.rs @@ -0,0 +1,316 @@ +//! The core language. +//! +//! This is not intended to be used directly by users of the programming +//! language. + +use std::collections::BTreeMap; +use std::fmt; +use std::sync::Arc; + +use crate::lang::Located; + +pub mod marshall; +pub mod semantics; +pub mod typing; + +/// Constants used in the core language. +// FIXME: Partial eq for floating point numbers +#[derive(Clone, Debug, PartialEq)] +pub enum Constant { + /// 8-bit unsigned integers. + U8(u8), + /// 16-bit unsigned integers. + U16(u16), + /// 32-bit unsigned integers. + U32(u32), + /// 64-bit unsigned integers. + U64(u64), + /// 8-bit signed [two's complement] integers. + /// + /// [two's complement]: https://en.wikipedia.org/wiki/Two%27s_complement + S8(i8), + /// 16-bit signed [two's complement] integers. + /// + /// [two's complement]: https://en.wikipedia.org/wiki/Two%27s_complement + S16(i16), + /// 32-bit signed [two's complement] integers. + /// + /// [two's complement]: https://en.wikipedia.org/wiki/Two%27s_complement + S32(i32), + /// 64-bit signed [two's complement] integers. + /// + /// [two's complement]: https://en.wikipedia.org/wiki/Two%27s_complement + S64(i64), + /// 32-bit [IEEE-754] floating point numbers. + /// + /// [IEEE-754]: https://en.wikipedia.org/wiki/IEEE_754 + F32(f32), + /// 64-bit [IEEE-754] floating point numbers. + /// + /// [IEEE-754]: https://en.wikipedia.org/wiki/IEEE_754 + F64(f64), + /// [Unicode scalar values](http://www.unicode.org/glossary/#unicode_scalar_value). + Char(char), + /// [UTF-8] encoded strings. + /// + /// [UTF-8]: http://www.unicode.org/glossary/#UTF_8 + String(String), +} + +/// Universe levels. +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)] +pub struct UniverseLevel(pub u32); + +impl std::ops::Add for UniverseLevel { + type Output = Option; + + fn add(self, other: UniverseOffset) -> Option { + u32::checked_add(self.0, other.0).map(UniverseLevel) + } +} + +impl From for UniverseLevel { + fn from(level: u32) -> UniverseLevel { + UniverseLevel(level) + } +} + +/// Universe level offsets. +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord)] +pub struct UniverseOffset(pub u32); + +impl std::ops::Add for UniverseOffset { + type Output = Option; + + fn add(self, other: UniverseOffset) -> Option { + u32::checked_add(self.0, other.0).map(UniverseOffset) + } +} + +impl From for UniverseOffset { + fn from(offset: u32) -> UniverseOffset { + UniverseOffset(offset) + } +} + +pub type Term = Located; + +/// Terms in the core language. +#[derive(Clone, Debug)] +pub enum TermData { + /// Global variables. + Global(String), + /// Local variables. + Local(LocalIndex), + + /// Annotated terms + Ann(Arc, Arc), + + /// The type of types. + TypeType(UniverseLevel), + /// Lift a term by the given number of universe levels. + Lift(Arc, UniverseOffset), + + /// Function types. + /// + /// Also known as: pi type, dependent product type. + FunctionType(Option, Arc, Arc), + /// Function terms. + /// + /// Also known as: lambda abstraction, anonymous function. + FunctionTerm(String, Arc), + /// Function eliminations. + /// + /// Also known as: function application. + FunctionElim(Arc, Arc), + + /// Record types. + RecordType(Arc<[(String, Arc)]>), + /// Record terms. + RecordTerm(Arc<[(String, Arc)]>), + /// Record eliminations. + /// + /// Also known as: record projection, field lookup. + RecordElim(Arc, String), + + /// Array terms. + ArrayTerm(Vec>), + /// List terms. + ListTerm(Vec>), + + /// Constants. + Constant(Constant), + + /// Error sentinel. + Error, +} + +impl From for TermData { + fn from(constant: Constant) -> TermData { + TermData::Constant(constant) + } +} + +/// An environment of global definitions. +pub struct Globals { + entries: BTreeMap, Option>)>, +} + +impl Globals { + pub fn new(entries: BTreeMap, Option>)>) -> Globals { + Globals { entries } + } + + pub fn get(&self, name: &str) -> Option<&(Arc, Option>)> { + self.entries.get(name) + } + + pub fn entries(&self) -> impl Iterator, Option>))> { + self.entries.iter() + } +} + +impl Default for Globals { + fn default() -> Globals { + let mut entries = BTreeMap::new(); + + let global = |name: &str| Arc::new(Term::generated(TermData::Global(name.to_owned()))); + let type_type = |level| Arc::new(Term::generated(TermData::TypeType(UniverseLevel(level)))); + let function_type = |input_type, output_type| { + Arc::new(Term::generated(TermData::FunctionType( + None, + input_type, + output_type, + ))) + }; + + entries.insert("Type".to_owned(), (type_type(1), Some(type_type(0)))); + entries.insert("Bool".to_owned(), (global("Type"), None)); + entries.insert("U8".to_owned(), (global("Type"), None)); + entries.insert("U16".to_owned(), (global("Type"), None)); + entries.insert("U32".to_owned(), (global("Type"), None)); + entries.insert("U64".to_owned(), (global("Type"), None)); + entries.insert("S8".to_owned(), (global("Type"), None)); + entries.insert("S16".to_owned(), (global("Type"), None)); + entries.insert("S32".to_owned(), (global("Type"), None)); + entries.insert("S64".to_owned(), (global("Type"), None)); + entries.insert("F32".to_owned(), (global("Type"), None)); + entries.insert("F64".to_owned(), (global("Type"), None)); + entries.insert("Char".to_owned(), (global("Type"), None)); + entries.insert("String".to_owned(), (global("Type"), None)); + entries.insert("true".to_owned(), (global("Bool"), None)); + entries.insert("false".to_owned(), (global("Bool"), None)); + entries.insert( + "Array".to_owned(), + ( + function_type(global("U32"), function_type(type_type(0), type_type(0))), + None, + ), + ); + entries.insert( + "List".to_owned(), + (function_type(type_type(0), type_type(0)), None), + ); + + Globals::new(entries) + } +} + +/// A De Bruijn index into the local environment. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct LocalIndex(pub u32); + +impl LocalIndex { + /// Convert a local index to a local level in the current environment. + /// + /// `None` is returned if the local environment is not large enough to + /// contain the local variable. + pub fn to_level(self, size: LocalSize) -> Option { + Some(LocalLevel(u32::checked_sub(size.0, self.0 + 1)?)) + } +} + +/// A De Bruijn level into the local environment. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct LocalLevel(u32); + +impl LocalLevel { + /// Convert a local level to a local index in the current environment. + /// + /// `None` is returned if the local environment is not large enough to + /// contain the local variable. + pub fn to_index(self, size: LocalSize) -> Option { + Some(LocalIndex(u32::checked_sub(size.0, self.0 + 1)?)) + } +} + +/// The size of the local environment, used for index-to-level conversions. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub struct LocalSize(u32); + +impl LocalSize { + pub fn increment(self) -> LocalSize { + LocalSize(self.0 + 1) + } + + /// Return the level of the next variable to be added to the environment. + pub fn next_level(self) -> LocalLevel { + LocalLevel(self.0) + } +} + +/// A local environment. +#[derive(Clone)] +pub struct Locals { + /// The local entries that are currently defined in the environment. + entries: im::Vector, +} + +impl Locals { + /// Create a new local environment. + pub fn new() -> Locals { + Locals { + entries: im::Vector::new(), + } + } + + /// Get the size of the environment. + pub fn size(&self) -> LocalSize { + LocalSize(self.entries.len() as u32) // FIXME: Check for overflow? + } + + /// Lookup an entry in the environment. + pub fn get(&self, index: LocalIndex) -> Option<&Entry> { + self.entries + .get(self.entries.len().checked_sub(index.0 as usize + 1)?) + } + + /// Push an entry onto the environment. + pub fn push(&mut self, entry: Entry) { + self.entries.push_back(entry); + } + + /// Pop an entry off the environment. + pub fn pop(&mut self) -> Option { + self.entries.pop_back() + } + + /// Pop a number of entries off the environment. + pub fn pop_many(&mut self, count: usize) { + self.entries + .truncate(self.entries.len().saturating_sub(count)); + } + + /// Clear the entries from the environment. + pub fn clear(&mut self) { + self.entries.clear(); + } +} + +impl fmt::Debug for Locals { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("Locals") + .field("entries", &self.entries) + .finish() + } +} diff --git a/pikelet/src/lang/core/marshall.rs b/pikelet/src/lang/core/marshall.rs new file mode 100644 index 000000000..82bf1025c --- /dev/null +++ b/pikelet/src/lang/core/marshall.rs @@ -0,0 +1,246 @@ +//! Marshalling API between Rust types and Pikelet's core language. + +use std::sync::Arc; + +use crate::lang::core::{Constant, Term, TermData}; + +pub trait HasType { + fn r#type() -> Arc; +} + +macro_rules! impl_has_type { + ($Self:ty, $term:expr) => { + impl HasType for $Self { + fn r#type() -> Arc { + Arc::new($term) + } + } + }; +} + +impl_has_type!(bool, Term::generated(TermData::Global("Bool".to_owned()))); +impl_has_type!(u8, Term::generated(TermData::Global("U8".to_owned()))); +impl_has_type!(u16, Term::generated(TermData::Global("U16".to_owned()))); +impl_has_type!(u32, Term::generated(TermData::Global("U32".to_owned()))); +impl_has_type!(u64, Term::generated(TermData::Global("U64".to_owned()))); +impl_has_type!(i8, Term::generated(TermData::Global("S8".to_owned()))); +impl_has_type!(i16, Term::generated(TermData::Global("S16".to_owned()))); +impl_has_type!(i32, Term::generated(TermData::Global("S32".to_owned()))); +impl_has_type!(i64, Term::generated(TermData::Global("S64".to_owned()))); +impl_has_type!(f32, Term::generated(TermData::Global("F32".to_owned()))); +impl_has_type!(f64, Term::generated(TermData::Global("F64".to_owned()))); +impl_has_type!(char, Term::generated(TermData::Global("Char".to_owned()))); +impl_has_type!( + String, + Term::generated(TermData::Global("String".to_owned())) +); +impl_has_type!(str, Term::generated(TermData::Global("String".to_owned()))); + +impl HasType for Vec { + fn r#type() -> Arc { + Arc::new(Term::generated(TermData::FunctionElim( + Arc::new(Term::generated(TermData::Global("List".to_owned()))), + T::r#type(), + ))) + } +} + +macro_rules! impl_has_type_array { + ($($len:expr),*) => { + $(impl HasType for [T; $len] { + fn r#type() -> Arc { + Arc::new(Term::generated(TermData::FunctionElim( + Arc::new(Term::generated(TermData::FunctionElim( + Arc::new(Term::generated(TermData::Global("List".to_owned()))), + Arc::new(Term::generated(TermData::from(Constant::U32($len as u32)))), + ))), + T::r#type(), + ))) + } + })* + }; +} + +impl_has_type_array!( + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + 26, 27, 28, 29, 30, 31, 32 +); + +/// Attempt to deserialize something from a `Term`. +/// +/// # Laws +/// +/// ```skipped +/// check_type(&term, &Self::r#type()) && Self::try_from_term(term).is_ok() +/// ``` +// TODO: Make more efficient with visitors +pub trait TryFromTerm: HasType + Sized { + type Error: Sized; + fn try_from_term(term: &Term) -> Result; +} + +macro_rules! impl_try_from_term { + ($Self:ty, |$p:pat| $term:expr) => { + impl TryFromTerm for $Self { + type Error = (); + + fn try_from_term(term: &Term) -> Result<$Self, ()> { + match &term.data { + $p => $term, + _ => Err(()), + } + } + } + }; +} + +impl_try_from_term!(bool, |TermData::Global(name)| match name.as_str() { + "true" => Ok(true), + "false" => Ok(false), + _ => Err(()), +}); +impl_try_from_term!(u8, |TermData::Constant(Constant::U8(value))| Ok(*value)); +impl_try_from_term!(u16, |TermData::Constant(Constant::U16(value))| Ok(*value)); +impl_try_from_term!(u32, |TermData::Constant(Constant::U32(value))| Ok(*value)); +impl_try_from_term!(u64, |TermData::Constant(Constant::U64(value))| Ok(*value)); +impl_try_from_term!(i8, |TermData::Constant(Constant::S8(value))| Ok(*value)); +impl_try_from_term!(i16, |TermData::Constant(Constant::S16(value))| Ok(*value)); +impl_try_from_term!(i32, |TermData::Constant(Constant::S32(value))| Ok(*value)); +impl_try_from_term!(i64, |TermData::Constant(Constant::S64(value))| Ok(*value)); +impl_try_from_term!(f32, |TermData::Constant(Constant::F32(value))| Ok(*value)); +impl_try_from_term!(f64, |TermData::Constant(Constant::F64(value))| Ok(*value)); +impl_try_from_term!(char, |TermData::Constant(Constant::Char(value))| Ok(*value)); +impl_try_from_term!(String, |TermData::Constant(Constant::String(value))| Ok( + value.clone(), +)); + +impl TryFromTerm for Vec { + type Error = (); + + fn try_from_term(term: &Term) -> Result, ()> { + match &term.data { + TermData::ListTerm(entry_terms) => entry_terms + .iter() + .map(|entry_term| T::try_from_term(entry_term).map_err(|_| ())) + .collect::, ()>>(), + _ => Err(()), + } + } +} + +macro_rules! impl_try_from_term_array { + ($($len:expr),*) => { + $(impl TryFromTerm for [T; $len] { + type Error = (); + + fn try_from_term(term: &Term) -> Result<[T; $len], ()> { + match &term.data { + TermData::ArrayTerm(entry_terms) if entry_terms.len() == $len => { + use std::mem::MaybeUninit; + + let mut entries: [MaybeUninit::; $len] = unsafe { + MaybeUninit::uninit().assume_init() + }; + for (i, entry_term) in entry_terms.iter().enumerate() { + entries[i] = MaybeUninit::new(T::try_from_term(entry_term).map_err(|_| ())?); + } + + // NOTE: We'd prefer to do the following: + // + // ``` + // Ok(unsafe { std::mem::transmute::<_, [T; $len]>(entries) }) + // ``` + // + // Sadly we run into the following issue: https://github.com/rust-lang/rust/issues/61956 + // For this reason we need to do the following (hideous) workaround: + + let ptr = &mut entries as *mut _ as *mut [T; $len]; + let result = unsafe { ptr.read() }; + core::mem::forget(entries); + Ok(result) + }, + _ => Err(()), + } + } + })* + }; +} + +impl_try_from_term_array!( + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + 26, 27, 28, 29, 30, 31, 32 +); + +/// Serialize something to a `Term`. +/// +/// # Laws +/// +/// ```skipped +/// check_type(&Self::to_term(&value), &Self::r#type()) == true +/// ``` +// TODO: Make more efficient with visitors +pub trait ToTerm: HasType { + fn to_term(&self) -> Term; +} + +macro_rules! impl_to_term { + ($Self:ty, |$p:pat| $term_data:expr) => { + impl ToTerm for $Self { + fn to_term(&self) -> Term { + let $p = self; + Term::generated($term_data) + } + } + }; +} + +impl_to_term!(bool, |value| match value { + true => TermData::Global("true".to_owned()), + false => TermData::Global("false".to_owned()), +}); +impl_to_term!(u8, |value| TermData::from(Constant::U8(*value))); +impl_to_term!(u16, |value| TermData::from(Constant::U16(*value))); +impl_to_term!(u32, |value| TermData::from(Constant::U32(*value))); +impl_to_term!(u64, |value| TermData::from(Constant::U64(*value))); +impl_to_term!(i8, |value| TermData::from(Constant::S8(*value))); +impl_to_term!(i16, |value| TermData::from(Constant::S16(*value))); +impl_to_term!(i32, |value| TermData::from(Constant::S32(*value))); +impl_to_term!(i64, |value| TermData::from(Constant::S64(*value))); +impl_to_term!(f32, |value| TermData::from(Constant::F32(*value))); +impl_to_term!(f64, |value| TermData::from(Constant::F64(*value))); +impl_to_term!(char, |value| TermData::from(Constant::Char(*value))); +impl_to_term!(String, |value| TermData::from(Constant::String( + value.clone() +))); +impl_to_term!(str, |value| TermData::from(Constant::String( + value.to_owned() +))); + +impl ToTerm for Vec { + fn to_term(&self) -> Term { + Term::generated(TermData::ListTerm( + self.iter() + .map(|entry_term| Arc::new(T::to_term(entry_term))) + .collect(), + )) + } +} + +macro_rules! impl_to_term_array { + ($($len:expr),*) => { + $(impl ToTerm for [T; $len] { + fn to_term(&self) -> Term { + Term::generated(TermData::ArrayTerm( + self.iter() + .map(|entry_term| Arc::new(T::to_term(entry_term))) + .collect(), + )) + } + })* + }; +} + +impl_to_term_array!( + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, + 26, 27, 28, 29, 30, 31, 32 +); diff --git a/pikelet/src/lang/core/semantics.rs b/pikelet/src/lang/core/semantics.rs new file mode 100644 index 000000000..d3ec2b33e --- /dev/null +++ b/pikelet/src/lang/core/semantics.rs @@ -0,0 +1,926 @@ +//! The operational semantics of the language, implemented using [normalisation-by-evaluation]. +//! +//! [normalisation-by-evaluation]: https://en.wikipedia.org/wiki/Normalisation_by_evaluation + +use contracts::debug_ensures; +use once_cell::sync::OnceCell; +use std::cell::RefCell; +use std::sync::Arc; + +use crate::lang::core::{ + Constant, Globals, LocalLevel, LocalSize, Locals, Term, TermData, UniverseLevel, UniverseOffset, +}; + +/// Values in the core language. +#[derive(Clone, Debug)] +pub enum Value { + /// A computation that is stuck on a [head value][Head] that cannot be + /// reduced further in the current scope. We maintain a 'spine' of + /// [eliminators][Elim], that can be applied if the head becomes unstuck + /// later on. + /// + /// This is more commonly known as a 'neutral value' in the type theory + /// literature. + Stuck(Head, Vec), + /// A computation that was previously stuck a on [head value][Head], but is + /// now unstuck due to its definition now being known. + /// + /// This is sometimes called a 'glued value'. + /// + /// It's useful to keep the head and spine of eliminations around from the + /// [stuck value][Value::Stuck] in order to reduce the size-blowup that + /// can result from deeply-normalizing terms. This can be useful for: + /// + /// - improving the performance of conversion checking + /// - making it easier to understand read-back types in diagnostic messages + /// + /// See the following for more information: + /// + /// - [AndrasKovacs/smalltt](https://github.com/AndrasKovacs/smalltt/) + /// - [ollef/sixty](https://github.com/ollef/sixty/) + /// - [Non-deterministic normalization-by-evaluation](https://gist.github.com/AndrasKovacs/a0e0938113b193d6b9c1c0620d853784) + /// - [Example of the blowup that can occur when reading back values](https://twitter.com/brendanzab/status/1283278258818002944) + Unstuck(Head, Vec, Arc), + + /// The type of types. + TypeType(UniverseLevel), + + /// Function types. + /// + /// Also known as: pi type, dependent product type. + FunctionType(Option, Arc, FunctionClosure), + /// Function terms. + /// + /// Also known as: lambda abstraction, anonymous function. + FunctionTerm(String, FunctionClosure), + + /// Record types. + RecordType(RecordClosure), + /// Record terms. + RecordTerm(RecordClosure), + + /// Array terms. + ArrayTerm(Vec>), + /// List terms. + ListTerm(Vec>), + + /// Constants. + Constant(Constant), + + /// Error sentinel. + Error, +} + +impl Value { + /// Create a type of types at the given level. + pub fn type_type(level: impl Into) -> Value { + Value::TypeType(level.into()) + } + + /// Create a global variable. + pub fn global( + name: impl Into, + offset: impl Into, + elims: impl Into>, + ) -> Value { + Value::Stuck(Head::Global(name.into(), offset.into()), elims.into()) + } + + /// Create a local variable. + pub fn local(level: impl Into, elims: impl Into>) -> Value { + Value::Stuck(Head::Local(level.into()), elims.into()) + } + + /// Attempt to match against a stuck global. + /// + /// This can help to clean up pattern matches in lieu of + /// [`match_default_bindings`](https://github.com/rust-lang/rust/issues/42640). + pub fn try_global(&self) -> Option<(&str, UniverseOffset, &[Elim])> { + match self { + Value::Stuck(Head::Global(name, universe_offset), elims) => { + Some((name, *universe_offset, elims)) + } + _ => None, + } + } + + /// Force any unstuck values. + pub fn force(&self, globals: &Globals) -> &Value { + match self { + Value::Unstuck(_, _, value) => Value::force(LazyValue::force(value, globals), globals), + value => value, + } + } +} + +impl From for Value { + fn from(constant: Constant) -> Value { + Value::Constant(constant) + } +} + +/// The head of a [stuck value][Value::Stuck]. +/// +/// This cannot currently be reduced in the current scope due to its definition +/// not being known. Once it becomes known, the head may be 'remembered' in an +/// [unstuck value][Value::Unstuck]. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Head { + /// Global variables. + Global(String, UniverseOffset), + /// Local variables. + Local(LocalLevel), +} + +/// An eliminator that is part of the spine of a [stuck value][`Value::Stuck`]. +/// +/// It might also be 'remembered' in an [unstuck value][Value::Unstuck]. +#[derive(Clone, Debug)] +pub enum Elim { + /// Function eliminators. + /// + /// This eliminator can be applied to a [`Value`] with the + /// [`apply_function_elim`] function. + /// + /// Also known as: function application. + Function(Arc), + /// Record eliminators. + /// + /// This eliminator can be applied to a [`Value`] with the + /// [`apply_record_elim`] function. + /// + /// Also known as: record projections, field lookup. + Record(String), +} + +/// Function closure, capturing the current universe offset and the current locals in scope. +#[derive(Clone, Debug)] +pub struct FunctionClosure { + universe_offset: UniverseOffset, + locals: Locals>, + term: Arc, +} + +impl FunctionClosure { + pub fn new( + universe_offset: UniverseOffset, + locals: Locals>, + term: Arc, + ) -> FunctionClosure { + FunctionClosure { + universe_offset, + locals, + term, + } + } + + /// Apply an input to the function closure. + pub fn apply(&self, globals: &Globals, input: Arc) -> Arc { + let mut locals = self.locals.clone(); + locals.push(input); + eval_term(globals, self.universe_offset, &mut locals, &self.term) + } +} + +/// Record closure, capturing the current universe offset and the current locals in scope. +#[derive(Clone, Debug)] +pub struct RecordClosure { + universe_offset: UniverseOffset, + locals: Locals>, + entries: Arc<[(String, Arc)]>, +} + +impl RecordClosure { + pub fn new( + universe_offset: UniverseOffset, + locals: Locals>, + entries: Arc<[(String, Arc)]>, + ) -> RecordClosure { + RecordClosure { + universe_offset, + locals, + entries, + } + } + + /// Apply a callback to each of the entries in the record closure. + pub fn for_each_entry<'closure>( + &'closure self, + globals: &Globals, + mut on_entry: impl FnMut(&'closure str, Arc) -> Arc, + ) { + let universe_offset = self.universe_offset; + let mut locals = self.locals.clone(); + + for (label, entry_value) in self.entries.iter() { + let entry_value = eval_term(globals, universe_offset, &mut locals, entry_value); + locals.push(on_entry(label, entry_value)); + } + } + + /// Find an entry in the record closure. + pub fn find_entry<'closure, T>( + &'closure self, + globals: &Globals, + mut on_entry: impl FnMut(&'closure str, Arc) -> Result>, + ) -> Option { + let universe_offset = self.universe_offset; + let mut locals = self.locals.clone(); + + for (label, entry_value) in self.entries.iter() { + let entry_value = eval_term(globals, universe_offset, &mut locals, entry_value); + match on_entry(label, entry_value) { + Ok(t) => return Some(t), + Err(entry_value) => locals.push(entry_value), + } + } + + None + } +} + +/// Initialization operation for lazy values. +/// +/// We need to use a [defunctionalized] representation because Rust does not allow +/// closures of type `dyn (Clone + FnOnce() -> Arc)`. +/// +/// [defunctionalized]: https://en.wikipedia.org/wiki/Defunctionalization +#[derive(Clone, Debug)] +enum LazyInit { + EvalTerm(UniverseOffset, Locals>, Arc), + ApplyElim(Arc, Elim), +} + +/// A lazily initialized value. +#[derive(Clone, Debug)] +pub struct LazyValue { + /// Initialization operation. Will be set to `None` if `cell` is forced. + init: RefCell>, + /// A once-cell to hold the lazily initialized value. + cell: OnceCell>, +} + +impl LazyValue { + /// Eagerly construct the lazy value. + pub fn new(value: Arc) -> LazyValue { + LazyValue { + init: RefCell::new(None), + cell: OnceCell::from(value), + } + } + + /// Lazily evaluate a term using the given universe offset and local values. + pub fn eval_term( + universe_offset: UniverseOffset, + locals: Locals>, + term: Arc, + ) -> LazyValue { + LazyValue { + init: RefCell::new(Some(LazyInit::EvalTerm(universe_offset, locals, term))), + cell: OnceCell::new(), + } + } + + /// Lazily apply an elimination. + pub fn apply_elim(head: Arc, elim: Elim) -> LazyValue { + LazyValue { + init: RefCell::new(Some(LazyInit::ApplyElim(head, elim))), + cell: OnceCell::new(), + } + } + + /// Force the evaluation of a lazy value. + pub fn force(&self, globals: &Globals) -> &Arc { + self.cell.get_or_init(|| match self.init.replace(None) { + Some(LazyInit::EvalTerm(universe_offset, mut locals, term)) => { + eval_term(globals, universe_offset, &mut locals, &term) + } + Some(LazyInit::ApplyElim(head, Elim::Record(label))) => { + apply_record_elim(globals, head.force(globals).clone(), &label) + } + Some(LazyInit::ApplyElim(head, Elim::Function(input))) => { + apply_function_elim(globals, head.force(globals).clone(), input) + } + None => panic!("Lazy instance has previously been poisoned"), + }) + } +} + +/// Fully normalize a [`Term`] using [normalization by evaluation]. +/// +/// [`Term`]: crate::lang::core::Term +/// [normalization by evaluation]: https://en.wikipedia.org/wiki/Normalisation_by_evaluation +#[debug_ensures(locals.size() == old(locals.size()))] +pub fn normalize_term( + globals: &Globals, + universe_offset: UniverseOffset, + locals: &mut Locals>, + term: &Term, +) -> Term { + let value = eval_term(globals, universe_offset, locals, term); + read_back_value(globals, locals.size(), Unfold::Always, &value) +} + +/// Evaluate a [`Term`] into a [`Value`]. +/// +/// [`Value`]: crate::lang::core::semantics::Value +/// [`Term`]: crate::lang::core::Term +#[debug_ensures(locals.size() == old(locals.size()))] +pub fn eval_term( + globals: &Globals, + universe_offset: UniverseOffset, + locals: &mut Locals>, + term: &Term, +) -> Arc { + match &term.data { + TermData::Global(name) => match globals.get(name) { + Some((_, Some(term))) => { + let head = Head::Global(name.into(), universe_offset); + let value = LazyValue::eval_term(universe_offset, locals.clone(), term.clone()); + Arc::new(Value::Unstuck(head, Vec::new(), Arc::new(value))) + } + Some((_, None)) | None => { + let head = Head::Global(name.into(), universe_offset); + Arc::new(Value::Stuck(head, Vec::new())) + } + }, + TermData::Local(index) => match locals.get(*index) { + Some(value) => value.clone(), + // FIXME: Local gluing is kind of broken right now :( + // Some(value) => { + // let head = Head::Local(index.to_level(locals.size()).unwrap()); // TODO: Handle overflow + // let value = LazyValue::new(value.clone()); // FIXME: Apply universe_offset? + // Arc::new(Value::Unstuck(head, Vec::new(), Arc::new(value))) + // } + None => { + let head = Head::Local(index.to_level(locals.size()).unwrap()); // TODO: Handle overflow + Arc::new(Value::Stuck(head, Vec::new())) + } + }, + + TermData::Ann(term, _) => eval_term(globals, universe_offset, locals, term), + + TermData::TypeType(level) => { + let universe_level = (*level + universe_offset).unwrap(); // FIXME: Handle overflow + Arc::new(Value::type_type(universe_level)) + } + TermData::Lift(term, offset) => { + let universe_offset = (universe_offset + *offset).unwrap(); // FIXME: Handle overflow + eval_term(globals, universe_offset, locals, term) + } + + TermData::RecordType(type_entries) => Arc::new(Value::RecordType(RecordClosure::new( + universe_offset, + locals.clone(), + type_entries.clone(), + ))), + TermData::RecordTerm(term_entries) => Arc::new(Value::RecordTerm(RecordClosure::new( + universe_offset, + locals.clone(), + term_entries.clone(), + ))), + TermData::RecordElim(head, label) => { + let head = eval_term(globals, universe_offset, locals, head); + apply_record_elim(globals, head, label) + } + + TermData::FunctionType(input_name_hint, input_type, output_type) => { + Arc::new(Value::FunctionType( + input_name_hint.clone(), + eval_term(globals, universe_offset, locals, input_type), + FunctionClosure::new(universe_offset, locals.clone(), output_type.clone()), + )) + } + TermData::FunctionTerm(input_name, output_term) => Arc::new(Value::FunctionTerm( + input_name.clone(), + FunctionClosure::new(universe_offset, locals.clone(), output_term.clone()), + )), + TermData::FunctionElim(head, input) => { + let head = eval_term(globals, universe_offset, locals, head); + let input = LazyValue::eval_term(universe_offset, locals.clone(), input.clone()); + apply_function_elim(globals, head, Arc::new(input)) + } + + TermData::ArrayTerm(term_entries) => { + let value_entries = term_entries + .iter() + .map(|entry_term| eval_term(globals, universe_offset, locals, entry_term)) + .collect(); + + Arc::new(Value::ArrayTerm(value_entries)) + } + TermData::ListTerm(term_entries) => { + let value_entries = term_entries + .iter() + .map(|entry_term| eval_term(globals, universe_offset, locals, entry_term)) + .collect(); + + Arc::new(Value::ListTerm(value_entries)) + } + + TermData::Constant(constant) => Arc::new(Value::from(constant.clone())), + + TermData::Error => Arc::new(Value::Error), + } +} + +/// Return the type of the record elimination. +pub fn record_elim_type( + globals: &Globals, + head_value: Arc, + label: &str, + closure: &RecordClosure, +) -> Option> { + closure.find_entry(globals, |entry_label, entry_type| { + if entry_label == label { + Ok(entry_type) + } else { + Err(apply_record_elim(globals, head_value.clone(), label)) + } + }) +} + +/// Apply a record term elimination. +fn apply_record_elim(globals: &Globals, mut head_value: Arc, label: &str) -> Arc { + match Arc::make_mut(&mut head_value) { + Value::Stuck(_, spine) => { + spine.push(Elim::Record(label.to_owned())); + head_value + } + Value::Unstuck(_, spine, value) => { + spine.push(Elim::Record(label.to_owned())); + *value = Arc::new(LazyValue::apply_elim( + value.clone(), + Elim::Record(label.to_owned()), + )); + head_value + } + + Value::RecordTerm(closure) => closure + .find_entry(globals, |entry_label, entry_value| { + if entry_label == label { + Ok(entry_value) + } else { + Err(entry_value) + } + }) + .unwrap_or_else(|| Arc::new(Value::Error)), + + _ => Arc::new(Value::Error), + } +} + +/// Apply a function term elimination. +fn apply_function_elim( + globals: &Globals, + mut head_value: Arc, + input: Arc, +) -> Arc { + match Arc::make_mut(&mut head_value) { + Value::Stuck(_, spine) => { + spine.push(Elim::Function(input)); + head_value + } + Value::Unstuck(_, spine, value) => { + spine.push(Elim::Function(input.clone())); + *value = Arc::new(LazyValue::apply_elim(value.clone(), Elim::Function(input))); + head_value + } + + Value::FunctionTerm(_, output_closure) => { + output_closure.apply(globals, input.force(globals).clone()) + } + + _ => Arc::new(Value::Error), + } +} + +/// Describes how definitions should be unfolded to when reading back values. +#[derive(Copy, Clone, Debug)] +pub enum Unfold { + /// Never unfold definitions. + /// + /// This avoids generating bloated terms, which can be detrimental for + /// performance and difficult for humans to read. Examples of where this + /// might be useful include: + /// + /// - elaborating partially annotated surface terms into core terms that + /// require explicit type annotations + /// - displaying terms in diagnostic messages to the user + Never, + /// Always unfold global and local definitions. + /// + /// This is useful for fully normalizing terms. + Always, +} + +/// Read-back a spine of eliminators into the term syntax. +fn read_back_stuck_value( + globals: &Globals, + local_size: LocalSize, + unfold: Unfold, + head: &Head, + spine: &[Elim], +) -> Term { + let head = match head { + Head::Global(name, shift) => { + let global = Term::generated(TermData::Global(name.clone())); + match shift { + UniverseOffset(0) => global, + shift => Term::generated(TermData::Lift(Arc::new(global), *shift)), + } + } + Head::Local(level) => { + let index = level.to_index(local_size).unwrap(); + Term::generated(TermData::Local(index)) // TODO: Handle overflow + } + }; + + spine.iter().fold(head, |head, elim| match elim { + Elim::Function(input) => { + let input = read_back_value(globals, local_size, unfold, input.force(globals)); + Term::generated(TermData::FunctionElim(Arc::new(head), Arc::new(input))) + } + Elim::Record(label) => Term::generated(TermData::RecordElim(Arc::new(head), label.clone())), + }) +} + +/// Read-back a value into the term syntax. +pub fn read_back_value( + globals: &Globals, + local_size: LocalSize, + unfold: Unfold, + value: &Value, +) -> Term { + match value { + Value::Stuck(head, spine) => { + read_back_stuck_value(globals, local_size, unfold, head, spine) + } + Value::Unstuck(head, spine, value) => match unfold { + Unfold::Never => read_back_stuck_value(globals, local_size, unfold, head, spine), + Unfold::Always => read_back_value(globals, local_size, unfold, value.force(globals)), + }, + + Value::TypeType(level) => Term::generated(TermData::TypeType(*level)), + + Value::FunctionType(input_name_hint, input_type, output_closure) => { + let local = Arc::new(Value::local(local_size.next_level(), [])); + let input_type = Arc::new(read_back_value(globals, local_size, unfold, input_type)); + let output_type = output_closure.apply(globals, local); + let output_type = + read_back_value(globals, local_size.increment(), unfold, &output_type); + + Term::generated(TermData::FunctionType( + input_name_hint.clone(), + input_type, + Arc::new(output_type), + )) + } + Value::FunctionTerm(input_name_hint, output_closure) => { + let local = Arc::new(Value::local(local_size.next_level(), [])); + let output_term = output_closure.apply(globals, local); + let output_term = + read_back_value(globals, local_size.increment(), unfold, &output_term); + + Term::generated(TermData::FunctionTerm( + input_name_hint.clone(), + Arc::new(output_term), + )) + } + + Value::RecordType(closure) => { + let mut local_size = local_size; + let mut type_entries = Vec::with_capacity(closure.entries.len()); + + closure.for_each_entry(globals, |label, entry_type| { + let entry_type = read_back_value(globals, local_size, unfold, &entry_type); + type_entries.push((label.to_owned(), Arc::new(entry_type))); + + let local_level = local_size.next_level(); + local_size = local_size.increment(); + + Arc::new(Value::local(local_level, [])) + }); + + Term::generated(TermData::RecordType(type_entries.into())) + } + Value::RecordTerm(closure) => { + let mut local_size = local_size; + let mut term_entries = Vec::with_capacity(closure.entries.len()); + + closure.for_each_entry(globals, |label, entry_term| { + let entry_term = read_back_value(globals, local_size, unfold, &entry_term); + term_entries.push((label.to_owned(), Arc::new(entry_term))); + + let local_level = local_size.next_level(); + local_size = local_size.increment(); + + Arc::new(Value::local(local_level, [])) + }); + + Term::generated(TermData::RecordTerm(term_entries.into())) + } + + Value::ArrayTerm(value_entries) => { + let term_entries = value_entries + .iter() + .map(|value_entry| { + Arc::new(read_back_value(globals, local_size, unfold, value_entry)) + }) + .collect(); + + Term::generated(TermData::ArrayTerm(term_entries)) + } + Value::ListTerm(value_entries) => { + let term_entries = value_entries + .iter() + .map(|value_entry| { + Arc::new(read_back_value(globals, local_size, unfold, value_entry)) + }) + .collect(); + + Term::generated(TermData::ListTerm(term_entries)) + } + + Value::Constant(constant) => Term::generated(TermData::from(constant.clone())), + + Value::Error => Term::generated(TermData::Error), + } +} + +/// Check that one stuck value is equal to another stuck value. +fn is_equal_stuck_value( + globals: &Globals, + local_size: LocalSize, + (head0, spine0): (&Head, &[Elim]), + (head1, spine1): (&Head, &[Elim]), +) -> bool { + if head0 != head1 || spine0.len() != spine1.len() { + return false; + } + + for (elim0, elim1) in Iterator::zip(spine0.iter(), spine1.iter()) { + match (elim0, elim1) { + (Elim::Function(input0), Elim::Function(input1)) => { + let input0 = input0.force(globals); + let input1 = input1.force(globals); + + if !is_equal(globals, local_size, input0, input1) { + return false; + } + } + (Elim::Record(label0), Elim::Record(label1)) if label0 == label1 => {} + (_, _) => return false, + } + } + + true +} + +/// Check that one value is [computationally equal] to another value. +/// +/// [computationally equal]: https://ncatlab.org/nlab/show/equality#computational_equality +fn is_equal(globals: &Globals, local_size: LocalSize, value0: &Value, value1: &Value) -> bool { + match (value0, value1) { + (Value::Stuck(head0, spine0), Value::Stuck(head1, spine1)) => { + is_equal_stuck_value(globals, local_size, (head0, spine0), (head1, spine1)) + } + (Value::Unstuck(head0, spine0, value0), Value::Unstuck(head1, spine1, value1)) => { + if is_equal_stuck_value(globals, local_size, (head0, spine0), (head1, spine1)) { + // No need to force computation if the stuck values are the same! + return true; + } + + let value0 = value0.force(globals); + let value1 = value1.force(globals); + is_equal(globals, local_size, value0, value1) + } + (Value::Unstuck(_, _, value0), value1) => { + is_equal(globals, local_size, value0.force(globals), value1) + } + (value0, Value::Unstuck(_, _, value1)) => { + is_equal(globals, local_size, value0, value1.force(globals)) + } + + (Value::TypeType(level0), Value::TypeType(level1)) => level0 == level1, + + ( + Value::FunctionType(_, input_type0, output_closure0), + Value::FunctionType(_, input_type1, output_closure1), + ) => { + if !is_equal(globals, local_size, input_type1, input_type0) { + return false; + } + + let local = Arc::new(Value::local(local_size.next_level(), [])); + is_equal( + globals, + local_size.increment(), + &output_closure0.apply(globals, local.clone()), + &output_closure1.apply(globals, local), + ) + } + (Value::FunctionTerm(_, output_closure0), Value::FunctionTerm(_, output_closure1)) => { + let local = Arc::new(Value::local(local_size.next_level(), [])); + is_equal( + globals, + local_size.increment(), + &output_closure0.apply(globals, local.clone()), + &output_closure1.apply(globals, local), + ) + } + + (Value::RecordType(closure0), Value::RecordType(closure1)) => { + if closure0.entries.len() != closure1.entries.len() { + return false; + } + + let mut local_size = local_size; + let universe_offset0 = closure0.universe_offset; + let universe_offset1 = closure1.universe_offset; + let mut locals0 = closure0.locals.clone(); + let mut locals1 = closure1.locals.clone(); + + for ((label0, entry_type0), (label1, entry_type1)) in + Iterator::zip(closure0.entries.iter(), closure1.entries.iter()) + { + if label0 != label1 { + return false; + } + + let entry_type0 = eval_term(globals, universe_offset0, &mut locals0, entry_type0); + let entry_type1 = eval_term(globals, universe_offset1, &mut locals1, entry_type1); + + if !is_equal(globals, local_size, &entry_type0, &entry_type1) { + return false; + } + + let local_level = local_size.next_level(); + locals0.push(Arc::new(Value::local(local_level, []))); + locals1.push(Arc::new(Value::local(local_level, []))); + local_size = local_size.increment(); + } + + true + } + (Value::RecordTerm(closure0), Value::RecordTerm(closure1)) => { + if closure0.entries.len() != closure1.entries.len() { + return false; + } + + let mut local_size = local_size; + let universe_offset0 = closure0.universe_offset; + let universe_offset1 = closure1.universe_offset; + let mut locals0 = closure0.locals.clone(); + let mut locals1 = closure1.locals.clone(); + + for ((label0, entry_type0), (label1, entry_type1)) in + Iterator::zip(closure0.entries.iter(), closure1.entries.iter()) + { + if label0 != label1 { + return false; + } + + let entry_type0 = eval_term(globals, universe_offset0, &mut locals0, entry_type0); + let entry_type1 = eval_term(globals, universe_offset1, &mut locals1, entry_type1); + + if !is_equal(globals, local_size, &entry_type0, &entry_type1) { + return false; + } + + let local_level = local_size.next_level(); + locals0.push(Arc::new(Value::local(local_level, []))); + locals1.push(Arc::new(Value::local(local_level, []))); + local_size = local_size.increment(); + } + + true + } + + (Value::ArrayTerm(value_entries0), Value::ArrayTerm(value_entries1)) + | (Value::ListTerm(value_entries0), Value::ListTerm(value_entries1)) => { + if value_entries0.len() != value_entries1.len() { + return false; + } + + Iterator::zip(value_entries0.iter(), value_entries1.iter()).all( + |(value_entry0, value_entry1)| { + is_equal(globals, local_size, value_entry0, value_entry1) + }, + ) + } + + (Value::Constant(constant0), Value::Constant(constant1)) => constant0 == constant1, + + // Errors are always treated as subtypes, regardless of what they are compared with. + (Value::Error, _) | (_, Value::Error) => true, + // Anything else is not equal! + (_, _) => false, + } +} + +/// Check that one [`Value`] is a subtype of another [`Value`]. +/// +/// Returns `false` if either value is not a type. +/// +/// [`Value`]: crate::lang::core::semantics::Value +pub fn is_subtype( + globals: &Globals, + local_size: LocalSize, + value0: &Value, + value1: &Value, +) -> bool { + // FIXME: It would be nice if we could replace this function with a + // [`crate::pass::surface_to_core::coerce`] function in the elaborator. + // This would allow us to remove the dependency on subtyping in the + // [`crate::lang::core::typing`] module. + // + // More information on using coercions for universe subtyping can be found + // in [“Notes on Universes in Type Theory”][notes-on-universes-in-tt] + // by Zhaohui Luo. + // + // [notes-on-universes-in-tt]: http://www.cs.rhul.ac.uk/home/zhaohui/universes.pdf + + match (value0, value1) { + (Value::Stuck(head0, spine0), Value::Stuck(head1, spine1)) => { + is_equal_stuck_value(globals, local_size, (head0, spine0), (head1, spine1)) + } + (Value::Unstuck(head0, spine0, value0), Value::Unstuck(head1, spine1, value1)) => { + if is_equal_stuck_value(globals, local_size, (head0, spine0), (head1, spine1)) { + // No need to force computation if the spines are the same! + return true; + } + + let value0 = value0.force(globals); + let value1 = value1.force(globals); + is_subtype(globals, local_size, value0, value1) + } + (Value::Unstuck(_, _, value0), value1) => { + is_subtype(globals, local_size, value0.force(globals), value1) + } + (value0, Value::Unstuck(_, _, value1)) => { + is_subtype(globals, local_size, value0, value1.force(globals)) + } + + (Value::TypeType(level0), Value::TypeType(level1)) => level0 <= level1, + + ( + Value::FunctionType(_, input_type0, output_closure0), + Value::FunctionType(_, input_type1, output_closure1), + ) => { + if !is_subtype(globals, local_size, input_type1, input_type0) { + return false; + } + + let local = Arc::new(Value::local(local_size.next_level(), [])); + let output_term0 = output_closure0.apply(globals, local.clone()); + let output_term1 = output_closure1.apply(globals, local); + + is_subtype( + globals, + local_size.increment(), + &output_term0, + &output_term1, + ) + } + + (Value::RecordType(closure0), Value::RecordType(closure1)) => { + if closure0.entries.len() != closure1.entries.len() { + return false; + } + + let mut local_size = local_size; + let universe_offset0 = closure0.universe_offset; + let universe_offset1 = closure1.universe_offset; + let mut locals0 = closure0.locals.clone(); + let mut locals1 = closure1.locals.clone(); + + for ((label0, entry_type0), (label1, entry_type1)) in + Iterator::zip(closure0.entries.iter(), closure1.entries.iter()) + { + if label0 != label1 { + return false; + } + + let entry_type0 = eval_term(globals, universe_offset0, &mut locals0, entry_type0); + let entry_type1 = eval_term(globals, universe_offset1, &mut locals1, entry_type1); + + if !is_subtype(globals, local_size, &entry_type0, &entry_type1) { + return false; + } + + let local_level = local_size.next_level(); + locals0.push(Arc::new(Value::local(local_level, []))); + locals1.push(Arc::new(Value::local(local_level, []))); + local_size = local_size.increment(); + } + + true + } + + // Errors are always treated as subtypes, regardless of what they are compared with. + (Value::Error, _) | (_, Value::Error) => true, + // Anything else is not equal! + (_, _) => false, + } +} diff --git a/pikelet/src/lang/core/typing.rs b/pikelet/src/lang/core/typing.rs new file mode 100644 index 000000000..2a40ce667 --- /dev/null +++ b/pikelet/src/lang/core/typing.rs @@ -0,0 +1,446 @@ +//! Bidirectional type checker for the [core language]. +//! +//! This is a simpler implementation of type checking than the one found in +//! [pass::surface_to_core], because it only needs to check the (much simpler) +//! core language, and doesn't need to perform any additional elaboration. +//! We can use it as a way to validate that elaborated terms are well-formed +//! for debugging and development purposes. +//! +//! [core language]: crate::lang::core +//! [`pass::surface_to_core`]: crate::pass::surface_to_core + +use contracts::debug_ensures; +use crossbeam_channel::Sender; +use std::sync::Arc; + +use crate::lang::core::semantics::{self, Elim, RecordClosure, Unfold, Value}; +use crate::lang::core::{ + Constant, Globals, LocalLevel, Locals, Term, TermData, UniverseLevel, UniverseOffset, +}; +use crate::reporting::{AmbiguousTerm, CoreTypingMessage, ExpectedType, Message}; + +/// The state of the type checker. +pub struct State<'me> { + /// Global definition environment. + globals: &'me Globals, + /// The current universe offset. + universe_offset: UniverseOffset, + /// Local type environment (used for getting the types of local variables). + local_declarations: Locals>, + /// Local value environment (used for evaluation). + local_definitions: Locals>, + /// The diagnostic messages accumulated during type checking. + message_tx: Sender, +} + +impl<'me> State<'me> { + /// Construct a new type checker state. + pub fn new(globals: &'me Globals, message_tx: Sender) -> State<'me> { + State { + globals, + universe_offset: UniverseOffset(0), + local_declarations: Locals::new(), + local_definitions: Locals::new(), + message_tx, + } + } + + /// Get the next level to be used for a local entry. + fn next_level(&self) -> LocalLevel { + self.local_definitions.size().next_level() + } + + /// Push a local entry. + fn push_local(&mut self, value: Arc, r#type: Arc) { + self.local_declarations.push(r#type); + self.local_definitions.push(value); + } + + /// Push a local parameter. + fn push_local_param(&mut self, r#type: Arc) -> Arc { + let value = Arc::new(Value::local(self.next_level(), [])); + self.push_local(value.clone(), r#type); + value + } + + /// Pop a local entry. + fn pop_local(&mut self) { + self.local_declarations.pop(); + self.local_definitions.pop(); + } + + /// Pop the given number of local entries. + fn pop_many_locals(&mut self, count: usize) { + self.local_declarations.pop_many(count); + self.local_definitions.pop_many(count); + } + + /// Report a diagnostic message. + fn report(&self, message: CoreTypingMessage) { + self.message_tx.send(message.into()).unwrap(); + } + + /// Evaluate a [`Term`] into a [`Value`]. + /// + /// [`Value`]: crate::lang::core::semantics::Value + /// [`Term`]: crate::lang::core::Term + pub fn eval_term(&mut self, term: &Term) -> Arc { + semantics::eval_term( + self.globals, + self.universe_offset, + &mut self.local_definitions, + term, + ) + } + + /// Return the type of the record elimination. + pub fn record_elim_type( + &self, + head_value: Arc, + name: &str, + closure: &RecordClosure, + ) -> Option> { + semantics::record_elim_type(self.globals, head_value, name, closure) + } + + /// Read back a value into a normal form using the current state of the elaborator. + pub fn read_back_value(&self, value: &Value) -> Term { + semantics::read_back_value( + self.globals, + self.local_definitions.size(), + Unfold::Never, + value, + ) + } + + /// Check that one [`Value`] is a subtype of another [`Value`]. + /// + /// Returns `false` if either value is not a type. + /// + /// [`Value`]: crate::lang::core::semantics::Value + pub fn is_subtype(&self, value0: &Value, value1: &Value) -> bool { + semantics::is_subtype(self.globals, self.local_definitions.size(), value0, value1) + } + + /// Check that a term is a type and return the universe level it inhabits. + #[debug_ensures(self.universe_offset == old(self.universe_offset))] + #[debug_ensures(self.local_declarations.size() == old(self.local_declarations.size()))] + #[debug_ensures(self.local_definitions.size() == old(self.local_definitions.size()))] + pub fn is_type(&mut self, term: &Term) -> Option { + let r#type = self.synth_type(term); + match r#type.force(self.globals) { + Value::TypeType(level) => Some(*level), + Value::Error => None, + _ => { + self.report(CoreTypingMessage::MismatchedTypes { + found_type: self.read_back_value(&r#type), + expected_type: ExpectedType::Universe, + }); + None + } + } + } + + /// Check that a term is an element of a type. + #[debug_ensures(self.universe_offset == old(self.universe_offset))] + #[debug_ensures(self.local_declarations.size() == old(self.local_declarations.size()))] + #[debug_ensures(self.local_definitions.size() == old(self.local_definitions.size()))] + pub fn check_type(&mut self, term: &Term, expected_type: &Arc) { + match (&term.data, expected_type.force(self.globals)) { + (_, Value::Error) => {} + + ( + TermData::FunctionTerm(_, output_term), + Value::FunctionType(_, input_type, output_closure), + ) => { + let input_term = self.push_local_param(input_type.clone()); + let output_type = output_closure.apply(self.globals, input_term); + self.check_type(output_term, &output_type); + self.pop_local(); + } + (TermData::FunctionTerm(_, _), _) => { + self.report(CoreTypingMessage::TooManyInputsInFunctionTerm); + } + + (TermData::RecordTerm(term_entries), Value::RecordType(closure)) => { + let mut pending_term_entries = term_entries.iter(); + let mut missing_labels = Vec::new(); + let mut unexpected_labels = Vec::new(); + let mut term_entry_count = 0; + + closure.for_each_entry(self.globals, |label, entry_type| loop { + match pending_term_entries.next() { + Some((next_label, entry_term)) if next_label == label => { + self.check_type(&entry_term, &entry_type); + let entry_value = self.eval_term(&entry_term); + + self.push_local(entry_value.clone(), entry_type); + term_entry_count += 1; + + break entry_value; + } + Some((next_label, _)) => unexpected_labels.push(next_label.to_owned()), + None => { + missing_labels.push(label.to_owned()); + break Arc::new(Value::Error); + } + } + }); + + self.pop_many_locals(term_entry_count); + unexpected_labels.extend(pending_term_entries.map(|(label, _)| label.clone())); + + if !missing_labels.is_empty() || !unexpected_labels.is_empty() { + self.report(CoreTypingMessage::InvalidRecordTerm { + missing_labels, + unexpected_labels, + }); + } + } + + (TermData::ArrayTerm(entry_terms), forced_type) => match forced_type.try_global() { + Some(("Array", _, [Elim::Function(len), Elim::Function(entry_type)])) => { + let forced_entry_type = entry_type.force(self.globals); + for entry_term in entry_terms { + self.check_type(entry_term, forced_entry_type); + } + + match len.force(self.globals).as_ref() { + Value::Constant(Constant::U32(len)) + if *len as usize == entry_terms.len() => {} + _ => { + self.report(CoreTypingMessage::MismatchedTypes { + expected_type: ExpectedType::Type( + self.read_back_value(expected_type), + ), + found_type: self.read_back_value(&Value::global( + "Array", + 0, + [ + Elim::Function(len.clone()), + Elim::Function(entry_type.clone()), + ], + )), + }); + } + } + } + Some(_) | None => { + let expected_type = self.read_back_value(expected_type); + self.report(CoreTypingMessage::UnexpectedArrayTerm { expected_type }) + } + }, + (TermData::ListTerm(entry_terms), forced_type) => match forced_type.try_global() { + Some(("List", _, [Elim::Function(entry_type)])) => { + let forced_entry_type = entry_type.force(self.globals); + for entry_term in entry_terms { + self.check_type(entry_term, forced_entry_type); + } + } + Some(_) | None => { + let expected_type = self.read_back_value(expected_type); + self.report(CoreTypingMessage::UnexpectedListTerm { expected_type }) + } + }, + + (_, _) => match self.synth_type(term) { + found_type if self.is_subtype(&found_type, expected_type) => {} + found_type => self.report(CoreTypingMessage::MismatchedTypes { + found_type: self.read_back_value(&found_type), + expected_type: ExpectedType::Type(self.read_back_value(expected_type)), + }), + }, + } + } + + /// Synthesize the type of a term. + #[debug_ensures(self.universe_offset == old(self.universe_offset))] + #[debug_ensures(self.local_declarations.size() == old(self.local_declarations.size()))] + #[debug_ensures(self.local_definitions.size() == old(self.local_definitions.size()))] + pub fn synth_type(&mut self, term: &Term) -> Arc { + match &term.data { + TermData::Global(name) => match self.globals.get(name) { + Some((r#type, _)) => self.eval_term(r#type), + None => { + self.report(CoreTypingMessage::UnboundGlobal { + name: name.to_owned(), + }); + Arc::new(Value::Error) + } + }, + TermData::Local(index) => match self.local_declarations.get(*index) { + Some(r#type) => r#type.clone(), + None => { + self.report(CoreTypingMessage::UnboundLocal); + Arc::new(Value::Error) + } + }, + + TermData::Ann(term, r#type) => { + self.is_type(r#type); + let r#type = self.eval_term(r#type); + self.check_type(term, &r#type); + r#type + } + + TermData::TypeType(level) => match *level + UniverseOffset(1) { + Some(level) => Arc::new(Value::type_type(level)), + None => { + self.report(CoreTypingMessage::MaximumUniverseLevelReached); + Arc::new(Value::Error) + } + }, + TermData::Lift(term, offset) => match self.universe_offset + *offset { + Some(new_offset) => { + let previous_offset = std::mem::replace(&mut self.universe_offset, new_offset); + let r#type = self.synth_type(term); + self.universe_offset = previous_offset; + r#type + } + None => { + self.report(CoreTypingMessage::MaximumUniverseLevelReached); + Arc::new(Value::Error) + } + }, + + TermData::FunctionType(_, input_type, output_type) => { + let input_level = self.is_type(input_type); + let input_type = match input_level { + None => Arc::new(Value::Error), + Some(_) => self.eval_term(input_type), + }; + + self.push_local_param(input_type); + let output_level = self.is_type(output_type); + self.pop_local(); + + match (input_level, output_level) { + (Some(input_level), Some(output_level)) => { + Arc::new(Value::TypeType(std::cmp::max(input_level, output_level))) + } + (_, _) => Arc::new(Value::Error), + } + } + TermData::FunctionTerm(_, _) => { + self.report(CoreTypingMessage::AmbiguousTerm { + term: AmbiguousTerm::FunctionTerm, + }); + Arc::new(Value::Error) + } + TermData::FunctionElim(head_term, input_term) => { + let head_type = self.synth_type(head_term); + match head_type.force(self.globals) { + Value::FunctionType(_, input_type, output_closure) => { + self.check_type(input_term, &input_type); + let input_value = self.eval_term(input_term); + output_closure.apply(self.globals, input_value) + } + Value::Error => Arc::new(Value::Error), + _ => { + let head_type = self.read_back_value(&head_type); + self.report(CoreTypingMessage::TooManyInputsInFunctionElim { head_type }); + Arc::new(Value::Error) + } + } + } + + TermData::RecordTerm(term_entries) => { + if term_entries.is_empty() { + Arc::from(Value::RecordType(RecordClosure::new( + self.universe_offset, + self.local_definitions.clone(), + Arc::new([]), + ))) + } else { + self.report(CoreTypingMessage::AmbiguousTerm { + term: AmbiguousTerm::RecordTerm, + }); + Arc::new(Value::Error) + } + } + TermData::RecordType(type_entries) => { + use std::collections::BTreeSet; + + let mut max_level = UniverseLevel(0); + let mut duplicate_labels = Vec::new(); + let mut seen_labels = BTreeSet::new(); + + for (name, r#type) in type_entries.iter() { + if !seen_labels.insert(name) { + duplicate_labels.push(name.clone()); + } + max_level = match self.is_type(r#type) { + Some(level) => std::cmp::max(max_level, level), + None => { + self.pop_many_locals(seen_labels.len()); + return Arc::new(Value::Error); + } + }; + let r#type = self.eval_term(r#type); + self.push_local_param(r#type); + } + + self.pop_many_locals(seen_labels.len()); + + if !duplicate_labels.is_empty() { + self.report(CoreTypingMessage::InvalidRecordType { duplicate_labels }); + } + + Arc::new(Value::TypeType(max_level)) + } + TermData::RecordElim(head_term, label) => { + let head_type = self.synth_type(head_term); + + match head_type.force(self.globals) { + Value::RecordType(closure) => { + let head_value = self.eval_term(head_term); + + if let Some(entry_type) = self.record_elim_type(head_value, label, closure) + { + return entry_type; + } + } + Value::Error => return Arc::new(Value::Error), + _ => {} + } + + let head_type = self.read_back_value(&head_type); + self.report(CoreTypingMessage::LabelNotFound { + expected_label: label.clone(), + head_type, + }); + Arc::new(Value::Error) + } + + TermData::ArrayTerm(_) => { + self.report(CoreTypingMessage::AmbiguousTerm { + term: AmbiguousTerm::Sequence, + }); + Arc::new(Value::Error) + } + TermData::ListTerm(_) => { + self.report(CoreTypingMessage::AmbiguousTerm { + term: AmbiguousTerm::Sequence, + }); + Arc::new(Value::Error) + } + + TermData::Constant(constant) => Arc::new(match constant { + Constant::U8(_) => Value::global("U8", 0, []), + Constant::U16(_) => Value::global("U16", 0, []), + Constant::U32(_) => Value::global("U32", 0, []), + Constant::U64(_) => Value::global("U64", 0, []), + Constant::S8(_) => Value::global("S8", 0, []), + Constant::S16(_) => Value::global("S16", 0, []), + Constant::S32(_) => Value::global("S32", 0, []), + Constant::S64(_) => Value::global("S64", 0, []), + Constant::F32(_) => Value::global("F32", 0, []), + Constant::F64(_) => Value::global("F64", 0, []), + Constant::Char(_) => Value::global("Char", 0, []), + Constant::String(_) => Value::global("String", 0, []), + }), + + TermData::Error => Arc::new(Value::Error), + } + } +} diff --git a/pikelet/src/lang/surface.rs b/pikelet/src/lang/surface.rs new file mode 100644 index 000000000..b4d308782 --- /dev/null +++ b/pikelet/src/lang/surface.rs @@ -0,0 +1,94 @@ +//! The surface language. +//! +//! This is a user-friendly concrete syntax for the language. + +use crossbeam_channel::Sender; + +use crate::lang::{FileId, Located, Location}; +use crate::reporting::Message; + +mod lexer; + +#[allow(clippy::all, unused_parens)] +mod grammar { + include!(concat!(env!("OUT_DIR"), "/lang/surface/grammar.rs")); +} + +/// Entry in a [record type](Term::RecordType). +pub type TypeEntry = (Located, Option>, Term); +/// Entry in a [record term](Term::RecordTerm). +pub type TermEntry = (Located, Option>, Term); +/// A group of function inputs that are elements of the same type. +pub type InputGroup = (Vec>, Term); + +pub type Term = Located; + +/// Terms in the surface language. +#[derive(Debug, Clone)] +pub enum TermData { + /// Names. + Name(String), + + /// Annotated terms. + Ann(Box, Box), + + /// Lift a term by the given number of universe levels. + Lift(Box, u32), + + /// Function types. + /// + /// Also known as: pi type, dependent product type. + FunctionType(Vec, Box), + /// Arrow function types. + /// + /// Also known as: non-dependent function type. + FunctionArrowType(Box, Box), + /// Function terms. + /// + /// Also known as: lambda abstraction, anonymous function. + FunctionTerm(Vec>, Box), + /// Function eliminations. + /// + /// Also known as: function application. + FunctionElim(Box, Vec), + + /// Record types. + RecordType(Vec), + /// Record terms. + RecordTerm(Vec), + /// Record eliminations. + /// + /// Also known as: record projections, field lookup. + RecordElim(Box, Located), + + /// Ordered sequences. + SequenceTerm(Vec), + /// Character literals. + CharTerm(String), + /// String literals. + StringTerm(String), + /// Numeric literals. + NumberTerm(String), + + /// Error sentinel. + Error, +} + +impl<'input> Term { + /// Parse a term from an input string. + #[allow(clippy::should_implement_trait)] + pub fn from_str(file_id: FileId, input: &str, messages_tx: &Sender) -> Term { + let tokens = lexer::tokens(file_id, input); + grammar::TermParser::new() + .parse(file_id, tokens) + .unwrap_or_else(|error| { + messages_tx + .send(Message::from_lalrpop(file_id, error)) + .unwrap(); + Term::new( + Location::file_range(file_id, 0..input.len()), + TermData::Error, + ) + }) + } +} diff --git a/pikelet/src/lang/surface/grammar.lalrpop b/pikelet/src/lang/surface/grammar.lalrpop new file mode 100644 index 000000000..5a41b483b --- /dev/null +++ b/pikelet/src/lang/surface/grammar.lalrpop @@ -0,0 +1,124 @@ +use crate::lang::{FileId, Located, Location}; +use crate::lang::surface::{Term, TermData, TypeEntry, TermEntry}; +use crate::lang::surface::lexer::Token; +use crate::reporting::LexerError; + +grammar<'input>(file_id: FileId); + +extern { + type Location = usize; + type Error = LexerError; + + enum Token<'input> { + "doc comment" => Token::DocComment(<&'input str>), + "character literal" => Token::CharLiteral(<&'input str>), + "string literal" => Token::StringLiteral(<&'input str>), + "numeric literal" => Token::NumericLiteral(<&'input str>), + "name" => Token::Name(<&'input str>), + "shift" => Token::Shift(<&'input str>), + + "as" => Token::As, + "fun" => Token::FunTerm, + "Fun" => Token::FunType, + "record" => Token::RecordTerm, + "Record" => Token::RecordType, + + "->" => Token::Arrow, + "=>" => Token::DArrow, + ":" => Token::Colon, + "," => Token::Comma, + "." => Token::Dot, + + "{" => Token::LBrace, + "}" => Token::RBrace, + "[" => Token::LBrack, + "]" => Token::RBrack, + "(" => Token::LParen, + ")" => Token::RParen, + "=" => Token::Equal, + } +} + +pub Term: Term = Located; +#[inline] ArrowTerm: Term = Located; +#[inline] ExprTerm: Term = Located; +#[inline] AppTerm: Term = Located; +#[inline] AtomicTerm: Term = Located; + +TermData: TermData = { + ExprTermData, + ":" => TermData::Ann(Box::new(term), Box::new(type_)), +}; + +ExprTermData: TermData = { + ArrowTermData, + "fun" +> "=>" => { + TermData::FunctionTerm(input_names, Box::new(output_term)) + }, +}; + +ArrowTermData: TermData = { + AppTermData, + "Fun" +> ":" ")")+> + "->" => + { + TermData::FunctionType(input_type_groups, Box::new(output_type)) + }, + "->" => { + TermData::FunctionArrowType(Box::new(input_type), Box::new(output_type)) + }, +}; + +AppTermData: TermData = { + AtomicTermData, + => { + TermData::FunctionElim(Box::new(head_term), input_terms) + }, +}; + +AtomicTermData: TermData = { + "(" ")" => term, + => TermData::Name(name), + => { + TermData::Lift(Box::new(term), shift[1..].parse().unwrap()) // FIXME: Overflow! + }, + "Record" "{" > "}" => TermData::RecordType(entries), + "record" "{" > "}" => TermData::RecordTerm(entries), + "." > => TermData::RecordElim(Box::new(head_term), label), + "[" > "]" => TermData::SequenceTerm(entries), + "character literal" => TermData::CharTerm(<>.to_owned()), + "string literal" => TermData::StringTerm(<>.to_owned()), + "numeric literal" => TermData::NumberTerm(<>.to_owned()), +}; + +#[inline] +List: Vec = { + ",")*> => { + entries.extend(last); + entries + } +} + +#[inline] +TypeEntry: TypeEntry = { + // TODO: Use doc comments + <_docs: "doc comment"*> + > >)?> ":" => (label, name, term), +}; + +#[inline] +TermEntry: TermEntry = { + // TODO: Use doc comments + <_docs: "doc comment"*> + > >)?> "=" => (label, name, term), +}; + +#[inline] +Name: String = { + "name" => (<>).to_owned(), +}; + +#[inline] +Located: Located = { + => Located::new(Location::file_range(file_id, start..end), data), +}; diff --git a/pikelet/src/lang/surface/lexer.rs b/pikelet/src/lang/surface/lexer.rs new file mode 100644 index 000000000..d4984e384 --- /dev/null +++ b/pikelet/src/lang/surface/lexer.rs @@ -0,0 +1,124 @@ +use logos::Logos; +use std::fmt; + +use crate::lang::{FileId, Location}; +use crate::reporting::LexerError; + +/// Tokens in the surface language. +#[derive(Debug, Clone, Logos)] +pub enum Token<'a> { + #[regex(r"\|\|\|(.*)\n")] + DocComment(&'a str), + #[regex(r#"'([^'\\]|\\.)*'"#)] + CharLiteral(&'a str), + #[regex(r#""([^"\\]|\\.)*""#)] + StringLiteral(&'a str), + #[regex(r"[-+]?[0-9][a-zA-Z0-9_\.]*")] + NumericLiteral(&'a str), + #[regex(r"[a-zA-Z][a-zA-Z0-9\-]*")] + Name(&'a str), + #[regex(r"\^[0-9]+(\.[0-9]+)?")] + Shift(&'a str), + + #[token("as")] + As, + #[token("fun")] + FunTerm, + #[token("Fun")] + FunType, + #[token("record")] + RecordTerm, + #[token("Record")] + RecordType, + + #[token(":")] + Colon, + #[token(",")] + Comma, + #[token("=>")] + DArrow, + #[token("->")] + Arrow, + #[token(".")] + Dot, + #[token("=")] + Equal, + + #[token("(")] + LParen, + #[token(")")] + RParen, + #[token("[")] + LBrack, + #[token("]")] + RBrack, + #[token("{")] + LBrace, + #[token("}")] + RBrace, + + #[error] + #[regex(r"\p{Whitespace}", logos::skip)] + #[regex(r"--(.*)\n", logos::skip)] + Error, +} + +impl<'a> fmt::Display for Token<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Token::DocComment(s) => write!(f, "{}", s), + Token::CharLiteral(s) => write!(f, "{}", s), + Token::StringLiteral(s) => write!(f, "{}", s), + Token::NumericLiteral(s) => write!(f, "{}", s), + Token::Name(s) => write!(f, "{}", s), + Token::Shift(s) => write!(f, "{}", s), + + Token::As => write!(f, "as"), + Token::FunTerm => write!(f, "fun"), + Token::FunType => write!(f, "Fun"), + Token::RecordTerm => write!(f, "record"), + Token::RecordType => write!(f, "Record"), + + Token::Colon => write!(f, ":"), + Token::Comma => write!(f, ","), + Token::DArrow => write!(f, "=>"), + Token::Arrow => write!(f, "->"), + Token::Equal => write!(f, "="), + Token::Dot => write!(f, "."), + + Token::LParen => write!(f, "("), + Token::RParen => write!(f, ")"), + Token::LBrack => write!(f, "["), + Token::RBrack => write!(f, "]"), + Token::LBrace => write!(f, "{{"), + Token::RBrace => write!(f, "}}"), + + Token::Error => write!(f, ""), + } + } +} + +pub type Spanned = Result<(Loc, Tok, Loc), Error>; + +pub fn tokens<'a>( + file_id: FileId, + source: &'a str, +) -> impl 'a + Iterator, usize, LexerError>> { + Token::lexer(source) + .spanned() + .map(move |(token, range)| match token { + Token::Error => Err(LexerError::InvalidToken { + location: Location::file_range(file_id, range), + }), + token => Ok((range.start, token, range.end)), + }) +} + +#[test] +fn behavior_after_error() { + let starts_with_invalid = "@."; + // [Err(...), Some(Token::DOT)] + let from_lex: Vec<_> = tokens(0, starts_with_invalid).collect(); + let result: Vec<_> = from_lex.iter().map(Result::is_ok).collect(); + assert_eq!(result, vec![false, true]); +} diff --git a/pikelet/src/lib.rs b/pikelet/src/lib.rs new file mode 100644 index 000000000..cbb5a1fd8 --- /dev/null +++ b/pikelet/src/lib.rs @@ -0,0 +1,9 @@ +//! A simple language. + +#![allow(clippy::new_without_default, clippy::drop_copy, clippy::drop_ref)] + +pub mod lang; +pub mod pass; + +mod literal; +pub mod reporting; diff --git a/pikelet/src/literal.rs b/pikelet/src/literal.rs new file mode 100644 index 000000000..6d0842523 --- /dev/null +++ b/pikelet/src/literal.rs @@ -0,0 +1,662 @@ +//! Decoding of [literals] in the surface language into Rust datatypes. +//! +//! [literals]: https://en.wikipedia.org/wiki/Literal_%28computer_programming%29 + +use crossbeam_channel::Sender; +use logos::Logos; +use num_traits::{Float, PrimInt, Signed, Unsigned}; + +use crate::lang::Location; +use crate::reporting::LiteralParseMessage::*; +use crate::reporting::Message; + +/// The maximum character code permitted in Unicode escape sequences. +pub const MAX_UNICODE: u32 = 0x10FFFF; +/// The maximum character code permitted in ASCII escape sequences. +pub const MAX_ASCII: u32 = 0x7F; + +/// The sign of a numeric literal. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum Sign { + Positive, + Negative, +} + +/// The [base] of a numeric digit. +/// +/// [base]: https://en.wikipedia.org/wiki/Radix +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum Base { + Binary, + Octal, + Decimal, + Hexadecimal, +} + +impl Base { + pub fn to_u8(self) -> u8 { + match self { + Base::Binary => 2, + Base::Octal => 8, + Base::Decimal => 10, + Base::Hexadecimal => 16, + } + } +} + +/// Convert the first byte of the source string to a digit. +fn ascii_digit<'source, Token>(lexer: &mut logos::Lexer<'source, Token>) -> Option +where + Token: Logos<'source, Source = [u8]>, +{ + match lexer.slice().first()? { + byte @ b'0'..=b'9' => Some(byte - b'0'), + byte @ b'a'..=b'z' => Some(byte - b'a' + 10), + byte @ b'A'..=b'Z' => Some(byte - b'A' + 10), + _ => None, + } +} + +/// Numeric literal tokens. +#[derive(Debug, Clone, Logos)] +enum NumericLiteral { + #[token(b"+", |_| Sign::Positive)] + #[token(b"-", |_| Sign::Negative)] + Sign(Sign), + #[token(b"0b", |_| Base::Binary)] + #[token(b"0o", |_| Base::Octal)] + #[token(b"0x", |_| Base::Hexadecimal)] + Base(Base), + #[regex(b"[0-9]", ascii_digit)] + Digit(u8), + + #[error] + Error, +} + +/// Digits up to base 32. +#[derive(Debug, Clone, Logos)] +enum Digit36 { + #[regex(b"[0-9a-zA-Z]", ascii_digit)] + Digit(u8), + #[regex(b"_+")] + Separator, + + #[error] + Error, +} + +/// Digits up to base 10. +#[derive(Debug, Clone, Logos)] +enum Digit10 { + #[regex(b"[0-9]", ascii_digit)] + Digit(u8), + #[regex(b"_+")] + Separator, + #[token(b".")] + StartFractional, + #[token(b"e")] + #[token(b"E")] + StartExponent, + + #[error] + Error, +} + +#[derive(Debug, Copy, Clone, PartialEq)] +enum Quote { + Single, + Double, +} + +impl Quote { + fn to_char(self) -> char { + match self { + Quote::Single => '\'', + Quote::Double => '\"', + } + } +} + +#[derive(Debug, Clone, Logos)] +enum QuotedLiteral { + #[token("\'", |_| Quote::Single)] + #[token("\"", |_| Quote::Double)] + Start(Quote), + + #[error] + Error, +} + +#[derive(Debug, Clone, Logos)] +enum QuotedText<'source> { + #[regex(r#"[^\\"']+"#)] + Utf8Text(&'source str), + #[token("\\")] + StartEscape, + #[token("\'", |_| Quote::Single)] + #[token("\"", |_| Quote::Double)] + End(Quote), + + #[error] + Error, +} + +#[derive(Debug, Clone, Logos)] +enum EscapeSequence { + #[token("\\", |_| '\\')] + #[token("n", |_| '\n')] + #[token("r", |_| '\r')] + #[token("t", |_| '\t')] + #[token("0", |_| '\0')] + #[token("\'", |_| '\'')] + #[token("\"", |_| '\"')] + Single(char), + #[token("u")] + StartUnicodeEscape, + #[token("x")] + StartAsciiEscape, + + #[error] + Error, +} + +#[derive(Debug, Clone, Logos)] +enum UnicodeEscape<'source> { + // TODO: digit separators? + #[regex(r"\{[0-9a-fA-F]*\}", |lexer| { + let len = lexer.slice().len(); + &lexer.slice()[1..(len - 1)] + })] + CharCode(&'source str), + #[regex(r"\{[^0-9a-fA-F]+\}")] + InvalidCharCode, + #[token("\'", |_| Quote::Single)] + #[token("\"", |_| Quote::Double)] + End(Quote), + + #[error] + Error, +} + +#[derive(Debug, Clone, Logos)] +enum AsciiEscape<'source> { + #[regex(r"[0-9a-fA-F][0-9a-fA-F]?")] + CharCode(&'source str), + #[token("\'", |_| Quote::Single)] + #[token("\"", |_| Quote::Double)] + End(Quote), + + #[error] + Error, +} + +/// Literal parser state. +pub struct State<'source, 'messages> { + location: Location, + source: &'source str, + message_tx: &'messages Sender, +} + +impl<'source, 'messages> State<'source, 'messages> { + pub fn new( + location: Location, + source: &'source str, + message_tx: &'messages Sender, + ) -> State<'source, 'messages> { + State { + location, + source, + message_tx, + } + } + + /// Report a diagnostic message. + fn report(&self, error: impl Into) -> Option { + self.message_tx.send(error.into()).unwrap(); + None + } + + /// Get the file-relative location of the current token. + fn token_location(&self, lexer: &logos::Lexer<'source, Token>) -> Location + where + Token: Logos<'source>, + { + match self.location { + Location::Generated => Location::Generated, + Location::FileRange(file_id, range) => Location::file_range( + file_id, + (range.start + lexer.span().start)..(range.start + lexer.span().end), + ), + } + } + + /// Parse a numeric literal into an unsigned integer. + /// + /// # Returns + /// + /// - `Some(_)`: If the literal was parsed correctly. + /// - `None`: If a fatal error when parsing the literal. + pub fn number_to_unsigned_int(self) -> Option { + let mut lexer = NumericLiteral::lexer(self.source.as_bytes()); + + let (base, start_digit) = match self.expect_numeric_literal_start(&mut lexer)? { + (Sign::Positive, base, start_digit) => (base, start_digit), + (Sign::Negative, _, _) => return self.report(NegativeUnsignedInteger(self.location)), + }; + + let mut lexer = lexer.morph(); + let mut integer = T::zero(); + let mut num_digits = 0; + + if let Some(digit) = start_digit { + integer = self.add_integer_digit(Sign::Positive, base, integer, digit)?; + num_digits += 1; + } + + while let Some(token) = lexer.next() { + let location = self.token_location(&lexer); + match token { + Digit36::Digit(digit) if digit < base.to_u8() => { + integer = self.add_integer_digit(Sign::Positive, base, integer, digit)?; + num_digits += 1; + } + Digit36::Separator if num_digits != 0 => {} + Digit36::Separator => return self.report(ExpectedDigit(location, base)), + Digit36::Digit(_) | Digit36::Error => match num_digits { + 0 => return self.report(ExpectedDigit(location, base)), + _ => return self.report(ExpectedDigitOrSeparator(location, base)), + }, + } + } + + if num_digits == 0 { + return self.report(UnexpectedEndOfLiteral(self.token_location(&lexer))); + } + + Some(integer) + } + + /// Parse a numeric literal into a signed integer. + /// + /// # Returns + /// + /// - `Some(_)`: If the literal was parsed correctly. + /// - `None`: If a fatal error when parsing the literal. + pub fn number_to_signed_int(self) -> Option { + let mut lexer = NumericLiteral::lexer(self.source.as_bytes()); + + let (sign, base, start_digit) = self.expect_numeric_literal_start(&mut lexer)?; + + let mut lexer = lexer.morph(); + let mut integer = T::zero(); + let mut num_digits = 0; + + if let Some(digit) = start_digit { + integer = self.add_integer_digit(sign, base, integer, digit)?; + num_digits += 1; + } + + while let Some(token) = lexer.next() { + let location = self.token_location(&lexer); + match token { + Digit36::Digit(digit) if digit < base.to_u8() => { + integer = self.add_integer_digit(sign, base, integer, digit)?; + num_digits += 1; + } + Digit36::Separator if num_digits != 0 => {} + Digit36::Separator => return self.report(ExpectedDigit(location, base)), + Digit36::Digit(_) | Digit36::Error => match num_digits { + 0 => return self.report(ExpectedDigit(location, base)), + _ => return self.report(ExpectedDigitOrSeparator(location, base)), + }, + } + } + + if num_digits == 0 { + return self.report(UnexpectedEndOfLiteral(self.token_location(&lexer))); + } + + Some(integer) + } + + /// Parse a numeric literal into a float. + /// + /// # Returns + /// + /// - `Some(_)`: If the literal was parsed correctly. + /// - `None`: If a fatal error when parsing the literal. + pub fn number_to_float>(self) -> Option { + // NOTE: This could probably be improved a great deal. + // It might be worth looking at `lexical-core` crate as an alternative + // to implementing our own parser: https://github.com/Alexhuszagh/rust-lexical/ + + let mut lexer = NumericLiteral::lexer(self.source.as_bytes()); + + let add_digit = |sign, base: Base, float: T, digit: u8| match sign { + Sign::Positive => float * base.to_u8().into() + digit.into(), + Sign::Negative => float * base.to_u8().into() - digit.into(), + }; + + let (sign, base, start_digit) = self.expect_numeric_literal_start(&mut lexer)?; + + let mut float = T::zero(); + let mut num_integer_digits = 0; + + if let Some(digit) = start_digit { + float = add_digit(sign, base, float, digit); + num_integer_digits += 1; + } + + if base == Base::Decimal { + let mut lexer = lexer.morph(); + let mut has_fractional = false; + let mut has_exponent = false; + + while let Some(token) = lexer.next() { + let location = self.token_location(&lexer); + match token { + Digit10::Digit(digit) if digit < base.to_u8() => { + float = add_digit(sign, base, float, digit); + num_integer_digits += 1; + } + Digit10::Separator if num_integer_digits != 0 => {} + Digit10::Separator => return self.report(ExpectedDigit(location, base)), + Digit10::StartFractional => { + has_fractional = true; + break; + } + Digit10::StartExponent => { + has_exponent = true; + break; + } + Digit10::Digit(_) | Digit10::Error => match num_integer_digits { + 0 => return self.report(ExpectedDigit(location, base)), + _ => return self.report(ExpectedDigitSeparatorFracOrExp(location, base)), + }, + } + } + + if num_integer_digits == 0 { + return self.report(ExpectedDigit(self.token_location(&lexer), base)); + } + + if has_fractional { + let mut frac = T::zero(); + let mut num_frac_digits = 0; + + while let Some(token) = lexer.next() { + let location = self.token_location(&lexer); + match token { + Digit10::Digit(digit) if digit < base.to_u8() => { + frac = add_digit(sign, base, frac, digit); + num_frac_digits += 1; + } + Digit10::Separator if num_frac_digits != 0 => {} + Digit10::Separator => return self.report(ExpectedDigit(location, base)), + Digit10::StartExponent => { + has_exponent = true; + break; + } + Digit10::Digit(_) | Digit10::StartFractional | Digit10::Error => { + match num_frac_digits { + 0 => return self.report(ExpectedDigit(location, base)), + _ => { + return self.report(ExpectedDigitSeparatorOrExp(location, base)) + } + } + } + } + } + + if num_frac_digits == 0 { + return self.report(ExpectedDigit(self.token_location(&lexer), base)); + } + + float = float + frac / T::powi(base.to_u8().into(), num_frac_digits); + } + + if has_exponent { + return self.report(FloatLiteralExponentNotSupported( + self.token_location(&lexer), + )); + } + + Some(float) + } else { + self.report(UnsupportedFloatLiteralBase(self.location, base)) + } + } + + fn expect_numeric_literal_start( + &self, + lexer: &mut logos::Lexer<'source, NumericLiteral>, + ) -> Option<(Sign, Base, Option)> { + match self.expect_token(lexer)? { + NumericLiteral::Sign(sign) => match self.expect_token(lexer)? { + NumericLiteral::Base(base) => Some((sign, base, None)), + NumericLiteral::Digit(digit) => Some((sign, Base::Decimal, Some(digit))), + NumericLiteral::Sign(_) | NumericLiteral::Error => { + self.report(ExpectedRadixOrDecimalDigit(self.token_location(&lexer))) + } + }, + NumericLiteral::Base(base) => Some((Sign::Positive, base, None)), + NumericLiteral::Digit(digit) => Some((Sign::Positive, Base::Decimal, Some(digit))), + NumericLiteral::Error => { + self.report(ExpectedStartOfNumericLiteral(self.token_location(&lexer))) + } + } + } + + /// Add a new place to the given integer, handling overflow and underflow. + fn add_integer_digit(&self, sign: Sign, base: Base, integer: T, digit: u8) -> Option + where + T: PrimInt, + { + T::checked_mul(&integer, &T::from(base.to_u8()).unwrap()) + .and_then(|place_shifted| match sign { + Sign::Positive => T::checked_add(&place_shifted, &T::from(digit).unwrap()), + Sign::Negative => T::checked_sub(&place_shifted, &T::from(digit).unwrap()), + }) + .or_else(|| self.report(LiteralOutOfRange(self.location))) + } + + /// Parse a quoted literal into a Unicode encoded character. + /// + /// # Returns + /// + /// - `Some(_)`: If the literal was parsed correctly. + /// - `None`: If a fatal error when parsing the literal. + pub fn quoted_to_unicode_char(self) -> Option { + let mut lexer = QuotedLiteral::lexer(self.source); + + let (mut lexer, end_quote) = match self.expect_token(&mut lexer)? { + QuotedLiteral::Start(quote) => (lexer.morph(), quote), + QuotedLiteral::Error => return self.report(InvalidToken(self.token_location(&lexer))), + }; + + let mut character = None; + + 'quoted_text: loop { + match self.expect_token(&mut lexer)? { + QuotedText::Utf8Text(text) => { + for ch in text.chars() { + match character { + None => character = Some(ch), + Some(_) => return self.report(OverlongCharLiteral(self.location)), + } + } + } + QuotedText::StartEscape => match character { + None => match self.expect_escape_sequence(lexer.morph(), end_quote)? { + (_, None) => return None, + (escape, Some(ch)) => { + character = Some(ch); + lexer = escape.morph(); + } + }, + Some(_) => { + return self.report(OverlongCharLiteral(self.token_location(&lexer))) + } + }, + QuotedText::End(quote) if quote == end_quote => match lexer.next() { + None => break 'quoted_text, + Some(_) => { + return self.report(ExpectedEndOfLiteral(self.token_location(&lexer))) + } + }, + QuotedText::End(quote) => match character { + None => character = Some(quote.to_char()), + Some(_) => return self.report(OverlongCharLiteral(self.location)), + }, + + QuotedText::Error => return self.report(InvalidToken(self.token_location(&lexer))), + } + } + + match character { + Some(ch) => Some(ch), + None => self.report(EmptyCharLiteral(self.location)), + } + } + + /// Parse a double quoted literal into a UTF-8 encoded string. + pub fn quoted_to_utf8_string(self) -> Option { + let mut lexer = QuotedLiteral::lexer(self.source); + + let (mut lexer, end_quote) = match self.expect_token(&mut lexer)? { + QuotedLiteral::Start(quote) => (lexer.morph(), quote), + QuotedLiteral::Error => return self.report(InvalidToken(self.token_location(&lexer))), + }; + + let mut string = Some(String::new()); + + 'quoted_text: loop { + match self.expect_token(&mut lexer)? { + QuotedText::Utf8Text(text) => { + if let Some(string) = &mut string { + string.push_str(text); + } + } + QuotedText::StartEscape => { + let (escape_lexer, ch) = + self.expect_escape_sequence(lexer.morph(), end_quote)?; + lexer = escape_lexer.morph(); + + match ch { + None => string = None, + Some(ch) => { + if let Some(string) = &mut string { + string.push(ch); + } + } + } + } + QuotedText::End(quote) if quote == end_quote => match lexer.next() { + None => break 'quoted_text, + Some(_) => { + return self.report(ExpectedEndOfLiteral(self.token_location(&lexer))) + } + }, + QuotedText::End(quote) => { + if let Some(string) = &mut string { + string.push(quote.to_char()); + } + } + + QuotedText::Error => return self.report(InvalidToken(self.token_location(&lexer))), + } + } + + string + } + + /// Expect another token to be present in the lexer, reporting an error if not. + /// + /// # Returns + /// + /// - `Some(_)`: If another token was found in the source stream + /// - `None`: If we reached the end of the source stream and we need to terminate parsing + fn expect_token>( + &self, + lexer: &mut logos::Lexer<'source, Token>, + ) -> Option { + match lexer.next() { + Some(token) => Some(token), + None => self.report(UnexpectedEndOfLiteral(self.token_location(&lexer))), + } + } + + /// Expect an escape sequence. + /// + /// # Returns + /// + /// - `Some(_, Some(_))`: If we succeeded in parsing an escape sequence + /// - `Some(_, None)`: If error ocurred but we may continue parsing in a degraded state + /// - `None`: If a fatal error has ocurred and we need to terminate parsing the literal + fn expect_escape_sequence( + &self, + mut lexer: logos::Lexer<'source, EscapeSequence>, + end_quote: Quote, + ) -> Option<(logos::Lexer<'source, EscapeSequence>, Option)> { + match self.expect_token(&mut lexer)? { + EscapeSequence::Single(ch) => Some((lexer, Some(ch))), + EscapeSequence::StartUnicodeEscape => { + let mut unicode_lexer = lexer.morph(); + let next = self.expect_token(&mut unicode_lexer)?; + let location = self.token_location(&unicode_lexer); + lexer = unicode_lexer.morph(); + + let ch = match next { + UnicodeEscape::CharCode(code) => match code.len() { + 1..=6 => match u32::from_str_radix(code, 16).unwrap() { + code @ 0..=MAX_UNICODE => Some(std::char::from_u32(code).unwrap()), + _ => self.report(OversizedUnicodeEscapeCode(location)), + }, + 0 => self.report(EmptyUnicodeEscapeCode(location)), + _ => self.report(OverlongUnicodeEscapeCode(location)), + }, + UnicodeEscape::InvalidCharCode => { + self.report(InvalidUnicodeEscapeCode(location)) + } + UnicodeEscape::End(quote) if end_quote == quote => { + return self.report(InvalidUnicodeEscape(location)); + } + UnicodeEscape::End(_) | UnicodeEscape::Error => { + self.report(InvalidUnicodeEscape(location)) + } + }; + + Some((lexer, ch)) + } + EscapeSequence::StartAsciiEscape => { + let mut ascii_lexer = lexer.morph(); + let next = self.expect_token(&mut ascii_lexer)?; + let location = self.token_location(&ascii_lexer); + lexer = ascii_lexer.morph(); + + let ch = match next { + AsciiEscape::CharCode(code) if code.len() == 2 => { + match u32::from_str_radix(code, 16).unwrap() { + code @ 0..=MAX_ASCII => Some(std::char::from_u32(code).unwrap()), + _ => self.report(OversizedAsciiEscapeCode(location)), + } + } + AsciiEscape::End(quote) if end_quote == quote => { + return self.report(InvalidUnicodeEscape(location)); + } + AsciiEscape::CharCode(_) | AsciiEscape::End(_) | AsciiEscape::Error => { + self.report(InvalidAsciiEscape(location)) + } + }; + + Some((lexer, ch)) + } + + EscapeSequence::Error => { + let location = self.token_location(&lexer); + Some((lexer, self.report(UnknownEscapeSequence(location)))) + } + } + } +} diff --git a/pikelet/src/pass.rs b/pikelet/src/pass.rs new file mode 100644 index 000000000..7540b7a36 --- /dev/null +++ b/pikelet/src/pass.rs @@ -0,0 +1,10 @@ +//! Passes between intermediate languages. +//! +//! The most significant step in this process is the [`surface_to_core`] pass, +//! which handles elaboration of the surface language into the core language, +//! and is the source of most user-facing typing diagnostics. + +pub mod core_to_pretty; +pub mod core_to_surface; +pub mod surface_to_core; +pub mod surface_to_pretty; diff --git a/pikelet/src/pass/core_to_pretty.rs b/pikelet/src/pass/core_to_pretty.rs new file mode 100644 index 000000000..66d442ff0 --- /dev/null +++ b/pikelet/src/pass/core_to_pretty.rs @@ -0,0 +1,201 @@ +//! Pretty prints the [core language] to a [pretty] document. +//! +//! [core language]: crate::lang::core + +use pretty::{DocAllocator, DocBuilder}; + +use crate::lang::core::{Constant, Term, TermData}; + +/// The precedence of a term. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum Prec { + Term = 0, + Expr, + Arrow, + App, + Atomic, +} + +pub fn from_term<'a, D>(alloc: &'a D, term: &'a Term) -> DocBuilder<'a, D> +where + D: DocAllocator<'a>, + D::Doc: Clone, +{ + from_term_prec(alloc, term, Prec::Term) +} + +pub fn from_term_prec<'a, D>(alloc: &'a D, term: &'a Term, prec: Prec) -> DocBuilder<'a, D> +where + D: DocAllocator<'a>, + D::Doc: Clone, +{ + match &term.data { + TermData::Global(name) => (alloc.nil()) + .append(alloc.text("global")) + .append(alloc.space()) + .append(alloc.text(name)), + TermData::Local(index) => (alloc.nil()) + .append(alloc.text("local")) + .append(alloc.space()) + .append(alloc.as_string(index.0)), + + TermData::Ann(term, r#type) => paren( + alloc, + prec > Prec::Term, + (alloc.nil()) + .append(from_term_prec(alloc, term, Prec::Expr)) + .append(alloc.space()) + .append(":") + .append( + (alloc.space()) + .append(from_term_prec(alloc, r#type, Prec::Term)) + .group() + .nest(4), + ), + ), + + TermData::TypeType(level) => (alloc.nil()) + .append("Type") + .append("^") + .append(alloc.as_string(level.0)), + TermData::Lift(term, shift) => (alloc.nil()) + .append(from_term_prec(alloc, term, Prec::Atomic)) + .append("^") + .append(alloc.as_string(shift.0)), + + TermData::FunctionType(_, input_type, output_type) => paren( + alloc, + prec > Prec::Arrow, + (alloc.nil()) + .append(from_term_prec(alloc, input_type, Prec::App)) + .append(alloc.space()) + .append("->") + .append(alloc.space()) + .append(from_term_prec(alloc, output_type, Prec::Arrow)), + ), + TermData::FunctionTerm(_, output_term) => paren( + alloc, + prec > Prec::Expr, + (alloc.nil()) + .append("fun") + .append(alloc.space()) + .append("_") + .append(alloc.space()) + .append("=>") + .group() + .append(alloc.space()) + .append(from_term_prec(alloc, output_term, Prec::Expr).nest(4)), + ), + TermData::FunctionElim(head_term, input_term) => paren( + alloc, + prec > Prec::App, + from_term_prec(alloc, head_term, Prec::App).append( + (alloc.space()) + .append(from_term_prec(alloc, input_term, Prec::Arrow)) + .group() + .nest(4), + ), + ), + + TermData::RecordType(type_entries) => (alloc.nil()) + .append("Record") + .append(alloc.space()) + .append("{") + .group() + .append(alloc.concat(type_entries.iter().map(|(label, r#type)| { + (alloc.nil()) + .append(alloc.hardline()) + .append(alloc.text(label)) + .append(alloc.space()) + .append(":") + .group() + .append( + (alloc.space()) + .append(from_term_prec(alloc, r#type, Prec::Term)) + .append(",") + .group() + .nest(4), + ) + .nest(4) + .group() + }))) + .append("}"), + TermData::RecordTerm(term_entries) => (alloc.nil()) + .append("record") + .append(alloc.space()) + .append("{") + .group() + .append(alloc.concat(term_entries.iter().map(|(label, term)| { + (alloc.nil()) + .append(alloc.hardline()) + .append(alloc.text(label)) + .append(alloc.space()) + .append("=") + .group() + .append( + (alloc.space()) + .append(from_term_prec(alloc, term, Prec::Term)) + .append(",") + .group() + .nest(4), + ) + .nest(4) + .group() + }))) + .append("}"), + TermData::RecordElim(head_term, label) => (alloc.nil()) + .append(from_term_prec(alloc, head_term, Prec::Atomic)) + .append(".") + .append(alloc.text(label)), + + TermData::ArrayTerm(term_entries) | TermData::ListTerm(term_entries) => (alloc.nil()) + .append("[") + .group() + .append( + alloc.intersperse( + term_entries + .iter() + .map(|term| from_term_prec(alloc, term, Prec::Term).group().nest(4)), + alloc.text(",").append(alloc.space()), + ), + ) + .append("]"), + + TermData::Constant(constant) => from_constant(alloc, constant), + + TermData::Error => alloc.text("!"), + } +} + +pub fn from_constant<'a, D>(alloc: &'a D, constant: &'a Constant) -> DocBuilder<'a, D> +where + D: DocAllocator<'a>, + D::Doc: Clone, +{ + match constant { + Constant::U8(value) => alloc.text(format!("{}", value)), + Constant::U16(value) => alloc.text(format!("{}", value)), + Constant::U32(value) => alloc.text(format!("{}", value)), + Constant::U64(value) => alloc.text(format!("{}", value)), + Constant::S8(value) => alloc.text(format!("{}", value)), + Constant::S16(value) => alloc.text(format!("{}", value)), + Constant::S32(value) => alloc.text(format!("{}", value)), + Constant::S64(value) => alloc.text(format!("{}", value)), + Constant::F32(value) => alloc.text(format!("{}", value)), + Constant::F64(value) => alloc.text(format!("{}", value)), + Constant::Char(value) => alloc.text(format!("{:?}", value)), + Constant::String(value) => alloc.text(format!("{:?}", value)), + } +} + +fn paren<'a, D>(alloc: &'a D, b: bool, doc: DocBuilder<'a, D>) -> DocBuilder<'a, D> +where + D: DocAllocator<'a>, + D::Doc: Clone, +{ + if b { + alloc.text("(").append(doc).append(")") + } else { + doc + } +} diff --git a/pikelet/src/pass/core_to_surface.rs b/pikelet/src/pass/core_to_surface.rs new file mode 100644 index 000000000..9d69533dc --- /dev/null +++ b/pikelet/src/pass/core_to_surface.rs @@ -0,0 +1,361 @@ +//! Distills the [core language] into the [surface language]. +//! +//! This is the inverse of [`pass::surface_to_core`], and is useful for pretty +//! printing terms when presenting them to the user. +//! +//! [surface language]: crate::lang::surface +//! [core language]: crate::lang::core +//! [`pass::surface_to_core`]: crate::pass::surface_to_core + +use contracts::debug_ensures; +use std::collections::HashMap; + +use crate::lang::core::{Constant, Globals, Locals, Term, TermData, UniverseLevel, UniverseOffset}; +use crate::lang::surface; +use crate::lang::Located; + +/// Distillation state. +pub struct State<'me> { + globals: &'me Globals, + usages: HashMap, + local_names: Locals, +} + +struct Usage { + base_name: Option, + count: usize, +} + +impl Usage { + fn new() -> Usage { + Usage { + base_name: None, + count: 1, + } + } +} + +const DEFAULT_NAME: &str = "t"; + +impl<'me> State<'me> { + /// Construct a new distillation state. + pub fn new(globals: &'me Globals) -> State<'me> { + let usages = globals + .entries() + .map(|(name, _)| (name.to_owned(), Usage::new())) + .collect(); + + State { + globals, + usages, + local_names: Locals::new(), + } + } + + // TODO: Find optimal names by using free variables + // TODO: Reduce string allocations + pub fn push_name(&mut self, name_hint: Option<&str>) -> String { + let base_name = name_hint.unwrap_or(DEFAULT_NAME); + let (fresh_name, base_name) = match self.usages.get_mut(base_name) { + // The name has not been used yet + None => (base_name.to_owned(), None), + // The name is in use - find a free one to use! + Some(usage) => { + let mut suffix = usage.count; + // Update the usage count to make finding the next name faster. + usage.count += 1; + // Attempt names with incrementing numeric suffixes until we + // find one that has yet to be used. + loop { + // TODO: Reduce string allocations + match format!("{}-{}", base_name, suffix) { + // Candidate name has been used - try another! + name if self.usages.contains_key(&name) => suffix += 1, + // The candidate has not been used - we're free to use it + name => break (name, Some(base_name.to_owned())), + } + } + } + }; + + let usage = Usage { + base_name, + count: 1, + }; + // TODO: Reduce cloning of names + self.usages.insert(fresh_name.clone(), usage); + self.local_names.push(fresh_name.clone()); + fresh_name + } + + pub fn pop_name(&mut self) { + if let Some(mut name) = self.local_names.pop() { + while let Some(base_name) = self.remove_usage(name) { + name = base_name; + } + } + } + + fn remove_usage(&mut self, name: String) -> Option { + use std::collections::hash_map::Entry; + + match self.usages.entry(name) { + Entry::Occupied(entry) if entry.get().count >= 1 => entry.remove().base_name, + Entry::Occupied(mut entry) => { + entry.get_mut().count -= 1; + None + } + Entry::Vacant(_) => None, + } + } + + pub fn pop_many_names(&mut self, count: usize) { + (0..count).for_each(|_| self.pop_name()); + } + + /// Distill a [`core::Term`] into a [`surface::Term`]. + /// + /// [`core::Term`]: crate::lang::core::Term + /// [`surface::Term`]: crate::lang::surface::Term + #[debug_ensures(self.local_names.size() == old(self.local_names.size()))] + pub fn from_term(&mut self, term: &Term) -> surface::Term { + let term_data = match &term.data { + TermData::Global(name) => match self.globals.get(name) { + Some(_) => surface::TermData::Name(name.to_owned()), + None => surface::TermData::Error, // TODO: Log error? + }, + TermData::Local(index) => match self.local_names.get(*index) { + Some(name) => surface::TermData::Name(name.clone()), + None => surface::TermData::Error, // TODO: Log error? + }, + + TermData::Ann(term, r#type) => surface::TermData::Ann( + Box::new(self.from_term(term)), + Box::new(self.from_term(r#type)), + ), + + TermData::TypeType(level) => { + let universe0 = match self.globals.get("Type") { + Some(_) => surface::TermData::Name("Type".to_owned()), + None => surface::TermData::Error, // TODO: Log error? + }; + match level { + UniverseLevel(0) => universe0, + UniverseLevel(level) => { + surface::TermData::Lift(Box::new(Located::generated(universe0)), *level) + } + } + } + TermData::Lift(term, UniverseOffset(offset)) => { + surface::TermData::Lift(Box::new(self.from_term(term)), *offset) + } + + TermData::FunctionType(input_name_hint, input_type, output_type) => { + // FIXME: properly group inputs! + let input_type = self.from_term(input_type); + let fresh_input_name = self.push_name(input_name_hint.as_ref().map(String::as_str)); + let input_type_groups = + vec![(vec![Located::generated(fresh_input_name)], input_type)]; + let output_type = self.from_term(output_type); + self.pop_many_names(input_type_groups.iter().map(|(ns, _)| ns.len()).sum()); + + surface::TermData::FunctionType(input_type_groups, Box::new(output_type)) + } + TermData::FunctionTerm(input_name_hint, output_term) => { + let mut current_output_term = output_term; + + let fresh_input_name = self.push_name(Some(input_name_hint)); + let mut input_names = vec![Located::generated(fresh_input_name)]; + + while let TermData::FunctionTerm(input_name_hint, output_term) = + ¤t_output_term.data + { + let fresh_input_name = self.push_name(Some(input_name_hint)); + input_names.push(Located::generated(fresh_input_name)); + current_output_term = output_term; + } + + let output_term = self.from_term(current_output_term); + self.pop_many_names(input_names.len()); + + surface::TermData::FunctionTerm(input_names, Box::new(output_term)) + } + TermData::FunctionElim(head_term, input_term) => { + let mut current_head_term = head_term; + + let mut input_terms = vec![self.from_term(input_term)]; + while let TermData::FunctionElim(head_term, input_term) = ¤t_head_term.data { + input_terms.push(self.from_term(input_term)); + current_head_term = head_term; + } + input_terms.reverse(); + + let head_term = self.from_term(current_head_term); + surface::TermData::FunctionElim(Box::new(head_term), input_terms) + } + + TermData::RecordType(type_entries) => { + let type_entries = type_entries + .iter() + .map(|(label, entry_type)| { + let entry_type = self.from_term(entry_type); + let label = label.clone(); + match self.push_name(Some(&label)) { + name if name == label => (Located::generated(label), None, entry_type), + name => ( + Located::generated(label), + Some(Located::generated(name)), + entry_type, + ), + } + }) + .collect::>(); + self.pop_many_names(type_entries.len()); + + surface::TermData::RecordType(type_entries) + } + TermData::RecordTerm(term_entries) => { + let term_entries = term_entries + .iter() + .map(|(label, entry_type)| { + let entry_type = self.from_term(entry_type); + let label = label.clone(); + match self.push_name(Some(&label)) { + name if name == label => (Located::generated(label), None, entry_type), + name => ( + Located::generated(label), + Some(Located::generated(name)), + entry_type, + ), + } + }) + .collect::>(); + self.pop_many_names(term_entries.len()); + + surface::TermData::RecordTerm(term_entries) + } + TermData::RecordElim(head_term, label) => surface::TermData::RecordElim( + Box::new(self.from_term(head_term)), + Located::generated(label.clone()), + ), + + TermData::ArrayTerm(entry_terms) | TermData::ListTerm(entry_terms) => { + let core_entry_terms = entry_terms + .iter() + .map(|entry_term| self.from_term(entry_term)) + .collect(); + + surface::TermData::SequenceTerm(core_entry_terms) + } + + TermData::Constant(constant) => match constant { + Constant::U8(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::U16(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::U32(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::U64(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::S8(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::S16(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::S32(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::S64(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::F32(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::F64(value) => surface::TermData::NumberTerm(value.to_string()), + Constant::Char(value) => surface::TermData::CharTerm(format!("{:?}", value)), + Constant::String(value) => surface::TermData::StringTerm(format!("{:?}", value)), + }, + + TermData::Error => surface::TermData::Error, + }; + + surface::Term::generated(term_data) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn push_default_name() { + let globals = Globals::default(); + let mut state = State::new(&globals); + + assert_eq!(state.push_name(None), "t"); + assert_eq!(state.push_name(Some("t")), "t-1"); + assert_eq!(state.push_name(None), "t-2"); + } + + #[test] + fn push_and_pop_default_name() { + let globals = Globals::default(); + let mut state = State::new(&globals); + + assert_eq!(state.push_name(None), "t"); + state.pop_name(); + assert_eq!(state.push_name(None), "t"); + assert_eq!(state.push_name(None), "t-1"); + state.pop_name(); + state.pop_name(); + assert_eq!(state.push_name(None), "t"); + assert_eq!(state.push_name(None), "t-1"); + assert_eq!(state.push_name(None), "t-2"); + state.pop_name(); + state.pop_name(); + state.pop_name(); + assert_eq!(state.push_name(None), "t"); + assert_eq!(state.push_name(None), "t-1"); + assert_eq!(state.push_name(None), "t-2"); + } + + #[test] + fn push_name() { + let globals = Globals::default(); + let mut state = State::new(&globals); + + assert_eq!(state.push_name(Some("test")), "test"); + assert_eq!(state.push_name(Some("test")), "test-1"); + assert_eq!(state.push_name(Some("test")), "test-2"); + } + + #[test] + fn push_and_pop_name() { + let globals = Globals::default(); + let mut state = State::new(&globals); + + assert_eq!(state.push_name(Some("test")), "test"); + state.pop_name(); + assert_eq!(state.push_name(Some("test")), "test"); + assert_eq!(state.push_name(Some("test")), "test-1"); + state.pop_name(); + state.pop_name(); + assert_eq!(state.push_name(Some("test")), "test"); + assert_eq!(state.push_name(Some("test")), "test-1"); + assert_eq!(state.push_name(Some("test")), "test-2"); + state.pop_name(); + state.pop_name(); + state.pop_name(); + assert_eq!(state.push_name(Some("test")), "test"); + assert_eq!(state.push_name(Some("test")), "test-1"); + assert_eq!(state.push_name(Some("test")), "test-2"); + } + + #[test] + fn push_fresh_name() { + let globals = Globals::default(); + let mut state = State::new(&globals); + + assert_eq!(state.push_name(Some("test")), "test"); + assert_eq!(state.push_name(Some("test")), "test-1"); + assert_eq!(state.push_name(Some("test-1")), "test-1-1"); + assert_eq!(state.push_name(Some("test-1")), "test-1-2"); + assert_eq!(state.push_name(Some("test-1-2")), "test-1-2-1"); + } + + #[test] + fn push_global_name() { + let globals = Globals::default(); + let mut state = State::new(&globals); + + assert_eq!(state.push_name(Some("Type")), "Type-1"); + assert_eq!(state.push_name(Some("Type")), "Type-2"); + } +} diff --git a/pikelet/src/pass/surface_to_core.rs b/pikelet/src/pass/surface_to_core.rs new file mode 100644 index 000000000..e1bd39ca2 --- /dev/null +++ b/pikelet/src/pass/surface_to_core.rs @@ -0,0 +1,790 @@ +//! Elaborates the [surface language] into the [core language]. +//! +//! This translation pass is the main place where user-facing type errors will be returned. +//! +//! [surface language]: crate::lang::surface +//! [core language]: crate::lang::core + +use contracts::debug_ensures; +use crossbeam_channel::Sender; +use num_traits::{Float, PrimInt, Signed, Unsigned}; +use std::sync::Arc; + +use crate::lang::core::semantics::{self, Elim, RecordClosure, Unfold, Value}; +use crate::lang::surface::{Term, TermData}; +use crate::lang::{core, Location}; +use crate::literal; +use crate::pass::core_to_surface; +use crate::reporting::{AmbiguousTerm, ExpectedType, Message, SurfaceToCoreMessage}; + +/// The state of the elaborator. +pub struct State<'me> { + /// Global definition environment. + globals: &'me core::Globals, + /// The current universe offset. + universe_offset: core::UniverseOffset, + /// Substitutions from the user-defined names to the level in which they were bound. + local_levels: Vec<(Option, core::LocalLevel)>, + /// Local type environment (used for getting the types of local variables). + local_declarations: core::Locals>, + /// Local value environment (used for evaluation). + local_definitions: core::Locals>, + /// Distillation state (used for pretty printing). + core_to_surface: core_to_surface::State<'me>, + /// The diagnostic messages accumulated during elaboration. + message_tx: Sender, +} + +impl<'me> State<'me> { + /// Construct a new elaborator state. + pub fn new(globals: &'me core::Globals, message_tx: Sender) -> State<'me> { + State { + globals, + universe_offset: core::UniverseOffset(0), + local_levels: Vec::new(), + local_declarations: core::Locals::new(), + local_definitions: core::Locals::new(), + core_to_surface: core_to_surface::State::new(globals), + message_tx, + } + } + + /// Get the next level to be used for a local entry. + fn next_level(&self) -> core::LocalLevel { + self.local_definitions.size().next_level() + } + + /// Get a local entry. + fn get_local(&self, name: &str) -> Option<(core::LocalIndex, &Arc)> { + let (_, level) = self.local_levels.iter().rev().find(|(n, _)| match n { + Some(n) => n == name, + None => false, + })?; + let index = level.to_index(self.local_definitions.size()).unwrap(); // TODO: Handle overflow + let r#type = self.local_declarations.get(index)?; + Some((index, r#type)) + } + + /// Push a local entry. + fn push_local(&mut self, name: Option<&str>, value: Arc, r#type: Arc) { + self.local_levels + .push((name.map(str::to_owned), self.next_level())); + self.local_declarations.push(r#type); + self.local_definitions.push(value); + self.core_to_surface.push_name(name); + } + + /// Push a local parameter. + fn push_local_param(&mut self, name: Option<&str>, r#type: Arc) -> Arc { + let value = Arc::new(Value::local(self.next_level(), [])); + self.push_local(name, value.clone(), r#type); + value + } + + /// Pop a local entry. + fn pop_local(&mut self) { + self.local_levels.pop(); + self.local_declarations.pop(); + self.local_definitions.pop(); + self.core_to_surface.pop_name(); + } + + /// Pop the given number of local entries. + fn pop_many_locals(&mut self, count: usize) { + self.local_levels + .truncate(self.local_levels.len().saturating_sub(count)); + self.local_declarations.pop_many(count); + self.local_definitions.pop_many(count); + self.core_to_surface.pop_many_names(count); + } + + /// Report a diagnostic message. + fn report(&self, error: SurfaceToCoreMessage) { + self.message_tx.send(error.into()).unwrap(); + } + + /// Evaluate a [`core::Term`] into a [`Value`]. + /// + /// [`Value`]: crate::lang::core::semantics::Value + /// [`core::Term`]: crate::lang::core::Term + pub fn eval_term(&mut self, term: &core::Term) -> Arc { + semantics::eval_term( + self.globals, + self.universe_offset, + &mut self.local_definitions, + term, + ) + } + + /// Return the type of the record elimination. + pub fn record_elim_type( + &self, + head_value: Arc, + label: &str, + closure: &RecordClosure, + ) -> Option> { + semantics::record_elim_type(self.globals, head_value, label, closure) + } + + /// Fully normalize a [`core::Term`] using [normalization by evaluation]. + /// + /// [`core::Term`]: crate::lang::core::Term + /// [normalization by evaluation]: https://en.wikipedia.org/wiki/Normalisation_by_evaluation + pub fn normalize_term(&mut self, term: &core::Term) -> core::Term { + semantics::normalize_term( + self.globals, + self.universe_offset, + &mut self.local_definitions, + term, + ) + } + + /// Read back a [`Value`] to a [`core::Term`] using the current + /// state of the elaborator. + /// + /// Unstuck eliminations are not unfolded, making this useful for printing + /// terms and types in user-facing diagnostics. + /// + /// [`Value`]: crate::lang::core::semantics::Value + /// [`core::Term`]: crate::lang::core::Term + pub fn read_back_value(&self, value: &Value) -> core::Term { + semantics::read_back_value( + self.globals, + self.local_definitions.size(), + Unfold::Never, + value, + ) + } + + /// Check that one [`Value`] is a subtype of another [`Value`]. + /// + /// Returns `false` if either value is not a type. + /// + /// [`Value`]: crate::lang::core::semantics::Value + pub fn is_subtype(&self, value0: &Value, value1: &Value) -> bool { + semantics::is_subtype(self.globals, self.local_definitions.size(), value0, value1) + } + + /// Distill a [`core::Term`] into a [`surface::Term`]. + /// + /// [`core::Term`]: crate::lang::core::Term + /// [`surface::Term`]: crate::lang::surface::Term + pub fn core_to_surface_term(&mut self, core_term: &core::Term) -> Term { + self.core_to_surface.from_term(&core_term) + } + + /// Read back a [`Value`] into a [`surface::Term`] using the + /// current state of the elaborator. + /// + /// Unstuck eliminations are not unfolded, making this useful for printing + /// terms and types in user-facing diagnostics. + /// + /// [`Value`]: crate::lang::core::semantics::Value + /// [`surface::Term`]: crate::lang::surface::Term + pub fn read_back_to_surface_term(&mut self, value: &Value) -> Term { + let core_term = self.read_back_value(value); + self.core_to_surface_term(&core_term) + } + + /// Check that a term is a type, and return the elaborated term and the + /// universe level it inhabits. + #[debug_ensures(self.universe_offset == old(self.universe_offset))] + #[debug_ensures(self.local_levels.len() == old(self.local_levels.len()))] + #[debug_ensures(self.local_declarations.size() == old(self.local_declarations.size()))] + #[debug_ensures(self.local_definitions.size() == old(self.local_definitions.size()))] + pub fn is_type(&mut self, term: &Term) -> (core::Term, Option) { + let (core_term, r#type) = self.synth_type(term); + match r#type.force(self.globals) { + Value::TypeType(level) => (core_term, Some(*level)), + Value::Error => (core::Term::new(term.location, core::TermData::Error), None), + found_type => { + let found_type = self.read_back_to_surface_term(&found_type); + self.report(SurfaceToCoreMessage::MismatchedTypes { + location: term.location, + found_type, + expected_type: ExpectedType::Universe, + }); + (core::Term::new(term.location, core::TermData::Error), None) + } + } + } + + /// Check that a term is an element of a type, and return the elaborated term. + #[debug_ensures(self.universe_offset == old(self.universe_offset))] + #[debug_ensures(self.local_levels.len() == old(self.local_levels.len()))] + #[debug_ensures(self.local_declarations.size() == old(self.local_declarations.size()))] + #[debug_ensures(self.local_definitions.size() == old(self.local_definitions.size()))] + pub fn check_type(&mut self, term: &Term, expected_type: &Arc) -> core::Term { + match (&term.data, expected_type.force(self.globals)) { + (_, Value::Error) => core::Term::new(term.location, core::TermData::Error), + + (TermData::FunctionTerm(input_names, output_term), _) => { + let mut seen_input_count = 0; + let mut expected_type = expected_type.clone(); + let mut pending_input_names = input_names.iter(); + + while let Some(input_name) = pending_input_names.next() { + match expected_type.force(self.globals) { + Value::FunctionType(_, input_type, output_closure) => { + let input_value = + self.push_local_param(Some(&input_name.data), input_type.clone()); + seen_input_count += 1; + expected_type = output_closure.apply(self.globals, input_value); + } + Value::Error => { + self.pop_many_locals(seen_input_count); + return core::Term::new(term.location, core::TermData::Error); + } + _ => { + self.report(SurfaceToCoreMessage::TooManyInputsInFunctionTerm { + unexpected_inputs: std::iter::once(input_name.location) + .chain( + pending_input_names.map(|input_name| input_name.location), + ) + .collect(), + }); + self.check_type(output_term, &expected_type); + self.pop_many_locals(seen_input_count); + return core::Term::new(term.location, core::TermData::Error); + } + } + } + + let core_output_term = self.check_type(output_term, &expected_type); + self.pop_many_locals(seen_input_count); + (input_names.iter().rev()).fold(core_output_term, |core_output_term, input_name| { + core::Term::new( + Location::merge(input_name.location, core_output_term.location), + core::TermData::FunctionTerm( + input_name.data.clone(), + Arc::new(core_output_term), + ), + ) + }) + } + + (TermData::RecordTerm(term_entries), Value::RecordType(closure)) => { + let mut pending_term_entries = term_entries.iter(); + let mut missing_labels = Vec::new(); + let mut unexpected_labels = Vec::new(); + + let mut core_term_entries = Vec::with_capacity(term_entries.len()); + + closure.for_each_entry(self.globals, |label, entry_type| loop { + match pending_term_entries.next() { + Some((next_label, next_name, entry_term)) if next_label.data == label => { + let next_name = next_name.as_ref().unwrap_or(next_label); + let core_entry_term = self.check_type(entry_term, &entry_type); + let core_entry_value = self.eval_term(&core_entry_term); + + self.push_local( + Some(&next_name.data), + core_entry_value.clone(), + entry_type, + ); + core_term_entries.push((label.to_owned(), Arc::new(core_entry_term))); + + return core_entry_value; + } + Some((next_label, _, _)) => unexpected_labels.push(next_label.location), + None => { + missing_labels.push(label.to_owned()); + return Arc::new(Value::Error); + } + } + }); + + self.pop_many_locals(core_term_entries.len()); + unexpected_labels.extend(pending_term_entries.map(|(label, _, _)| label.location)); + + if !missing_labels.is_empty() || !unexpected_labels.is_empty() { + self.report(SurfaceToCoreMessage::InvalidRecordTerm { + location: term.location, + missing_labels, + unexpected_labels, + }); + } + + core::Term::new( + term.location, + core::TermData::RecordTerm(core_term_entries.into()), + ) + } + + (TermData::SequenceTerm(entry_terms), forced_type) => match forced_type.try_global() { + Some(("Array", _, [Elim::Function(len), Elim::Function(core_entry_type)])) => { + let core_entry_type = core_entry_type.force(self.globals); + let core_entry_terms = entry_terms + .iter() + .map(|entry_term| Arc::new(self.check_type(entry_term, core_entry_type))) + .collect(); + + let len = len.force(self.globals); + match len.as_ref() { + Value::Constant(core::Constant::U32(len)) + if *len as usize == entry_terms.len() => + { + core::Term::new( + term.location, + core::TermData::ArrayTerm(core_entry_terms), + ) + } + Value::Error => core::Term::new(term.location, core::TermData::Error), + _ => { + let expected_len = self.read_back_to_surface_term(&len); + self.report(SurfaceToCoreMessage::MismatchedSequenceLength { + location: term.location, + found_len: entry_terms.len(), + expected_len, + }); + core::Term::new(term.location, core::TermData::Error) + } + } + } + Some(("List", _, [Elim::Function(core_entry_type)])) => { + let core_entry_type = core_entry_type.force(self.globals); + let core_entry_terms = entry_terms + .iter() + .map(|entry_term| Arc::new(self.check_type(entry_term, core_entry_type))) + .collect(); + + core::Term::new(term.location, core::TermData::ListTerm(core_entry_terms)) + } + Some(_) | None => { + let expected_type = self.read_back_to_surface_term(expected_type); + self.report(SurfaceToCoreMessage::NoSequenceConversion { + location: term.location, + expected_type, + }); + core::Term::new(term.location, core::TermData::Error) + } + }, + (TermData::NumberTerm(data), forced_type) => { + use crate::lang::core::Constant::*; + + match forced_type.try_global() { + Some(("U8", _, [])) => self.parse_unsigned(term.location, data, U8), + Some(("U16", _, [])) => self.parse_unsigned(term.location, data, U16), + Some(("U32", _, [])) => self.parse_unsigned(term.location, data, U32), + Some(("U64", _, [])) => self.parse_unsigned(term.location, data, U64), + Some(("S8", _, [])) => self.parse_signed(term.location, data, S8), + Some(("S16", _, [])) => self.parse_signed(term.location, data, S16), + Some(("S32", _, [])) => self.parse_signed(term.location, data, S32), + Some(("S64", _, [])) => self.parse_signed(term.location, data, S64), + Some(("F32", _, [])) => self.parse_float(term.location, data, F32), + Some(("F64", _, [])) => self.parse_float(term.location, data, F64), + Some(_) | None => { + let expected_type = self.read_back_to_surface_term(expected_type); + self.report(SurfaceToCoreMessage::NoLiteralConversion { + location: term.location, + expected_type, + }); + core::Term::new(term.location, core::TermData::Error) + } + } + } + (TermData::CharTerm(data), forced_type) => match forced_type.try_global() { + Some(("Char", _, [])) => self.parse_char(term.location, data), + Some(_) | None => { + let expected_type = self.read_back_to_surface_term(expected_type); + self.report(SurfaceToCoreMessage::NoLiteralConversion { + location: term.location, + expected_type, + }); + core::Term::new(term.location, core::TermData::Error) + } + }, + (TermData::StringTerm(data), forced_type) => match forced_type.try_global() { + Some(("String", _, [])) => self.parse_string(term.location, data), + Some(_) | None => { + let expected_type = self.read_back_to_surface_term(expected_type); + self.report(SurfaceToCoreMessage::NoLiteralConversion { + location: term.location, + expected_type, + }); + core::Term::new(term.location, core::TermData::Error) + } + }, + + (_, _) => match self.synth_type(term) { + (term, found_type) if self.is_subtype(&found_type, expected_type) => term, + (_, found_type) => { + let found_type = self.read_back_to_surface_term(&found_type); + let expected_type = self.read_back_to_surface_term(expected_type); + self.report(SurfaceToCoreMessage::MismatchedTypes { + location: term.location, + found_type, + expected_type: ExpectedType::Type(expected_type), + }); + core::Term::new(term.location, core::TermData::Error) + } + }, + } + } + + /// Synthesize the type of a surface term, and return the elaborated term. + #[debug_ensures(self.universe_offset == old(self.universe_offset))] + #[debug_ensures(self.local_levels.len() == old(self.local_levels.len()))] + #[debug_ensures(self.local_declarations.size() == old(self.local_declarations.size()))] + #[debug_ensures(self.local_definitions.size() == old(self.local_definitions.size()))] + pub fn synth_type(&mut self, term: &Term) -> (core::Term, Arc) { + use std::collections::BTreeMap; + + let error_term = || core::Term::new(term.location, core::TermData::Error); + + match &term.data { + TermData::Name(name) => { + if let Some((index, r#type)) = self.get_local(name.as_ref()) { + let core_term = core::Term::new(term.location, core::TermData::Local(index)); + return (core_term, r#type.clone()); + } + + if let Some((r#type, _)) = self.globals.get(name.as_ref()) { + let name = name.clone(); + let global = core::Term::new(term.location, core::TermData::Global(name)); + let core_term = match self.universe_offset { + core::UniverseOffset(0) => global, + offset => { + core::Term::generated(core::TermData::Lift(Arc::new(global), offset)) + } + }; + return (core_term, self.eval_term(r#type)); + } + + self.report(SurfaceToCoreMessage::UnboundName { + location: term.location, + name: name.clone(), + }); + (error_term(), Arc::new(Value::Error)) + } + + TermData::Ann(term, r#type) => { + let (core_type, _) = self.is_type(r#type); + let core_type_value = self.eval_term(&core_type); + let core_term = self.check_type(term, &core_type_value); + ( + core::Term::new( + term.location, + core::TermData::Ann(Arc::new(core_term), Arc::new(core_type)), + ), + core_type_value, + ) + } + + TermData::Lift(inner_term, offset) => { + match self.universe_offset + core::UniverseOffset(*offset) { + Some(new_offset) => { + let old_offset = std::mem::replace(&mut self.universe_offset, new_offset); + let (core_term, r#type) = self.synth_type(inner_term); + self.universe_offset = old_offset; + (core_term, r#type) + } + None => { + self.report(SurfaceToCoreMessage::MaximumUniverseLevelReached { + location: term.location, + }); + (error_term(), Arc::new(Value::Error)) + } + } + } + + TermData::FunctionType(input_type_groups, output_type) => { + let mut max_level = Some(core::UniverseLevel(0)); + let update_level = |max_level, next_level| match (max_level, next_level) { + (Some(max_level), Some(pl)) => Some(std::cmp::max(max_level, pl)), + (None, _) | (_, None) => None, + }; + let mut core_inputs = Vec::new(); + + for (input_names, input_type) in input_type_groups { + for input_name in input_names { + let (core_input_type, input_level) = self.is_type(input_type); + max_level = update_level(max_level, input_level); + + let core_input_type_value = self.eval_term(&core_input_type); + self.push_local_param(Some(&input_name.data), core_input_type_value); + core_inputs.push((input_name.clone(), core_input_type)); + } + } + + let (core_output_type, output_level) = self.is_type(output_type); + max_level = update_level(max_level, output_level); + + self.pop_many_locals(core_inputs.len()); + + match max_level { + None => (error_term(), Arc::new(Value::Error)), + Some(max_level) => { + let mut core_type = core_output_type; + for (input_name, input_type) in core_inputs.into_iter().rev() { + core_type = core::Term::new( + Location::merge(input_name.location, output_type.location), + core::TermData::FunctionType( + Some(input_name.data), + Arc::new(input_type), + Arc::new(core_type), + ), + ); + } + + (core_type, Arc::new(Value::TypeType(max_level))) + } + } + } + TermData::FunctionArrowType(input_type, output_type) => { + let (core_input_type, input_level) = self.is_type(input_type); + let core_input_type_value = match input_level { + None => Arc::new(Value::Error), + Some(_) => self.eval_term(&core_input_type), + }; + + self.push_local_param(None, core_input_type_value); + let (core_output_type, output_level) = self.is_type(output_type); + self.pop_local(); + + match (input_level, output_level) { + (Some(input_level), Some(output_level)) => ( + core::Term::new( + term.location, + core::TermData::FunctionType( + None, + Arc::new(core_input_type), + Arc::new(core_output_type), + ), + ), + Arc::new(Value::TypeType(std::cmp::max(input_level, output_level))), + ), + (_, _) => (error_term(), Arc::new(Value::Error)), + } + } + TermData::FunctionTerm(_, _) => { + self.report(SurfaceToCoreMessage::AmbiguousTerm { + location: term.location, + term: AmbiguousTerm::FunctionTerm, + }); + (error_term(), Arc::new(Value::Error)) + } + TermData::FunctionElim(head_term, input_terms) => { + let mut head_location = head_term.location; + let (mut core_head_term, mut head_type) = self.synth_type(head_term); + let mut input_terms = input_terms.iter(); + + while let Some(input) = input_terms.next() { + match head_type.force(self.globals) { + Value::FunctionType(_, input_type, output_closure) => { + head_location = input.location; + let core_input = self.check_type(input, &input_type); + let core_input_value = self.eval_term(&core_input); + core_head_term = core::Term::new( + Location::merge(head_location, input.location), + core::TermData::FunctionElim( + Arc::new(core_head_term), + Arc::new(core_input), + ), + ); + head_type = output_closure.apply(self.globals, core_input_value); + } + Value::Error => return (error_term(), Arc::new(Value::Error)), + _ => { + let head_type = self.read_back_to_surface_term(&head_type); + let unexpected_input_terms = + input_terms.map(|arg| arg.location).collect(); + self.report(SurfaceToCoreMessage::TooManyInputsInFunctionElim { + head_location, + head_type, + unexpected_input_terms, + }); + return (error_term(), Arc::new(Value::Error)); + } + } + } + + (core_head_term, head_type) + } + + TermData::RecordTerm(term_entries) => { + if term_entries.is_empty() { + ( + core::Term::new(term.location, core::TermData::RecordTerm(Arc::new([]))), + Arc::from(Value::RecordType(RecordClosure::new( + self.universe_offset, + self.local_definitions.clone(), + Arc::new([]), + ))), + ) + } else { + self.report(SurfaceToCoreMessage::AmbiguousTerm { + location: term.location, + term: AmbiguousTerm::RecordTerm, + }); + (error_term(), Arc::new(Value::Error)) + } + } + TermData::RecordType(type_entries) => { + use std::collections::btree_map::Entry; + + let mut max_level = core::UniverseLevel(0); + let mut duplicate_labels = Vec::new(); + let mut seen_labels = BTreeMap::new(); + let mut core_type_entries = Vec::new(); + + for (label, name, entry_type) in type_entries { + let name = name.as_ref().unwrap_or(label); + match seen_labels.entry(label.data.as_str()) { + Entry::Vacant(entry) => { + let (core_type, level) = self.is_type(entry_type); + max_level = match level { + Some(level) => std::cmp::max(max_level, level), + None => { + self.pop_many_locals(seen_labels.len()); + return (error_term(), Arc::new(Value::Error)); + } + }; + let core_type = Arc::new(core_type); + let core_type_value = self.eval_term(&core_type); + core_type_entries.push((label.data.clone(), core_type)); + self.push_local_param(Some(&name.data), core_type_value); + entry.insert(label.location); + } + Entry::Occupied(entry) => { + let seen_range = *entry.get(); + let current_range = label.location; + duplicate_labels.push((label.data.clone(), seen_range, current_range)); + self.is_type(entry_type); + } + } + } + + if !duplicate_labels.is_empty() { + self.report(SurfaceToCoreMessage::InvalidRecordType { duplicate_labels }); + } + + self.pop_many_locals(seen_labels.len()); + ( + core::Term::new( + term.location, + core::TermData::RecordType(core_type_entries.into()), + ), + Arc::new(Value::TypeType(max_level)), + ) + } + TermData::RecordElim(head_term, label) => { + let (core_head_term, head_type) = self.synth_type(head_term); + + match head_type.force(self.globals) { + Value::RecordType(closure) => { + let head_value = self.eval_term(&core_head_term); + + if let Some(entry_type) = + self.record_elim_type(head_value, &label.data, closure) + { + let core_head_term = Arc::new(core_head_term); + let core_term = core::Term::new( + term.location, + core::TermData::RecordElim(core_head_term, label.data.clone()), + ); + return (core_term, entry_type); + } + } + Value::Error => return (error_term(), Arc::new(Value::Error)), + _ => {} + } + + let head_type = self.read_back_to_surface_term(&head_type); + self.report(SurfaceToCoreMessage::LabelNotFound { + head_location: head_term.location, + label_location: label.location, + expected_label: label.data.clone(), + head_type, + }); + (error_term(), Arc::new(Value::Error)) + } + + TermData::SequenceTerm(_) => { + self.report(SurfaceToCoreMessage::AmbiguousTerm { + location: term.location, + term: AmbiguousTerm::Sequence, + }); + (error_term(), Arc::new(Value::Error)) + } + + TermData::NumberTerm(_) => { + self.report(SurfaceToCoreMessage::AmbiguousTerm { + location: term.location, + term: AmbiguousTerm::NumberLiteral, + }); + (error_term(), Arc::new(Value::Error)) + } + TermData::CharTerm(data) => ( + self.parse_char(term.location, data), + Arc::new(Value::global("Char", 0, [])), + ), + TermData::StringTerm(data) => ( + self.parse_string(term.location, data), + Arc::new(Value::global("String", 0, [])), + ), + + TermData::Error => (error_term(), Arc::new(Value::Error)), + } + } + + fn parse_float>( + &mut self, + location: Location, + data: &str, + make_constant: fn(T) -> core::Constant, + ) -> core::Term { + let term_data = literal::State::new(location, data, &self.message_tx) + .number_to_float() + .map(make_constant) + .map_or(core::TermData::Error, core::TermData::from); + + core::Term::new(location, term_data) + } + + fn parse_unsigned( + &mut self, + location: Location, + source: &str, + make_constant: fn(T) -> core::Constant, + ) -> core::Term { + let term_data = literal::State::new(location, source, &self.message_tx) + .number_to_unsigned_int() + .map(make_constant) + .map_or(core::TermData::Error, core::TermData::from); + + core::Term::new(location, term_data) + } + + fn parse_signed( + &mut self, + location: Location, + source: &str, + make_constant: fn(T) -> core::Constant, + ) -> core::Term { + let term_data = literal::State::new(location, source, &self.message_tx) + .number_to_signed_int() + .map(make_constant) + .map_or(core::TermData::Error, core::TermData::from); + + core::Term::new(location, term_data) + } + + fn parse_char(&mut self, location: Location, source: &str) -> core::Term { + let term_data = literal::State::new(location, source, &self.message_tx) + .quoted_to_unicode_char() + .map(core::Constant::Char) + .map_or(core::TermData::Error, core::TermData::from); + + core::Term::new(location, term_data) + } + + fn parse_string(&mut self, location: Location, source: &str) -> core::Term { + let term_data = literal::State::new(location, source, &self.message_tx) + .quoted_to_utf8_string() + .map(core::Constant::String) + .map_or(core::TermData::Error, core::TermData::from); + + core::Term::new(location, term_data) + } +} diff --git a/pikelet/src/pass/surface_to_pretty.rs b/pikelet/src/pass/surface_to_pretty.rs new file mode 100644 index 000000000..2a175eb52 --- /dev/null +++ b/pikelet/src/pass/surface_to_pretty.rs @@ -0,0 +1,236 @@ +//! Pretty prints the [surface language] to a [pretty] document. +//! +//! [surface language]: crate::lang::surface + +use pretty::{DocAllocator, DocBuilder}; + +use crate::lang::surface::{Term, TermData}; + +/// The precedence of a term. +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] +pub enum Prec { + Term = 0, + Expr, + Arrow, + App, + Atomic, +} + +pub fn from_term<'a, D>(alloc: &'a D, term: &'a Term) -> DocBuilder<'a, D> +where + D: DocAllocator<'a>, + D::Doc: Clone, +{ + from_term_prec(alloc, term, Prec::Term) +} + +pub fn from_term_prec<'a, D>(alloc: &'a D, term: &'a Term, prec: Prec) -> DocBuilder<'a, D> +where + D: DocAllocator<'a>, + D::Doc: Clone, +{ + match &term.data { + TermData::Name(name) => alloc.text(name), + + TermData::Ann(term, r#type) => paren( + alloc, + prec > Prec::Term, + (alloc.nil()) + .append(from_term_prec(alloc, term, Prec::Expr)) + .append(alloc.space()) + .append(":") + .append( + (alloc.space()) + .append(from_term_prec(alloc, r#type, Prec::Term)) + .group() + .nest(4), + ), + ), + + TermData::Lift(term, shift) => (alloc.nil()) + .append(from_term_prec(alloc, term, Prec::Atomic)) + .append("^") + .append(shift.to_string()), + + TermData::FunctionType(input_type_groups, output_type) => paren( + alloc, + prec > Prec::Arrow, + (alloc.nil()) + .append("Fun") + .append(alloc.space()) + .append(alloc.intersperse( + input_type_groups.iter().map(|(input_names, input_type)| { + (alloc.nil()) + .append("(") + .append(alloc.intersperse( + input_names.iter().map(|input_name| &input_name.data), + alloc.space(), + )) + .append(alloc.space()) + .append(":") + .append(alloc.space()) + .append(from_term_prec(alloc, input_type, Prec::Term)) + .append(")") + }), + alloc.space(), + )) + .append(alloc.space()) + .append("->") + .group() + .append( + (alloc.nil()).append(alloc.space()).append( + from_term_prec(alloc, output_type, Prec::Arrow) + .group() + .nest(4), + ), + ), + ), + TermData::FunctionArrowType(input_type, output_type) => paren( + alloc, + prec > Prec::Arrow, + (alloc.nil()) + .append(from_term_prec(alloc, input_type, Prec::App)) + .append(alloc.space()) + .append("->") + .append(alloc.space()) + .append(from_term_prec(alloc, output_type, Prec::Arrow)), + ), + TermData::FunctionTerm(input_names, output_term) => paren( + alloc, + prec > Prec::Expr, + (alloc.nil()) + .append("fun") + .append(alloc.space()) + .append(alloc.intersperse( + input_names.iter().map(|input_name| &input_name.data), + alloc.space(), + )) + .append(alloc.space()) + .append("=>") + .group() + .append( + (alloc.nil()).append(alloc.space()).append( + from_term_prec(alloc, output_term, Prec::Expr) + .group() + .nest(4), + ), + ), + ), + TermData::FunctionElim(head_term, input_terms) => paren( + alloc, + prec > Prec::App, + from_term_prec(alloc, head_term, Prec::App).append( + (alloc.nil()) + .append(alloc.concat(input_terms.iter().map(|input_term| { + alloc + .space() + .append(from_term_prec(alloc, input_term, Prec::Arrow)) + }))) + .group() + .nest(4), + ), + ), + + TermData::RecordType(type_entries) => (alloc.nil()) + .append("Record") + .append(alloc.space()) + .append("{") + .group() + .append( + alloc.concat(type_entries.iter().map(|(label, name, entry_type)| { + (alloc.nil()) + .append(alloc.hardline()) + .append(match name { + None => alloc.text(&label.data).append(alloc.space()), + Some(name) => alloc + .text(&label.data) + .append(alloc.space()) + .append("as") + .append(alloc.space()) + .append(&name.data) + .append(alloc.space()), + }) + .append(":") + .group() + .append( + (alloc.space()) + .append(from_term_prec(alloc, entry_type, Prec::Term)) + .append(",") + .group() + .nest(4), + ) + .nest(4) + .group() + })), + ) + .append("}"), + TermData::RecordTerm(term_entries) => (alloc.nil()) + .append("record") + .append(alloc.space()) + .append("{") + .group() + .append( + alloc.concat(term_entries.iter().map(|(label, name, entry_term)| { + (alloc.nil()) + .append(alloc.hardline()) + .append(match name { + None => alloc.text(&label.data).append(alloc.space()), + Some(name) => alloc + .text(&label.data) + .append(alloc.space()) + .append("as") + .append(alloc.space()) + .append(&name.data) + .append(alloc.space()), + }) + .append("=") + .group() + .append( + (alloc.space()) + .append(from_term_prec(alloc, entry_term, Prec::Term)) + .append(",") + .group() + .nest(4), + ) + .nest(4) + .group() + })), + ) + .append("}"), + TermData::RecordElim(head_term, label) => (alloc.nil()) + .append(from_term_prec(alloc, head_term, Prec::Atomic)) + .append(".") + .append(&label.data), + + TermData::SequenceTerm(term_entries) => (alloc.nil()) + .append("[") + .group() + .append( + alloc.intersperse( + term_entries + .iter() + .map(|term| from_term_prec(alloc, term, Prec::Term).group().nest(4)), + alloc.text(",").append(alloc.space()), + ), + ) + .append("]"), + + TermData::CharTerm(text) | TermData::StringTerm(text) | TermData::NumberTerm(text) => { + alloc.text(text) + } + + TermData::Error => alloc.text("!"), + } +} + +fn paren<'a, D>(alloc: &'a D, b: bool, doc: DocBuilder<'a, D>) -> DocBuilder<'a, D> +where + D: DocAllocator<'a>, + D::Doc: Clone, +{ + if b { + alloc.text("(").append(doc).append(")") + } else { + doc + } +} diff --git a/pikelet/src/reporting.rs b/pikelet/src/reporting.rs new file mode 100644 index 000000000..570881142 --- /dev/null +++ b/pikelet/src/reporting.rs @@ -0,0 +1,777 @@ +//! Reporting diagnostic messages. + +use codespan_reporting::diagnostic::{Diagnostic, Label}; +use pretty::DocAllocator; + +use crate::lang::{core, surface, FileId, Location}; +use crate::literal; + +/// Global diagnostic messages +#[derive(Clone, Debug)] +pub enum Message { + /// Errors produced during lexing. + Lexer(LexerError), + /// Errors produced during parsing. + Parse(ParseError), + /// Messages produced when parsing literals. + LiteralParse(LiteralParseMessage), + /// Messages produced from [`lang::core::typing`]. + /// + /// [`lang::core::typing`]: crate::lang::core::typing + CoreTyping(CoreTypingMessage), + /// Messages produced from [`pass::surface_to_core`]. + /// + /// [`pass::surface_to_core`]: crate::pass::surface_to_core + SurfaceToCore(SurfaceToCoreMessage), +} + +impl From for Message { + fn from(error: LexerError) -> Self { + Message::Lexer(error) + } +} + +impl From for Message { + fn from(error: ParseError) -> Self { + Message::Parse(error) + } +} + +impl From for Message { + fn from(message: LiteralParseMessage) -> Self { + Message::LiteralParse(message) + } +} + +impl From for Message { + fn from(message: CoreTypingMessage) -> Self { + Message::CoreTyping(message) + } +} + +impl From for Message { + fn from(message: SurfaceToCoreMessage) -> Self { + Message::SurfaceToCore(message) + } +} + +impl Message { + pub fn from_lalrpop( + file_id: FileId, + error: lalrpop_util::ParseError, + ) -> Message { + use lalrpop_util::ParseError::*; + + match error { + InvalidToken { location } => Message::from(LexerError::InvalidToken { + location: Location::file_range(file_id, location..location), + }), + UnrecognizedEOF { location, expected } => Message::from(ParseError::UnrecognizedEOF { + location: Location::file_range(file_id, location..location), + expected, + }), + UnrecognizedToken { + token: (start, token, end), + expected, + } => Message::from(ParseError::UnrecognizedToken { + location: Location::file_range(file_id, start..end), + token: token.to_string(), + expected, + }), + ExtraToken { + token: (start, token, end), + } => Message::from(ParseError::ExtraToken { + location: Location::file_range(file_id, start..end), + token: token.to_string(), + }), + User { error } => Message::from(error), + } + } + + pub fn to_diagnostic<'a, D>(&'a self, pretty_alloc: &'a D) -> Diagnostic + where + D: DocAllocator<'a>, + D::Doc: Clone, + { + match self { + Message::Lexer(error) => error.to_diagnostic(), + Message::Parse(error) => error.to_diagnostic(), + Message::LiteralParse(message) => message.to_diagnostic(), + Message::CoreTyping(message) => message.to_diagnostic(pretty_alloc), + Message::SurfaceToCore(message) => message.to_diagnostic(pretty_alloc), + } + } +} + +/// Lexer errors +#[derive(Debug, Clone)] +pub enum LexerError { + InvalidToken { location: Location }, +} + +impl LexerError { + pub fn to_diagnostic(&self) -> Diagnostic { + match self { + LexerError::InvalidToken { location } => Diagnostic::error() + .with_message("invalid token") + .with_labels(option_to_vec(primary(location))), + } + } +} + +/// Parse errors +#[derive(Clone, Debug)] +pub enum ParseError { + UnrecognizedEOF { + location: Location, + expected: Vec, + }, + UnrecognizedToken { + location: Location, + token: String, + expected: Vec, + }, + ExtraToken { + location: Location, + token: String, + }, +} + +impl ParseError { + pub fn to_diagnostic(&self) -> Diagnostic { + match self { + ParseError::UnrecognizedEOF { location, expected } => Diagnostic::error() + .with_message("unexpected end of file") + .with_labels(option_to_vec( + primary(location).map(|label| label.with_message("unexpected end of file")), + )) + .with_notes(format_expected(expected).map_or(Vec::new(), |message| vec![message])), + ParseError::UnrecognizedToken { + location, + token, + expected, + } => Diagnostic::error() + .with_message(format!("unexpected token {}", token)) + .with_labels(option_to_vec( + primary(location).map(|label| label.with_message("unexpected token")), + )) + .with_notes(format_expected(expected).map_or(Vec::new(), |message| vec![message])), + ParseError::ExtraToken { location, token } => Diagnostic::error() + .with_message(format!("extra token {}", token)) + .with_labels(option_to_vec( + primary(location).map(|label| label.with_message("extra token")), + )), + } + } +} + +#[derive(Clone, Debug)] +pub enum LiteralParseMessage { + ExpectedRadixOrDecimalDigit(Location), + ExpectedStartOfNumericLiteral(Location), + NegativeUnsignedInteger(Location), + ExpectedDigit(Location, literal::Base), + ExpectedDigitOrSeparator(Location, literal::Base), + ExpectedDigitSeparatorOrExp(Location, literal::Base), + ExpectedDigitSeparatorFracOrExp(Location, literal::Base), + FloatLiteralExponentNotSupported(Location), + UnsupportedFloatLiteralBase(Location, literal::Base), + LiteralOutOfRange(Location), + OverlongCharLiteral(Location), + EmptyCharLiteral(Location), + OversizedUnicodeEscapeCode(Location), + EmptyUnicodeEscapeCode(Location), + OverlongUnicodeEscapeCode(Location), + InvalidUnicodeEscapeCode(Location), + InvalidUnicodeEscape(Location), + OversizedAsciiEscapeCode(Location), + InvalidAsciiEscape(Location), + UnknownEscapeSequence(Location), + InvalidToken(Location), + ExpectedEndOfLiteral(Location), + UnexpectedEndOfLiteral(Location), +} + +impl LiteralParseMessage { + pub fn to_diagnostic(&self) -> Diagnostic { + match self { + LiteralParseMessage::ExpectedRadixOrDecimalDigit(location) => Diagnostic::error() + .with_message("expected a radix or decimal digit") + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::ExpectedStartOfNumericLiteral(location) => Diagnostic::error() + .with_message("expected the start of a numeric literal") + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::NegativeUnsignedInteger(location) => Diagnostic::error() + .with_message("unsigned integer literals cannot be negative") + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::ExpectedDigit(location, base) => Diagnostic::error() + .with_message(format!("expected a base {} digit", base.to_u8())) + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::ExpectedDigitOrSeparator(location, base) => Diagnostic::error() + .with_message(format!( + "expected a base {} digit or digit separator", + base.to_u8(), + )) + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::ExpectedDigitSeparatorOrExp(location, base) => Diagnostic::error() + .with_message(format!( + "expected a base {} digit, digit separator, or exponent", + base.to_u8(), + )) + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::ExpectedDigitSeparatorFracOrExp(location, base) => { + Diagnostic::error() + .with_message(format!( + "expected a base {} digit, digit separator, period, or exponent", + base.to_u8(), + )) + .with_labels(option_to_vec(primary(location))) + } + LiteralParseMessage::FloatLiteralExponentNotSupported(location) => Diagnostic::error() + .with_message("exponents are not yet supported for float literals") + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::UnsupportedFloatLiteralBase(location, base) => Diagnostic::error() + .with_message(format!( + "base {} float literals are not yet supported", + base.to_u8(), + )) + .with_labels(option_to_vec(primary(location))) + .with_notes(vec![ + "only base 10 float literals are currently supported".to_owned() + ]), + LiteralParseMessage::LiteralOutOfRange(location) => Diagnostic::error() + .with_message("literal out of range") + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::OverlongCharLiteral(location) => Diagnostic::error() + .with_message("too many codepoints in character literal") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec![ + "character literals may only contain one codepoint".to_owned() + ]), + LiteralParseMessage::EmptyCharLiteral(location) => Diagnostic::error() + .with_message("empty character literal") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec!["character literals must not be empty".to_owned()]), + LiteralParseMessage::OversizedUnicodeEscapeCode(location) => Diagnostic::error() + .with_message("unicode escape code exceeds maximum allowed range") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec![format!("must be at most {:X} ", literal::MAX_UNICODE)]), + LiteralParseMessage::EmptyUnicodeEscapeCode(location) => Diagnostic::error() + .with_message("empty unicode character code") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec!["must contain at least one hex digit".to_owned()]), + LiteralParseMessage::OverlongUnicodeEscapeCode(location) => Diagnostic::error() + .with_message("too many digits in unicode character code") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec!["must contain at most six hex digits".to_owned()]), + LiteralParseMessage::InvalidUnicodeEscapeCode(location) => Diagnostic::error() + .with_message("invalid unicode escape code") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec!["must contain only hex digits".to_owned()]), + LiteralParseMessage::InvalidUnicodeEscape(location) => Diagnostic::error() + .with_message("invalid unicode escape sequence") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec![ + "must be followed with a braced sequence of hex digits".to_owned(), + "for example: `\\u{..}`".to_owned(), + ]), + LiteralParseMessage::OversizedAsciiEscapeCode(location) => Diagnostic::error() + .with_message("ACII escape code exceeds maximum allowed range") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec![format!("must be at most {:X} ", literal::MAX_ASCII)]), + LiteralParseMessage::InvalidAsciiEscape(location) => Diagnostic::error() + .with_message("invalid ASCII escape") + .with_labels(option_to_vec(primary(location))) + .with_notes(vec!["must contain exactly two hex digits ".to_owned()]), + LiteralParseMessage::UnknownEscapeSequence(location) => Diagnostic::error() + .with_message("unknown escape sequence") + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::InvalidToken(location) => Diagnostic::error() + .with_message("invalid token") + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::ExpectedEndOfLiteral(location) => Diagnostic::error() + .with_message("expected end of literal") + .with_labels(option_to_vec(primary(location))), + LiteralParseMessage::UnexpectedEndOfLiteral(location) => Diagnostic::error() + .with_message("unexpected end of literal") + .with_labels(option_to_vec(primary(location))), + } + } +} + +#[derive(Clone, Debug)] +pub enum AmbiguousTerm { + NumberLiteral, + Sequence, + FunctionTerm, + RecordTerm, +} + +impl AmbiguousTerm { + fn description(&self) -> &'static str { + match self { + AmbiguousTerm::NumberLiteral => "numeric literal", + AmbiguousTerm::Sequence => "sequence", + AmbiguousTerm::FunctionTerm => "function term", + AmbiguousTerm::RecordTerm => "record term", + } + } +} + +#[derive(Clone, Debug)] +pub enum ExpectedType { + Universe, + Type(T), +} + +/// Message produced from [lang::core::typing] +#[derive(Clone, Debug)] +pub enum CoreTypingMessage { + MaximumUniverseLevelReached, + UnboundGlobal { + name: String, + }, + UnboundLocal, + InvalidRecordType { + duplicate_labels: Vec, + }, + InvalidRecordTerm { + missing_labels: Vec, + unexpected_labels: Vec, + }, + LabelNotFound { + expected_label: String, + head_type: core::Term, + }, + TooManyInputsInFunctionTerm, + TooManyInputsInFunctionElim { + head_type: core::Term, + }, + UnexpectedArrayTerm { + expected_type: core::Term, + }, + UnexpectedListTerm { + expected_type: core::Term, + }, + AmbiguousTerm { + term: AmbiguousTerm, + }, + MismatchedTypes { + found_type: core::Term, + expected_type: ExpectedType, + }, +} + +impl CoreTypingMessage { + pub fn to_diagnostic<'a, D>(&'a self, pretty_alloc: &'a D) -> Diagnostic + where + D: DocAllocator<'a>, + D::Doc: Clone, + { + use itertools::Itertools; + + use crate::pass::core_to_pretty; + + let to_doc = |term| core_to_pretty::from_term(pretty_alloc, term).1; + + match self { + CoreTypingMessage::MaximumUniverseLevelReached => { + Diagnostic::bug().with_message("maximum universe level reached") + } + CoreTypingMessage::UnboundGlobal { name } => { + Diagnostic::bug().with_message(format!("unbound global variable `{}`", name)) + } + CoreTypingMessage::UnboundLocal => { + Diagnostic::bug().with_message("unbound local variable") + } + CoreTypingMessage::InvalidRecordType { duplicate_labels } => Diagnostic::bug() + .with_message("invalid record type") + .with_notes( + duplicate_labels + .iter() + .map(|name| format!("label `{}` was used more than once", name)) + .collect(), + ), + CoreTypingMessage::InvalidRecordTerm { + missing_labels, + unexpected_labels, + } => Diagnostic::bug() + .with_message("invalid record term") + .with_notes({ + let mut notes = Vec::with_capacity( + unexpected_labels.len() + if missing_labels.is_empty() { 0 } else { 1 }, + ); + + for label in unexpected_labels { + notes.push(format!("unexpected label `{}`", label)); + } + + if !missing_labels.is_empty() { + notes.push(format!( + "missing the labels {} in this record term", + missing_labels + .iter() + // TODO: reduce string allocations + .map(|label| format!("`{}`", label)) + .format(", "), + )); + } + + notes + }), + CoreTypingMessage::LabelNotFound { + expected_label, + head_type, + } => Diagnostic::bug() + .with_message(format!("label `{}` not found", expected_label)) + .with_notes(vec![format!( + "eliminating a term of type `{}`", + to_doc(head_type).pretty(std::usize::MAX), + )]), + CoreTypingMessage::TooManyInputsInFunctionTerm => { + Diagnostic::bug().with_message("too many inputs in function term") + } + CoreTypingMessage::TooManyInputsInFunctionElim { head_type } => Diagnostic::bug() + .with_message("too many inputs in function elimination") + .with_notes(vec![format!( + "eliminating a term of type `{}`", + to_doc(head_type).pretty(std::usize::MAX), + )]), + CoreTypingMessage::UnexpectedArrayTerm { expected_type } => Diagnostic::bug() + .with_message("unexpected array term") + .with_notes(vec![format!( + "expected `{}`, found an array", + to_doc(&expected_type).pretty(std::usize::MAX), + )]), + CoreTypingMessage::UnexpectedListTerm { expected_type } => Diagnostic::bug() + .with_message("unexpected list term") + .with_notes(vec![format!( + "expected `{}`, found a list", + to_doc(&expected_type).pretty(std::usize::MAX), + )]), + CoreTypingMessage::AmbiguousTerm { term } => { + Diagnostic::bug().with_message(format!("ambiguous {}", term.description(),)) + } + CoreTypingMessage::MismatchedTypes { + found_type, + expected_type, + } => Diagnostic::bug() + .with_message("mismatched types") + .with_notes(vec![match expected_type { + ExpectedType::Universe => format!( + "expected a type, found `{}`", + to_doc(&found_type).pretty(std::usize::MAX), + ), + ExpectedType::Type(expected_type) => format!( + "expected `{}`, found `{}`", + to_doc(&expected_type).pretty(std::usize::MAX), + to_doc(&found_type).pretty(std::usize::MAX), + ), + }]), + } + } +} + +/// Message produced from [pass::surface_to_core] +#[derive(Clone, Debug)] +pub enum SurfaceToCoreMessage { + MaximumUniverseLevelReached { + location: Location, + }, + UnboundName { + location: Location, + name: String, + }, + InvalidRecordType { + duplicate_labels: Vec<(String, Location, Location)>, + }, + InvalidRecordTerm { + location: Location, + missing_labels: Vec, + unexpected_labels: Vec, + }, + LabelNotFound { + head_location: Location, + label_location: Location, + expected_label: String, + head_type: surface::Term, + }, + TooManyInputsInFunctionTerm { + unexpected_inputs: Vec, + }, + TooManyInputsInFunctionElim { + head_location: Location, + head_type: surface::Term, + unexpected_input_terms: Vec, + }, + NoLiteralConversion { + location: Location, + expected_type: surface::Term, + }, + MismatchedSequenceLength { + location: Location, + found_len: usize, + expected_len: surface::Term, + }, + NoSequenceConversion { + location: Location, + expected_type: surface::Term, + }, + AmbiguousTerm { + location: Location, + term: AmbiguousTerm, + }, + MismatchedTypes { + location: Location, + found_type: surface::Term, + expected_type: ExpectedType, + }, +} + +impl SurfaceToCoreMessage { + pub fn to_diagnostic<'a, D>(&'a self, pretty_alloc: &'a D) -> Diagnostic + where + D: DocAllocator<'a>, + D::Doc: Clone, + { + use itertools::Itertools; + + use crate::pass::surface_to_pretty; + + let to_doc = |term| surface_to_pretty::from_term(pretty_alloc, term).1; + + match self { + SurfaceToCoreMessage::MaximumUniverseLevelReached { location } => Diagnostic::error() + .with_message("maximum universe level reached") + .with_labels(option_to_vec( + primary(location).map(|label| label.with_message("overflowing universe level")), + )), + + SurfaceToCoreMessage::UnboundName { location, name } => Diagnostic::error() + .with_message(format!("cannot find `{}` in this scope", name)) + // TODO: name suggestions? + .with_labels(option_to_vec( + primary(location).map(|label| label.with_message("not found in this scope")), + )), + + SurfaceToCoreMessage::InvalidRecordType { duplicate_labels } => Diagnostic::error() + .with_message("invalid record type") + .with_labels({ + let mut labels = Vec::with_capacity(duplicate_labels.len() * 2); + + for (label_name, label_location1, label_location2) in duplicate_labels { + labels.extend(secondary(label_location1).map(|label| { + label.with_message(format!("first use of `{}`", label_name)) + })); + labels + .extend(primary(label_location2).map(|label| { + label.with_message("entry label used more than once") + })); + } + + labels + }), + + SurfaceToCoreMessage::InvalidRecordTerm { + location, + missing_labels, + unexpected_labels, + } => Diagnostic::error() + .with_message("invalid record term") + .with_labels({ + let mut labels = Vec::with_capacity( + unexpected_labels.len() + if missing_labels.is_empty() { 0 } else { 1 }, + ); + + for label_location in unexpected_labels { + labels.extend( + primary(label_location) + .map(|label| label.with_message("unexpected entry label")), + ); + } + + if !missing_labels.is_empty() { + labels.extend(primary(location).map(|label| { + label.with_message(format!( + "missing the labels {} in this record term", + missing_labels + .iter() + // TODO: reduce string allocations + .map(|label| format!("`{}`", label)) + .format(", "), + )) + })); + } + + labels + }), + + SurfaceToCoreMessage::LabelNotFound { + head_location, + label_location, + expected_label, + head_type, + } => Diagnostic::error() + .with_message(format!( + "no entry with label `{}` in type `{}`", + expected_label, + to_doc(&head_type).pretty(std::usize::MAX), + )) + .with_labels( + primary(label_location) + .map(|label| label.with_message("unknown entry label")) + .into_iter() + .chain(secondary(head_location).map(|label| { + label.with_message(format!( + "the type here is `{}`", + to_doc(&head_type).pretty(std::usize::MAX), + )) + })) + .collect(), + ), + + SurfaceToCoreMessage::TooManyInputsInFunctionTerm { unexpected_inputs } => { + Diagnostic::error() + .with_message("too many inputs given for function term") + .with_labels( + unexpected_inputs + .iter() + .flat_map(|input_location| { + primary(input_location) + .map(|label| label.with_message("unexpected input")) + }) + .collect(), + ) + } + + SurfaceToCoreMessage::TooManyInputsInFunctionElim { + head_location, + head_type, + unexpected_input_terms, + } => Diagnostic::error() + .with_message("term was applied to too many inputs") + .with_labels( + primary(head_location) + .map(|label| { + label.with_message(format!( + // TODO: multi-line? + "expected a function, found `{}`", + to_doc(&head_type).pretty(std::usize::MAX), + )) + }) + .into_iter() + .chain(unexpected_input_terms.iter().flat_map(|input_location| { + primary(input_location) + .map(|label| label.with_message("unexpected input".to_owned())) + })) + .collect(), + ), + + SurfaceToCoreMessage::NoLiteralConversion { + location, + expected_type, + } => Diagnostic::error() + .with_message("no known literal conversion") + .with_labels(option_to_vec(primary(location).map(|label| { + label.with_message(format!( + // TODO: multi-line? + "expected `{}`, found a literal", + to_doc(&expected_type).pretty(std::usize::MAX), + )) + }))), + + SurfaceToCoreMessage::MismatchedSequenceLength { + location, + found_len, + expected_len, + } => Diagnostic::error() + .with_message("mismatched sequence length") + .with_labels(option_to_vec(primary(location).map(|label| { + label.with_message(format!( + // TODO: multi-line? + "expected `{}` entries, found `{}` entries", + to_doc(&expected_len).pretty(std::usize::MAX), + found_len, + )) + }))), + + SurfaceToCoreMessage::NoSequenceConversion { + location, + expected_type, + } => Diagnostic::error() + .with_message("no known sequence conversion") + .with_labels(option_to_vec(primary(location).map(|label| { + label.with_message(format!( + // TODO: multi-line? + "expected `{}`, found a sequence", + to_doc(&expected_type).pretty(std::usize::MAX), + )) + }))), + + SurfaceToCoreMessage::AmbiguousTerm { location, term } => Diagnostic::error() + .with_message(format!("ambiguous {}", term.description())) + .with_labels(option_to_vec( + primary(location).map(|label| label.with_message("type annotations needed")), + )), + + SurfaceToCoreMessage::MismatchedTypes { + location, + found_type, + expected_type, + } => Diagnostic::error() + .with_message("mismatched types") + .with_labels(option_to_vec(primary(location).map(|label| { + label.with_message(match expected_type { + ExpectedType::Universe => format!( + // TODO: multi-line? + "expected a type, found `{}`", + to_doc(&found_type).pretty(std::usize::MAX), + ), + ExpectedType::Type(expected_type) => format!( + // TODO: multi-line? + "expected `{}`, found `{}`", + to_doc(&expected_type).pretty(std::usize::MAX), + to_doc(&found_type).pretty(std::usize::MAX), + ), + }) + }))), + } + } +} + +/// Create a new label with a style of [`LabelStyle::Primary`]. +/// +/// [`LabelStyle::Primary`]: LabelStyle::Primary +fn primary(location: &Location) -> Option> { + match location { + Location::Generated => None, + Location::FileRange(file_id, range) => Some(Label::primary(*file_id, *range)), + } +} + +/// Create a new label with a style of [`LabelStyle::Secondary`]. +/// +/// [`LabelStyle::Secondary`]: LabelStyle::Secondary +fn secondary(location: &Location) -> Option> { + match location { + Location::Generated => None, + Location::FileRange(file_id, range) => Some(Label::secondary(*file_id, *range)), + } +} + +fn option_to_vec(option: Option) -> Vec { + match option { + None => Vec::new(), + Some(elem) => vec![elem], + } +} + +fn format_expected(expected: &[String]) -> Option { + use itertools::Itertools; + + expected.split_last().map(|items| match items { + // TODO: Improve token formatting + (last, []) => format!("expected {}", last), + (last, expected) => format!("expected {} or {}", expected.iter().format(", "), last), + }) +} diff --git a/pikelet/tests/examples.rs b/pikelet/tests/examples.rs new file mode 100644 index 000000000..7ed425392 --- /dev/null +++ b/pikelet/tests/examples.rs @@ -0,0 +1,100 @@ +//! Integration tests against the language samples directory. + +use codespan_reporting::files::SimpleFiles; +use codespan_reporting::term::termcolor::{BufferedStandardStream, ColorChoice}; +use pikelet::lang::{core, surface}; +use pikelet::pass::surface_to_core; +use std::io::Write; + +fn run_test(path: &str, source: &str) -> Result<(), Box> { + let mut is_failed = false; + + let mut writer = BufferedStandardStream::stdout(ColorChoice::Always); + let globals = core::Globals::default(); + let pretty_alloc = pretty::BoxAllocator; + let config = codespan_reporting::term::Config::default(); + let mut files = SimpleFiles::new(); + let (messages_tx, messages_rx) = crossbeam_channel::unbounded(); + + let file_id = files.add(path, source); + let file = files.get(file_id).unwrap(); + let surface_term = surface::Term::from_str(file_id, file.source(), &messages_tx); + if !messages_rx.is_empty() { + is_failed = true; + writeln!(writer, "surface::Term::from_str messages:")?; + for message in messages_rx.try_iter() { + let diagnostic = message.to_diagnostic(&pretty_alloc); + codespan_reporting::term::emit(&mut writer, &config, &files, &diagnostic)?; + writer.flush()?; + } + writeln!(writer)?; + } + + let mut state = surface_to_core::State::new(&globals, messages_tx.clone()); + let (core_term, r#type) = state.synth_type(&surface_term); + if !messages_rx.is_empty() { + is_failed = true; + writeln!(writer, "surface_to_core::State::synth_type messages:")?; + for message in messages_rx.try_iter() { + let diagnostic = message.to_diagnostic(&pretty_alloc); + codespan_reporting::term::emit(&mut writer, &config, &files, &diagnostic)?; + writer.flush()?; + } + writeln!(writer)?; + } + + let mut state = core::typing::State::new(&globals, messages_tx.clone()); + + state.synth_type(&core_term); + if !messages_rx.is_empty() { + is_failed = true; + writeln!(writer, "core::typing::State::synth_term messages:")?; + for message in messages_rx.try_iter() { + let diagnostic = message.to_diagnostic(&pretty_alloc); + codespan_reporting::term::emit(&mut writer, &config, &files, &diagnostic)?; + writer.flush()?; + } + writeln!(writer)?; + } + + state.check_type(&core_term, &r#type); + if !messages_rx.is_empty() { + is_failed = true; + writeln!(writer, "core::typing::State::check_term messages:")?; + for message in messages_rx.try_iter() { + let diagnostic = message.to_diagnostic(&pretty_alloc); + codespan_reporting::term::emit(&mut writer, &config, &files, &diagnostic)?; + writer.flush()?; + } + writeln!(writer)?; + } + + if is_failed { + Err("failed sample".into()) + } else { + Ok(()) + } +} + +macro_rules! example_test { + ($test_name:ident, $path:literal) => { + #[test] + fn $test_name() -> Result<(), Box> { + run_test( + concat!("examples/", $path, ".pi"), + include_str!(concat!("../../examples/", $path, ".pi")), + ) + } + }; +} + +example_test!(comments, "comments"); +example_test!(functions, "functions"); +example_test!(hello_world, "hello-world"); +example_test!(literals, "literals"); +example_test!(prelude, "prelude"); +example_test!(record_mesh, "record-mesh"); +example_test!(record_term_deps, "record-term-deps"); +example_test!(record_type_deps, "record-type-deps"); +example_test!(universes, "universes"); +example_test!(window_settings, "window-settings"); diff --git a/rustfmt.toml b/rustfmt.toml deleted file mode 100644 index cf11cbaa1..000000000 --- a/rustfmt.toml +++ /dev/null @@ -1,2 +0,0 @@ -unstable_features = true -match_block_trailing_comma = true diff --git a/tools/build-book b/tools/build-book deleted file mode 100755 index a8cf67d1b..000000000 --- a/tools/build-book +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -set -euo pipefail - -main() { - mdbook build book -} - -main diff --git a/tools/build-highlight-js b/tools/build-highlight-js deleted file mode 100755 index 6385f8a08..000000000 --- a/tools/build-highlight-js +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -set -euo pipefail - -main() { - local book_dir=./book - local highlightjs_dir=$book_dir/highlight.js - local theme_dir=$book_dir/theme - - echo "Preparing highlight.js submodule" - git submodule update --init $highlightjs_dir - - ( - cd $highlightjs_dir - - echo "Updating NPM dependencies" - npm install - - echo "Running highlight.js build script" - node tools/build.js pikelet pikelet-repl rust bash - ) - - echo "Copying minified sources to the theme directory" - mkdir -p $theme_dir - cp $highlightjs_dir/build/highlight.pack.js $theme_dir/highlight.js -} - -main diff --git a/tools/install-cargo-updates b/tools/install-cargo-updates deleted file mode 100755 index da7f4cef6..000000000 --- a/tools/install-cargo-updates +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash -set -euo pipefail - -main() { - (test -x "$HOME/.cargo/bin/cargo-install-update" || cargo install cargo-update) - (test -x "$HOME/.cargo/bin/mdbook" || cargo install --vers "^0.1.7" mdbook) # https://github.com/rust-lang-nursery/mdBook#installation - cargo install-update --all -} - -main diff --git a/tools/install-code b/tools/install-code deleted file mode 100755 index ef3448f04..000000000 --- a/tools/install-code +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -set -euo pipefail - -main() { - local pikelet_dir=./crates/pikelet - local extension_dir=./editors/code - - echo "Installing Pikelet executable" - cargo install --path $pikelet_dir --force - - ( - cd $extension_dir - echo "Installing Pikelet VS Code extension" - npm run install-dev-extension - ) -} - -main diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 000000000..48432be17 --- /dev/null +++ b/yarn.lock @@ -0,0 +1,5183 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a" + integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== + dependencies: + "@babel/highlight" "^7.10.4" + +"@babel/compat-data@^7.10.4", "@babel/compat-data@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.11.0.tgz#e9f73efe09af1355b723a7f39b11bad637d7c99c" + integrity sha512-TPSvJfv73ng0pfnEOh17bYMPQbI95+nGWc71Ss4vZdRBHTDqmM9Z8ZV4rYz8Ks7sfzc95n30k6ODIq5UGnXcYQ== + dependencies: + browserslist "^4.12.0" + invariant "^2.2.4" + semver "^5.5.0" + +"@babel/core@^7.4.4": + version "7.11.6" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.11.6.tgz#3a9455dc7387ff1bac45770650bc13ba04a15651" + integrity sha512-Wpcv03AGnmkgm6uS6k8iwhIwTrcP0m17TL1n1sy7qD0qelDu4XNeW0dN0mHfa+Gei211yDaLoEe/VlbXQzM4Bg== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.11.6" + "@babel/helper-module-transforms" "^7.11.0" + "@babel/helpers" "^7.10.4" + "@babel/parser" "^7.11.5" + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.11.5" + "@babel/types" "^7.11.5" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.1" + json5 "^2.1.2" + lodash "^4.17.19" + resolve "^1.3.2" + semver "^5.4.1" + source-map "^0.5.0" + +"@babel/generator@^7.11.5", "@babel/generator@^7.11.6", "@babel/generator@^7.4.4": + version "7.11.6" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.11.6.tgz#b868900f81b163b4d464ea24545c61cbac4dc620" + integrity sha512-DWtQ1PV3r+cLbySoHrwn9RWEgKMBLLma4OBQloPRyDYvc5msJM9kvTLo1YnlJd1P/ZuKbdli3ijr5q3FvAF3uA== + dependencies: + "@babel/types" "^7.11.5" + jsesc "^2.5.1" + source-map "^0.5.0" + +"@babel/helper-annotate-as-pure@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.10.4.tgz#5bf0d495a3f757ac3bda48b5bf3b3ba309c72ba3" + integrity sha512-XQlqKQP4vXFB7BN8fEEerrmYvHp3fK/rBkRFz9jaJbzK0B1DSfej9Kc7ZzE8Z/OnId1jpJdNAZ3BFQjWG68rcA== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-builder-binary-assignment-operator-visitor@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.10.4.tgz#bb0b75f31bf98cbf9ff143c1ae578b87274ae1a3" + integrity sha512-L0zGlFrGWZK4PbT8AszSfLTM5sDU1+Az/En9VrdT8/LmEiJt4zXt+Jve9DCAnQcbqDhCI+29y/L93mrDzddCcg== + dependencies: + "@babel/helper-explode-assignable-expression" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-builder-react-jsx-experimental@^7.10.4": + version "7.11.5" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-react-jsx-experimental/-/helper-builder-react-jsx-experimental-7.11.5.tgz#4ea43dd63857b0a35cd1f1b161dc29b43414e79f" + integrity sha512-Vc4aPJnRZKWfzeCBsqTBnzulVNjABVdahSPhtdMD3Vs80ykx4a87jTHtF/VR+alSrDmNvat7l13yrRHauGcHVw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-module-imports" "^7.10.4" + "@babel/types" "^7.11.5" + +"@babel/helper-builder-react-jsx@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-react-jsx/-/helper-builder-react-jsx-7.10.4.tgz#8095cddbff858e6fa9c326daee54a2f2732c1d5d" + integrity sha512-5nPcIZ7+KKDxT1427oBivl9V9YTal7qk0diccnh7RrcgrT/pGFOjgGw1dgryyx1GvHEpXVfoDF6Ak3rTiWh8Rg== + dependencies: + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-compilation-targets@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.10.4.tgz#804ae8e3f04376607cc791b9d47d540276332bd2" + integrity sha512-a3rYhlsGV0UHNDvrtOXBg8/OpfV0OKTkxKPzIplS1zpx7CygDcWWxckxZeDd3gzPzC4kUT0A4nVFDK0wGMh4MQ== + dependencies: + "@babel/compat-data" "^7.10.4" + browserslist "^4.12.0" + invariant "^2.2.4" + levenary "^1.1.1" + semver "^5.5.0" + +"@babel/helper-create-class-features-plugin@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.10.5.tgz#9f61446ba80e8240b0a5c85c6fdac8459d6f259d" + integrity sha512-0nkdeijB7VlZoLT3r/mY3bUkw3T8WG/hNw+FATs/6+pG2039IJWjTYL0VTISqsNHMUTEnwbVnc89WIJX9Qed0A== + dependencies: + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-member-expression-to-functions" "^7.10.5" + "@babel/helper-optimise-call-expression" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-replace-supers" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.10.4" + +"@babel/helper-create-regexp-features-plugin@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.10.4.tgz#fdd60d88524659a0b6959c0579925e425714f3b8" + integrity sha512-2/hu58IEPKeoLF45DBwx3XFqsbCXmkdAay4spVr2x0jYgRxrSNp+ePwvSsy9g6YSaNDcKIQVPXk1Ov8S2edk2g== + dependencies: + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-regex" "^7.10.4" + regexpu-core "^4.7.0" + +"@babel/helper-define-map@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.10.5.tgz#b53c10db78a640800152692b13393147acb9bb30" + integrity sha512-fMw4kgFB720aQFXSVaXr79pjjcW5puTCM16+rECJ/plGS+zByelE8l9nCpV1GibxTnFVmUuYG9U8wYfQHdzOEQ== + dependencies: + "@babel/helper-function-name" "^7.10.4" + "@babel/types" "^7.10.5" + lodash "^4.17.19" + +"@babel/helper-explode-assignable-expression@^7.10.4": + version "7.11.4" + resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.11.4.tgz#2d8e3470252cc17aba917ede7803d4a7a276a41b" + integrity sha512-ux9hm3zR4WV1Y3xXxXkdG/0gxF9nvI0YVmKVhvK9AfMoaQkemL3sJpXw+Xbz65azo8qJiEz2XVDUpK3KYhH3ZQ== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-function-name@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.10.4.tgz#d2d3b20c59ad8c47112fa7d2a94bc09d5ef82f1a" + integrity sha512-YdaSyz1n8gY44EmN7x44zBn9zQ1Ry2Y+3GTA+3vH6Mizke1Vw0aWDM66FOYEPw8//qKkmqOckrGgTYa+6sceqQ== + dependencies: + "@babel/helper-get-function-arity" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-get-function-arity@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.10.4.tgz#98c1cbea0e2332f33f9a4661b8ce1505b2c19ba2" + integrity sha512-EkN3YDB+SRDgiIUnNgcmiD361ti+AVbL3f3Henf6dqqUyr5dMsorno0lJWJuLhDhkI5sYEpgj6y9kB8AOU1I2A== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-hoist-variables@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.10.4.tgz#d49b001d1d5a68ca5e6604dda01a6297f7c9381e" + integrity sha512-wljroF5PgCk2juF69kanHVs6vrLwIPNp6DLD+Lrl3hoQ3PpPPikaDRNFA+0t81NOoMt2DL6WW/mdU8k4k6ZzuA== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-member-expression-to-functions@^7.10.4", "@babel/helper-member-expression-to-functions@^7.10.5": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.11.0.tgz#ae69c83d84ee82f4b42f96e2a09410935a8f26df" + integrity sha512-JbFlKHFntRV5qKw3YC0CvQnDZ4XMwgzzBbld7Ly4Mj4cbFy3KywcR8NtNctRToMWJOVvLINJv525Gd6wwVEx/Q== + dependencies: + "@babel/types" "^7.11.0" + +"@babel/helper-module-imports@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.10.4.tgz#4c5c54be04bd31670a7382797d75b9fa2e5b5620" + integrity sha512-nEQJHqYavI217oD9+s5MUBzk6x1IlvoS9WTPfgG43CbMEeStE0v+r+TucWdx8KFGowPGvyOkDT9+7DHedIDnVw== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-module-transforms@^7.10.4", "@babel/helper-module-transforms@^7.10.5", "@babel/helper-module-transforms@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.11.0.tgz#b16f250229e47211abdd84b34b64737c2ab2d359" + integrity sha512-02EVu8COMuTRO1TAzdMtpBPbe6aQ1w/8fePD2YgQmxZU4gpNWaL9gK3Jp7dxlkUlUCJOTaSeA+Hrm1BRQwqIhg== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@babel/helper-replace-supers" "^7.10.4" + "@babel/helper-simple-access" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.11.0" + "@babel/template" "^7.10.4" + "@babel/types" "^7.11.0" + lodash "^4.17.19" + +"@babel/helper-optimise-call-expression@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.10.4.tgz#50dc96413d594f995a77905905b05893cd779673" + integrity sha512-n3UGKY4VXwXThEiKrgRAoVPBMqeoPgHVqiHZOanAJCG9nQUL2pLRQirUzl0ioKclHGpGqRgIOkgcIJaIWLpygg== + dependencies: + "@babel/types" "^7.10.4" + +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz#2f75a831269d4f677de49986dff59927533cf375" + integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== + +"@babel/helper-regex@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.10.5.tgz#32dfbb79899073c415557053a19bd055aae50ae0" + integrity sha512-68kdUAzDrljqBrio7DYAEgCoJHxppJOERHOgOrDN7WjOzP0ZQ1LsSDRXcemzVZaLvjaJsJEESb6qt+znNuENDg== + dependencies: + lodash "^4.17.19" + +"@babel/helper-remap-async-to-generator@^7.10.4": + version "7.11.4" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.11.4.tgz#4474ea9f7438f18575e30b0cac784045b402a12d" + integrity sha512-tR5vJ/vBa9wFy3m5LLv2faapJLnDFxNWff2SAYkSE4rLUdbp7CdObYFgI7wK4T/Mj4UzpjPwzR8Pzmr5m7MHGA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-wrap-function" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-replace-supers@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.10.4.tgz#d585cd9388ea06e6031e4cd44b6713cbead9e6cf" + integrity sha512-sPxZfFXocEymYTdVK1UNmFPBN+Hv5mJkLPsYWwGBxZAxaWfFu+xqp7b6qWD0yjNuNL2VKc6L5M18tOXUP7NU0A== + dependencies: + "@babel/helper-member-expression-to-functions" "^7.10.4" + "@babel/helper-optimise-call-expression" "^7.10.4" + "@babel/traverse" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-simple-access@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.10.4.tgz#0f5ccda2945277a2a7a2d3a821e15395edcf3461" + integrity sha512-0fMy72ej/VEvF8ULmX6yb5MtHG4uH4Dbd6I/aHDb/JVg0bbivwt9Wg+h3uMvX+QSFtwr5MeItvazbrc4jtRAXw== + dependencies: + "@babel/template" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helper-skip-transparent-expression-wrappers@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.11.0.tgz#eec162f112c2f58d3af0af125e3bb57665146729" + integrity sha512-0XIdiQln4Elglgjbwo9wuJpL/K7AGCY26kmEt0+pRP0TAj4jjyNq1MjoRvikrTVqKcx4Gysxt4cXvVFXP/JO2Q== + dependencies: + "@babel/types" "^7.11.0" + +"@babel/helper-split-export-declaration@^7.10.4", "@babel/helper-split-export-declaration@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.11.0.tgz#f8a491244acf6a676158ac42072911ba83ad099f" + integrity sha512-74Vejvp6mHkGE+m+k5vHY93FX2cAtrw1zXrZXRlG4l410Nm9PxfEiVTn1PjDPV5SnmieiueY4AFg2xqhNFuuZg== + dependencies: + "@babel/types" "^7.11.0" + +"@babel/helper-validator-identifier@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz#a78c7a7251e01f616512d31b10adcf52ada5e0d2" + integrity sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw== + +"@babel/helper-wrap-function@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.10.4.tgz#8a6f701eab0ff39f765b5a1cfef409990e624b87" + integrity sha512-6py45WvEF0MhiLrdxtRjKjufwLL1/ob2qDJgg5JgNdojBAZSAKnAjkyOCNug6n+OBl4VW76XjvgSFTdaMcW0Ug== + dependencies: + "@babel/helper-function-name" "^7.10.4" + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/helpers@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.10.4.tgz#2abeb0d721aff7c0a97376b9e1f6f65d7a475044" + integrity sha512-L2gX/XeUONeEbI78dXSrJzGdz4GQ+ZTA/aazfUsFaWjSe95kiCuOZ5HsXvkiw3iwF+mFHSRUfJU8t6YavocdXA== + dependencies: + "@babel/template" "^7.10.4" + "@babel/traverse" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/highlight@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.4.tgz#7d1bdfd65753538fabe6c38596cdb76d9ac60143" + integrity sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA== + dependencies: + "@babel/helper-validator-identifier" "^7.10.4" + chalk "^2.0.0" + js-tokens "^4.0.0" + +"@babel/parser@^7.10.4", "@babel/parser@^7.11.5", "@babel/parser@^7.4.4": + version "7.11.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.11.5.tgz#c7ff6303df71080ec7a4f5b8c003c58f1cf51037" + integrity sha512-X9rD8qqm695vgmeaQ4fvz/o3+Wk4ZzQvSHkDBgpYKxpD4qTAUm88ZKtHkVqIOsYFFbIQ6wQYhC6q7pjqVK0E0Q== + +"@babel/plugin-proposal-async-generator-functions@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.10.5.tgz#3491cabf2f7c179ab820606cec27fed15e0e8558" + integrity sha512-cNMCVezQbrRGvXJwm9fu/1sJj9bHdGAgKodZdLqOQIpfoH3raqmRPBM17+lh7CzhiKRRBrGtZL9WcjxSoGYUSg== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-remap-async-to-generator" "^7.10.4" + "@babel/plugin-syntax-async-generators" "^7.8.0" + +"@babel/plugin-proposal-class-properties@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.10.4.tgz#a33bf632da390a59c7a8c570045d1115cd778807" + integrity sha512-vhwkEROxzcHGNu2mzUC0OFFNXdZ4M23ib8aRRcJSsW8BZK9pQMD7QB7csl97NBbgGZO7ZyHUyKDnxzOaP4IrCg== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-proposal-dynamic-import@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.10.4.tgz#ba57a26cb98b37741e9d5bca1b8b0ddf8291f17e" + integrity sha512-up6oID1LeidOOASNXgv/CFbgBqTuKJ0cJjz6An5tWD+NVBNlp3VNSBxv2ZdU7SYl3NxJC7agAQDApZusV6uFwQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-dynamic-import" "^7.8.0" + +"@babel/plugin-proposal-export-namespace-from@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.10.4.tgz#570d883b91031637b3e2958eea3c438e62c05f54" + integrity sha512-aNdf0LY6/3WXkhh0Fdb6Zk9j1NMD8ovj3F6r0+3j837Pn1S1PdNtcwJ5EG9WkVPNHPxyJDaxMaAOVq4eki0qbg== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + +"@babel/plugin-proposal-json-strings@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.10.4.tgz#593e59c63528160233bd321b1aebe0820c2341db" + integrity sha512-fCL7QF0Jo83uy1K0P2YXrfX11tj3lkpN7l4dMv9Y9VkowkhkQDwFHFd8IiwyK5MZjE8UpbgokkgtcReH88Abaw== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-json-strings" "^7.8.0" + +"@babel/plugin-proposal-logical-assignment-operators@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.11.0.tgz#9f80e482c03083c87125dee10026b58527ea20c8" + integrity sha512-/f8p4z+Auz0Uaf+i8Ekf1iM7wUNLcViFUGiPxKeXvxTSl63B875YPiVdUDdem7hREcI0E0kSpEhS8tF5RphK7Q== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + +"@babel/plugin-proposal-nullish-coalescing-operator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.10.4.tgz#02a7e961fc32e6d5b2db0649e01bf80ddee7e04a" + integrity sha512-wq5n1M3ZUlHl9sqT2ok1T2/MTt6AXE0e1Lz4WzWBr95LsAZ5qDXe4KnFuauYyEyLiohvXFMdbsOTMyLZs91Zlw== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" + +"@babel/plugin-proposal-numeric-separator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.10.4.tgz#ce1590ff0a65ad12970a609d78855e9a4c1aef06" + integrity sha512-73/G7QoRoeNkLZFxsoCCvlg4ezE4eM+57PnOqgaPOozd5myfj7p0muD1mRVJvbUWbOzD+q3No2bWbaKy+DJ8DA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + +"@babel/plugin-proposal-object-rest-spread@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.11.0.tgz#bd81f95a1f746760ea43b6c2d3d62b11790ad0af" + integrity sha512-wzch41N4yztwoRw0ak+37wxwJM2oiIiy6huGCoqkvSTA9acYWcPfn9Y4aJqmFFJ70KTJUu29f3DQ43uJ9HXzEA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.0" + "@babel/plugin-transform-parameters" "^7.10.4" + +"@babel/plugin-proposal-optional-catch-binding@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.10.4.tgz#31c938309d24a78a49d68fdabffaa863758554dd" + integrity sha512-LflT6nPh+GK2MnFiKDyLiqSqVHkQnVf7hdoAvyTnnKj9xB3docGRsdPuxp6qqqW19ifK3xgc9U5/FwrSaCNX5g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" + +"@babel/plugin-proposal-optional-chaining@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.11.0.tgz#de5866d0646f6afdaab8a566382fe3a221755076" + integrity sha512-v9fZIu3Y8562RRwhm1BbMRxtqZNFmFA2EG+pT2diuU8PT3H6T/KXoZ54KgYisfOFZHV6PfvAiBIZ9Rcz+/JCxA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-skip-transparent-expression-wrappers" "^7.11.0" + "@babel/plugin-syntax-optional-chaining" "^7.8.0" + +"@babel/plugin-proposal-private-methods@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.10.4.tgz#b160d972b8fdba5c7d111a145fc8c421fc2a6909" + integrity sha512-wh5GJleuI8k3emgTg5KkJK6kHNsGEr0uBTDBuQUBJwckk9xs1ez79ioheEVVxMLyPscB0LfkbVHslQqIzWV6Bw== + dependencies: + "@babel/helper-create-class-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-proposal-unicode-property-regex@^7.10.4", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.10.4.tgz#4483cda53041ce3413b7fe2f00022665ddfaa75d" + integrity sha512-H+3fOgPnEXFL9zGYtKQe4IDOPKYlZdF1kqFDQRRb8PK4B8af1vAGK04tF5iQAAsui+mHNBQSAtd2/ndEDe9wuA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-async-generators@^7.8.0": + version "7.8.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" + integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-class-properties@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.10.4.tgz#6644e6a0baa55a61f9e3231f6c9eeb6ee46c124c" + integrity sha512-GCSBF7iUle6rNugfURwNmCGG3Z/2+opxAMLs1nND4bhEG5PuxTIggDBoeYYSujAlLtsupzOHYJQgPS3pivwXIA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-dynamic-import@^7.8.0": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-export-namespace-from@^7.8.3": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" + integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.3" + +"@babel/plugin-syntax-flow@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-flow/-/plugin-syntax-flow-7.10.4.tgz#53351dd7ae01995e567d04ce42af1a6e0ba846a6" + integrity sha512-yxQsX1dJixF4qEEdzVbst3SZQ58Nrooz8NV9Z9GL4byTE25BvJgl5lf0RECUf0fh28rZBb/RYTWn/eeKwCMrZQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-json-strings@^7.8.0": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" + integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-jsx@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.10.4.tgz#39abaae3cbf710c4373d8429484e6ba21340166c" + integrity sha512-KCg9mio9jwiARCB7WAcQ7Y1q+qicILjoK8LP/VkPkEKaf5dkaZZK1EcTe91a3JJlZ3qy6L5s9X52boEYi8DM9g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" + integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.0": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" + integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-numeric-separator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" + integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-syntax-object-rest-spread@^7.8.0": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-catch-binding@^7.8.0": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" + integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-optional-chaining@^7.8.0": + version "7.8.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" + integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== + dependencies: + "@babel/helper-plugin-utils" "^7.8.0" + +"@babel/plugin-syntax-top-level-await@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.10.4.tgz#4bbeb8917b54fcf768364e0a81f560e33a3ef57d" + integrity sha512-ni1brg4lXEmWyafKr0ccFWkJG0CeMt4WV1oyeBW6EFObF4oOHclbkj5cARxAPQyAQ2UTuplJyK4nfkXIMMFvsQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-arrow-functions@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.10.4.tgz#e22960d77e697c74f41c501d44d73dbf8a6a64cd" + integrity sha512-9J/oD1jV0ZCBcgnoFWFq1vJd4msoKb/TCpGNFyyLt0zABdcvgK3aYikZ8HjzB14c26bc7E3Q1yugpwGy2aTPNA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-async-to-generator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.10.4.tgz#41a5017e49eb6f3cda9392a51eef29405b245a37" + integrity sha512-F6nREOan7J5UXTLsDsZG3DXmZSVofr2tGNwfdrVwkDWHfQckbQXnXSPfD7iO+c/2HGqycwyLST3DnZ16n+cBJQ== + dependencies: + "@babel/helper-module-imports" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-remap-async-to-generator" "^7.10.4" + +"@babel/plugin-transform-block-scoped-functions@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.10.4.tgz#1afa595744f75e43a91af73b0d998ecfe4ebc2e8" + integrity sha512-WzXDarQXYYfjaV1szJvN3AD7rZgZzC1JtjJZ8dMHUyiK8mxPRahynp14zzNjU3VkPqPsO38CzxiWO1c9ARZ8JA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-block-scoping@^7.10.4": + version "7.11.1" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.11.1.tgz#5b7efe98852bef8d652c0b28144cd93a9e4b5215" + integrity sha512-00dYeDE0EVEHuuM+26+0w/SCL0BH2Qy7LwHuI4Hi4MH5gkC8/AqMN5uWFJIsoXZrAphiMm1iXzBw6L2T+eA0ew== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-classes@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.10.4.tgz#405136af2b3e218bc4a1926228bc917ab1a0adc7" + integrity sha512-2oZ9qLjt161dn1ZE0Ms66xBncQH4In8Sqw1YWgBUZuGVJJS5c0OFZXL6dP2MRHrkU/eKhWg8CzFJhRQl50rQxA== + dependencies: + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-define-map" "^7.10.4" + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-optimise-call-expression" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-replace-supers" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.10.4" + globals "^11.1.0" + +"@babel/plugin-transform-computed-properties@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.10.4.tgz#9ded83a816e82ded28d52d4b4ecbdd810cdfc0eb" + integrity sha512-JFwVDXcP/hM/TbyzGq3l/XWGut7p46Z3QvqFMXTfk6/09m7xZHJUN9xHfsv7vqqD4YnfI5ueYdSJtXqqBLyjBw== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-destructuring@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.10.4.tgz#70ddd2b3d1bea83d01509e9bb25ddb3a74fc85e5" + integrity sha512-+WmfvyfsyF603iPa6825mq6Qrb7uLjTOsa3XOFzlYcYDHSS4QmpOWOL0NNBY5qMbvrcf3tq0Cw+v4lxswOBpgA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-dotall-regex@^7.10.4", "@babel/plugin-transform-dotall-regex@^7.4.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.10.4.tgz#469c2062105c1eb6a040eaf4fac4b488078395ee" + integrity sha512-ZEAVvUTCMlMFAbASYSVQoxIbHm2OkG2MseW6bV2JjIygOjdVv8tuxrCTzj1+Rynh7ODb8GivUy7dzEXzEhuPaA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-duplicate-keys@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.10.4.tgz#697e50c9fee14380fe843d1f306b295617431e47" + integrity sha512-GL0/fJnmgMclHiBTTWXNlYjYsA7rDrtsazHG6mglaGSTh0KsrW04qml+Bbz9FL0LcJIRwBWL5ZqlNHKTkU3xAA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-exponentiation-operator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.10.4.tgz#5ae338c57f8cf4001bdb35607ae66b92d665af2e" + integrity sha512-S5HgLVgkBcRdyQAHbKj+7KyuWx8C6t5oETmUuwz1pt3WTWJhsUV0WIIXuVvfXMxl/QQyHKlSCNNtaIamG8fysw== + dependencies: + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-flow-strip-types@^7.4.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-flow-strip-types/-/plugin-transform-flow-strip-types-7.10.4.tgz#c497957f09e86e3df7296271e9eb642876bf7788" + integrity sha512-XTadyuqNst88UWBTdLjM+wEY7BFnY2sYtPyAidfC7M/QaZnSuIZpMvLxqGT7phAcnGyWh/XQFLKcGf04CnvxSQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-flow" "^7.10.4" + +"@babel/plugin-transform-for-of@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.10.4.tgz#c08892e8819d3a5db29031b115af511dbbfebae9" + integrity sha512-ItdQfAzu9AlEqmusA/65TqJ79eRcgGmpPPFvBnGILXZH975G0LNjP1yjHvGgfuCxqrPPueXOPe+FsvxmxKiHHQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-function-name@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.10.4.tgz#6a467880e0fc9638514ba369111811ddbe2644b7" + integrity sha512-OcDCq2y5+E0dVD5MagT5X+yTRbcvFjDI2ZVAottGH6tzqjx/LKpgkUepu3hp/u4tZBzxxpNGwLsAvGBvQ2mJzg== + dependencies: + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-literals@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.10.4.tgz#9f42ba0841100a135f22712d0e391c462f571f3c" + integrity sha512-Xd/dFSTEVuUWnyZiMu76/InZxLTYilOSr1UlHV+p115Z/Le2Fi1KXkJUYz0b42DfndostYlPub3m8ZTQlMaiqQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-member-expression-literals@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.10.4.tgz#b1ec44fcf195afcb8db2c62cd8e551c881baf8b7" + integrity sha512-0bFOvPyAoTBhtcJLr9VcwZqKmSjFml1iVxvPL0ReomGU53CX53HsM4h2SzckNdkQcHox1bpAqzxBI1Y09LlBSw== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-modules-amd@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.10.5.tgz#1b9cddaf05d9e88b3aad339cb3e445c4f020a9b1" + integrity sha512-elm5uruNio7CTLFItVC/rIzKLfQ17+fX7EVz5W0TMgIHFo1zY0Ozzx+lgwhL4plzl8OzVn6Qasx5DeEFyoNiRw== + dependencies: + "@babel/helper-module-transforms" "^7.10.5" + "@babel/helper-plugin-utils" "^7.10.4" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-commonjs@^7.10.4", "@babel/plugin-transform-modules-commonjs@^7.4.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.10.4.tgz#66667c3eeda1ebf7896d41f1f16b17105a2fbca0" + integrity sha512-Xj7Uq5o80HDLlW64rVfDBhao6OX89HKUmb+9vWYaLXBZOma4gA6tw4Ni1O5qVDoZWUV0fxMYA0aYzOawz0l+1w== + dependencies: + "@babel/helper-module-transforms" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-simple-access" "^7.10.4" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-systemjs@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.10.5.tgz#6270099c854066681bae9e05f87e1b9cadbe8c85" + integrity sha512-f4RLO/OL14/FP1AEbcsWMzpbUz6tssRaeQg11RH1BP/XnPpRoVwgeYViMFacnkaw4k4wjRSjn3ip1Uw9TaXuMw== + dependencies: + "@babel/helper-hoist-variables" "^7.10.4" + "@babel/helper-module-transforms" "^7.10.5" + "@babel/helper-plugin-utils" "^7.10.4" + babel-plugin-dynamic-import-node "^2.3.3" + +"@babel/plugin-transform-modules-umd@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.10.4.tgz#9a8481fe81b824654b3a0b65da3df89f3d21839e" + integrity sha512-mohW5q3uAEt8T45YT7Qc5ws6mWgJAaL/8BfWD9Dodo1A3RKWli8wTS+WiQ/knF+tXlPirW/1/MqzzGfCExKECA== + dependencies: + "@babel/helper-module-transforms" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-named-capturing-groups-regex@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.10.4.tgz#78b4d978810b6f3bcf03f9e318f2fc0ed41aecb6" + integrity sha512-V6LuOnD31kTkxQPhKiVYzYC/Jgdq53irJC/xBSmqcNcqFGV+PER4l6rU5SH2Vl7bH9mLDHcc0+l9HUOe4RNGKA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.10.4" + +"@babel/plugin-transform-new-target@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.10.4.tgz#9097d753cb7b024cb7381a3b2e52e9513a9c6888" + integrity sha512-YXwWUDAH/J6dlfwqlWsztI2Puz1NtUAubXhOPLQ5gjR/qmQ5U96DY4FQO8At33JN4XPBhrjB8I4eMmLROjjLjw== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-object-super@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.10.4.tgz#d7146c4d139433e7a6526f888c667e314a093894" + integrity sha512-5iTw0JkdRdJvr7sY0vHqTpnruUpTea32JHmq/atIWqsnNussbRzjEDyWep8UNztt1B5IusBYg8Irb0bLbiEBCQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-replace-supers" "^7.10.4" + +"@babel/plugin-transform-parameters@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.10.5.tgz#59d339d58d0b1950435f4043e74e2510005e2c4a" + integrity sha512-xPHwUj5RdFV8l1wuYiu5S9fqWGM2DrYc24TMvUiRrPVm+SM3XeqU9BcokQX/kEUe+p2RBwy+yoiR1w/Blq6ubw== + dependencies: + "@babel/helper-get-function-arity" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-property-literals@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.10.4.tgz#f6fe54b6590352298785b83edd815d214c42e3c0" + integrity sha512-ofsAcKiUxQ8TY4sScgsGeR2vJIsfrzqvFb9GvJ5UdXDzl+MyYCaBj/FGzXuv7qE0aJcjWMILny1epqelnFlz8g== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-react-jsx@^7.0.0": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.10.4.tgz#673c9f913948764a4421683b2bef2936968fddf2" + integrity sha512-L+MfRhWjX0eI7Js093MM6MacKU4M6dnCRa/QPDwYMxjljzSCzzlzKzj9Pk4P3OtrPcxr2N3znR419nr3Xw+65A== + dependencies: + "@babel/helper-builder-react-jsx" "^7.10.4" + "@babel/helper-builder-react-jsx-experimental" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-jsx" "^7.10.4" + +"@babel/plugin-transform-regenerator@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.10.4.tgz#2015e59d839074e76838de2159db421966fd8b63" + integrity sha512-3thAHwtor39A7C04XucbMg17RcZ3Qppfxr22wYzZNcVIkPHfpM9J0SO8zuCV6SZa265kxBJSrfKTvDCYqBFXGw== + dependencies: + regenerator-transform "^0.14.2" + +"@babel/plugin-transform-reserved-words@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.10.4.tgz#8f2682bcdcef9ed327e1b0861585d7013f8a54dd" + integrity sha512-hGsw1O6Rew1fkFbDImZIEqA8GoidwTAilwCyWqLBM9f+e/u/sQMQu7uX6dyokfOayRuuVfKOW4O7HvaBWM+JlQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-shorthand-properties@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.10.4.tgz#9fd25ec5cdd555bb7f473e5e6ee1c971eede4dd6" + integrity sha512-AC2K/t7o07KeTIxMoHneyX90v3zkm5cjHJEokrPEAGEy3UCp8sLKfnfOIGdZ194fyN4wfX/zZUWT9trJZ0qc+Q== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-spread@^7.11.0": + version "7.11.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.11.0.tgz#fa84d300f5e4f57752fe41a6d1b3c554f13f17cc" + integrity sha512-UwQYGOqIdQJe4aWNyS7noqAnN2VbaczPLiEtln+zPowRNlD+79w3oi2TWfYe0eZgd+gjZCbsydN7lzWysDt+gw== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-skip-transparent-expression-wrappers" "^7.11.0" + +"@babel/plugin-transform-sticky-regex@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.10.4.tgz#8f3889ee8657581130a29d9cc91d7c73b7c4a28d" + integrity sha512-Ddy3QZfIbEV0VYcVtFDCjeE4xwVTJWTmUtorAJkn6u/92Z/nWJNV+mILyqHKrUxXYKA2EoCilgoPePymKL4DvQ== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-regex" "^7.10.4" + +"@babel/plugin-transform-template-literals@^7.10.4": + version "7.10.5" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.10.5.tgz#78bc5d626a6642db3312d9d0f001f5e7639fde8c" + integrity sha512-V/lnPGIb+KT12OQikDvgSuesRX14ck5FfJXt6+tXhdkJ+Vsd0lDCVtF6jcB4rNClYFzaB2jusZ+lNISDk2mMMw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-typeof-symbol@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.10.4.tgz#9509f1a7eec31c4edbffe137c16cc33ff0bc5bfc" + integrity sha512-QqNgYwuuW0y0H+kUE/GWSR45t/ccRhe14Fs/4ZRouNNQsyd4o3PG4OtHiIrepbM2WKUBDAXKCAK/Lk4VhzTaGA== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-unicode-escapes@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.10.4.tgz#feae523391c7651ddac115dae0a9d06857892007" + integrity sha512-y5XJ9waMti2J+e7ij20e+aH+fho7Wb7W8rNuu72aKRwCHFqQdhkdU2lo3uZ9tQuboEJcUFayXdARhcxLQ3+6Fg== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/plugin-transform-unicode-regex@^7.10.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.10.4.tgz#e56d71f9282fac6db09c82742055576d5e6d80a8" + integrity sha512-wNfsc4s8N2qnIwpO/WP2ZiSyjfpTamT2C9V9FDH/Ljub9zw6P3SjkXcFmc0RQUt96k2fmIvtla2MMjgTwIAC+A== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + +"@babel/preset-env@^7.4.4": + version "7.11.5" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.11.5.tgz#18cb4b9379e3e92ffea92c07471a99a2914e4272" + integrity sha512-kXqmW1jVcnB2cdueV+fyBM8estd5mlNfaQi6lwLgRwCby4edpavgbFhiBNjmWA3JpB/yZGSISa7Srf+TwxDQoA== + dependencies: + "@babel/compat-data" "^7.11.0" + "@babel/helper-compilation-targets" "^7.10.4" + "@babel/helper-module-imports" "^7.10.4" + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-proposal-async-generator-functions" "^7.10.4" + "@babel/plugin-proposal-class-properties" "^7.10.4" + "@babel/plugin-proposal-dynamic-import" "^7.10.4" + "@babel/plugin-proposal-export-namespace-from" "^7.10.4" + "@babel/plugin-proposal-json-strings" "^7.10.4" + "@babel/plugin-proposal-logical-assignment-operators" "^7.11.0" + "@babel/plugin-proposal-nullish-coalescing-operator" "^7.10.4" + "@babel/plugin-proposal-numeric-separator" "^7.10.4" + "@babel/plugin-proposal-object-rest-spread" "^7.11.0" + "@babel/plugin-proposal-optional-catch-binding" "^7.10.4" + "@babel/plugin-proposal-optional-chaining" "^7.11.0" + "@babel/plugin-proposal-private-methods" "^7.10.4" + "@babel/plugin-proposal-unicode-property-regex" "^7.10.4" + "@babel/plugin-syntax-async-generators" "^7.8.0" + "@babel/plugin-syntax-class-properties" "^7.10.4" + "@babel/plugin-syntax-dynamic-import" "^7.8.0" + "@babel/plugin-syntax-export-namespace-from" "^7.8.3" + "@babel/plugin-syntax-json-strings" "^7.8.0" + "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" + "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" + "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/plugin-syntax-object-rest-spread" "^7.8.0" + "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" + "@babel/plugin-syntax-optional-chaining" "^7.8.0" + "@babel/plugin-syntax-top-level-await" "^7.10.4" + "@babel/plugin-transform-arrow-functions" "^7.10.4" + "@babel/plugin-transform-async-to-generator" "^7.10.4" + "@babel/plugin-transform-block-scoped-functions" "^7.10.4" + "@babel/plugin-transform-block-scoping" "^7.10.4" + "@babel/plugin-transform-classes" "^7.10.4" + "@babel/plugin-transform-computed-properties" "^7.10.4" + "@babel/plugin-transform-destructuring" "^7.10.4" + "@babel/plugin-transform-dotall-regex" "^7.10.4" + "@babel/plugin-transform-duplicate-keys" "^7.10.4" + "@babel/plugin-transform-exponentiation-operator" "^7.10.4" + "@babel/plugin-transform-for-of" "^7.10.4" + "@babel/plugin-transform-function-name" "^7.10.4" + "@babel/plugin-transform-literals" "^7.10.4" + "@babel/plugin-transform-member-expression-literals" "^7.10.4" + "@babel/plugin-transform-modules-amd" "^7.10.4" + "@babel/plugin-transform-modules-commonjs" "^7.10.4" + "@babel/plugin-transform-modules-systemjs" "^7.10.4" + "@babel/plugin-transform-modules-umd" "^7.10.4" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.10.4" + "@babel/plugin-transform-new-target" "^7.10.4" + "@babel/plugin-transform-object-super" "^7.10.4" + "@babel/plugin-transform-parameters" "^7.10.4" + "@babel/plugin-transform-property-literals" "^7.10.4" + "@babel/plugin-transform-regenerator" "^7.10.4" + "@babel/plugin-transform-reserved-words" "^7.10.4" + "@babel/plugin-transform-shorthand-properties" "^7.10.4" + "@babel/plugin-transform-spread" "^7.11.0" + "@babel/plugin-transform-sticky-regex" "^7.10.4" + "@babel/plugin-transform-template-literals" "^7.10.4" + "@babel/plugin-transform-typeof-symbol" "^7.10.4" + "@babel/plugin-transform-unicode-escapes" "^7.10.4" + "@babel/plugin-transform-unicode-regex" "^7.10.4" + "@babel/preset-modules" "^0.1.3" + "@babel/types" "^7.11.5" + browserslist "^4.12.0" + core-js-compat "^3.6.2" + invariant "^2.2.2" + levenary "^1.1.1" + semver "^5.5.0" + +"@babel/preset-modules@^0.1.3": + version "0.1.4" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.4.tgz#362f2b68c662842970fdb5e254ffc8fc1c2e415e" + integrity sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg== + dependencies: + "@babel/helper-plugin-utils" "^7.0.0" + "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" + "@babel/plugin-transform-dotall-regex" "^7.4.4" + "@babel/types" "^7.4.4" + esutils "^2.0.2" + +"@babel/runtime@^7.4.4", "@babel/runtime@^7.8.4": + version "7.11.2" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.11.2.tgz#f549c13c754cc40b87644b9fa9f09a6a95fe0736" + integrity sha512-TeWkU52so0mPtDcaCTxNBI/IHiz0pZgr8VEFqXFtZWpYD08ZB6FaSwVAS8MKRQAP3bYKiVjwysOJgMFY28o6Tw== + dependencies: + regenerator-runtime "^0.13.4" + +"@babel/template@^7.10.4", "@babel/template@^7.4.4": + version "7.10.4" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.10.4.tgz#3251996c4200ebc71d1a8fc405fba940f36ba278" + integrity sha512-ZCjD27cGJFUB6nmCB1Enki3r+L5kJveX9pq1SvAUKoICy6CZ9yD8xO086YXdYhvNjBdnekm4ZnaP5yC8Cs/1tA== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/parser" "^7.10.4" + "@babel/types" "^7.10.4" + +"@babel/traverse@^7.10.4", "@babel/traverse@^7.11.5", "@babel/traverse@^7.4.4": + version "7.11.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.11.5.tgz#be777b93b518eb6d76ee2e1ea1d143daa11e61c3" + integrity sha512-EjiPXt+r7LiCZXEfRpSJd+jUMnBd4/9OUv7Nx3+0u9+eimMwJmG0Q98lw4/289JCoxSE8OolDMNZaaF/JZ69WQ== + dependencies: + "@babel/code-frame" "^7.10.4" + "@babel/generator" "^7.11.5" + "@babel/helper-function-name" "^7.10.4" + "@babel/helper-split-export-declaration" "^7.11.0" + "@babel/parser" "^7.11.5" + "@babel/types" "^7.11.5" + debug "^4.1.0" + globals "^11.1.0" + lodash "^4.17.19" + +"@babel/types@^7.10.4", "@babel/types@^7.10.5", "@babel/types@^7.11.0", "@babel/types@^7.11.5", "@babel/types@^7.4.4": + version "7.11.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.11.5.tgz#d9de577d01252d77c6800cee039ee64faf75662d" + integrity sha512-bvM7Qz6eKnJVFIn+1LPtjlBFPVN5jNDc1XmN15vWe7Q3DPBufWWsLiIvUu7xW87uTG6QoggpIDnUgLQvPheU+Q== + dependencies: + "@babel/helper-validator-identifier" "^7.10.4" + lodash "^4.17.19" + to-fast-properties "^2.0.0" + +"@iarna/toml@^2.2.0": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@iarna/toml/-/toml-2.2.5.tgz#b32366c89b43c6f8cefbdefac778b9c828e3ba8c" + integrity sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg== + +"@mrmlnc/readdir-enhanced@^2.2.1": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz#524af240d1a360527b730475ecfa1344aa540dde" + integrity sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g== + dependencies: + call-me-maybe "^1.0.1" + glob-to-regexp "^0.3.0" + +"@nodelib/fs.stat@^1.1.2": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz#2b5a3ab3f918cca48a8c754c08168e3f03eba61b" + integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw== + +"@parcel/fs@^1.11.0": + version "1.11.0" + resolved "https://registry.yarnpkg.com/@parcel/fs/-/fs-1.11.0.tgz#fb8a2be038c454ad46a50dc0554c1805f13535cd" + integrity sha512-86RyEqULbbVoeo8OLcv+LQ1Vq2PKBAvWTU9fCgALxuCTbbs5Ppcvll4Vr+Ko1AnmMzja/k++SzNAwJfeQXVlpA== + dependencies: + "@parcel/utils" "^1.11.0" + mkdirp "^0.5.1" + rimraf "^2.6.2" + +"@parcel/logger@^1.11.1": + version "1.11.1" + resolved "https://registry.yarnpkg.com/@parcel/logger/-/logger-1.11.1.tgz#c55b0744bcbe84ebc291155627f0ec406a23e2e6" + integrity sha512-9NF3M6UVeP2udOBDILuoEHd8VrF4vQqoWHEafymO1pfSoOMfxrSJZw1MfyAAIUN/IFp9qjcpDCUbDZB+ioVevA== + dependencies: + "@parcel/workers" "^1.11.0" + chalk "^2.1.0" + grapheme-breaker "^0.3.2" + ora "^2.1.0" + strip-ansi "^4.0.0" + +"@parcel/utils@^1.11.0": + version "1.11.0" + resolved "https://registry.yarnpkg.com/@parcel/utils/-/utils-1.11.0.tgz#539e08fff8af3b26eca11302be80b522674b51ea" + integrity sha512-cA3p4jTlaMeOtAKR/6AadanOPvKeg8VwgnHhOyfi0yClD0TZS/hi9xu12w4EzA/8NtHu0g6o4RDfcNjqN8l1AQ== + +"@parcel/watcher@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@parcel/watcher/-/watcher-1.12.1.tgz#b98b3df309fcab93451b5583fc38e40826696dad" + integrity sha512-od+uCtCxC/KoNQAIE1vWx1YTyKYY+7CTrxBJPRh3cDWw/C0tCtlBMVlrbplscGoEpt6B27KhJDCv82PBxOERNA== + dependencies: + "@parcel/utils" "^1.11.0" + chokidar "^2.1.5" + +"@parcel/workers@^1.11.0": + version "1.11.0" + resolved "https://registry.yarnpkg.com/@parcel/workers/-/workers-1.11.0.tgz#7b8dcf992806f4ad2b6cecf629839c41c2336c59" + integrity sha512-USSjRAAQYsZFlv43FUPdD+jEGML5/8oLF0rUzPQTtK4q9kvaXr49F5ZplyLz5lox78cLZ0TxN2bIDQ1xhOkulQ== + dependencies: + "@parcel/utils" "^1.11.0" + physical-cpu-count "^2.0.0" + +"@types/q@^1.5.1": + version "1.5.4" + resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.4.tgz#15925414e0ad2cd765bfef58842f7e26a7accb24" + integrity sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug== + +abab@^2.0.0: + version "2.0.5" + resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.5.tgz#c0b678fb32d60fc1219c784d6a826fe385aeb79a" + integrity sha512-9IK9EadsbHo6jLWIpxpR6pL0sazTXV6+SQv25ZB+F7Bj9mJNaOc4nCRabwd5M/JwmUa8idz6Eci6eKfJryPs6Q== + +acorn-globals@^4.3.0: + version "4.3.4" + resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-4.3.4.tgz#9fa1926addc11c97308c4e66d7add0d40c3272e7" + integrity sha512-clfQEh21R+D0leSbUdWf3OcfqyaCSAQ8Ryq00bofSekfr9W8u1jyYZo6ir0xu9Gtcf7BjcHJpnbZH7JOCpP60A== + dependencies: + acorn "^6.0.1" + acorn-walk "^6.0.1" + +acorn-walk@^6.0.1: + version "6.2.0" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-6.2.0.tgz#123cb8f3b84c2171f1f7fb252615b1c78a6b1a8c" + integrity sha512-7evsyfH1cLOCdAzZAd43Cic04yKydNx0cF+7tiA19p1XnLLPU4dpCQOqpjqwokFe//vS0QqfqqjCS2JkiIs0cA== + +acorn@^6.0.1, acorn@^6.0.4: + version "6.4.2" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6" + integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ== + +acorn@^7.1.1: + version "7.4.1" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" + integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== + +ajv@^6.12.3: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + +alphanum-sort@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/alphanum-sort/-/alphanum-sort-1.0.2.tgz#97a1119649b211ad33691d9f9f486a8ec9fbe0a3" + integrity sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM= + +ansi-regex@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" + integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= + +ansi-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" + integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= + +ansi-styles@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-2.2.1.tgz#b432dd3358b634cf75e1e4664368240533c1ddbe" + integrity sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4= + +ansi-styles@^3.2.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== + dependencies: + color-convert "^1.9.0" + +ansi-to-html@^0.6.4: + version "0.6.14" + resolved "https://registry.yarnpkg.com/ansi-to-html/-/ansi-to-html-0.6.14.tgz#65fe6d08bba5dd9db33f44a20aec331e0010dad8" + integrity sha512-7ZslfB1+EnFSDO5Ju+ue5Y6It19DRnZXWv8jrGHgIlPna5Mh4jz7BV5jCbQneXNFurQcKoolaaAjHtgSBfOIuA== + dependencies: + entities "^1.1.2" + +anymatch@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" + integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== + dependencies: + micromatch "^3.1.4" + normalize-path "^2.1.1" + +argparse@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== + dependencies: + sprintf-js "~1.0.2" + +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= + +arr-flatten@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" + integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== + +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= + +array-equal@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" + integrity sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM= + +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= + +asn1.js@^5.2.0: + version "5.4.1" + resolved "https://registry.yarnpkg.com/asn1.js/-/asn1.js-5.4.1.tgz#11a980b84ebb91781ce35b0fdc2ee294e3783f07" + integrity sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA== + dependencies: + bn.js "^4.0.0" + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + safer-buffer "^2.1.0" + +asn1@~0.2.3: + version "0.2.4" + resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" + integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== + dependencies: + safer-buffer "~2.1.0" + +assert-plus@1.0.0, assert-plus@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" + integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= + +assert@^1.1.1: + version "1.5.0" + resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb" + integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA== + dependencies: + object-assign "^4.1.1" + util "0.10.3" + +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= + +async-each@^1.0.1: + version "1.0.3" + resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" + integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== + +async-limiter@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" + integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== + +asynckit@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= + +atob@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" + integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== + +aws-sign2@~0.7.0: + version "0.7.0" + resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" + integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= + +aws4@^1.8.0: + version "1.10.1" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.10.1.tgz#e1e82e4f3e999e2cfd61b161280d16a111f86428" + integrity sha512-zg7Hz2k5lI8kb7U32998pRRFin7zJlkfezGJjUc2heaD4Pw2wObakCDVzkKztTm/Ln7eiVvYsjqak0Ed4LkMDA== + +babel-plugin-dynamic-import-node@^2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" + integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== + dependencies: + object.assign "^4.1.0" + +babel-runtime@^6.11.6, babel-runtime@^6.26.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe" + integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4= + dependencies: + core-js "^2.4.0" + regenerator-runtime "^0.11.0" + +babel-types@^6.15.0: + version "6.26.0" + resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497" + integrity sha1-o7Bz+Uq0nrb6Vc1lInozQ4BjJJc= + dependencies: + babel-runtime "^6.26.0" + esutils "^2.0.2" + lodash "^4.17.4" + to-fast-properties "^1.0.3" + +babylon-walk@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/babylon-walk/-/babylon-walk-1.0.2.tgz#3b15a5ddbb482a78b4ce9c01c8ba181702d9d6ce" + integrity sha1-OxWl3btIKni0zpwByLoYFwLZ1s4= + dependencies: + babel-runtime "^6.11.6" + babel-types "^6.15.0" + lodash.clone "^4.5.0" + +balanced-match@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" + integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= + +base64-js@^1.0.2: + version "1.3.1" + resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.3.1.tgz#58ece8cb75dd07e71ed08c736abc5fac4dbf8df1" + integrity sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g== + +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + +bcrypt-pbkdf@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" + integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= + dependencies: + tweetnacl "^0.14.3" + +binary-extensions@^1.0.0: + version "1.13.1" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" + integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== + +bindings@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" + integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== + dependencies: + file-uri-to-path "1.0.0" + +bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.4.0: + version "4.11.9" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-4.11.9.tgz#26d556829458f9d1e81fc48952493d0ba3507828" + integrity sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw== + +bn.js@^5.1.1: + version "5.1.3" + resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.1.3.tgz#beca005408f642ebebea80b042b4d18d2ac0ee6b" + integrity sha512-GkTiFpjFtUzU9CbMeJ5iazkCzGL3jrhzerzZIuqLABjbwRaFt33I9tUdSNryIptM+RxDet6OKm2WnLXzW51KsQ== + +boolbase@^1.0.0, boolbase@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/boolbase/-/boolbase-1.0.0.tgz#68dff5fbe60c51eb37725ea9e3ed310dcc1e776e" + integrity sha1-aN/1++YMUes3cl6p4+0xDcwed24= + +brace-expansion@^1.1.7: + version "1.1.11" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== + dependencies: + balanced-match "^1.0.0" + concat-map "0.0.1" + +braces@^2.3.1, braces@^2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + +brfs@^1.2.0: + version "1.6.1" + resolved "https://registry.yarnpkg.com/brfs/-/brfs-1.6.1.tgz#b78ce2336d818e25eea04a0947cba6d4fb8849c3" + integrity sha512-OfZpABRQQf+Xsmju8XE9bDjs+uU4vLREGolP7bDgcpsI17QREyZ4Bl+2KLxxx1kCgA0fAIhKQBaBYh+PEcCqYQ== + dependencies: + quote-stream "^1.0.1" + resolve "^1.1.5" + static-module "^2.2.0" + through2 "^2.0.0" + +brorand@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/brorand/-/brorand-1.1.0.tgz#12c25efe40a45e3c323eb8675a0a0ce57b22371f" + integrity sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8= + +browser-process-hrtime@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" + integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== + +browserify-aes@^1.0.0, browserify-aes@^1.0.4: + version "1.2.0" + resolved "https://registry.yarnpkg.com/browserify-aes/-/browserify-aes-1.2.0.tgz#326734642f403dabc3003209853bb70ad428ef48" + integrity sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA== + dependencies: + buffer-xor "^1.0.3" + cipher-base "^1.0.0" + create-hash "^1.1.0" + evp_bytestokey "^1.0.3" + inherits "^2.0.1" + safe-buffer "^5.0.1" + +browserify-cipher@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/browserify-cipher/-/browserify-cipher-1.0.1.tgz#8d6474c1b870bfdabcd3bcfcc1934a10e94f15f0" + integrity sha512-sPhkz0ARKbf4rRQt2hTpAHqn47X3llLkUGn+xEJzLjwY8LRs2p0v7ljvI5EyoRO/mexrNunNECisZs+gw2zz1w== + dependencies: + browserify-aes "^1.0.4" + browserify-des "^1.0.0" + evp_bytestokey "^1.0.0" + +browserify-des@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/browserify-des/-/browserify-des-1.0.2.tgz#3af4f1f59839403572f1c66204375f7a7f703e9c" + integrity sha512-BioO1xf3hFwz4kc6iBhI3ieDFompMhrMlnDFC4/0/vd5MokpuAc3R+LYbwTA9A5Yc9pq9UYPqffKpW2ObuwX5A== + dependencies: + cipher-base "^1.0.1" + des.js "^1.0.0" + inherits "^2.0.1" + safe-buffer "^5.1.2" + +browserify-rsa@^4.0.0, browserify-rsa@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/browserify-rsa/-/browserify-rsa-4.0.1.tgz#21e0abfaf6f2029cf2fafb133567a701d4135524" + integrity sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ= + dependencies: + bn.js "^4.1.0" + randombytes "^2.0.1" + +browserify-sign@^4.0.0: + version "4.2.1" + resolved "https://registry.yarnpkg.com/browserify-sign/-/browserify-sign-4.2.1.tgz#eaf4add46dd54be3bb3b36c0cf15abbeba7956c3" + integrity sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg== + dependencies: + bn.js "^5.1.1" + browserify-rsa "^4.0.1" + create-hash "^1.2.0" + create-hmac "^1.1.7" + elliptic "^6.5.3" + inherits "^2.0.4" + parse-asn1 "^5.1.5" + readable-stream "^3.6.0" + safe-buffer "^5.2.0" + +browserify-zlib@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f" + integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA== + dependencies: + pako "~1.0.5" + +browserslist@^4.0.0, browserslist@^4.1.0, browserslist@^4.12.0, browserslist@^4.8.5: + version "4.14.5" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.14.5.tgz#1c751461a102ddc60e40993639b709be7f2c4015" + integrity sha512-Z+vsCZIvCBvqLoYkBFTwEYH3v5MCQbsAjp50ERycpOjnPmolg1Gjy4+KaWWpm8QOJt9GHkhdqAl14NpCX73CWA== + dependencies: + caniuse-lite "^1.0.30001135" + electron-to-chromium "^1.3.571" + escalade "^3.1.0" + node-releases "^1.1.61" + +buffer-equal@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/buffer-equal/-/buffer-equal-0.0.1.tgz#91bc74b11ea405bc916bc6aa908faafa5b4aac4b" + integrity sha1-kbx0sR6kBbyRa8aqkI+q+ltKrEs= + +buffer-from@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" + integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== + +buffer-xor@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/buffer-xor/-/buffer-xor-1.0.3.tgz#26e61ed1422fb70dd42e6e36729ed51d855fe8d9" + integrity sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk= + +buffer@^4.3.0: + version "4.9.2" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" + integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== + dependencies: + base64-js "^1.0.2" + ieee754 "^1.1.4" + isarray "^1.0.0" + +builtin-status-codes@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8" + integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= + +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + +call-me-maybe@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" + integrity sha1-JtII6onje1y95gJQoV8DHBak1ms= + +caller-callsite@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-2.0.0.tgz#847e0fce0a223750a9a027c54b33731ad3154134" + integrity sha1-hH4PzgoiN1CpoCfFSzNzGtMVQTQ= + dependencies: + callsites "^2.0.0" + +caller-path@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-2.0.0.tgz#468f83044e369ab2010fac5f06ceee15bb2cb1f4" + integrity sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ= + dependencies: + caller-callsite "^2.0.0" + +callsites@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-2.0.0.tgz#06eb84f00eea413da86affefacbffb36093b3c50" + integrity sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA= + +caniuse-api@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/caniuse-api/-/caniuse-api-3.0.0.tgz#5e4d90e2274961d46291997df599e3ed008ee4c0" + integrity sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw== + dependencies: + browserslist "^4.0.0" + caniuse-lite "^1.0.0" + lodash.memoize "^4.1.2" + lodash.uniq "^4.5.0" + +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001135: + version "1.0.30001148" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001148.tgz#dc97c7ed918ab33bf8706ddd5e387287e015d637" + integrity sha512-E66qcd0KMKZHNJQt9hiLZGE3J4zuTqE1OnU53miEVtylFbwOEmeA5OsRu90noZful+XGSQOni1aT2tiqu/9yYw== + +caseless@~0.12.0: + version "0.12.0" + resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" + integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= + +chalk@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98" + integrity sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg= + dependencies: + ansi-styles "^2.2.1" + escape-string-regexp "^1.0.2" + has-ansi "^2.0.0" + strip-ansi "^3.0.0" + supports-color "^2.0.0" + +chalk@^2.0.0, chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.1, chalk@^2.4.1, chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chokidar@^2.1.5: + version "2.1.8" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" + integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== + dependencies: + anymatch "^2.0.0" + async-each "^1.0.1" + braces "^2.3.2" + glob-parent "^3.1.0" + inherits "^2.0.3" + is-binary-path "^1.0.0" + is-glob "^4.0.0" + normalize-path "^3.0.0" + path-is-absolute "^1.0.0" + readdirp "^2.2.1" + upath "^1.1.1" + optionalDependencies: + fsevents "^1.2.7" + +cipher-base@^1.0.0, cipher-base@^1.0.1, cipher-base@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/cipher-base/-/cipher-base-1.0.4.tgz#8760e4ecc272f4c363532f926d874aae2c1397de" + integrity sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + +cli-cursor@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5" + integrity sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU= + dependencies: + restore-cursor "^2.0.0" + +cli-spinners@^1.1.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.3.1.tgz#002c1990912d0d59580c93bd36c056de99e4259a" + integrity sha512-1QL4544moEsDVH9T/l6Cemov/37iv1RtoKf7NJ04A60+4MREXNfx/QvavbH6QoGdsD4N4Mwy49cmaINR/o2mdg== + +clone@^1.0.2: + version "1.0.4" + resolved "https://registry.yarnpkg.com/clone/-/clone-1.0.4.tgz#da309cc263df15994c688ca902179ca3c7cd7c7e" + integrity sha1-2jCcwmPfFZlMaIypAheco8fNfH4= + +clone@^2.1.1: + version "2.1.2" + resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18= + +coa@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" + integrity sha512-q5/jG+YQnSy4nRTV4F7lPepBJZ8qBNJJDBuJdoejDyLXgmL7IEo+Le2JDZudFTFt7mrCqIRaSjws4ygRCTCAXA== + dependencies: + "@types/q" "^1.5.1" + chalk "^2.4.1" + q "^1.1.2" + +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + +color-convert@^1.9.0, color-convert@^1.9.1: + version "1.9.3" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== + dependencies: + color-name "1.1.3" + +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= + +color-name@^1.0.0: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +color-string@^1.5.4: + version "1.5.4" + resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.4.tgz#dd51cd25cfee953d138fe4002372cc3d0e504cb6" + integrity sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw== + dependencies: + color-name "^1.0.0" + simple-swizzle "^0.2.2" + +color@^3.0.0: + version "3.1.3" + resolved "https://registry.yarnpkg.com/color/-/color-3.1.3.tgz#ca67fb4e7b97d611dcde39eceed422067d91596e" + integrity sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ== + dependencies: + color-convert "^1.9.1" + color-string "^1.5.4" + +combined-stream@^1.0.6, combined-stream@~1.0.6: + version "1.0.8" + resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== + dependencies: + delayed-stream "~1.0.0" + +command-exists@^1.2.6: + version "1.2.9" + resolved "https://registry.yarnpkg.com/command-exists/-/command-exists-1.2.9.tgz#c50725af3808c8ab0260fd60b01fbfa25b954f69" + integrity sha512-LTQ/SGc+s0Xc0Fu5WaKnR0YiygZkm9eKFvyS+fRsU7/ZWFF8ykFM6Pc9aCVf1+xasOOZpO3BAVgVrKvsqKHV7w== + +commander@^2.11.0, commander@^2.19.0, commander@^2.20.0: + version "2.20.3" + resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" + integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== + +component-emitter@^1.2.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" + integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== + +concat-map@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= + +concat-stream@~1.6.0: + version "1.6.2" + resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" + integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== + dependencies: + buffer-from "^1.0.0" + inherits "^2.0.3" + readable-stream "^2.2.2" + typedarray "^0.0.6" + +console-browserify@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336" + integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA== + +constants-browserify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75" + integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U= + +convert-source-map@^1.5.1, convert-source-map@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" + integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== + dependencies: + safe-buffer "~5.1.1" + +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= + +core-js-compat@^3.6.2: + version "3.6.5" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.6.5.tgz#2a51d9a4e25dfd6e690251aa81f99e3c05481f1c" + integrity sha512-7ItTKOhOZbznhXAQ2g/slGg1PJV5zDO/WdkTwi7UEOJmkvsE32PWvx6mKtDjiMpjnR2CNf6BAD6sSxIlv7ptng== + dependencies: + browserslist "^4.8.5" + semver "7.0.0" + +core-js@^2.4.0, core-js@^2.6.5: + version "2.6.11" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.11.tgz#38831469f9922bded8ee21c9dc46985e0399308c" + integrity sha512-5wjnpaT/3dV+XB4borEsnAYQchn00XSgTAWKDkEqv+K8KevjbzmofK6hfJ9TZIlpj2N0xQpazy7PiRQiWHqzWg== + +core-util-is@1.0.2, core-util-is@~1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" + integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= + +cosmiconfig@^5.0.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-5.2.1.tgz#040f726809c591e77a17c0a3626ca45b4f168b1a" + integrity sha512-H65gsXo1SKjf8zmrJ67eJk8aIRKV5ff2D4uKZIBZShbhGSpEmsQOPW/SKMKYhSTrqR7ufy6RP69rPogdaPh/kA== + dependencies: + import-fresh "^2.0.0" + is-directory "^0.3.1" + js-yaml "^3.13.1" + parse-json "^4.0.0" + +create-ecdh@^4.0.0: + version "4.0.4" + resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e" + integrity sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A== + dependencies: + bn.js "^4.1.0" + elliptic "^6.5.3" + +create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" + integrity sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg== + dependencies: + cipher-base "^1.0.1" + inherits "^2.0.1" + md5.js "^1.3.4" + ripemd160 "^2.0.1" + sha.js "^2.4.0" + +create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: + version "1.1.7" + resolved "https://registry.yarnpkg.com/create-hmac/-/create-hmac-1.1.7.tgz#69170c78b3ab957147b2b8b04572e47ead2243ff" + integrity sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg== + dependencies: + cipher-base "^1.0.3" + create-hash "^1.1.0" + inherits "^2.0.1" + ripemd160 "^2.0.0" + safe-buffer "^5.0.1" + sha.js "^2.4.8" + +cross-spawn@^6.0.4: + version "6.0.5" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" + integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== + dependencies: + nice-try "^1.0.4" + path-key "^2.0.1" + semver "^5.5.0" + shebang-command "^1.2.0" + which "^1.2.9" + +crypto-browserify@^3.11.0: + version "3.12.0" + resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec" + integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg== + dependencies: + browserify-cipher "^1.0.0" + browserify-sign "^4.0.0" + create-ecdh "^4.0.0" + create-hash "^1.1.0" + create-hmac "^1.1.0" + diffie-hellman "^5.0.0" + inherits "^2.0.1" + pbkdf2 "^3.0.3" + public-encrypt "^4.0.0" + randombytes "^2.0.0" + randomfill "^1.0.3" + +css-color-names@0.0.4, css-color-names@^0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0" + integrity sha1-gIrcLnnPhHOAabZGyyDsJ762KeA= + +css-declaration-sorter@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-4.0.1.tgz#c198940f63a76d7e36c1e71018b001721054cb22" + integrity sha512-BcxQSKTSEEQUftYpBVnsH4SF05NTuBokb19/sBt6asXGKZ/6VP7PLG1CBCkFDYOnhXhPh0jMhO6xZ71oYHXHBA== + dependencies: + postcss "^7.0.1" + timsort "^0.3.0" + +css-modules-loader-core@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/css-modules-loader-core/-/css-modules-loader-core-1.1.0.tgz#5908668294a1becd261ae0a4ce21b0b551f21d16" + integrity sha1-WQhmgpShvs0mGuCkziGwtVHyHRY= + dependencies: + icss-replace-symbols "1.1.0" + postcss "6.0.1" + postcss-modules-extract-imports "1.1.0" + postcss-modules-local-by-default "1.2.0" + postcss-modules-scope "1.1.0" + postcss-modules-values "1.3.0" + +css-select-base-adapter@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/css-select-base-adapter/-/css-select-base-adapter-0.1.1.tgz#3b2ff4972cc362ab88561507a95408a1432135d7" + integrity sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w== + +css-select@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/css-select/-/css-select-2.1.0.tgz#6a34653356635934a81baca68d0255432105dbef" + integrity sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ== + dependencies: + boolbase "^1.0.0" + css-what "^3.2.1" + domutils "^1.7.0" + nth-check "^1.0.2" + +css-selector-tokenizer@^0.7.0: + version "0.7.3" + resolved "https://registry.yarnpkg.com/css-selector-tokenizer/-/css-selector-tokenizer-0.7.3.tgz#735f26186e67c749aaf275783405cf0661fae8f1" + integrity sha512-jWQv3oCEL5kMErj4wRnK/OPoBi0D+P1FR2cDCKYPaMeD2eW3/mttav8HT4hT1CKopiJI/psEULjkClhvJo4Lvg== + dependencies: + cssesc "^3.0.0" + fastparse "^1.1.2" + +css-tree@1.0.0-alpha.37: + version "1.0.0-alpha.37" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + dependencies: + mdn-data "2.0.4" + source-map "^0.6.1" + +css-tree@1.0.0-alpha.39: + version "1.0.0-alpha.39" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.39.tgz#2bff3ffe1bb3f776cf7eefd91ee5cba77a149eeb" + integrity sha512-7UvkEYgBAHRG9Nt980lYxjsTrCyHFN53ky3wVsDkiMdVqylqRt+Zc+jm5qw7/qyOvN2dHSYtX0e4MbCCExSvnA== + dependencies: + mdn-data "2.0.6" + source-map "^0.6.1" + +css-what@^3.2.1: + version "3.4.2" + resolved "https://registry.yarnpkg.com/css-what/-/css-what-3.4.2.tgz#ea7026fcb01777edbde52124e21f327e7ae950e4" + integrity sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ== + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +cssnano-preset-default@^4.0.7: + version "4.0.7" + resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-4.0.7.tgz#51ec662ccfca0f88b396dcd9679cdb931be17f76" + integrity sha512-x0YHHx2h6p0fCl1zY9L9roD7rnlltugGu7zXSKQx6k2rYw0Hi3IqxcoAGF7u9Q5w1nt7vK0ulxV8Lo+EvllGsA== + dependencies: + css-declaration-sorter "^4.0.1" + cssnano-util-raw-cache "^4.0.1" + postcss "^7.0.0" + postcss-calc "^7.0.1" + postcss-colormin "^4.0.3" + postcss-convert-values "^4.0.1" + postcss-discard-comments "^4.0.2" + postcss-discard-duplicates "^4.0.2" + postcss-discard-empty "^4.0.1" + postcss-discard-overridden "^4.0.1" + postcss-merge-longhand "^4.0.11" + postcss-merge-rules "^4.0.3" + postcss-minify-font-values "^4.0.2" + postcss-minify-gradients "^4.0.2" + postcss-minify-params "^4.0.2" + postcss-minify-selectors "^4.0.2" + postcss-normalize-charset "^4.0.1" + postcss-normalize-display-values "^4.0.2" + postcss-normalize-positions "^4.0.2" + postcss-normalize-repeat-style "^4.0.2" + postcss-normalize-string "^4.0.2" + postcss-normalize-timing-functions "^4.0.2" + postcss-normalize-unicode "^4.0.1" + postcss-normalize-url "^4.0.1" + postcss-normalize-whitespace "^4.0.2" + postcss-ordered-values "^4.1.2" + postcss-reduce-initial "^4.0.3" + postcss-reduce-transforms "^4.0.2" + postcss-svgo "^4.0.2" + postcss-unique-selectors "^4.0.1" + +cssnano-util-get-arguments@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/cssnano-util-get-arguments/-/cssnano-util-get-arguments-4.0.0.tgz#ed3a08299f21d75741b20f3b81f194ed49cc150f" + integrity sha1-7ToIKZ8h11dBsg87gfGU7UnMFQ8= + +cssnano-util-get-match@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/cssnano-util-get-match/-/cssnano-util-get-match-4.0.0.tgz#c0e4ca07f5386bb17ec5e52250b4f5961365156d" + integrity sha1-wOTKB/U4a7F+xeUiULT1lhNlFW0= + +cssnano-util-raw-cache@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/cssnano-util-raw-cache/-/cssnano-util-raw-cache-4.0.1.tgz#b26d5fd5f72a11dfe7a7846fb4c67260f96bf282" + integrity sha512-qLuYtWK2b2Dy55I8ZX3ky1Z16WYsx544Q0UWViebptpwn/xDBmog2TLg4f+DBMg1rJ6JDWtn96WHbOKDWt1WQA== + dependencies: + postcss "^7.0.0" + +cssnano-util-same-parent@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/cssnano-util-same-parent/-/cssnano-util-same-parent-4.0.1.tgz#574082fb2859d2db433855835d9a8456ea18bbf3" + integrity sha512-WcKx5OY+KoSIAxBW6UBBRay1U6vkYheCdjyVNDm85zt5K9mHoGOfsOsqIszfAqrQQFIIKgjh2+FDgIj/zsl21Q== + +cssnano@^4.0.0, cssnano@^4.1.10: + version "4.1.10" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-4.1.10.tgz#0ac41f0b13d13d465487e111b778d42da631b8b2" + integrity sha512-5wny+F6H4/8RgNlaqab4ktc3e0/blKutmq8yNlBFXA//nSFFAqAngjNVRzUvCgYROULmZZUoosL/KSoZo5aUaQ== + dependencies: + cosmiconfig "^5.0.0" + cssnano-preset-default "^4.0.7" + is-resolvable "^1.0.0" + postcss "^7.0.0" + +csso@^4.0.2: + version "4.0.3" + resolved "https://registry.yarnpkg.com/csso/-/csso-4.0.3.tgz#0d9985dc852c7cc2b2cacfbbe1079014d1a8e903" + integrity sha512-NL3spysxUkcrOgnpsT4Xdl2aiEiBG6bXswAABQVHcMrfjjBisFOKwLDOmf4wf32aPdcJws1zds2B0Rg+jqMyHQ== + dependencies: + css-tree "1.0.0-alpha.39" + +cssom@0.3.x, cssom@^0.3.4: + version "0.3.8" + resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" + integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== + +cssstyle@^1.1.1: + version "1.4.0" + resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-1.4.0.tgz#9d31328229d3c565c61e586b02041a28fccdccf1" + integrity sha512-GBrLZYZ4X4x6/QEoBnIrqb8B/f5l4+8me2dkom/j1Gtbxy0kBv6OGzKuAsGM75bkGwGAFkt56Iwg28S3XTZgSA== + dependencies: + cssom "0.3.x" + +dashdash@^1.12.0: + version "1.14.1" + resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" + integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= + dependencies: + assert-plus "^1.0.0" + +data-urls@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-1.1.0.tgz#15ee0582baa5e22bb59c77140da8f9c76963bbfe" + integrity sha512-YTWYI9se1P55u58gL5GkQHW4P6VJBJ5iBT+B5a7i2Tjadhv52paJG0qHX4A0OR6/t52odI64KP2YvFpkDOi3eQ== + dependencies: + abab "^2.0.0" + whatwg-mimetype "^2.2.0" + whatwg-url "^7.0.0" + +deasync@^0.1.14: + version "0.1.20" + resolved "https://registry.yarnpkg.com/deasync/-/deasync-0.1.20.tgz#546fd2660688a1eeed55edce2308c5cf7104f9da" + integrity sha512-E1GI7jMI57hL30OX6Ht/hfQU8DO4AuB9m72WFm4c38GNbUD4Q03//XZaOIHZiY+H1xUaomcot5yk2q/qIZQkGQ== + dependencies: + bindings "^1.5.0" + node-addon-api "^1.7.1" + +debug@2.6.9, debug@^2.2.0, debug@^2.3.3: + version "2.6.9" + resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.2.0.tgz#7f150f93920e94c58f5574c2fd01a3110effe7f1" + integrity sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg== + dependencies: + ms "2.1.2" + +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= + +deep-is@~0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" + integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= + +defaults@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" + integrity sha1-xlYFHpgX2f8I7YgUd/P+QBnz730= + dependencies: + clone "^1.0.2" + +define-properties@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" + integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== + dependencies: + object-keys "^1.0.12" + +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= + dependencies: + is-descriptor "^1.0.0" + +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= + +depd@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" + integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= + +des.js@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" + integrity sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA== + dependencies: + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + +destroy@~1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" + integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= + +diffie-hellman@^5.0.0: + version "5.0.3" + resolved "https://registry.yarnpkg.com/diffie-hellman/-/diffie-hellman-5.0.3.tgz#40e8ee98f55a2149607146921c63e1ae5f3d2875" + integrity sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg== + dependencies: + bn.js "^4.1.0" + miller-rabin "^4.0.0" + randombytes "^2.0.0" + +dom-serializer@0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.2.2.tgz#1afb81f533717175d478655debc5e332d9f9bb51" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +domain-browser@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda" + integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA== + +domelementtype@1, domelementtype@^1.3.1: + version "1.3.1" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-1.3.1.tgz#d048c44b37b0d10a7f2a3d5fee3f4333d790481f" + integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== + +domelementtype@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/domelementtype/-/domelementtype-2.0.2.tgz#f3b6e549201e46f588b59463dd77187131fe6971" + integrity sha512-wFwTwCVebUrMgGeAwRL/NhZtHAUyT9n9yg4IMDwf10+6iCMxSkVq9MGCVEH+QZWo1nNidy8kNvwmv4zWHDTqvA== + +domexception@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/domexception/-/domexception-1.0.1.tgz#937442644ca6a31261ef36e3ec677fe805582c90" + integrity sha512-raigMkn7CJNNo6Ihro1fzG7wr3fHuYVytzquZKX5n0yizGsTcYgzdIUwj1X9pK0VvjeihV+XiclP+DjwbsSKug== + dependencies: + webidl-conversions "^4.0.2" + +domhandler@^2.3.0: + version "2.4.2" + resolved "https://registry.yarnpkg.com/domhandler/-/domhandler-2.4.2.tgz#8805097e933d65e85546f726d60f5eb88b44f803" + integrity sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA== + dependencies: + domelementtype "1" + +domutils@^1.5.1, domutils@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/domutils/-/domutils-1.7.0.tgz#56ea341e834e06e6748af7a1cb25da67ea9f8c2a" + integrity sha512-Lgd2XcJ/NjEw+7tFvfKxOzCYKZsdct5lczQ2ZaQY8Djz7pfAD3Gbp8ySJWtreII/vDlMVmxwa6pHmdxIYgttDg== + dependencies: + dom-serializer "0" + domelementtype "1" + +dot-prop@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" + integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== + dependencies: + is-obj "^2.0.0" + +dotenv-expand@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-5.1.0.tgz#3fbaf020bfd794884072ea26b1e9791d45a629f0" + integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== + +dotenv@^5.0.0: + version "5.0.1" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-5.0.1.tgz#a5317459bd3d79ab88cff6e44057a6a3fbb1fcef" + integrity sha512-4As8uPrjfwb7VXC+WnLCbXK7y+Ueb2B3zgNCePYfhxS1PYeaO1YTeplffTEcbfLhvFNGLAz90VvJs9yomG7bow== + +duplexer2@~0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/duplexer2/-/duplexer2-0.1.4.tgz#8b12dab878c0d69e3e7891051662a32fc6bddcc1" + integrity sha1-ixLauHjA1p4+eJEFFmKjL8a93ME= + dependencies: + readable-stream "^2.0.2" + +ecc-jsbn@~0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" + integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= + dependencies: + jsbn "~0.1.0" + safer-buffer "^2.1.0" + +ee-first@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= + +electron-to-chromium@^1.3.571: + version "1.3.578" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.578.tgz#e6671936f4571a874eb26e2e833aa0b2c0b776e0" + integrity sha512-z4gU6dA1CbBJsAErW5swTGAaU2TBzc2mPAonJb00zqW1rOraDo2zfBMDRvaz9cVic+0JEZiYbHWPw/fTaZlG2Q== + +elliptic@^6.5.3: + version "6.5.3" + resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.5.3.tgz#cb59eb2efdaf73a0bd78ccd7015a62ad6e0f93d6" + integrity sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw== + dependencies: + bn.js "^4.4.0" + brorand "^1.0.1" + hash.js "^1.0.0" + hmac-drbg "^1.0.0" + inherits "^2.0.1" + minimalistic-assert "^1.0.0" + minimalistic-crypto-utils "^1.0.0" + +encodeurl@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= + +entities@^1.1.1, entities@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56" + integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w== + +entities@^2.0.0: + version "2.0.3" + resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.3.tgz#5c487e5742ab93c15abb5da22759b8590ec03b7f" + integrity sha512-MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ== + +envinfo@^7.3.1: + version "7.7.3" + resolved "https://registry.yarnpkg.com/envinfo/-/envinfo-7.7.3.tgz#4b2d8622e3e7366afb8091b23ed95569ea0208cc" + integrity sha512-46+j5QxbPWza0PB1i15nZx0xQ4I/EfQxg9J8Had3b408SV63nEtor2e+oiY63amTo9KTuh2a3XLObNwduxYwwA== + +error-ex@^1.3.1: + version "1.3.2" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" + integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== + dependencies: + is-arrayish "^0.2.1" + +es-abstract@^1.17.0-next.1, es-abstract@^1.17.2, es-abstract@^1.17.5: + version "1.17.7" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.7.tgz#a4de61b2f66989fc7421676c1cb9787573ace54c" + integrity sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g== + dependencies: + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + is-callable "^1.2.2" + is-regex "^1.1.1" + object-inspect "^1.8.0" + object-keys "^1.1.1" + object.assign "^4.1.1" + string.prototype.trimend "^1.0.1" + string.prototype.trimstart "^1.0.1" + +es-abstract@^1.18.0-next.0: + version "1.18.0-next.1" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.0-next.1.tgz#6e3a0a4bda717e5023ab3b8e90bec36108d22c68" + integrity sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA== + dependencies: + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + has "^1.0.3" + has-symbols "^1.0.1" + is-callable "^1.2.2" + is-negative-zero "^2.0.0" + is-regex "^1.1.1" + object-inspect "^1.8.0" + object-keys "^1.1.1" + object.assign "^4.1.1" + string.prototype.trimend "^1.0.1" + string.prototype.trimstart "^1.0.1" + +es-to-primitive@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== + dependencies: + is-callable "^1.1.4" + is-date-object "^1.0.1" + is-symbol "^1.0.2" + +escalade@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.0.tgz#e8e2d7c7a8b76f6ee64c2181d6b8151441602d4e" + integrity sha512-mAk+hPSO8fLDkhV7V0dXazH5pDc6MrjBTPyD3VeKzxnVFjH1MIxbCdqGZB9O8+EwWakZs3ZCbDS4IpRt79V1ig== + +escape-html@~1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= + +escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= + +escodegen@^1.11.0, escodegen@^1.11.1: + version "1.14.3" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.14.3.tgz#4e7b81fba61581dc97582ed78cab7f0e8d63f503" + integrity sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw== + dependencies: + esprima "^4.0.1" + estraverse "^4.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +escodegen@~1.9.0: + version "1.9.1" + resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.9.1.tgz#dbae17ef96c8e4bedb1356f4504fa4cc2f7cb7e2" + integrity sha512-6hTjO1NAWkHnDk3OqQ4YrCuwwmGHL9S3nPlzBOUG/R44rda3wLNrfvQ5fkSGjyhHFKM7ALPKcKGrwvCLe0lC7Q== + dependencies: + esprima "^3.1.3" + estraverse "^4.2.0" + esutils "^2.0.2" + optionator "^0.8.1" + optionalDependencies: + source-map "~0.6.1" + +esprima@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633" + integrity sha1-/cpRzuYTOJXjyI1TXOSdv/YqRjM= + +esprima@^4.0.0, esprima@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== + +estraverse@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" + integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== + +esutils@^2.0.2: + version "2.0.3" + resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== + +etag@~1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= + +events@^3.0.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/events/-/events-3.2.0.tgz#93b87c18f8efcd4202a461aec4dfc0556b639379" + integrity sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg== + +evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz#7fcbdb198dc71959432efe13842684e0525acb02" + integrity sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA== + dependencies: + md5.js "^1.3.4" + safe-buffer "^5.1.1" + +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extend@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" + integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== + +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +extsprintf@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" + integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= + +extsprintf@^1.2.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" + integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= + +falafel@^2.1.0: + version "2.2.4" + resolved "https://registry.yarnpkg.com/falafel/-/falafel-2.2.4.tgz#b5d86c060c2412a43166243cb1bce44d1abd2819" + integrity sha512-0HXjo8XASWRmsS0X1EkhwEMZaD3Qvp7FfURwjLKjG1ghfRm/MGZl2r4cWUTv41KdNghTw4OUMmVtdGQp3+H+uQ== + dependencies: + acorn "^7.1.1" + foreach "^2.0.5" + isarray "^2.0.1" + object-keys "^1.0.6" + +fast-deep-equal@^3.1.1: + version "3.1.3" + resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== + +fast-glob@^2.2.2: + version "2.2.7" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-2.2.7.tgz#6953857c3afa475fff92ee6015d52da70a4cd39d" + integrity sha512-g1KuQwHOZAmOZMuBtHdxDtju+T2RT8jgCC9aANsbpdiDDTSnjgfuVsIBNKbUeJI3oKMRExcfNDtJl4OhbffMsw== + dependencies: + "@mrmlnc/readdir-enhanced" "^2.2.1" + "@nodelib/fs.stat" "^1.1.2" + glob-parent "^3.1.0" + is-glob "^4.0.0" + merge2 "^1.2.3" + micromatch "^3.1.10" + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@~2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= + +fastparse@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/fastparse/-/fastparse-1.1.2.tgz#91728c5a5942eced8531283c79441ee4122c35a9" + integrity sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ== + +file-uri-to-path@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" + integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== + +filesize@^3.6.0: + version "3.6.1" + resolved "https://registry.yarnpkg.com/filesize/-/filesize-3.6.1.tgz#090bb3ee01b6f801a8a8be99d31710b3422bb317" + integrity sha512-7KjR1vv6qnicaPMi1iiTcI85CyYwRO/PSFCu6SvqL8jN2Wjt/NIYQTFtFs7fSDCYOstUkEWIQGFUg5YZQfjlcg== + +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + +for-in@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" + integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= + +foreach@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/foreach/-/foreach-2.0.5.tgz#0bee005018aeb260d0a3af3ae658dd0136ec1b99" + integrity sha1-C+4AUBiusmDQo6865ljdATbsG5k= + +forever-agent@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" + integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= + +form-data@~2.3.2: + version "2.3.3" + resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" + integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== + dependencies: + asynckit "^0.4.0" + combined-stream "^1.0.6" + mime-types "^2.1.12" + +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= + dependencies: + map-cache "^0.2.2" + +fresh@0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= + +fs.realpath@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= + +fsevents@^1.2.7: + version "1.2.13" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38" + integrity sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw== + dependencies: + bindings "^1.5.0" + nan "^2.12.1" + +function-bind@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== + +gensync@^1.0.0-beta.1: + version "1.0.0-beta.1" + resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269" + integrity sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg== + +get-port@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/get-port/-/get-port-3.2.0.tgz#dd7ce7de187c06c8bf353796ac71e099f0980ebc" + integrity sha1-3Xzn3hh8Bsi/NTeWrHHgmfCYDrw= + +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= + +getpass@^0.1.1: + version "0.1.7" + resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" + integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= + dependencies: + assert-plus "^1.0.0" + +glob-parent@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" + integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= + dependencies: + is-glob "^3.1.0" + path-dirname "^1.0.0" + +glob-to-regexp@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/glob-to-regexp/-/glob-to-regexp-0.3.0.tgz#8c5a1494d2066c570cc3bfe4496175acc4d502ab" + integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs= + +glob@^7.0.0, glob@^7.1.3, glob@^7.1.4: + version "7.1.6" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + +globals@^11.1.0: + version "11.12.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== + +graceful-fs@^4.1.11: + version "4.2.4" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" + integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== + +grapheme-breaker@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/grapheme-breaker/-/grapheme-breaker-0.3.2.tgz#5b9e6b78c3832452d2ba2bb1cb830f96276410ac" + integrity sha1-W55reMODJFLSuiuxy4MPlidkEKw= + dependencies: + brfs "^1.2.0" + unicode-trie "^0.3.1" + +har-schema@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" + integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= + +har-validator@~5.1.3: + version "5.1.5" + resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" + integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== + dependencies: + ajv "^6.12.3" + har-schema "^2.0.0" + +has-ansi@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91" + integrity sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE= + dependencies: + ansi-regex "^2.0.0" + +has-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa" + integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo= + +has-flag@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= + +has-symbols@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" + integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== + +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + +has@^1.0.0, has@^1.0.1, has@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== + dependencies: + function-bind "^1.1.1" + +hash-base@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/hash-base/-/hash-base-3.1.0.tgz#55c381d9e06e1d2997a883b4a3fddfe7f0d3af33" + integrity sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA== + dependencies: + inherits "^2.0.4" + readable-stream "^3.6.0" + safe-buffer "^5.2.0" + +hash.js@^1.0.0, hash.js@^1.0.3: + version "1.1.7" + resolved "https://registry.yarnpkg.com/hash.js/-/hash.js-1.1.7.tgz#0babca538e8d4ee4a0f8988d68866537a003cf42" + integrity sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA== + dependencies: + inherits "^2.0.3" + minimalistic-assert "^1.0.1" + +hex-color-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/hex-color-regex/-/hex-color-regex-1.1.0.tgz#4c06fccb4602fe2602b3c93df82d7e7dbf1a8a8e" + integrity sha512-l9sfDFsuqtOqKDsQdqrMRk0U85RZc0RtOR9yPI7mRVOa4FsR/BVnZ0shmQRM96Ji99kYZP/7hn1cedc1+ApsTQ== + +highlight.js@^10.2.0: + version "10.2.1" + resolved "https://registry.yarnpkg.com/highlight.js/-/highlight.js-10.2.1.tgz#09784fe2e95612abbefd510948945d4fe6fa9668" + integrity sha512-A+sckVPIb9zQTUydC9lpRX1qRFO/N0OKEh0NwIr65ckvWA/oMY8v9P3+kGRK3w2ULSh9E8v5MszXafodQ6039g== + +hmac-drbg@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/hmac-drbg/-/hmac-drbg-1.0.1.tgz#d2745701025a6c775a6c545793ed502fc0c649a1" + integrity sha1-0nRXAQJabHdabFRXk+1QL8DGSaE= + dependencies: + hash.js "^1.0.3" + minimalistic-assert "^1.0.0" + minimalistic-crypto-utils "^1.0.1" + +hsl-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/hsl-regex/-/hsl-regex-1.0.0.tgz#d49330c789ed819e276a4c0d272dffa30b18fe6e" + integrity sha1-1JMwx4ntgZ4nakwNJy3/owsY/m4= + +hsla-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/hsla-regex/-/hsla-regex-1.0.0.tgz#c1ce7a3168c8c6614033a4b5f7877f3b225f9c38" + integrity sha1-wc56MWjIxmFAM6S194d/OyJfnDg= + +html-comment-regex@^1.1.0: + version "1.1.2" + resolved "https://registry.yarnpkg.com/html-comment-regex/-/html-comment-regex-1.1.2.tgz#97d4688aeb5c81886a364faa0cad1dda14d433a7" + integrity sha512-P+M65QY2JQ5Y0G9KKdlDpo0zK+/OHptU5AaBwUfAIDJZk1MYf32Frm84EcOytfJE0t5JvkAnKlmjsXDnWzCJmQ== + +html-encoding-sniffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz#e70d84b94da53aa375e11fe3a351be6642ca46f8" + integrity sha512-71lZziiDnsuabfdYiUeWdCVyKuqwWi23L8YeIgV9jSSZHCtb6wB1BKWooH7L3tn4/FuZJMVWyNaIDr4RGmaSYw== + dependencies: + whatwg-encoding "^1.0.1" + +html-tags@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-1.2.0.tgz#c78de65b5663aa597989dd2b7ab49200d7e4db98" + integrity sha1-x43mW1Zjqll5id0rerSSANfk25g= + +htmlnano@^0.2.2: + version "0.2.6" + resolved "https://registry.yarnpkg.com/htmlnano/-/htmlnano-0.2.6.tgz#d36e39729faa1dd5f8709d8963c67c7502e578b1" + integrity sha512-HUY/99maFsWX2LRoGJpZ/8QRLCkyY0UU1El3wgLLFAHQlD3mCxCJJNcWJk5SBqaU49MLhIWVDW6cGBeuemvaPQ== + dependencies: + cssnano "^4.1.10" + normalize-html-whitespace "^1.0.0" + posthtml "^0.13.1" + posthtml-render "^1.2.2" + purgecss "^2.3.0" + svgo "^1.3.2" + terser "^4.8.0" + uncss "^0.17.3" + +htmlparser2@^3.9.2: + version "3.10.1" + resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-3.10.1.tgz#bd679dc3f59897b6a34bb10749c855bb53a9392f" + integrity sha512-IgieNijUMbkDovyoKObU1DUhm1iwNYE/fuifEoEHfd1oZKZDaONBSkal7Y01shxsM49R4XaMdGez3WnF9UfiCQ== + dependencies: + domelementtype "^1.3.1" + domhandler "^2.3.0" + domutils "^1.5.1" + entities "^1.1.1" + inherits "^2.0.1" + readable-stream "^3.1.1" + +http-errors@~1.7.2: + version "1.7.3" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" + integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== + dependencies: + depd "~1.1.2" + inherits "2.0.4" + setprototypeof "1.1.1" + statuses ">= 1.5.0 < 2" + toidentifier "1.0.0" + +http-signature@~1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" + integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= + dependencies: + assert-plus "^1.0.0" + jsprim "^1.2.2" + sshpk "^1.7.0" + +https-browserify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" + integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= + +iconv-lite@0.4.24: + version "0.4.24" + resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== + dependencies: + safer-buffer ">= 2.1.2 < 3" + +icss-replace-symbols@1.1.0, icss-replace-symbols@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz#06ea6f83679a7749e386cfe1fe812ae5db223ded" + integrity sha1-Bupvg2ead0njhs/h/oEq5dsiPe0= + +ieee754@^1.1.4: + version "1.1.13" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" + integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== + +import-fresh@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546" + integrity sha1-2BNVwVYS04bGH53dOSLUMEgipUY= + dependencies: + caller-path "^2.0.0" + resolve-from "^3.0.0" + +indexes-of@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/indexes-of/-/indexes-of-1.0.1.tgz#f30f716c8e2bd346c7b67d3df3915566a7c05607" + integrity sha1-8w9xbI4r00bHtn0985FVZqfAVgc= + +inflight@^1.0.4: + version "1.0.6" + resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= + dependencies: + once "^1.3.0" + wrappy "1" + +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== + +inherits@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1" + integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE= + +inherits@2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" + integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= + +invariant@^2.2.2, invariant@^2.2.4: + version "2.2.4" + resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" + integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== + dependencies: + loose-envify "^1.0.0" + +is-absolute-url@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6" + integrity sha1-UFMN+4T8yap9vnhS6Do3uTufKqY= + +is-absolute-url@^3.0.1: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" + integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== + +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== + dependencies: + kind-of "^6.0.0" + +is-arrayish@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" + integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= + +is-arrayish@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" + integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== + +is-binary-path@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" + integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= + dependencies: + binary-extensions "^1.0.0" + +is-buffer@^1.1.5: + version "1.1.6" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" + integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== + +is-callable@^1.1.4, is-callable@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.2.tgz#c7c6715cd22d4ddb48d3e19970223aceabb080d9" + integrity sha512-dnMqspv5nU3LoewK2N/y7KLtxtakvTuaCsU9FU50/QDmdbHNy/4/JuRtMHqRU22o3q+W89YQndQEeCVwK+3qrA== + +is-color-stop@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-color-stop/-/is-color-stop-1.1.0.tgz#cfff471aee4dd5c9e158598fbe12967b5cdad345" + integrity sha1-z/9HGu5N1cnhWFmPvhKWe1za00U= + dependencies: + css-color-names "^0.0.4" + hex-color-regex "^1.1.0" + hsl-regex "^1.0.0" + hsla-regex "^1.0.0" + rgb-regex "^1.0.1" + rgba-regex "^1.0.0" + +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== + dependencies: + kind-of "^6.0.0" + +is-date-object@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" + integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== + +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + +is-directory@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/is-directory/-/is-directory-0.3.1.tgz#61339b6f2475fc772fd9c9d83f5c8575dc154ae1" + integrity sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE= + +is-extendable@^0.1.0, is-extendable@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" + integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= + +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== + dependencies: + is-plain-object "^2.0.4" + +is-extglob@^2.1.0, is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= + +is-glob@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" + integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= + dependencies: + is-extglob "^2.1.0" + +is-glob@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" + integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + dependencies: + is-extglob "^2.1.1" + +is-html@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-html/-/is-html-1.1.0.tgz#e04f1c18d39485111396f9a0273eab51af218464" + integrity sha1-4E8cGNOUhRETlvmgJz6rUa8hhGQ= + dependencies: + html-tags "^1.0.0" + +is-negative-zero@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.0.tgz#9553b121b0fac28869da9ed459e20c7543788461" + integrity sha1-lVOxIbD6wohp2p7UWeIMdUN4hGE= + +is-number@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" + integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= + dependencies: + kind-of "^3.0.2" + +is-obj@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-2.0.0.tgz#473fb05d973705e3fd9620545018ca8e22ef4982" + integrity sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w== + +is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== + dependencies: + isobject "^3.0.1" + +is-regex@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.1.tgz#c6f98aacc546f6cec5468a07b7b153ab564a57b9" + integrity sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg== + dependencies: + has-symbols "^1.0.1" + +is-resolvable@^1.0.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" + integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== + +is-svg@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-svg/-/is-svg-3.0.0.tgz#9321dbd29c212e5ca99c4fa9794c714bcafa2f75" + integrity sha512-gi4iHK53LR2ujhLVVj+37Ykh9GLqYHX6JOVXbLAucaG/Cqw9xwdFOjDM2qeifLs1sF1npXXFvDu0r5HNgCMrzQ== + dependencies: + html-comment-regex "^1.1.0" + +is-symbol@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" + integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== + dependencies: + has-symbols "^1.0.1" + +is-typedarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= + +is-url@^1.2.2: + version "1.2.4" + resolved "https://registry.yarnpkg.com/is-url/-/is-url-1.2.4.tgz#04a4df46d28c4cff3d73d01ff06abeb318a1aa52" + integrity sha512-ITvGim8FhRiYe4IQ5uHSkj7pVaPDrCTkNd3yq3cV7iZAcJdHTUMPMEHcqSOy9xZ9qFenQCvi+2wjH9a1nXqHww== + +is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== + +is-wsl@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-1.1.0.tgz#1f16e4aa22b04d1336b66188a66af3c600c3a66d" + integrity sha1-HxbkqiKwTRM2tmGIpmrzxgDDpm0= + +isarray@1.0.0, isarray@^1.0.0, isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= + +isarray@^2.0.1: + version "2.0.5" + resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" + integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= + +isobject@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" + integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= + dependencies: + isarray "1.0.0" + +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= + +isstream@~0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" + integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= + +"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== + +js-yaml@^3.10.0, js-yaml@^3.13.1: + version "3.14.0" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482" + integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +jsbn@~0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" + integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= + +jsdom@^14.1.0: + version "14.1.0" + resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-14.1.0.tgz#916463b6094956b0a6c1782c94e380cd30e1981b" + integrity sha512-O901mfJSuTdwU2w3Sn+74T+RnDVP+FuV5fH8tcPWyqrseRAb0s5xOtPgCFiPOtLcyK7CLIJwPyD83ZqQWvA5ng== + dependencies: + abab "^2.0.0" + acorn "^6.0.4" + acorn-globals "^4.3.0" + array-equal "^1.0.0" + cssom "^0.3.4" + cssstyle "^1.1.1" + data-urls "^1.1.0" + domexception "^1.0.1" + escodegen "^1.11.0" + html-encoding-sniffer "^1.0.2" + nwsapi "^2.1.3" + parse5 "5.1.0" + pn "^1.1.0" + request "^2.88.0" + request-promise-native "^1.0.5" + saxes "^3.1.9" + symbol-tree "^3.2.2" + tough-cookie "^2.5.0" + w3c-hr-time "^1.0.1" + w3c-xmlserializer "^1.1.2" + webidl-conversions "^4.0.2" + whatwg-encoding "^1.0.5" + whatwg-mimetype "^2.3.0" + whatwg-url "^7.0.0" + ws "^6.1.2" + xml-name-validator "^3.0.0" + +jsesc@^2.5.1: + version "2.5.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== + +jsesc@~0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" + integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= + +json-parse-better-errors@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" + integrity sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw== + +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + +json-schema@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" + integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= + +json-stringify-safe@~5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= + +json5@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" + integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + dependencies: + minimist "^1.2.0" + +json5@^2.1.2: + version "2.1.3" + resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.3.tgz#c9b0f7fa9233bfe5807fe66fcf3a5617ed597d43" + integrity sha512-KXPvOm8K9IJKFM0bmdn8QXh7udDh1g/giieX0NLCaMnb4hEiVFqnop2ImTXCc5e0/oHz3LTqmHGtExn5hfMkOA== + dependencies: + minimist "^1.2.5" + +jsprim@^1.2.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" + integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= + dependencies: + assert-plus "1.0.0" + extsprintf "1.3.0" + json-schema "0.2.3" + verror "1.10.0" + +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: + version "3.2.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" + integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= + dependencies: + is-buffer "^1.1.5" + +kind-of@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" + integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= + dependencies: + is-buffer "^1.1.5" + +kind-of@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.3" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" + integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== + +leven@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" + integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== + +levenary@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/levenary/-/levenary-1.1.1.tgz#842a9ee98d2075aa7faeedbe32679e9205f46f77" + integrity sha512-mkAdOIt79FD6irqjYSs4rdbnlT5vRonMEvBVPVb3XmevfS8kgRXwfes0dhPdEtzTWD/1eNE/Bm/G1iRt6DcnQQ== + dependencies: + leven "^3.1.0" + +levn@~0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" + integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= + dependencies: + prelude-ls "~1.1.2" + type-check "~0.3.2" + +lodash.clone@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.clone/-/lodash.clone-4.5.0.tgz#195870450f5a13192478df4bc3d23d2dea1907b6" + integrity sha1-GVhwRQ9aExkkeN9Lw9I9LeoZB7Y= + +lodash.memoize@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" + integrity sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4= + +lodash.sortby@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= + +lodash.uniq@^4.5.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= + +lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.4: + version "4.17.20" + resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52" + integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA== + +log-symbols@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" + integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg== + dependencies: + chalk "^2.0.1" + +loose-envify@^1.0.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== + dependencies: + js-tokens "^3.0.0 || ^4.0.0" + +magic-string@^0.22.4: + version "0.22.5" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.22.5.tgz#8e9cf5afddf44385c1da5bc2a6a0dbd10b03657e" + integrity sha512-oreip9rJZkzvA8Qzk9HFs8fZGF/u7H/gtrE8EN6RjKJ9kh2HlC+yQ2QezifqTZfGyiuAV0dRv5a+y/8gBb1m9w== + dependencies: + vlq "^0.2.2" + +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= + +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= + dependencies: + object-visit "^1.0.0" + +md5.js@^1.3.4: + version "1.3.5" + resolved "https://registry.yarnpkg.com/md5.js/-/md5.js-1.3.5.tgz#b5d07b8e3216e3e27cd728d72f70d1e6a342005f" + integrity sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg== + dependencies: + hash-base "^3.0.0" + inherits "^2.0.1" + safe-buffer "^5.1.2" + +mdn-data@2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" + integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== + +mdn-data@2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.6.tgz#852dc60fcaa5daa2e8cf6c9189c440ed3e042978" + integrity sha512-rQvjv71olwNHgiTbfPZFkJtjNMciWgswYeciZhtvWLO8bmX3TnhyA62I6sTWOyZssWHJJjY6/KiWwqQsWWsqOA== + +merge-source-map@1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/merge-source-map/-/merge-source-map-1.0.4.tgz#a5de46538dae84d4114cc5ea02b4772a6346701f" + integrity sha1-pd5GU42uhNQRTMXqArR3KmNGcB8= + dependencies: + source-map "^0.5.6" + +merge2@^1.2.3: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^3.0.4, micromatch@^3.1.10, micromatch@^3.1.4: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + +miller-rabin@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/miller-rabin/-/miller-rabin-4.0.1.tgz#f080351c865b0dc562a8462966daa53543c78a4d" + integrity sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA== + dependencies: + bn.js "^4.0.0" + brorand "^1.0.1" + +mime-db@1.44.0: + version "1.44.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92" + integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg== + +mime-types@^2.1.12, mime-types@~2.1.19: + version "2.1.27" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f" + integrity sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w== + dependencies: + mime-db "1.44.0" + +mime@1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== + +mimic-fn@^1.0.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-1.2.0.tgz#820c86a39334640e99516928bd03fca88057d022" + integrity sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ== + +minimalistic-assert@^1.0.0, minimalistic-assert@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz#2e194de044626d4a10e7f7fbc00ce73e83e4d5c7" + integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== + +minimalistic-crypto-utils@^1.0.0, minimalistic-crypto-utils@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" + integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= + +minimatch@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + +minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.5: + version "1.2.5" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" + integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== + +mixin-deep@^1.2.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" + integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + +mkdirp@^0.5.1, mkdirp@~0.5.1: + version "0.5.5" + resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" + integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== + dependencies: + minimist "^1.2.5" + +ms@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= + +ms@2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" + integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== + +ms@2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + +nan@^2.12.1: + version "2.14.1" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.1.tgz#d7be34dfa3105b91494c3147089315eff8874b01" + integrity sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw== + +nanomatch@^1.2.9: + version "1.2.13" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" + integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + +nice-try@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" + integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== + +node-addon-api@^1.7.1: + version "1.7.2" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-1.7.2.tgz#3df30b95720b53c24e59948b49532b662444f54d" + integrity sha512-ibPK3iA+vaY1eEjESkQkM0BbCqFOaZMiXRTtdB0u7b4djtY6JnsjvPdUHVMg6xQt3B8fpTTWHI9A+ADjM9frzg== + +node-forge@^0.7.1: + version "0.7.6" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.7.6.tgz#fdf3b418aee1f94f0ef642cd63486c77ca9724ac" + integrity sha512-sol30LUpz1jQFBjOKwbjxijiE3b6pjd74YwfD0fJOKPjF+fONKb2Yg8rYgS6+bK6VDl+/wfr4IYpC7jDzLUIfw== + +node-libs-browser@^2.0.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425" + integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q== + dependencies: + assert "^1.1.1" + browserify-zlib "^0.2.0" + buffer "^4.3.0" + console-browserify "^1.1.0" + constants-browserify "^1.0.0" + crypto-browserify "^3.11.0" + domain-browser "^1.1.1" + events "^3.0.0" + https-browserify "^1.0.0" + os-browserify "^0.3.0" + path-browserify "0.0.1" + process "^0.11.10" + punycode "^1.2.4" + querystring-es3 "^0.2.0" + readable-stream "^2.3.3" + stream-browserify "^2.0.1" + stream-http "^2.7.2" + string_decoder "^1.0.0" + timers-browserify "^2.0.4" + tty-browserify "0.0.0" + url "^0.11.0" + util "^0.11.0" + vm-browserify "^1.0.1" + +node-releases@^1.1.61: + version "1.1.61" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.61.tgz#707b0fca9ce4e11783612ba4a2fcba09047af16e" + integrity sha512-DD5vebQLg8jLCOzwupn954fbIiZht05DAZs0k2u8NStSe6h9XdsuIQL8hSRKYiU8WUQRznmSDrKGbv3ObOmC7g== + +normalize-html-whitespace@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/normalize-html-whitespace/-/normalize-html-whitespace-1.0.0.tgz#5e3c8e192f1b06c3b9eee4b7e7f28854c7601e34" + integrity sha512-9ui7CGtOOlehQu0t/OhhlmDyc71mKVlv+4vF+me4iZLPrNtRL2xoquEdfZxasC/bdQi/Hr3iTrpyRKIG+ocabA== + +normalize-path@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" + integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= + dependencies: + remove-trailing-separator "^1.0.1" + +normalize-path@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-url@^3.0.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559" + integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg== + +nth-check@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" + integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + dependencies: + boolbase "~1.0.0" + +nwsapi@^2.1.3: + version "2.2.0" + resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" + integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== + +oauth-sign@~0.9.0: + version "0.9.0" + resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" + integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== + +object-assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= + +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + +object-inspect@^1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.8.0.tgz#df807e5ecf53a609cc6bfe93eac3cc7be5b3a9d0" + integrity sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA== + +object-inspect@~1.4.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.4.1.tgz#37ffb10e71adaf3748d05f713b4c9452f402cbc4" + integrity sha512-wqdhLpfCUbEsoEwl3FXwGyv8ief1k/1aUdIPCqVnupM6e8l63BEJdiF/0swtn04/8p05tG/T0FrpTlfwvljOdw== + +object-keys@^1.0.12, object-keys@^1.0.6, object-keys@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== + +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= + dependencies: + isobject "^3.0.0" + +object.assign@^4.1.0, object.assign@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.1.tgz#303867a666cdd41936ecdedfb1f8f3e32a478cdd" + integrity sha512-VT/cxmx5yaoHSOTSyrCygIDFco+RsibY2NM0a4RdEeY/4KgqezwFtK1yr3U67xYhqJSlASm2pKhLVzPj2lr4bA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.18.0-next.0" + has-symbols "^1.0.1" + object-keys "^1.1.1" + +object.getownpropertydescriptors@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz#369bf1f9592d8ab89d712dced5cb81c7c5352649" + integrity sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.0-next.1" + +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= + dependencies: + isobject "^3.0.1" + +object.values@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.1.tgz#68a99ecde356b7e9295a3c5e0ce31dc8c953de5e" + integrity sha512-WTa54g2K8iu0kmS/us18jEmdv1a4Wi//BZ/DTVYEcH0XhLM5NYdpDHja3gt57VrZLcNAO2WGA+KpWsDBaHt6eA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.0-next.1" + function-bind "^1.1.1" + has "^1.0.3" + +on-finished@~2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" + integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= + dependencies: + ee-first "1.1.1" + +once@^1.3.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= + dependencies: + wrappy "1" + +onetime@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/onetime/-/onetime-2.0.1.tgz#067428230fd67443b2794b22bba528b6867962d4" + integrity sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ= + dependencies: + mimic-fn "^1.0.0" + +opn@^5.1.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/opn/-/opn-5.5.0.tgz#fc7164fab56d235904c51c3b27da6758ca3b9bfc" + integrity sha512-PqHpggC9bLV0VeWcdKhkpxY+3JTzetLSqTCWL/z/tFIbI6G8JCjondXklT1JinczLz2Xib62sSp0T/gKT4KksA== + dependencies: + is-wsl "^1.1.0" + +optionator@^0.8.1: + version "0.8.3" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" + integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== + dependencies: + deep-is "~0.1.3" + fast-levenshtein "~2.0.6" + levn "~0.3.0" + prelude-ls "~1.1.2" + type-check "~0.3.2" + word-wrap "~1.2.3" + +ora@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/ora/-/ora-2.1.0.tgz#6caf2830eb924941861ec53a173799e008b51e5b" + integrity sha512-hNNlAd3gfv/iPmsNxYoAPLvxg7HuPozww7fFonMZvL84tP6Ox5igfk5j/+a9rtJJwqMgKK+JgWsAQik5o0HTLA== + dependencies: + chalk "^2.3.1" + cli-cursor "^2.1.0" + cli-spinners "^1.1.0" + log-symbols "^2.2.0" + strip-ansi "^4.0.0" + wcwidth "^1.0.1" + +os-browserify@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27" + integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc= + +pako@^0.2.5: + version "0.2.9" + resolved "https://registry.yarnpkg.com/pako/-/pako-0.2.9.tgz#f3f7522f4ef782348da8161bad9ecfd51bf83a75" + integrity sha1-8/dSL073gjSNqBYbrZ7P1Rv4OnU= + +pako@~1.0.5: + version "1.0.11" + resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" + integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== + +parcel-bundler@^1.12.4: + version "1.12.4" + resolved "https://registry.yarnpkg.com/parcel-bundler/-/parcel-bundler-1.12.4.tgz#31223f4ab4d00323a109fce28d5e46775409a9ee" + integrity sha512-G+iZGGiPEXcRzw0fiRxWYCKxdt/F7l9a0xkiU4XbcVRJCSlBnioWEwJMutOCCpoQmaQtjB4RBHDGIHN85AIhLQ== + dependencies: + "@babel/code-frame" "^7.0.0" + "@babel/core" "^7.4.4" + "@babel/generator" "^7.4.4" + "@babel/parser" "^7.4.4" + "@babel/plugin-transform-flow-strip-types" "^7.4.4" + "@babel/plugin-transform-modules-commonjs" "^7.4.4" + "@babel/plugin-transform-react-jsx" "^7.0.0" + "@babel/preset-env" "^7.4.4" + "@babel/runtime" "^7.4.4" + "@babel/template" "^7.4.4" + "@babel/traverse" "^7.4.4" + "@babel/types" "^7.4.4" + "@iarna/toml" "^2.2.0" + "@parcel/fs" "^1.11.0" + "@parcel/logger" "^1.11.1" + "@parcel/utils" "^1.11.0" + "@parcel/watcher" "^1.12.1" + "@parcel/workers" "^1.11.0" + ansi-to-html "^0.6.4" + babylon-walk "^1.0.2" + browserslist "^4.1.0" + chalk "^2.1.0" + clone "^2.1.1" + command-exists "^1.2.6" + commander "^2.11.0" + core-js "^2.6.5" + cross-spawn "^6.0.4" + css-modules-loader-core "^1.1.0" + cssnano "^4.0.0" + deasync "^0.1.14" + dotenv "^5.0.0" + dotenv-expand "^5.1.0" + envinfo "^7.3.1" + fast-glob "^2.2.2" + filesize "^3.6.0" + get-port "^3.2.0" + htmlnano "^0.2.2" + is-glob "^4.0.0" + is-url "^1.2.2" + js-yaml "^3.10.0" + json5 "^1.0.1" + micromatch "^3.0.4" + mkdirp "^0.5.1" + node-forge "^0.7.1" + node-libs-browser "^2.0.0" + opn "^5.1.0" + postcss "^7.0.11" + postcss-value-parser "^3.3.1" + posthtml "^0.11.2" + posthtml-parser "^0.4.0" + posthtml-render "^1.1.3" + resolve "^1.4.0" + semver "^5.4.1" + serialize-to-js "^3.0.0" + serve-static "^1.12.4" + source-map "0.6.1" + terser "^3.7.3" + v8-compile-cache "^2.0.0" + ws "^5.1.1" + +parse-asn1@^5.0.0, parse-asn1@^5.1.5: + version "5.1.6" + resolved "https://registry.yarnpkg.com/parse-asn1/-/parse-asn1-5.1.6.tgz#385080a3ec13cb62a62d39409cb3e88844cdaed4" + integrity sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw== + dependencies: + asn1.js "^5.2.0" + browserify-aes "^1.0.0" + evp_bytestokey "^1.0.0" + pbkdf2 "^3.0.3" + safe-buffer "^5.1.1" + +parse-json@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-4.0.0.tgz#be35f5425be1f7f6c747184f98a788cb99477ee0" + integrity sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA= + dependencies: + error-ex "^1.3.1" + json-parse-better-errors "^1.0.1" + +parse5@5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-5.1.0.tgz#c59341c9723f414c452975564c7c00a68d58acd2" + integrity sha512-fxNG2sQjHvlVAYmzBZS9YlDp6PTSSDwa98vkD4QgVDDCAo84z5X1t5XyJQ62ImdLXx5NdIIfihey6xpum9/gRQ== + +parseurl@~1.3.3: + version "1.3.3" + resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== + +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= + +path-browserify@0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a" + integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ== + +path-dirname@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" + integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= + +path-is-absolute@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= + +path-key@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" + integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= + +path-parse@^1.0.6: + version "1.0.6" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" + integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== + +pbkdf2@^3.0.3: + version "3.1.1" + resolved "https://registry.yarnpkg.com/pbkdf2/-/pbkdf2-3.1.1.tgz#cb8724b0fada984596856d1a6ebafd3584654b94" + integrity sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg== + dependencies: + create-hash "^1.1.2" + create-hmac "^1.1.4" + ripemd160 "^2.0.1" + safe-buffer "^5.0.1" + sha.js "^2.4.8" + +performance-now@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" + integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= + +physical-cpu-count@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/physical-cpu-count/-/physical-cpu-count-2.0.0.tgz#18de2f97e4bf7a9551ad7511942b5496f7aba660" + integrity sha1-GN4vl+S/epVRrXURlCtUlverpmA= + +pn@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/pn/-/pn-1.1.0.tgz#e2f4cef0e219f463c179ab37463e4e1ecdccbafb" + integrity sha512-2qHaIQr2VLRFoxe2nASzsV6ef4yOOH+Fi9FBOVH6cqeSgUnoyySPZkxzLuzd+RYOQTRpROA0ztTMqxROKSb/nA== + +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= + +postcss-calc@^7.0.1: + version "7.0.5" + resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-7.0.5.tgz#f8a6e99f12e619c2ebc23cf6c486fdc15860933e" + integrity sha512-1tKHutbGtLtEZF6PT4JSihCHfIVldU72mZ8SdZHIYriIZ9fh9k9aWSppaT8rHsyI3dX+KSR+W+Ix9BMY3AODrg== + dependencies: + postcss "^7.0.27" + postcss-selector-parser "^6.0.2" + postcss-value-parser "^4.0.2" + +postcss-colormin@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-4.0.3.tgz#ae060bce93ed794ac71264f08132d550956bd381" + integrity sha512-WyQFAdDZpExQh32j0U0feWisZ0dmOtPl44qYmJKkq9xFWY3p+4qnRzCHeNrkeRhwPHz9bQ3mo0/yVkaply0MNw== + dependencies: + browserslist "^4.0.0" + color "^3.0.0" + has "^1.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-convert-values@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-4.0.1.tgz#ca3813ed4da0f812f9d43703584e449ebe189a7f" + integrity sha512-Kisdo1y77KUC0Jmn0OXU/COOJbzM8cImvw1ZFsBgBgMgb1iL23Zs/LXRe3r+EZqM3vGYKdQ2YJVQ5VkJI+zEJQ== + dependencies: + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-discard-comments@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-4.0.2.tgz#1fbabd2c246bff6aaad7997b2b0918f4d7af4033" + integrity sha512-RJutN259iuRf3IW7GZyLM5Sw4GLTOH8FmsXBnv8Ab/Tc2k4SR4qbV4DNbyyY4+Sjo362SyDmW2DQ7lBSChrpkg== + dependencies: + postcss "^7.0.0" + +postcss-discard-duplicates@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-4.0.2.tgz#3fe133cd3c82282e550fc9b239176a9207b784eb" + integrity sha512-ZNQfR1gPNAiXZhgENFfEglF93pciw0WxMkJeVmw8eF+JZBbMD7jp6C67GqJAXVZP2BWbOztKfbsdmMp/k8c6oQ== + dependencies: + postcss "^7.0.0" + +postcss-discard-empty@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-4.0.1.tgz#c8c951e9f73ed9428019458444a02ad90bb9f765" + integrity sha512-B9miTzbznhDjTfjvipfHoqbWKwd0Mj+/fL5s1QOz06wufguil+Xheo4XpOnc4NqKYBCNqqEzgPv2aPBIJLox0w== + dependencies: + postcss "^7.0.0" + +postcss-discard-overridden@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-4.0.1.tgz#652aef8a96726f029f5e3e00146ee7a4e755ff57" + integrity sha512-IYY2bEDD7g1XM1IDEsUT4//iEYCxAmP5oDSFMVU/JVvT7gh+l4fmjciLqGgwjdWpQIdb0Che2VX00QObS5+cTg== + dependencies: + postcss "^7.0.0" + +postcss-merge-longhand@^4.0.11: + version "4.0.11" + resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-4.0.11.tgz#62f49a13e4a0ee04e7b98f42bb16062ca2549e24" + integrity sha512-alx/zmoeXvJjp7L4mxEMjh8lxVlDFX1gqWHzaaQewwMZiVhLo42TEClKaeHbRf6J7j82ZOdTJ808RtN0ZOZwvw== + dependencies: + css-color-names "0.0.4" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + stylehacks "^4.0.0" + +postcss-merge-rules@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-4.0.3.tgz#362bea4ff5a1f98e4075a713c6cb25aefef9a650" + integrity sha512-U7e3r1SbvYzO0Jr3UT/zKBVgYYyhAz0aitvGIYOYK5CPmkNih+WDSsS5tvPrJ8YMQYlEMvsZIiqmn7HdFUaeEQ== + dependencies: + browserslist "^4.0.0" + caniuse-api "^3.0.0" + cssnano-util-same-parent "^4.0.0" + postcss "^7.0.0" + postcss-selector-parser "^3.0.0" + vendors "^1.0.0" + +postcss-minify-font-values@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-4.0.2.tgz#cd4c344cce474343fac5d82206ab2cbcb8afd5a6" + integrity sha512-j85oO6OnRU9zPf04+PZv1LYIYOprWm6IA6zkXkrJXyRveDEuQggG6tvoy8ir8ZwjLxLuGfNkCZEQG7zan+Hbtg== + dependencies: + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-minify-gradients@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-4.0.2.tgz#93b29c2ff5099c535eecda56c4aa6e665a663471" + integrity sha512-qKPfwlONdcf/AndP1U8SJ/uzIJtowHlMaSioKzebAXSG4iJthlWC9iSWznQcX4f66gIWX44RSA841HTHj3wK+Q== + dependencies: + cssnano-util-get-arguments "^4.0.0" + is-color-stop "^1.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-minify-params@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-4.0.2.tgz#6b9cef030c11e35261f95f618c90036d680db874" + integrity sha512-G7eWyzEx0xL4/wiBBJxJOz48zAKV2WG3iZOqVhPet/9geefm/Px5uo1fzlHu+DOjT+m0Mmiz3jkQzVHe6wxAWg== + dependencies: + alphanum-sort "^1.0.0" + browserslist "^4.0.0" + cssnano-util-get-arguments "^4.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + uniqs "^2.0.0" + +postcss-minify-selectors@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-4.0.2.tgz#e2e5eb40bfee500d0cd9243500f5f8ea4262fbd8" + integrity sha512-D5S1iViljXBj9kflQo4YutWnJmwm8VvIsU1GeXJGiG9j8CIg9zs4voPMdQDUmIxetUOh60VilsNzCiAFTOqu3g== + dependencies: + alphanum-sort "^1.0.0" + has "^1.0.0" + postcss "^7.0.0" + postcss-selector-parser "^3.0.0" + +postcss-modules-extract-imports@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.1.0.tgz#b614c9720be6816eaee35fb3a5faa1dba6a05ddb" + integrity sha1-thTJcgvmgW6u41+zpfqh26agXds= + dependencies: + postcss "^6.0.1" + +postcss-modules-local-by-default@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz#f7d80c398c5a393fa7964466bd19500a7d61c069" + integrity sha1-99gMOYxaOT+nlkRmvRlQCn1hwGk= + dependencies: + css-selector-tokenizer "^0.7.0" + postcss "^6.0.1" + +postcss-modules-scope@1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz#d6ea64994c79f97b62a72b426fbe6056a194bb90" + integrity sha1-1upkmUx5+XtipytCb75gVqGUu5A= + dependencies: + css-selector-tokenizer "^0.7.0" + postcss "^6.0.1" + +postcss-modules-values@1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz#ecffa9d7e192518389f42ad0e83f72aec456ea20" + integrity sha1-7P+p1+GSUYOJ9CrQ6D9yrsRW6iA= + dependencies: + icss-replace-symbols "^1.1.0" + postcss "^6.0.1" + +postcss-normalize-charset@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-4.0.1.tgz#8b35add3aee83a136b0471e0d59be58a50285dd4" + integrity sha512-gMXCrrlWh6G27U0hF3vNvR3w8I1s2wOBILvA87iNXaPvSNo5uZAMYsZG7XjCUf1eVxuPfyL4TJ7++SGZLc9A3g== + dependencies: + postcss "^7.0.0" + +postcss-normalize-display-values@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-4.0.2.tgz#0dbe04a4ce9063d4667ed2be476bb830c825935a" + integrity sha512-3F2jcsaMW7+VtRMAqf/3m4cPFhPD3EFRgNs18u+k3lTJJlVe7d0YPO+bnwqo2xg8YiRpDXJI2u8A0wqJxMsQuQ== + dependencies: + cssnano-util-get-match "^4.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-normalize-positions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-4.0.2.tgz#05f757f84f260437378368a91f8932d4b102917f" + integrity sha512-Dlf3/9AxpxE+NF1fJxYDeggi5WwV35MXGFnnoccP/9qDtFrTArZ0D0R+iKcg5WsUd8nUYMIl8yXDCtcrT8JrdA== + dependencies: + cssnano-util-get-arguments "^4.0.0" + has "^1.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-normalize-repeat-style@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-4.0.2.tgz#c4ebbc289f3991a028d44751cbdd11918b17910c" + integrity sha512-qvigdYYMpSuoFs3Is/f5nHdRLJN/ITA7huIoCyqqENJe9PvPmLhNLMu7QTjPdtnVf6OcYYO5SHonx4+fbJE1+Q== + dependencies: + cssnano-util-get-arguments "^4.0.0" + cssnano-util-get-match "^4.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-normalize-string@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-4.0.2.tgz#cd44c40ab07a0c7a36dc5e99aace1eca4ec2690c" + integrity sha512-RrERod97Dnwqq49WNz8qo66ps0swYZDSb6rM57kN2J+aoyEAJfZ6bMx0sx/F9TIEX0xthPGCmeyiam/jXif0eA== + dependencies: + has "^1.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-normalize-timing-functions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-4.0.2.tgz#8e009ca2a3949cdaf8ad23e6b6ab99cb5e7d28d9" + integrity sha512-acwJY95edP762e++00Ehq9L4sZCEcOPyaHwoaFOhIwWCDfik6YvqsYNxckee65JHLKzuNSSmAdxwD2Cud1Z54A== + dependencies: + cssnano-util-get-match "^4.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-normalize-unicode@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-4.0.1.tgz#841bd48fdcf3019ad4baa7493a3d363b52ae1cfb" + integrity sha512-od18Uq2wCYn+vZ/qCOeutvHjB5jm57ToxRaMeNuf0nWVHaP9Hua56QyMF6fs/4FSUnVIw0CBPsU0K4LnBPwYwg== + dependencies: + browserslist "^4.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-normalize-url@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-4.0.1.tgz#10e437f86bc7c7e58f7b9652ed878daaa95faae1" + integrity sha512-p5oVaF4+IHwu7VpMan/SSpmpYxcJMtkGppYf0VbdH5B6hN8YNmVyJLuY9FmLQTzY3fag5ESUUHDqM+heid0UVA== + dependencies: + is-absolute-url "^2.0.0" + normalize-url "^3.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-normalize-whitespace@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-4.0.2.tgz#bf1d4070fe4fcea87d1348e825d8cc0c5faa7d82" + integrity sha512-tO8QIgrsI3p95r8fyqKV+ufKlSHh9hMJqACqbv2XknufqEDhDvbguXGBBqxw9nsQoXWf0qOqppziKJKHMD4GtA== + dependencies: + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-ordered-values@^4.1.2: + version "4.1.2" + resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-4.1.2.tgz#0cf75c820ec7d5c4d280189559e0b571ebac0eee" + integrity sha512-2fCObh5UanxvSxeXrtLtlwVThBvHn6MQcu4ksNT2tsaV2Fg76R2CV98W7wNSlX+5/pFwEyaDwKLLoEV7uRybAw== + dependencies: + cssnano-util-get-arguments "^4.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-reduce-initial@^4.0.3: + version "4.0.3" + resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-4.0.3.tgz#7fd42ebea5e9c814609639e2c2e84ae270ba48df" + integrity sha512-gKWmR5aUulSjbzOfD9AlJiHCGH6AEVLaM0AV+aSioxUDd16qXP1PCh8d1/BGVvpdWn8k/HiK7n6TjeoXN1F7DA== + dependencies: + browserslist "^4.0.0" + caniuse-api "^3.0.0" + has "^1.0.0" + postcss "^7.0.0" + +postcss-reduce-transforms@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-4.0.2.tgz#17efa405eacc6e07be3414a5ca2d1074681d4e29" + integrity sha512-EEVig1Q2QJ4ELpJXMZR8Vt5DQx8/mo+dGWSR7vWXqcob2gQLyQGsionYcGKATXvQzMPn6DSN1vTN7yFximdIAg== + dependencies: + cssnano-util-get-match "^4.0.0" + has "^1.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + +postcss-selector-parser@6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.2.tgz#934cf799d016c83411859e09dcecade01286ec5c" + integrity sha512-36P2QR59jDTOAiIkqEprfJDsoNrvwFei3eCqKd1Y0tUsBimsq39BLp7RD+JWny3WgB1zGhJX8XVePwm9k4wdBg== + dependencies: + cssesc "^3.0.0" + indexes-of "^1.0.1" + uniq "^1.0.1" + +postcss-selector-parser@^3.0.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz#b310f5c4c0fdaf76f94902bbaa30db6aa84f5270" + integrity sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA== + dependencies: + dot-prop "^5.2.0" + indexes-of "^1.0.1" + uniq "^1.0.1" + +postcss-selector-parser@^6.0.2: + version "6.0.4" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.4.tgz#56075a1380a04604c38b063ea7767a129af5c2b3" + integrity sha512-gjMeXBempyInaBqpp8gODmwZ52WaYsVOsfr4L4lDQ7n3ncD6mEyySiDtgzCT+NYC0mmeOLvtsF8iaEf0YT6dBw== + dependencies: + cssesc "^3.0.0" + indexes-of "^1.0.1" + uniq "^1.0.1" + util-deprecate "^1.0.2" + +postcss-svgo@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-4.0.2.tgz#17b997bc711b333bab143aaed3b8d3d6e3d38258" + integrity sha512-C6wyjo3VwFm0QgBy+Fu7gCYOkCmgmClghO+pjcxvrcBKtiKt0uCF+hvbMO1fyv5BMImRK90SMb+dwUnfbGd+jw== + dependencies: + is-svg "^3.0.0" + postcss "^7.0.0" + postcss-value-parser "^3.0.0" + svgo "^1.0.0" + +postcss-unique-selectors@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-4.0.1.tgz#9446911f3289bfd64c6d680f073c03b1f9ee4bac" + integrity sha512-+JanVaryLo9QwZjKrmJgkI4Fn8SBgRO6WXQBJi7KiAVPlmxikB5Jzc4EvXMT2H0/m0RjrVVm9rGNhZddm/8Spg== + dependencies: + alphanum-sort "^1.0.0" + postcss "^7.0.0" + uniqs "^2.0.0" + +postcss-value-parser@^3.0.0, postcss-value-parser@^3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" + integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== + +postcss-value-parser@^4.0.2: + version "4.1.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz#443f6a20ced6481a2bda4fa8532a6e55d789a2cb" + integrity sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ== + +postcss@6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.1.tgz#000dbd1f8eef217aa368b9a212c5fc40b2a8f3f2" + integrity sha1-AA29H47vIXqjaLmiEsX8QLKo8/I= + dependencies: + chalk "^1.1.3" + source-map "^0.5.6" + supports-color "^3.2.3" + +postcss@7.0.32: + version "7.0.32" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.32.tgz#4310d6ee347053da3433db2be492883d62cec59d" + integrity sha512-03eXong5NLnNCD05xscnGKGDZ98CyzoqPSMjOe6SuoQY7Z2hIj0Ld1g/O/UQRuOle2aRtiIRDg9tDcTGAkLfKw== + dependencies: + chalk "^2.4.2" + source-map "^0.6.1" + supports-color "^6.1.0" + +postcss@^6.0.1: + version "6.0.23" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.23.tgz#61c82cc328ac60e677645f979054eb98bc0e3324" + integrity sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag== + dependencies: + chalk "^2.4.1" + source-map "^0.6.1" + supports-color "^5.4.0" + +postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.17, postcss@^7.0.27: + version "7.0.35" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.35.tgz#d2be00b998f7f211d8a276974079f2e92b970e24" + integrity sha512-3QT8bBJeX/S5zKTTjTCIjRF3If4avAT6kqxcASlTWEtAFCb9NH0OUxNDfgZSWdP5fJnBYCMEWkIFfWeugjzYMg== + dependencies: + chalk "^2.4.2" + source-map "^0.6.1" + supports-color "^6.1.0" + +posthtml-parser@^0.4.0, posthtml-parser@^0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/posthtml-parser/-/posthtml-parser-0.4.2.tgz#a132bbdf0cd4bc199d34f322f5c1599385d7c6c1" + integrity sha512-BUIorsYJTvS9UhXxPTzupIztOMVNPa/HtAm9KHni9z6qEfiJ1bpOBL5DfUOL9XAc3XkLIEzBzpph+Zbm4AdRAg== + dependencies: + htmlparser2 "^3.9.2" + +posthtml-parser@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/posthtml-parser/-/posthtml-parser-0.5.0.tgz#571058a3b63c1704964ffc25bbe69ffda213244e" + integrity sha512-BsZFAqOeX9lkJJPKG2JmGgtm6t++WibU7FeS40FNNGZ1KS2szRSRQ8Wr2JLvikDgAecrQ/9V4sjugTAin2+KVw== + dependencies: + htmlparser2 "^3.9.2" + +posthtml-render@^1.1.3, posthtml-render@^1.1.5, posthtml-render@^1.2.2, posthtml-render@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/posthtml-render/-/posthtml-render-1.2.3.tgz#da1cf7ba4efb42cfe9c077f4f41669745de99b6d" + integrity sha512-rGGayND//VwTlsYKNqdILsA7U/XP0WJa6SMcdAEoqc2WRM5QExplGg/h9qbTuHz7mc2PvaXU+6iNxItvr5aHMg== + +posthtml@^0.11.2: + version "0.11.6" + resolved "https://registry.yarnpkg.com/posthtml/-/posthtml-0.11.6.tgz#e349d51af7929d0683b9d8c3abd8166beecc90a8" + integrity sha512-C2hrAPzmRdpuL3iH0TDdQ6XCc9M7Dcc3zEW5BLerY65G4tWWszwv6nG/ksi6ul5i2mx22ubdljgktXCtNkydkw== + dependencies: + posthtml-parser "^0.4.1" + posthtml-render "^1.1.5" + +posthtml@^0.13.1: + version "0.13.3" + resolved "https://registry.yarnpkg.com/posthtml/-/posthtml-0.13.3.tgz#9702d745108d532a9d5808985e0dafd81b09f7bd" + integrity sha512-5NL2bBc4ihAyoYnY0EAQrFQbJNE1UdvgC1wjYts0hph7jYeU2fa5ki3/9U45ce9V6M1vLMEgUX2NXe/bYL+bCQ== + dependencies: + posthtml-parser "^0.5.0" + posthtml-render "^1.2.3" + +prelude-ls@~1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" + integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= + +process-nextick-args@~2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== + +process@^0.11.10: + version "0.11.10" + resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182" + integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI= + +psl@^1.1.28: + version "1.8.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" + integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + +public-encrypt@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/public-encrypt/-/public-encrypt-4.0.3.tgz#4fcc9d77a07e48ba7527e7cbe0de33d0701331e0" + integrity sha512-zVpa8oKZSz5bTMTFClc1fQOnyyEzpl5ozpi1B5YcvBrdohMjH2rfsBtyXcuNuwjsDIXmBYlF2N5FlJYhR29t8Q== + dependencies: + bn.js "^4.1.0" + browserify-rsa "^4.0.0" + create-hash "^1.1.0" + parse-asn1 "^5.0.0" + randombytes "^2.0.1" + safe-buffer "^5.1.2" + +punycode@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= + +punycode@^1.2.4: + version "1.4.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e" + integrity sha1-wNWmOycYgArY4esPpSachN1BhF4= + +punycode@^2.1.0, punycode@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== + +purgecss@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/purgecss/-/purgecss-2.3.0.tgz#5327587abf5795e6541517af8b190a6fb5488bb3" + integrity sha512-BE5CROfVGsx2XIhxGuZAT7rTH9lLeQx/6M0P7DTXQH4IUc3BBzs9JUzt4yzGf3JrH9enkeq6YJBe9CTtkm1WmQ== + dependencies: + commander "^5.0.0" + glob "^7.0.0" + postcss "7.0.32" + postcss-selector-parser "^6.0.2" + +q@^1.1.2: + version "1.5.1" + resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" + integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= + +qs@~6.5.2: + version "6.5.2" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" + integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== + +querystring-es3@^0.2.0: + version "0.2.1" + resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73" + integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM= + +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= + +quote-stream@^1.0.1, quote-stream@~1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/quote-stream/-/quote-stream-1.0.2.tgz#84963f8c9c26b942e153feeb53aae74652b7e0b2" + integrity sha1-hJY/jJwmuULhU/7rU6rnRlK34LI= + dependencies: + buffer-equal "0.0.1" + minimist "^1.1.3" + through2 "^2.0.0" + +randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5: + version "2.1.0" + resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== + dependencies: + safe-buffer "^5.1.0" + +randomfill@^1.0.3: + version "1.0.4" + resolved "https://registry.yarnpkg.com/randomfill/-/randomfill-1.0.4.tgz#c92196fc86ab42be983f1bf31778224931d61458" + integrity sha512-87lcbR8+MhcWcUiQ+9e+Rwx8MyR2P7qnt15ynUlbm3TU/fjbgz4GsvfSUDTemtCCtVCqb4ZcEFlyPNTh9bBTLw== + dependencies: + randombytes "^2.0.5" + safe-buffer "^5.1.0" + +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +readable-stream@^2.0.2, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.3, readable-stream@~2.3.6: + version "2.3.7" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^3.1.1, readable-stream@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198" + integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + +readdirp@^2.2.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" + integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== + dependencies: + graceful-fs "^4.1.11" + micromatch "^3.1.10" + readable-stream "^2.0.2" + +regenerate-unicode-properties@^8.2.0: + version "8.2.0" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.2.0.tgz#e5de7111d655e7ba60c057dbe9ff37c87e65cdec" + integrity sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA== + dependencies: + regenerate "^1.4.0" + +regenerate@^1.4.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.1.tgz#cad92ad8e6b591773485fbe05a485caf4f457e6f" + integrity sha512-j2+C8+NtXQgEKWk49MMP5P/u2GhnahTtVkRIHr5R5lVRlbKvmQ+oS+A5aLKWp2ma5VkT8sh6v+v4hbH0YHR66A== + +regenerator-runtime@^0.11.0: + version "0.11.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9" + integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== + +regenerator-runtime@^0.13.4: + version "0.13.7" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" + integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew== + +regenerator-transform@^0.14.2: + version "0.14.5" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.5.tgz#c98da154683671c9c4dcb16ece736517e1b7feb4" + integrity sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw== + dependencies: + "@babel/runtime" "^7.8.4" + +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" + +regexpu-core@^4.7.0: + version "4.7.1" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.7.1.tgz#2dea5a9a07233298fbf0db91fa9abc4c6e0f8ad6" + integrity sha512-ywH2VUraA44DZQuRKzARmw6S66mr48pQVva4LBeRhcOltJ6hExvWly5ZjFLYo67xbIxb6W1q4bAGtgfEl20zfQ== + dependencies: + regenerate "^1.4.0" + regenerate-unicode-properties "^8.2.0" + regjsgen "^0.5.1" + regjsparser "^0.6.4" + unicode-match-property-ecmascript "^1.0.4" + unicode-match-property-value-ecmascript "^1.2.0" + +regjsgen@^0.5.1: + version "0.5.2" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.2.tgz#92ff295fb1deecbf6ecdab2543d207e91aa33733" + integrity sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A== + +regjsparser@^0.6.4: + version "0.6.4" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.4.tgz#a769f8684308401a66e9b529d2436ff4d0666272" + integrity sha512-64O87/dPDgfk8/RQqC4gkZoGyyWFIEUTTh80CU6CWuK5vkCGyekIx+oKcEIYtP/RAxSQltCZHCNu/mdd7fqlJw== + dependencies: + jsesc "~0.5.0" + +remove-trailing-separator@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" + integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= + +repeat-element@^1.1.2: + version "1.1.3" + resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" + integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== + +repeat-string@^1.6.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" + integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= + +request-promise-core@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.4.tgz#3eedd4223208d419867b78ce815167d10593a22f" + integrity sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw== + dependencies: + lodash "^4.17.19" + +request-promise-native@^1.0.5: + version "1.0.9" + resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.9.tgz#e407120526a5efdc9a39b28a5679bf47b9d9dc28" + integrity sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g== + dependencies: + request-promise-core "1.1.4" + stealthy-require "^1.1.1" + tough-cookie "^2.3.3" + +request@^2.88.0: + version "2.88.2" + resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" + integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== + dependencies: + aws-sign2 "~0.7.0" + aws4 "^1.8.0" + caseless "~0.12.0" + combined-stream "~1.0.6" + extend "~3.0.2" + forever-agent "~0.6.1" + form-data "~2.3.2" + har-validator "~5.1.3" + http-signature "~1.2.0" + is-typedarray "~1.0.0" + isstream "~0.1.2" + json-stringify-safe "~5.0.1" + mime-types "~2.1.19" + oauth-sign "~0.9.0" + performance-now "^2.1.0" + qs "~6.5.2" + safe-buffer "^5.1.2" + tough-cookie "~2.5.0" + tunnel-agent "^0.6.0" + uuid "^3.3.2" + +resolve-from@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" + integrity sha1-six699nWiBvItuZTM17rywoYh0g= + +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= + +resolve@^1.1.5, resolve@^1.3.2, resolve@^1.4.0: + version "1.17.0" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444" + integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w== + dependencies: + path-parse "^1.0.6" + +restore-cursor@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" + integrity sha1-n37ih/gv0ybU/RYpI9YhKe7g368= + dependencies: + onetime "^2.0.0" + signal-exit "^3.0.2" + +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== + +rgb-regex@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/rgb-regex/-/rgb-regex-1.0.1.tgz#c0e0d6882df0e23be254a475e8edd41915feaeb1" + integrity sha1-wODWiC3w4jviVKR16O3UGRX+rrE= + +rgba-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/rgba-regex/-/rgba-regex-1.0.0.tgz#43374e2e2ca0968b0ef1523460b7d730ff22eeb3" + integrity sha1-QzdOLiyglosO8VI0YLfXMP8i7rM= + +rimraf@^2.6.2: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + +ripemd160@^2.0.0, ripemd160@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" + integrity sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA== + dependencies: + hash-base "^3.0.0" + inherits "^2.0.1" + +safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: + version "5.2.1" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: + version "5.1.2" + resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== + +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= + dependencies: + ret "~0.1.10" + +"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: + version "2.1.2" + resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== + +sax@~1.2.4: + version "1.2.4" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" + integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== + +saxes@^3.1.9: + version "3.1.11" + resolved "https://registry.yarnpkg.com/saxes/-/saxes-3.1.11.tgz#d59d1fd332ec92ad98a2e0b2ee644702384b1c5b" + integrity sha512-Ydydq3zC+WYDJK1+gRxRapLIED9PWeSuuS41wqyoRmzvhhh9nc+QQrVMKJYzJFULazeGhzSV0QleN2wD3boh2g== + dependencies: + xmlchars "^2.1.1" + +semver@7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" + integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== + +semver@^5.4.1, semver@^5.5.0: + version "5.7.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" + integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== + +send@0.17.1: + version "0.17.1" + resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" + integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== + dependencies: + debug "2.6.9" + depd "~1.1.2" + destroy "~1.0.4" + encodeurl "~1.0.2" + escape-html "~1.0.3" + etag "~1.8.1" + fresh "0.5.2" + http-errors "~1.7.2" + mime "1.6.0" + ms "2.1.1" + on-finished "~2.3.0" + range-parser "~1.2.1" + statuses "~1.5.0" + +serialize-to-js@^3.0.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/serialize-to-js/-/serialize-to-js-3.1.1.tgz#b3e77d0568ee4a60bfe66287f991e104d3a1a4ac" + integrity sha512-F+NGU0UHMBO4Q965tjw7rvieNVjlH6Lqi2emq/Lc9LUURYJbiCzmpi4Cy1OOjjVPtxu0c+NE85LU6968Wko5ZA== + +serve-static@^1.12.4: + version "1.14.1" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" + integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== + dependencies: + encodeurl "~1.0.2" + escape-html "~1.0.3" + parseurl "~1.3.3" + send "0.17.1" + +set-value@^2.0.0, set-value@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" + integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + +setimmediate@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" + integrity sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU= + +setprototypeof@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" + integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== + +sha.js@^2.4.0, sha.js@^2.4.8: + version "2.4.11" + resolved "https://registry.yarnpkg.com/sha.js/-/sha.js-2.4.11.tgz#37a5cf0b81ecbc6943de109ba2960d1b26584ae7" + integrity sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ== + dependencies: + inherits "^2.0.1" + safe-buffer "^5.0.1" + +shallow-copy@~0.0.1: + version "0.0.1" + resolved "https://registry.yarnpkg.com/shallow-copy/-/shallow-copy-0.0.1.tgz#415f42702d73d810330292cc5ee86eae1a11a170" + integrity sha1-QV9CcC1z2BAzApLMXuhurhoRoXA= + +shebang-command@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" + integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= + dependencies: + shebang-regex "^1.0.0" + +shebang-regex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" + integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= + +signal-exit@^3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" + integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== + +simple-swizzle@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" + integrity sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo= + dependencies: + is-arrayish "^0.3.1" + +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" + +source-map-resolve@^0.5.0: + version "0.5.3" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" + integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== + dependencies: + atob "^2.1.2" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + +source-map-support@~0.5.10, source-map-support@~0.5.12: + version "0.5.19" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" + integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== + dependencies: + buffer-from "^1.0.0" + source-map "^0.6.0" + +source-map-url@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" + integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= + +source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: + version "0.6.1" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== + +source-map@^0.5.0, source-map@^0.5.6: + version "0.5.7" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" + integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= + +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== + dependencies: + extend-shallow "^3.0.0" + +sprintf-js@~1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= + +sshpk@^1.7.0: + version "1.16.1" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" + integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== + dependencies: + asn1 "~0.2.3" + assert-plus "^1.0.0" + bcrypt-pbkdf "^1.0.0" + dashdash "^1.12.0" + ecc-jsbn "~0.1.1" + getpass "^0.1.1" + jsbn "~0.1.0" + safer-buffer "^2.0.2" + tweetnacl "~0.14.0" + +stable@^0.1.8: + version "0.1.8" + resolved "https://registry.yarnpkg.com/stable/-/stable-0.1.8.tgz#836eb3c8382fe2936feaf544631017ce7d47a3cf" + integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== + +static-eval@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/static-eval/-/static-eval-2.1.0.tgz#a16dbe54522d7fa5ef1389129d813fd47b148014" + integrity sha512-agtxZ/kWSsCkI5E4QifRwsaPs0P0JmZV6dkLz6ILYfFYQGn+5plctanRN+IC8dJRiFkyXHrwEE3W9Wmx67uDbw== + dependencies: + escodegen "^1.11.1" + +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + +static-module@^2.2.0: + version "2.2.5" + resolved "https://registry.yarnpkg.com/static-module/-/static-module-2.2.5.tgz#bd40abceae33da6b7afb84a0e4329ff8852bfbbf" + integrity sha512-D8vv82E/Kpmz3TXHKG8PPsCPg+RAX6cbCOyvjM6x04qZtQ47EtJFVwRsdov3n5d6/6ynrOY9XB4JkaZwB2xoRQ== + dependencies: + concat-stream "~1.6.0" + convert-source-map "^1.5.1" + duplexer2 "~0.1.4" + escodegen "~1.9.0" + falafel "^2.1.0" + has "^1.0.1" + magic-string "^0.22.4" + merge-source-map "1.0.4" + object-inspect "~1.4.0" + quote-stream "~1.0.2" + readable-stream "~2.3.3" + shallow-copy "~0.0.1" + static-eval "^2.0.0" + through2 "~2.0.3" + +"statuses@>= 1.5.0 < 2", statuses@~1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" + integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= + +stealthy-require@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" + integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= + +stream-browserify@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b" + integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg== + dependencies: + inherits "~2.0.1" + readable-stream "^2.0.2" + +stream-http@^2.7.2: + version "2.8.3" + resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc" + integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw== + dependencies: + builtin-status-codes "^3.0.0" + inherits "^2.0.1" + readable-stream "^2.3.6" + to-arraybuffer "^1.0.0" + xtend "^4.0.0" + +string.prototype.trimend@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz#85812a6b847ac002270f5808146064c995fb6913" + integrity sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.5" + +string.prototype.trimstart@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz#14af6d9f34b053f7cfc89b72f8f2ee14b9039a54" + integrity sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.5" + +string_decoder@^1.0.0, string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +strip-ansi@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" + integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= + dependencies: + ansi-regex "^2.0.0" + +strip-ansi@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= + dependencies: + ansi-regex "^3.0.0" + +stylehacks@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-4.0.3.tgz#6718fcaf4d1e07d8a1318690881e8d96726a71d5" + integrity sha512-7GlLk9JwlElY4Y6a/rmbH2MhVlTyVmiJd1PfTCqFaIBEGMYNsrO/v3SeGTdhBThLg4Z+NbOk/qFMwCa+J+3p/g== + dependencies: + browserslist "^4.0.0" + postcss "^7.0.0" + postcss-selector-parser "^3.0.0" + +supports-color@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" + integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= + +supports-color@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6" + integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY= + dependencies: + has-flag "^1.0.0" + +supports-color@^5.3.0, supports-color@^5.4.0: + version "5.5.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== + dependencies: + has-flag "^3.0.0" + +supports-color@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-6.1.0.tgz#0764abc69c63d5ac842dd4867e8d025e880df8f3" + integrity sha512-qe1jfm1Mg7Nq/NSh6XE24gPXROEVsWHxC1LIx//XNlD9iw7YZQGjZNjYN7xGaEG6iKdA8EtNFW6R0gjnVXp+wQ== + dependencies: + has-flag "^3.0.0" + +svgo@^1.0.0, svgo@^1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.37" + csso "^4.0.2" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +symbol-tree@^3.2.2: + version "3.2.4" + resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" + integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== + +terser@^3.7.3: + version "3.17.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-3.17.0.tgz#f88ffbeda0deb5637f9d24b0da66f4e15ab10cb2" + integrity sha512-/FQzzPJmCpjAH9Xvk2paiWrFq+5M6aVOf+2KRbwhByISDX/EujxsK+BAvrhb6H+2rtrLCHK9N01wO014vrIwVQ== + dependencies: + commander "^2.19.0" + source-map "~0.6.1" + source-map-support "~0.5.10" + +terser@^4.8.0: + version "4.8.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-4.8.0.tgz#63056343d7c70bb29f3af665865a46fe03a0df17" + integrity sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw== + dependencies: + commander "^2.20.0" + source-map "~0.6.1" + source-map-support "~0.5.12" + +through2@^2.0.0, through2@~2.0.3: + version "2.0.5" + resolved "https://registry.yarnpkg.com/through2/-/through2-2.0.5.tgz#01c1e39eb31d07cb7d03a96a70823260b23132cd" + integrity sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ== + dependencies: + readable-stream "~2.3.6" + xtend "~4.0.1" + +timers-browserify@^2.0.4: + version "2.0.11" + resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.11.tgz#800b1f3eee272e5bc53ee465a04d0e804c31211f" + integrity sha512-60aV6sgJ5YEbzUdn9c8kYGIqOubPoUdqQCul3SBAsRCZ40s6Y5cMcrW4dt3/k/EsbLVJNl9n6Vz3fTc+k2GeKQ== + dependencies: + setimmediate "^1.0.4" + +timsort@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/timsort/-/timsort-0.3.0.tgz#405411a8e7e6339fe64db9a234de11dc31e02bd4" + integrity sha1-QFQRqOfmM5/mTbmiNN4R3DHgK9Q= + +tiny-inflate@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/tiny-inflate/-/tiny-inflate-1.0.3.tgz#122715494913a1805166aaf7c93467933eea26c4" + integrity sha512-pkY1fj1cKHb2seWDy0B16HeWyczlJA9/WW3u3c4z/NiWDsO3DOU5D7nhTLE9CF0yXv/QZFY7sEJmj24dK+Rrqw== + +to-arraybuffer@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43" + integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M= + +to-fast-properties@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47" + integrity sha1-uDVx+k2MJbguIxsG46MFXeTKGkc= + +to-fast-properties@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= + +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= + dependencies: + kind-of "^3.0.2" + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + +toidentifier@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" + integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== + +tough-cookie@^2.3.3, tough-cookie@^2.5.0, tough-cookie@~2.5.0: + version "2.5.0" + resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" + integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== + dependencies: + psl "^1.1.28" + punycode "^2.1.1" + +tr46@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" + integrity sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk= + dependencies: + punycode "^2.1.0" + +tty-browserify@0.0.0: + version "0.0.0" + resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" + integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= + +tunnel-agent@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" + integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= + dependencies: + safe-buffer "^5.0.1" + +tweetnacl@^0.14.3, tweetnacl@~0.14.0: + version "0.14.5" + resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" + integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= + +type-check@~0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" + integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= + dependencies: + prelude-ls "~1.1.2" + +typedarray@^0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= + +uncss@^0.17.3: + version "0.17.3" + resolved "https://registry.yarnpkg.com/uncss/-/uncss-0.17.3.tgz#50fc1eb4ed573ffff763458d801cd86e4d69ea11" + integrity sha512-ksdDWl81YWvF/X14fOSw4iu8tESDHFIeyKIeDrK6GEVTQvqJc1WlOEXqostNwOCi3qAj++4EaLsdAgPmUbEyog== + dependencies: + commander "^2.20.0" + glob "^7.1.4" + is-absolute-url "^3.0.1" + is-html "^1.1.0" + jsdom "^14.1.0" + lodash "^4.17.15" + postcss "^7.0.17" + postcss-selector-parser "6.0.2" + request "^2.88.0" + +unicode-canonical-property-names-ecmascript@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" + integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== + +unicode-match-property-ecmascript@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" + integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== + dependencies: + unicode-canonical-property-names-ecmascript "^1.0.4" + unicode-property-aliases-ecmascript "^1.0.4" + +unicode-match-property-value-ecmascript@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.2.0.tgz#0d91f600eeeb3096aa962b1d6fc88876e64ea531" + integrity sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ== + +unicode-property-aliases-ecmascript@^1.0.4: + version "1.1.0" + resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.1.0.tgz#dd57a99f6207bedff4628abefb94c50db941c8f4" + integrity sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg== + +unicode-trie@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/unicode-trie/-/unicode-trie-0.3.1.tgz#d671dddd89101a08bac37b6a5161010602052085" + integrity sha1-1nHd3YkQGgi6w3tqUWEBBgIFIIU= + dependencies: + pako "^0.2.5" + tiny-inflate "^1.0.0" + +union-value@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" + integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^2.0.1" + +uniq@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" + integrity sha1-sxxa6CVIRKOoKBVBzisEuGWnNP8= + +uniqs@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/uniqs/-/uniqs-2.0.0.tgz#ffede4b36b25290696e6e165d4a59edb998e6b02" + integrity sha1-/+3ks2slKQaW5uFl1KWe25mOawI= + +unquote@~1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/unquote/-/unquote-1.1.1.tgz#8fded7324ec6e88a0ff8b905e7c098cdc086d544" + integrity sha1-j97XMk7G6IoP+LkF58CYzcCG1UQ= + +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + +upath@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" + integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== + +uri-js@^4.2.2: + version "4.4.0" + resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.0.tgz#aa714261de793e8a82347a7bcc9ce74e86f28602" + integrity sha512-B0yRTzYdUCCn9n+F4+Gh4yIDtMQcaJsmYBDsTSG8g/OejKBodLQ2IHfN3bM7jUsRXndopT7OIXWdYqc1fjmV6g== + dependencies: + punycode "^2.1.0" + +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= + +url@^0.11.0: + version "0.11.0" + resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" + integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE= + dependencies: + punycode "1.3.2" + querystring "0.2.0" + +use@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" + integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== + +util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= + +util.promisify@~1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/util.promisify/-/util.promisify-1.0.1.tgz#6baf7774b80eeb0f7520d8b81d07982a59abbaee" + integrity sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA== + dependencies: + define-properties "^1.1.3" + es-abstract "^1.17.2" + has-symbols "^1.0.1" + object.getownpropertydescriptors "^2.1.0" + +util@0.10.3: + version "0.10.3" + resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9" + integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk= + dependencies: + inherits "2.0.1" + +util@^0.11.0: + version "0.11.1" + resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61" + integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ== + dependencies: + inherits "2.0.3" + +uuid@^3.3.2: + version "3.4.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + +v8-compile-cache@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz#54bc3cdd43317bca91e35dcaf305b1a7237de745" + integrity sha512-8OQ9CL+VWyt3JStj7HX7/ciTL2V3Rl1Wf5OL+SNTm0yK1KvtReVulksyeRnCANHHuUxHlQig+JJDlUhBt1NQDQ== + +vendors@^1.0.0: + version "1.0.4" + resolved "https://registry.yarnpkg.com/vendors/-/vendors-1.0.4.tgz#e2b800a53e7a29b93506c3cf41100d16c4c4ad8e" + integrity sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w== + +verror@1.10.0: + version "1.10.0" + resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" + integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= + dependencies: + assert-plus "^1.0.0" + core-util-is "1.0.2" + extsprintf "^1.2.0" + +vlq@^0.2.2: + version "0.2.3" + resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" + integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow== + +vm-browserify@^1.0.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0" + integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ== + +w3c-hr-time@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" + integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== + dependencies: + browser-process-hrtime "^1.0.0" + +w3c-xmlserializer@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-1.1.2.tgz#30485ca7d70a6fd052420a3d12fd90e6339ce794" + integrity sha512-p10l/ayESzrBMYWRID6xbuCKh2Fp77+sA0doRuGn4tTIMrrZVeqfpKjXHY+oDh3K4nLdPgNwMTVP6Vp4pvqbNg== + dependencies: + domexception "^1.0.1" + webidl-conversions "^4.0.2" + xml-name-validator "^3.0.0" + +wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + integrity sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g= + dependencies: + defaults "^1.0.3" + +webidl-conversions@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" + integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== + +whatwg-encoding@^1.0.1, whatwg-encoding@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" + integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== + dependencies: + iconv-lite "0.4.24" + +whatwg-mimetype@^2.2.0, whatwg-mimetype@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" + integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== + +whatwg-url@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" + integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== + dependencies: + lodash.sortby "^4.7.0" + tr46 "^1.0.1" + webidl-conversions "^4.0.2" + +which@^1.2.9: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +word-wrap@~1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" + integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + +wrappy@1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= + +ws@^5.1.1: + version "5.2.2" + resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.2.tgz#dffef14866b8e8dc9133582514d1befaf96e980f" + integrity sha512-jaHFD6PFv6UgoIVda6qZllptQsMlDEJkTQcybzzXDYM1XO9Y8em691FGMPmM46WGyLU4z9KMgQN+qrux/nhlHA== + dependencies: + async-limiter "~1.0.0" + +ws@^6.1.2: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" + integrity sha512-GIyAXC2cB7LjvpgMt9EKS2ldqr0MTrORaleiOno6TweZ6r3TKtoFQWay/2PceJ3RuBasOHzXNn5Lrw1X0bEjqA== + dependencies: + async-limiter "~1.0.0" + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" + integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== + +xmlchars@^2.1.1: + version "2.2.0" + resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" + integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== + +xtend@^4.0.0, xtend@~4.0.1: + version "4.0.2" + resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==