diff --git a/.claude/skills/type-checker-tests/SKILL.md b/.claude/skills/type-checker-tests/SKILL.md index cacaeb82a..0a42f19ac 100644 --- a/.claude/skills/type-checker-tests/SKILL.md +++ b/.claude/skills/type-checker-tests/SKILL.md @@ -6,31 +6,21 @@ allowed-tools: Bash(mkdir:*) # Type Checker Integration Tests -Use this skill when adding new type checker functions or expanding behavior. +Use the command reference at `reference/compiler-scripts.md` for test runner syntax, snapshot workflows, filters, and trace debugging. The category is `checking`. -**Language:** Test fixtures use PureScript syntax, not Haskell. - -## Quick Reference - -| Action | Command | -|--------|---------| -| Find next test number | `ls tests-integration/fixtures/checking/ \| tail -5` | -| Run a test or multiple tests | `just tc NNN` or `just tc 101 102` | -| Run with tracing enabled | `just tc --debug NNN` | -| Run all checking tests | `just tc` | -| Accept all pending snapshots | `cargo insta accept` | - -Use `just tc --help` for all options. +**Language:** Fixtures use PureScript syntax, not Haskell. ## Creating a Test ### 1. Create fixture directory ```bash -mkdir tests-integration/fixtures/checking/{NNN_descriptive_name} +just t checking --create "descriptive name" ``` -Tests are auto-discovered by `build.rs` - no manual registration needed. +The CLI picks the next fixture number and creates the folder. + +Tests are auto-discovered by `build.rs`. ### 2. Write Main.purs @@ -49,28 +39,22 @@ test' [x] = x **Guidelines:** - Test ONE specific behavior per fixture -- Name tests descriptively: `test`, `test'`, `test2`, `test2'`, etc. -- Include edge cases relevant to the behavior being tested +- Name tests: `test`, `test'`, `test2`, `test2'`, etc. +- Include edge cases relevant to the behavior -### 3. Generate and review snapshot +### 3. Run and review ```bash -just tc NNN +just t checking NNN MMM ``` -This outputs: -- `CREATED path` (green) with numbered lines showing full content -- `UPDATED path` (yellow) with chunked diff (2 lines context, line numbers) - ## Multi-File Tests -For testing imports, re-exports, or cross-module behavior, add multiple `.purs` files -to the same fixture directory. The type checker loads all `.purs` files in the folder. +For imports, re-exports, or cross-module behavior: -**Example structure:** ``` tests-integration/fixtures/checking/NNN_import_test/ -├── Main.purs # The test file (snapshot generated for Main) +├── Main.purs # Test file (snapshot generated) ├── Lib.purs # Supporting module └── Main.snap # Generated snapshot ``` @@ -95,14 +79,10 @@ test :: Maybe Int test = Just life ``` -**Key points:** -- Module name must match filename (`Lib.purs` -> `module Lib where`) -- Only `Main.purs` generates a snapshot (the test runs against `Main`) -- Use standard PureScript import syntax +- Module name must match filename +- Only `Main.purs` generates a snapshot -## Reviewing Snapshots - -Snapshots have this structure: +## Snapshot Structure ``` Terms @@ -117,101 +97,31 @@ Errors ErrorKind { details } at [location] ``` -### Acceptance Criteria +## Acceptance Criteria -**Before accepting, verify:** +Before accepting, verify: -1. **Types are correct** - Check that inferred types match expectations - - `test :: Array Int -> Int` - explicit signature preserved - - `test' :: forall t. Array t -> t` - polymorphism inferred correctly +1. **Types are correct** + - `test :: Array Int -> Int` - signature preserved + - `test' :: forall t. Array t -> t` - polymorphism inferred -2. **No unexpected `???`** - This indicates inference failure - - `test :: ???` - STOP: the term failed to type check - - `CannotUnify { ??? -> ???, Int }` - OK in error tests, shows unresolved unification variables +2. **No unexpected `???`** + - `test :: ???` - STOP: inference failure + - `CannotUnify { ??? -> ???, Int }` - OK in error tests -3. **Errors appear where expected** - For tests validating error behavior - - Confirm error kind matches expectations (e.g., `NoInstanceFound`, `CannotUnify`) - - Verify error location points to the correct declaration +3. **Errors appear where expected** + - Confirm error kind matches (`NoInstanceFound`, `CannotUnify`) + - Verify location points to correct declaration 4. **Polymorphism is appropriate** - - Check type variable names (`t6`, `a`, etc.) are scoped correctly - - Verify constraints propagate as expected + - Type variables scoped correctly + - Constraints propagate as expected -### Common Issues +## Common Issues | Symptom | Likely Cause | |---------|--------------| -| `test :: ???` | Test code has syntax error or uses undefined names | -| Unexpected monomorphism | Missing polymorphic context or over-constrained signature | -| Wrong error location | Check binder/expression placement in source | +| `test :: ???` | Syntax error or undefined names | +| Unexpected monomorphism | Missing polymorphic context | +| Wrong error location | Check binder/expression placement | | Missing types in snapshot | Module header or imports incorrect | - -## Accept and Verify - -```bash -# Accept only after thorough review -cargo insta accept - -# Verify all checking tests pass -just tc -``` - -## Debugging - -When investigating a potential compiler bug: - -```bash -# Focus on single test to reduce noise -just tc NNN - -# Enable tracing to see type checker behaviour -just tc --debug NNN -``` - -### Trace Files - -The `--debug` flag emits detailed type checker traces to `target/compiler-tracing/`. - -**Trace file naming:** `{test_id}_{module_name}.jsonl` -- Example: `200_int_compare_transitive_Main.jsonl` - -**Output format:** JSON Lines (one JSON object per line), containing: -- `timestamp` - when the event occurred -- `level` - DEBUG, INFO, or TRACE -- `fields` - trace data (e.g., types being unified) -- `target` - the module emitting the trace (e.g., `checking::algorithm::unification`) -- `span`/`spans` - current span and span stack - -**Example trace line:** -```json -{"timestamp":"...","level":"DEBUG","fields":{"t1":"?0","t2":"Int"},"target":"checking::algorithm::unification","span":{"name":"unify"}} -``` - -When `--debug` is used, the trace file path is shown alongside pending snapshots: -``` -UPDATED tests-integration/fixtures/checking/200_int_compare_transitive/Main.snap - TRACE target/compiler-tracing/200_int_compare_transitive_Main.jsonl -``` - -### Analysing Traces - -Trace files can be large for complex tests. Use sampling and filtering: - -```bash -# Check file size and line count -wc -l target/compiler-tracing/NNN_*.jsonl - -# Sample random lines to get an overview -shuf -n 20 target/compiler-tracing/NNN_*.jsonl | jq . - -# Filter by level -jq 'select(.level == "DEBUG")' target/compiler-tracing/NNN_*.jsonl - -# Filter by target module -jq 'select(.target | contains("unification"))' target/compiler-tracing/NNN_*.jsonl - -# Extract specific fields -jq '{level, target, fields}' target/compiler-tracing/NNN_*.jsonl -``` - -You should run `just tc` to check for regressions. diff --git a/.claude/skills/type-checker-tests/reference/compiler-scripts.md b/.claude/skills/type-checker-tests/reference/compiler-scripts.md new file mode 100644 index 000000000..aac183a6b --- /dev/null +++ b/.claude/skills/type-checker-tests/reference/compiler-scripts.md @@ -0,0 +1,66 @@ +# Compiler Scripts Command Reference + +CLI tools in `compiler-scripts/` for running integration tests. + +## Test Runner Commands + +### Run tests + +```bash +just t [filters...] # Run tests (summary output) +just t --diff [filters...] # Run with full inline diffs +just t --count 10 [filters...] # Show more snapshots (default: 3) +just t --debug [filters...] # Enable tracing +just t --verbose [filters...] # Show test progress +just t --create "name" # Scaffold a new fixture +just t --delete "name" # Dry-run fixture deletion (use --confirm) +``` + +### Categories + +| Category | Alias | Description | +|----------|-------|-------------| +| checking | c | Type checker tests | +| lowering | l | Lowering tests | +| resolving | r | Resolver tests | +| lsp | - | LSP tests | + +### Snapshot commands + +```bash +just t accept [--all] [filters...] # Accept pending snapshots +just t reject [--all] [filters...] # Reject pending snapshots +``` + +Requires `--all` flag when no filters provided (safety guardrail). + +### Exclusion filters + +Hide snapshots ephemerally during a session: + +```bash +just t --exclude "pattern" # Single exclusion +just t --exclude "foo" --exclude "bar" # Multiple exclusions +EXCLUDE_SNAPSHOTS="foo,bar" just t # Via environment variable +``` + +### Filters + +Space-delimited, passed through to nextest. Mix numbers and patterns: + +```bash +just t c 101 102 # Run tests 101 and 102 +just t c pattern # Filter by name pattern +just t c 101 102 pattern # Numbers + pattern together +``` + +## Debugging Traces + +When `--debug` is used, traces are written to `target/compiler-tracing/{test_id}_{module}.jsonl`. + +```bash +wc -l target/compiler-tracing/*.jsonl # Check sizes +shuf -n 20 target/compiler-tracing/NNN_*.jsonl | jq . # Sample +jq 'select(.level == "DEBUG")' file.jsonl # Filter by level +jq 'select(.target | contains("unification"))' file.jsonl +``` diff --git a/AGENTS.md b/AGENTS.md index 797ea6ac0..325212f87 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,71 +1,33 @@ -## Project Overview - -purescript-analyzer is a compiler frontend for the PureScript programming language written -in Rust. It provides additional LSP server functionality for the language. The compiler is -organised as a Cargo workspace with functionality located in separate folders. - -### Compiler Core - -The compiler core is split into different core components in `./compiler-core`, with each -components having well-defined responsibilities. These components are designed to produce -information as transparent as possible, such that editor integrations can be built around -introspection. Likewise, these components are also designed to be compatible with a -query-based and incremental build system. - -- checking: type checking and elaboration -- indexing: high-level relationships between module items -- lexing: tokenization and the layout algorithm -- lowering: core semantic representation, name resolution -- parsing: parsing into a rowan-based CST -- resolving: name-indexed interface for module items -- stabilizing: assigns stable IDs to source ranges -- sugar: syntax desugaring such as operator bracketing -- syntax: types for the rowan-based CST +## Commands + +```bash +cargo check -p --tests # Type check a crate (always specify -p) +just t checking [filters...] # Type checker integration tests +just t lowering [filters...] # Lowering integration tests +just t resolving [filters...] # Resolver integration tests +just t lsp [filters...] # LSP integration tests +just fix # Apply clippy fixes and format +``` -Additionally, the following crates are related to the build system implementation. +For unit tests in compiler-core (not tests-integration which requires the test runner shim): +```bash +cargo nextest run -p # Run all tests in a crate +cargo nextest run -p # Run single test +``` -- building: query-based parallel build system -- building-types: shared type definitions -- files: virtual file system -- interner: generic interner implementation +## Architecture -### LSP and Binary +PureScript compiler frontend in Rust using rowan (lossless syntax trees) and query-based incremental builds. -- `./compiler-bin`: implements the `purescript-analyzer` executable -- `./compiler-lsp`: LSP server functionality used in `./compiler-bin` +**compiler-core/**: checking (types), indexing, lexing, lowering, parsing, resolving, stabilizing, sugar, syntax, building, files, interner +**compiler-bin/**: CLI executable | **compiler-lsp/**: LSP server -## Key Concepts +## Code Style -Additional concepts that you should be mindful of, the compiler: -- uses rust-analyzer/rowan, a lossless syntax tree library inspired by Swift's libsyntax -- uses a query-based incremental build system rather than a traditional phase-based setup -- uses techniques like interning and arena allocation to enable better caching patterns - - for instance, whitespace does not immediately invalidate type checking results +- Use `cargo fmt` (rustfmt with `use_small_heuristics = "Max"`) +- Use `just format-imports` for module-granularity imports (requires nightly) +- Leverage interning/arena allocation for caching; avoid unnecessary allocations ## Skills -Agent skills are specialized instruction sets for common tasks. They're stored in `.claude/skills/`. - -- **type-checker-tests**: Use this when asked to implement tests for type checker inference and checking - -## Quick Commands - -Always provide the crate name of the code that you're working on for efficiency. - -``` -# Bread and butter, check for errors -cargo check -p --tests - -# Run tests (always use nextest) -cargo nextest run -p -``` - -### Domain Specific - -``` -# Run type checker integration tests -just tc - -# You can also provide a test filter -just tc 101 -``` +Load `.claude/skills/type-checker-tests` when implementing type checker tests. diff --git a/Cargo.lock b/Cargo.lock index d3543b09d..8850f0095 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,12 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + [[package]] name = "aho-corasick" version = "1.1.4" @@ -47,6 +53,15 @@ dependencies = [ "url", ] +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anes" version = "0.1.6" @@ -103,6 +118,12 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + [[package]] name = "arrayvec" version = "0.5.2" @@ -140,12 +161,24 @@ dependencies = [ "syn", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "autocfg" version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "bitflags" version = "1.3.2" @@ -252,6 +285,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a0aeaff4ff1a90589618835a598e545176939b97874f7abc7851caa0618f203" dependencies = [ "find-msvc-tools", + "jobserver", + "libc", "shlex", ] @@ -286,9 +321,24 @@ dependencies = [ "smol_str", "stabilizing", "sugar", + "syntax", "tracing", ] +[[package]] +name = "chrono" +version = "0.4.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + [[package]] name = "ciborium" version = "0.2.2" @@ -362,12 +412,47 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "compiler-compatibility" +version = "0.1.0" +dependencies = [ + "analyzer", + "base64", + "building-types", + "chrono", + "clap", + "diagnostics", + "files", + "flate2", + "git2", + "glob", + "hex", + "line-index", + "petgraph", + "rayon", + "registry", + "reqwest", + "rowan", + "semver", + "serde", + "serde_json", + "sha2", + "tar", + "thiserror", + "tracing", + "tracing-subscriber", + "tracing-tree", + "url", +] + [[package]] name = "compiler-scripts" version = "0.1.0" dependencies = [ + "anyhow", "clap", "console", + "heck", "md-5", "serde", "serde_json", @@ -399,20 +484,45 @@ dependencies = [ ] [[package]] -name = "convert_case" -version = "0.8.0" +name = "core-foundation" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ - "unicode-segmentation", + "core-foundation-sys", + "libc", ] +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + [[package]] name = "countme" version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + [[package]] name = "criterion" version = "0.6.0" @@ -487,6 +597,23 @@ dependencies = [ "typenum", ] +[[package]] +name = "diagnostics" +version = "0.1.0" +dependencies = [ + "checking", + "files", + "indexing", + "itertools 0.14.0", + "line-index", + "lowering", + "lsp-types", + "resolving", + "rowan", + "stabilizing", + "syntax", +] + [[package]] name = "digest" version = "0.10.7" @@ -552,6 +679,15 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + [[package]] name = "endian-type" version = "0.2.0" @@ -589,6 +725,17 @@ dependencies = [ "rustc-hash 2.1.1", ] +[[package]] +name = "filetime" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" +dependencies = [ + "cfg-if", + "libc", + "libredox", +] + [[package]] name = "find-msvc-tools" version = "0.1.6" @@ -601,6 +748,16 @@ version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" +[[package]] +name = "flate2" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b375d6465b98090a5f25b1c7703f3859783755aa9a80433b36e0379a3ec2f369" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "fnv" version = "1.0.7" @@ -619,6 +776,21 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.2" @@ -715,6 +887,17 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + [[package]] name = "getrandom" version = "0.3.4" @@ -727,6 +910,21 @@ dependencies = [ "wasip2", ] +[[package]] +name = "git2" +version = "0.20.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e2b37e2f62729cdada11f0e6b3b6fe383c69c29fc619e391223e12856af308c" +dependencies = [ + "bitflags 2.10.0", + "libc", + "libgit2-sys", + "log", + "openssl-probe", + "openssl-sys", + "url", +] + [[package]] name = "glob" version = "0.3.3" @@ -746,6 +944,25 @@ dependencies = [ "regex-syntax", ] +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + [[package]] name = "half" version = "2.7.1" @@ -791,6 +1008,155 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2", + "system-configuration", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "icu_collections" version = "2.1.1" @@ -941,6 +1307,22 @@ dependencies = [ "rustc-hash 2.1.1", ] +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "is_terminal_polyfill" version = "1.70.2" @@ -980,6 +1362,16 @@ version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + [[package]] name = "js-sys" version = "0.3.83" @@ -1018,12 +1410,63 @@ version = "0.2.178" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" +[[package]] +name = "libgit2-sys" +version = "0.18.3+1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9b3acc4b91781bb0b3386669d325163746af5f6e4f73e6d2d630e09a35f3487" +dependencies = [ + "cc", + "libc", + "libssh2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", +] + [[package]] name = "libm" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" +[[package]] +name = "libredox" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" +dependencies = [ + "bitflags 2.10.0", + "libc", + "redox_syscall 0.7.0", +] + +[[package]] +name = "libssh2-sys" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "220e4f05ad4a218192533b300327f5150e809b54c4ec83b5a1d91833601811b9" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "line-index" version = "0.1.2" @@ -1122,6 +1565,12 @@ version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + [[package]] name = "minicov" version = "0.3.8" @@ -1132,6 +1581,16 @@ dependencies = [ "walkdir", ] +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + [[package]] name = "mio" version = "1.1.1" @@ -1143,6 +1602,23 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + [[package]] name = "nibble_vec" version = "0.1.0" @@ -1195,6 +1671,50 @@ version = "11.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" +[[package]] +name = "openssl" +version = "0.10.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" +dependencies = [ + "bitflags 2.10.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "papergrid" version = "0.17.0" @@ -1224,7 +1744,7 @@ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.5.18", "smallvec", "windows-link", ] @@ -1296,6 +1816,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + [[package]] name = "plotters" version = "0.3.7" @@ -1387,9 +1913,11 @@ dependencies = [ "async-lsp", "checking", "clap", + "diagnostics", "files", "globset", "indexing", + "itertools 0.14.0", "lowering", "parking_lot", "path-absolutize", @@ -1457,9 +1985,18 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.18" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags 2.10.0", +] + +[[package]] +name = "redox_syscall" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" dependencies = [ "bitflags 2.10.0", ] @@ -1493,6 +2030,58 @@ version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +[[package]] +name = "registry" +version = "0.1.0" +dependencies = [ + "semver", + "serde", + "serde_json", + "thiserror", +] + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "resolving" version = "0.1.0" @@ -1510,6 +2099,20 @@ dependencies = [ "smol_str", ] +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + [[package]] name = "rowan" version = "0.16.1" @@ -1547,12 +2150,51 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "rustls" +version = "0.23.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" +[[package]] +name = "ryu" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" + [[package]] name = "same-file" version = "1.0.6" @@ -1562,12 +2204,54 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "scopeguard" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.10.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + [[package]] name = "serde" version = "1.0.228" @@ -1633,6 +2317,29 @@ dependencies = [ "syn", ] +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sharded-slab" version = "0.1.7" @@ -1658,6 +2365,12 @@ dependencies = [ "libc", ] +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + [[package]] name = "similar" version = "2.7.0" @@ -1678,9 +2391,9 @@ checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "smol_str" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3498b0a27f93ef1402f20eefacfaa1691272ac4eca1cdc8c596cb0a245d6cbf5" +checksum = "0f7a918bd2a9951d18ee6e48f076843e8e73a9a5d22cf05bcd4b7a81bdd04e17" dependencies = [ "borsh", "serde_core", @@ -1746,6 +2459,12 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + [[package]] name = "sugar" version = "0.1.0" @@ -1769,6 +2488,15 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + [[package]] name = "synstructure" version = "0.13.2" @@ -1791,6 +2519,27 @@ dependencies = [ "rustc-hash 2.1.1", ] +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.10.0", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tabled" version = "0.20.0" @@ -1815,6 +2564,17 @@ dependencies = [ "syn", ] +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "tempfile" version = "3.24.0" @@ -1822,7 +2582,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" dependencies = [ "fastrand", - "getrandom", + "getrandom 0.3.4", "once_cell", "rustix", "windows-sys 0.61.2", @@ -1856,9 +2616,10 @@ dependencies = [ "analyzer", "async-lsp", "checking", - "convert_case", + "diagnostics", "files", "glob", + "heck", "indexing", "insta", "interner", @@ -1975,6 +2736,26 @@ dependencies = [ "syn", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.17" @@ -1995,6 +2776,29 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "bitflags 2.10.0", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", "tower-layer", "tower-service", ] @@ -2085,6 +2889,24 @@ dependencies = [ "tracing-serde", ] +[[package]] +name = "tracing-tree" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac87aa03b6a4d5a7e4810d1a80c19601dbe0f8a837e9177f23af721c7ba7beec" +dependencies = [ + "nu-ansi-term", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + [[package]] name = "typed-arena" version = "2.0.2" @@ -2103,12 +2925,6 @@ version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - [[package]] name = "unicode-width" version = "0.2.2" @@ -2121,6 +2937,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" version = "2.5.7" @@ -2151,6 +2973,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + [[package]] name = "version_check" version = "0.9.5" @@ -2177,6 +3005,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + [[package]] name = "wasi" version = "0.11.1+wasi-snapshot-preview1" @@ -2301,12 +3138,85 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "windows-link" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-sys" version = "0.59.0" @@ -2475,6 +3385,16 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix", +] + [[package]] name = "yoke" version = "0.8.1" @@ -2539,6 +3459,12 @@ dependencies = [ "synstructure", ] +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" + [[package]] name = "zerotrie" version = "0.2.3" diff --git a/Cargo.toml b/Cargo.toml index f9eafa8cc..ebf1d6615 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,7 @@ [workspace] members = [ "compiler-bin", + "compiler-compatibility/*", "compiler-core/*", "compiler-lsp/*", "compiler-scripts", @@ -9,7 +10,7 @@ members = [ "tests-package-set", ] default-members = [ - "compiler-core/*", + "compiler-core/*", "compiler-lsp/*", ] resolver = "3" diff --git a/compiler-bin/Cargo.toml b/compiler-bin/Cargo.toml index 9e32f00a9..9dae62826 100644 --- a/compiler-bin/Cargo.toml +++ b/compiler-bin/Cargo.toml @@ -17,12 +17,14 @@ no-tracing = ["checking/no-tracing"] [dependencies] checking = { version = "0.1.0", path = "../compiler-core/checking" } +diagnostics = { version = "0.1.0", path = "../compiler-core/diagnostics" } analyzer = { version = "0.1.0", path = "../compiler-lsp/analyzer" } async-lsp = "0.2.2" clap = { version = "4.5.53", features = ["derive"] } files = { version = "0.1.0", path = "../compiler-core/files" } globset = "0.4.18" indexing = { version = "0.1.0", path = "../compiler-core/indexing" } +itertools = "0.14.0" lowering = { version = "0.1.0", path = "../compiler-core/lowering" } parking_lot = "0.12.5" path-absolutize = "3.1.1" diff --git a/compiler-bin/src/lsp/event.rs b/compiler-bin/src/lsp/event.rs index 0cec27855..34ab44a0a 100644 --- a/compiler-bin/src/lsp/event.rs +++ b/compiler-bin/src/lsp/event.rs @@ -1,27 +1,14 @@ use analyzer::{common, locate}; use async_lsp::LanguageClient; use async_lsp::lsp_types::*; +use diagnostics::{DiagnosticsContext, ToDiagnostics}; use files::FileId; -use indexing::{IndexedModule, TypeItemKind}; -use lowering::{LoweringError, RecursiveGroup}; -use resolving::{ResolvedModule, ResolvingError}; -use rowan::ast::AstNode; -use stabilizing::StabilizedModule; -use syntax::SyntaxNode; +use itertools::Itertools; +use rowan::TextSize; use crate::lsp::error::LspError; use crate::lsp::{State, StateSnapshot}; -struct DiagnosticsContext<'a> { - uri: &'a Url, - content: &'a str, - root: &'a SyntaxNode, - stabilized: &'a StabilizedModule, - indexed: &'a IndexedModule, - resolved: &'a ResolvedModule, - lowered: &'a lowering::LoweredModule, -} - pub fn emit_collect_diagnostics(state: &mut State, uri: Url) -> Result<(), LspError> { let files = state.files.read(); let uri = uri.as_str(); @@ -56,231 +43,78 @@ fn collect_diagnostics_core( let indexed = snapshot.engine.indexed(id)?; let resolved = snapshot.engine.resolved(id)?; let lowered = snapshot.engine.lowered(id)?; + let checked = snapshot.engine.checked(id)?; let uri = { let files = snapshot.files.read(); common::file_uri(&snapshot.engine, &files, id)? }; - let context = DiagnosticsContext { - uri: &uri, - content: &content, - root: &root, - stabilized: &stabilized, - indexed: &indexed, - resolved: &resolved, - lowered: &lowered, - }; - - let mut diagnostics = vec![]; - diagnostics.extend(resolved_diagnostics(&context)); - diagnostics.extend(lowered_diagnostics(&context)); - - snapshot.client.publish_diagnostics(PublishDiagnosticsParams { - uri, - diagnostics, - version: None, - })?; - - Ok(()) -} - -fn lowered_diagnostics<'a>( - context: &'a DiagnosticsContext<'a>, -) -> impl Iterator + 'a { - context.lowered.errors.iter().filter_map(|error| lowered_error(context, error)) -} - -fn lowered_error(context: &DiagnosticsContext<'_>, error: &LoweringError) -> Option { - match error { - LoweringError::NotInScope(not_in_scope) => { - let (ptr, name) = match not_in_scope { - lowering::NotInScope::ExprConstructor { id } => { - (context.stabilized.syntax_ptr(*id)?, None) - } - lowering::NotInScope::ExprVariable { id } => { - (context.stabilized.syntax_ptr(*id)?, None) - } - lowering::NotInScope::ExprOperatorName { id } => { - (context.stabilized.syntax_ptr(*id)?, None) - } - lowering::NotInScope::TypeConstructor { id } => { - (context.stabilized.syntax_ptr(*id)?, None) - } - lowering::NotInScope::TypeVariable { id } => { - (context.stabilized.syntax_ptr(*id)?, None) - } - lowering::NotInScope::TypeOperatorName { id } => { - (context.stabilized.syntax_ptr(*id)?, None) - } - lowering::NotInScope::NegateFn { id } => { - (context.stabilized.syntax_ptr(*id)?, Some("negate")) - } - lowering::NotInScope::DoFn { kind, id } => ( - context.stabilized.syntax_ptr(*id)?, - match kind { - lowering::DoFn::Bind => Some("bind"), - lowering::DoFn::Discard => Some("discard"), - }, - ), - lowering::NotInScope::AdoFn { kind, id } => ( - context.stabilized.syntax_ptr(*id)?, - match kind { - lowering::AdoFn::Map => Some("map"), - lowering::AdoFn::Apply => Some("apply"), - lowering::AdoFn::Pure => Some("pure"), - }, - ), - lowering::NotInScope::TermOperator { id } => { - (context.stabilized.syntax_ptr(*id)?, None) - } - lowering::NotInScope::TypeOperator { id } => { - (context.stabilized.syntax_ptr(*id)?, None) - } - }; - - let message = if let Some(name) = name { - format!("'{name}' is not in scope") - } else { - let range = ptr.to_node(context.root).text_range(); - let name = context.content[range].trim(); - format!("'{name}' is not in scope") - }; - - let range = locate::syntax_range(context.content, context.root, &ptr)?; - - Some(Diagnostic { - range, - severity: Some(DiagnosticSeverity::ERROR), - code: Some(NumberOrString::String("NotInScope".to_string())), - code_description: None, - source: Some("analyzer/lowering".to_string()), - message: message.to_string(), - related_information: None, - tags: None, - data: None, - }) - } - - LoweringError::RecursiveSynonym(RecursiveGroup { group }) => { - let equations = group.iter().filter_map(|id| { - if let TypeItemKind::Synonym { equation, .. } = context.indexed.items[*id].kind { - equation - } else { - None - } - }); - - let locations = equations.filter_map(|equation| { - let syntax_ptr = context.stabilized.syntax_ptr(equation)?; - locate::syntax_range(context.content, context.root, &syntax_ptr) - }); - - let locations: Vec<_> = locations.collect(); - let [range, associated @ ..] = &locations[..] else { return None }; - - let related_information = associated.iter().map(|&range| { - let uri = context.uri.clone(); - let location = Location { uri, range }; - DiagnosticRelatedInformation { - location, - message: "Includes this type synonym".to_string(), - } - }); - - let related_information = related_information.collect(); + let context = + DiagnosticsContext::new(&content, &root, &stabilized, &indexed, &lowered, &checked); - Some(Diagnostic { - range: *range, - severity: Some(DiagnosticSeverity::ERROR), - code: Some(NumberOrString::String("RecursiveSynonym".to_string())), - code_description: None, - source: Some("analyzer/lowering".to_string()), - message: "Invalid type synonym cycle".to_string(), - related_information: Some(related_information), - tags: None, - data: None, - }) - } + let mut all_diagnostics = vec![]; - _ => None, + for error in &lowered.errors { + all_diagnostics.extend(error.to_diagnostics(&context)); } -} - -fn resolved_diagnostics<'a>( - context: &'a DiagnosticsContext<'a>, -) -> impl Iterator + 'a { - context.resolved.errors.iter().filter_map(|error| resolved_error(context, error)) -} -fn resolved_error(context: &DiagnosticsContext<'_>, error: &ResolvingError) -> Option { - let source = Some("analyzer/resolving".to_string()); - match error { - ResolvingError::TermImportConflict { .. } => None, - - ResolvingError::TypeImportConflict { .. } => None, - - ResolvingError::TermExportConflict { .. } => None, - - ResolvingError::TypeExportConflict { .. } => None, - - ResolvingError::ExistingTerm { .. } => None, - - ResolvingError::ExistingType { .. } => None, + for error in &resolved.errors { + all_diagnostics.extend(error.to_diagnostics(&context)); + } - ResolvingError::InvalidImportStatement { id } => { - let ptr = context.stabilized.ast_ptr(*id)?; + for error in &checked.errors { + all_diagnostics.extend(error.to_diagnostics(&context)); + } - let message = { - let cst = ptr.to_node(context.root); + let to_position = |offset: u32| locate::offset_to_position(&content, TextSize::from(offset)); - let name = cst.module_name().map(|cst| { - let range = cst.syntax().text_range(); - context.content[range].trim() - }); + let diagnostics = all_diagnostics + .iter() + .filter_map(|diagnostic| { + let start = to_position(diagnostic.primary.start)?; + let end = to_position(diagnostic.primary.end)?; + let range = Range { start, end }; - let name = name.unwrap_or(""); - format!("Cannot import module '{name}'") + let severity = match diagnostic.severity { + diagnostics::Severity::Error => DiagnosticSeverity::ERROR, + diagnostics::Severity::Warning => DiagnosticSeverity::WARNING, }; - let ptr = ptr.syntax_node_ptr(); - let range = locate::syntax_range(context.content, context.root, &ptr)?; + let related_information = diagnostic.related.iter().filter_map(|related| { + let start = to_position(related.span.start)?; + let end = to_position(related.span.end)?; + Some(DiagnosticRelatedInformation { + location: Location { uri: uri.clone(), range: Range { start, end } }, + message: related.message.clone(), + }) + }); + + let related_information = related_information.collect_vec(); Some(Diagnostic { range, - severity: Some(DiagnosticSeverity::ERROR), - code: Some(NumberOrString::String("InvalidImportStatement".to_string())), + severity: Some(severity), + code: Some(NumberOrString::String(diagnostic.code.to_string())), code_description: None, - source, - message, - related_information: None, + source: Some(format!("analyzer/{}", diagnostic.source)), + message: diagnostic.message.clone(), + related_information: if related_information.is_empty() { + None + } else { + Some(related_information) + }, tags: None, data: None, }) - } - - ResolvingError::InvalidImportItem { id } => { - let ptr = context.stabilized.syntax_ptr(*id)?; - - let message = { - let range = ptr.to_node(context.root).text_range(); - let name = context.content[range].trim(); - format!("Cannot import item '{name}'") - }; + }) + .collect(); - let range = locate::syntax_range(context.content, context.root, &ptr)?; + snapshot.client.publish_diagnostics(PublishDiagnosticsParams { + uri, + diagnostics, + version: None, + })?; - Some(Diagnostic { - range, - severity: Some(DiagnosticSeverity::ERROR), - code: Some(NumberOrString::String("InvalidImportItem".to_string())), - code_description: None, - source, - message, - related_information: None, - tags: None, - data: None, - }) - } - } + Ok(()) } diff --git a/compiler-compatibility/command/Cargo.toml b/compiler-compatibility/command/Cargo.toml new file mode 100644 index 000000000..3a0357111 --- /dev/null +++ b/compiler-compatibility/command/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "compiler-compatibility" +version = "0.1.0" +edition = "2024" + +[dependencies] +registry = { path = "../registry" } +analyzer = { path = "../../compiler-lsp/analyzer" } +building-types = { path = "../../compiler-core/building-types" } +files = { path = "../../compiler-core/files" } +diagnostics = { path = "../../compiler-core/diagnostics" } +clap = { version = "4", features = ["derive"] } +git2 = "0.20" +reqwest = { version = "0.12", features = ["blocking"] } +semver = "1" +flate2 = "1" +tar = "0.4" +sha2 = "0.10" +hex = "0.4" +thiserror = "2" +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } +base64 = "0.22.1" +glob = "0.3" +url = "2" +line-index = "0.1" +petgraph = "0.8" +rayon = "1" +rowan = "0.16" +tracing-tree = "0.4.1" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +chrono = { version = "0.4", features = ["serde"] } diff --git a/compiler-compatibility/command/src/compat.rs b/compiler-compatibility/command/src/compat.rs new file mode 100644 index 000000000..06975d094 --- /dev/null +++ b/compiler-compatibility/command/src/compat.rs @@ -0,0 +1,524 @@ +//! Package compatibility checking with QueryEngine. + +use std::collections::{BTreeMap, HashSet}; +use std::path::Path; + +use analyzer::QueryEngine; +use building_types::QueryResult; +use diagnostics::{DiagnosticsContext, Severity, ToDiagnostics, format_rustc_with_path}; +use files::{FileId, Files}; +use petgraph::graphmap::DiGraphMap; +use rayon::prelude::*; +use url::Url; + +use crate::loader; +use crate::resolver; +use crate::types::ResolvedSet; + +/// Result of checking a single file. +pub struct FileResult { + pub error_count: usize, + pub warning_count: usize, + pub output: String, +} + +/// Result of checking a package. +pub struct CheckResult { + pub files: Vec, + pub total_errors: usize, + pub total_warnings: usize, +} + +/// Outcome for a single package in `check_all` mode. +pub struct PackageOutcome { + pub version: String, + pub total_errors: usize, + pub total_warnings: usize, + pub topo_layer: usize, + pub root_cause: bool, + pub cascaded_from: Vec, + pub cascaded_from_root_causes: Vec, +} + +/// Result of checking all packages. +pub struct AllCheckResult { + pub outcomes: BTreeMap, +} + +/// Primes dependency caches by processing files through the query pipeline in +/// topological order. Each layer is processed in parallel using rayon, so that +/// packages within the same layer are primed concurrently. +fn prime_dependencies( + engine: &QueryEngine, + files: &Files, + file_ids: &[FileId], + target_package: &str, + resolved: &ResolvedSet, + packages_dir: &Path, +) { + let layers = resolver::topological_order(resolved); + + for (layer_index, layer) in layers.iter().enumerate() { + let dependency_packages: Vec<&String> = + layer.iter().filter(|name| *name != target_package).collect(); + + if dependency_packages.is_empty() { + continue; + } + + let layer_files = dependency_packages.iter().flat_map(|package_name| { + let package_directory = find_package_dir(packages_dir, package_name); + if let Some(directory) = package_directory { + loader::filter_package_files(files, file_ids, &directory) + } else { + vec![] + } + }); + + let layer_files: Vec = layer_files.collect(); + let package_names: Vec<&str> = dependency_packages.iter().map(|s| s.as_str()).collect(); + + tracing::debug!( + target: "compiler_compatibility", + layer = layer_index, + file_count = layer_files.len(), + packages = ?package_names, + "Priming" + ); + + // Prime each file in parallel through the full query pipeline. + layer_files.par_iter().for_each(|&id| { + let snapshot = engine.snapshot(); + let _ = snapshot.lowered(id); + let _ = snapshot.resolved(id); + let _ = snapshot.checked(id); + }); + } +} + +/// Checks all packages in the packages directory and returns diagnostics. +/// +/// `packages` should point to the directory containing unpacked packages. +/// `target_package` is the specific package to report diagnostics for. +/// `resolved` provides the dependency graph for cache priming. +pub fn check_package(packages: &Path, target_package: &str, resolved: &ResolvedSet) -> CheckResult { + let _span = + tracing::info_span!(target: "compiler_compatibility", "check_package", target_package) + .entered(); + + let (engine, files, file_ids) = loader::load_packages(packages); + + prime_dependencies(&engine, &files, &file_ids, target_package, resolved, packages); + + let target_directory = find_package_dir(packages, target_package); + let target_files = if let Some(directory) = &target_directory { + loader::filter_package_files(&files, &file_ids, directory) + } else { + vec![] + }; + + target_files.par_iter().for_each(|&id| { + let snapshot = engine.snapshot(); + let _ = snapshot.indexed(id); + }); + + let layers = module_topological_layers(&engine, &target_files); + + tracing::debug!( + target: "compiler_compatibility", + layer_count = layers.len(), + layers = ?layers.iter().map(|layer| layer.len()).collect::>(), + "Layers" + ); + + { + for (layer_index, layer) in layers.iter().enumerate() { + tracing::debug!( + target: "compiler_compatibility", + layer = layer_index, + file_count = layer.len(), + "Priming" + ); + layer.par_iter().for_each(|&id| { + let snapshot = engine.snapshot(); + let _ = snapshot.lowered(id); + let _ = snapshot.resolved(id); + }); + } + } + + let mut results = vec![]; + let mut total_errors = 0; + let mut total_warnings = 0; + + for layer in &layers { + for &id in layer { + let relative_path = compute_relative_path(&files, id, packages); + let file_result = collect_diagnostics(&engine, id, &relative_path); + + total_errors += file_result.error_count; + total_warnings += file_result.warning_count; + results.push(file_result); + } + } + + CheckResult { files: results, total_errors, total_warnings } +} + +/// Checks a single package using an already-loaded engine. +/// +/// Files for the package must already be loaded in the engine. The caller +/// is responsible for ensuring dependencies are already cached via +/// topological ordering. +fn check_loaded_package( + engine: &QueryEngine, + files: &Files, + package_files: &[FileId], + packages_dir: &Path, + _quiet: bool, +) -> CheckResult { + // Index target files + package_files.par_iter().for_each(|&id| { + let snapshot = engine.snapshot(); + let _ = snapshot.indexed(id); + }); + + // Compute module-level topological layers within the package + let layers = module_topological_layers(engine, package_files); + + tracing::debug!( + target: "compiler_compatibility", + layer_count = layers.len(), + layers = ?layers.iter().map(|layer| layer.len()).collect::>(), + "Module layers" + ); + + // Prime in module-layer order + for layer in &layers { + layer.par_iter().for_each(|&id| { + let snapshot = engine.snapshot(); + let _ = snapshot.lowered(id); + let _ = snapshot.resolved(id); + }); + } + + // Collect diagnostics + let mut results = vec![]; + let mut total_errors = 0; + let mut total_warnings = 0; + + for layer in &layers { + for &id in layer { + let relative_path = compute_relative_path(files, id, packages_dir); + let file_result = collect_diagnostics(engine, id, &relative_path); + + total_errors += file_result.error_count; + total_warnings += file_result.warning_count; + results.push(file_result); + } + } + + CheckResult { files: results, total_errors, total_warnings } +} + +/// Checks all packages using a single shared engine, processing in +/// topological order to maximize cache reuse. +pub fn check_all(packages_dir: &Path, resolved: &ResolvedSet, quiet: bool) -> AllCheckResult { + let _span = tracing::info_span!(target: "compiler_compatibility", "check_all").entered(); + + // Load all files once + let (engine, files, file_ids) = loader::load_packages(packages_dir); + + // Build package -> files map + let mut package_files: BTreeMap> = BTreeMap::new(); + for name in resolved.packages.keys() { + let dir = find_package_dir(packages_dir, name); + let pkg_files = if let Some(directory) = dir { + loader::filter_package_files(&files, &file_ids, &directory) + } else { + tracing::warn!(target: "compiler_compatibility", package = name, "Package directory not found"); + vec![] + }; + package_files.insert(name.clone(), pkg_files); + } + + // Index all files upfront + tracing::info!(target: "compiler_compatibility", file_count = file_ids.len(), "Indexing all files"); + file_ids.par_iter().for_each(|&id| { + let snapshot = engine.snapshot(); + let _ = snapshot.indexed(id); + }); + + // Process in topological layers + let layers = resolver::topological_order(resolved); + let mut outcomes: BTreeMap = BTreeMap::new(); + + for (layer_index, layer) in layers.iter().enumerate() { + tracing::info!( + target: "compiler_compatibility", + layer = layer_index, + package_count = layer.len(), + "Processing layer" + ); + + // Check each package in the layer + // (packages within a layer have no mutual dependencies) + for package_name in layer { + let _pkg_span = tracing::info_span!( + target: "compiler_compatibility", + "check_package", + package = package_name + ) + .entered(); + + let pkg_files = package_files.get(package_name).map(|v| v.as_slice()).unwrap_or(&[]); + let version = resolved.packages.get(package_name).cloned().unwrap_or_default(); + + let result = if pkg_files.is_empty() { + // Synthetic error for missing package directory + CheckResult { + files: vec![FileResult { + error_count: 1, + warning_count: 0, + output: format!("{}: error: package directory not found\n", package_name), + }], + total_errors: 1, + total_warnings: 0, + } + } else { + check_loaded_package(&engine, &files, pkg_files, packages_dir, quiet) + }; + + // Print diagnostics if not quiet + if !quiet { + for file_result in &result.files { + if !file_result.output.is_empty() { + print!("{}", file_result.output); + } + } + } + + // Log summary + let summary = format!( + "{}: {} errors, {} warnings", + package_name, result.total_errors, result.total_warnings + ); + if result.total_errors > 0 { + tracing::error!(target: "compiler_compatibility", "{}", summary); + } else if result.total_warnings > 0 { + tracing::warn!(target: "compiler_compatibility", "{}", summary); + } else { + tracing::info!(target: "compiler_compatibility", "{}", summary); + } + + // Classify: root cause vs cascaded + let deps = resolved.dependencies.get(package_name).cloned().unwrap_or_default(); + let failed_deps: Vec = deps + .iter() + .filter(|d| outcomes.get(*d).is_some_and(|o| o.total_errors > 0)) + .cloned() + .collect(); + + let failed = result.total_errors > 0; + let root_cause = failed && failed_deps.is_empty(); + let cascaded_from = if failed { failed_deps } else { vec![] }; + + // Transitive root-cause attribution + let cascaded_from_root_causes = if !cascaded_from.is_empty() { + collect_root_causes(&cascaded_from, &outcomes, resolved) + } else { + vec![] + }; + + outcomes.insert( + package_name.clone(), + PackageOutcome { + version, + total_errors: result.total_errors, + total_warnings: result.total_warnings, + topo_layer: layer_index, + root_cause, + cascaded_from, + cascaded_from_root_causes, + }, + ); + } + } + + AllCheckResult { outcomes } +} + +/// Walks transitive dependencies to find all root-cause packages. +fn collect_root_causes( + failed_deps: &[String], + outcomes: &BTreeMap, + resolved: &ResolvedSet, +) -> Vec { + let mut root_causes = Vec::new(); + let mut visited = HashSet::new(); + let mut stack: Vec = failed_deps.to_vec(); + + while let Some(dep) = stack.pop() { + if !visited.insert(dep.clone()) { + continue; + } + if let Some(outcome) = outcomes.get(&dep) { + if outcome.root_cause { + root_causes.push(dep.clone()); + } + // Continue walking through failed transitive deps + if let Some(transitive_deps) = resolved.dependencies.get(&dep) { + for td in transitive_deps { + if outcomes.get(td).is_some_and(|o| o.total_errors > 0) { + stack.push(td.clone()); + } + } + } + } + } + + root_causes.sort(); + root_causes.dedup(); + root_causes +} + +/// Builds a module-level dependency graph from indexed imports and returns +/// files grouped into topological layers. +/// +/// Only tracks edges between files in the provided set. Files with no +/// intra-package dependencies go in layer 0. +fn module_topological_layers(engine: &QueryEngine, file_ids: &[FileId]) -> Vec> { + let file_set: HashSet = file_ids.iter().copied().collect(); + let mut graph = DiGraphMap::new(); + + for &id in file_ids { + graph.add_node(id); + } + + for &id in file_ids { + let Ok(indexed) = engine.indexed(id) else { continue }; + for import in indexed.imports.values() { + let Some(name) = &import.name else { continue }; + let Some(dependency_id) = engine.module_file(name) else { continue }; + if file_set.contains(&dependency_id) { + graph.add_edge(id, dependency_id, ()); + } + } + } + + resolver::topological_layers(&graph) +} + +fn find_package_dir(packages_dir: &Path, package_name: &str) -> Option { + let prefix = format!("{}-", package_name); + let entries = std::fs::read_dir(packages_dir).ok()?; + for entry in entries.filter_map(Result::ok) { + let name = entry.file_name(); + let name_str = name.to_string_lossy(); + if let Some(suffix) = name_str.strip_prefix(&prefix) { + if suffix.parse::().is_ok() && entry.file_type().ok()?.is_dir() { + return entry.path().canonicalize().ok(); + } + } + } + None +} + +fn compute_relative_path(files: &Files, id: FileId, base_dir: &Path) -> String { + let uri = files.path(id); + Url::parse(&uri) + .ok() + .and_then(|u| u.to_file_path().ok()) + .and_then(|p| p.strip_prefix(base_dir).ok().map(|r| r.to_path_buf())) + .map(|p| p.display().to_string()) + .unwrap_or_else(|| uri.to_string()) +} + +trait QueryResultExt { + fn or_file_error(self, path: &str, code: &str, message: &str) -> Result; +} + +impl QueryResultExt for QueryResult { + fn or_file_error(self, path: &str, code: &str, message: &str) -> Result { + self.map_err(|_| FileResult { + error_count: 1, + warning_count: 0, + output: format!("{path}:1:1: error[{code}]: {message}\n"), + }) + } +} + +fn collect_diagnostics(engine: &QueryEngine, id: FileId, relative_path: &str) -> FileResult { + match collect_diagnostics_inner(engine, id, relative_path) { + Ok(result) | Err(result) => result, + } +} + +fn collect_diagnostics_inner( + engine: &QueryEngine, + id: FileId, + relative_path: &str, +) -> Result { + let content = engine.content(id); + + let (parsed, _) = + engine.parsed(id).or_file_error(relative_path, "ParseError", "Failed to parse file")?; + + if let Some(module_name) = parsed.module_name() { + tracing::info!(target: "compiler_compatibility", module_name = %module_name); + } else { + tracing::warn!(target: "compiler_compatibility", path = ?relative_path, "Invalid module name"); + }; + + let root = parsed.syntax_node(); + + let stabilized = engine.stabilized(id).or_file_error( + relative_path, + "StabilizeError", + "Failed to stabilize", + )?; + + let indexed = + engine.indexed(id).or_file_error(relative_path, "IndexError", "Failed to index")?; + + let lowered = + engine.lowered(id).or_file_error(relative_path, "LowerError", "Failed to lower")?; + + let resolved = engine.resolved(id); + + let checked = + engine.checked(id).or_file_error(relative_path, "CheckError", "Failed to check")?; + + let context = + DiagnosticsContext::new(&content, &root, &stabilized, &indexed, &lowered, &checked); + + let mut all_diagnostics = vec![]; + + for error in &lowered.errors { + all_diagnostics.extend(error.to_diagnostics(&context)); + } + + if let Ok(ref resolved) = resolved { + for error in &resolved.errors { + all_diagnostics.extend(error.to_diagnostics(&context)); + } + } + + for error in &checked.errors { + all_diagnostics.extend(error.to_diagnostics(&context)); + } + + let mut error_count = 0; + let mut warning_count = 0; + + for diagnostic in &all_diagnostics { + match diagnostic.severity { + Severity::Error => error_count += 1, + Severity::Warning => warning_count += 1, + } + } + + let output = format_rustc_with_path(&all_diagnostics, &content, relative_path); + + Ok(FileResult { error_count, warning_count, output }) +} diff --git a/compiler-compatibility/command/src/error.rs b/compiler-compatibility/command/src/error.rs new file mode 100644 index 000000000..b4bb50c29 --- /dev/null +++ b/compiler-compatibility/command/src/error.rs @@ -0,0 +1,33 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum CompatError { + #[error("io: {0}")] + Io(#[from] std::io::Error), + + #[error("git: {0}")] + Git(#[from] git2::Error), + + #[error("http: {0}")] + Http(#[from] reqwest::Error), + + #[error("semver: {0}")] + Semver(#[from] semver::Error), + + #[error("registry: {0}")] + Registry(#[from] registry::RegistryError), + + #[error("package set missing package: {0}")] + MissingFromPackageSet(String), + + #[error("hash mismatch for {name}@{version}")] + HashMismatch { name: String, version: String }, + + #[error("manifest not found for {name}@{version}")] + ManifestNotFound { name: String, version: String }, + + #[error("{0}")] + Other(String), +} + +pub type Result = std::result::Result; diff --git a/compiler-compatibility/command/src/layout.rs b/compiler-compatibility/command/src/layout.rs new file mode 100644 index 000000000..72141662e --- /dev/null +++ b/compiler-compatibility/command/src/layout.rs @@ -0,0 +1,34 @@ +use std::path::{Path, PathBuf}; + +use registry::RegistryLayout; + +/// Layout for compiler-compatibility tool directories. +/// +/// Includes paths for repository checkouts, tarball cache, and unpacked packages. +#[derive(Debug, Clone)] +pub struct Layout { + pub registry: RegistryLayout, + pub cache_tarballs: PathBuf, + pub packages: PathBuf, +} + +impl Layout { + pub fn new(root: impl AsRef) -> Layout { + let root = root.as_ref().to_path_buf(); + let repos_dir = root.join("repos"); + let registry_dir = repos_dir.join("registry"); + let index_dir = repos_dir.join("registry-index"); + let cache_tarballs = root.join("cache").join("tarballs"); + let packages = root.join("packages"); + + Layout { + registry: RegistryLayout::new(®istry_dir, &index_dir), + cache_tarballs, + packages, + } + } + + pub fn tarball_cache_path(&self, name: &str, version: &str) -> PathBuf { + self.cache_tarballs.join(format!("{}-{}.tar.gz", name, version)) + } +} diff --git a/compiler-compatibility/command/src/loader.rs b/compiler-compatibility/command/src/loader.rs new file mode 100644 index 000000000..c59440b10 --- /dev/null +++ b/compiler-compatibility/command/src/loader.rs @@ -0,0 +1,67 @@ +//! Loads PureScript packages into QueryEngine + Files. + +use std::fs; +use std::path::Path; + +use analyzer::{QueryEngine, prim}; +use files::{FileId, Files}; +use glob::glob; +use url::Url; + +/// Loads all .purs files from a directory into the engine. +fn load_file(engine: &mut QueryEngine, files: &mut Files, path: &Path) -> Option { + let path = path.canonicalize().ok()?; + let url = Url::from_file_path(&path).ok()?; + let content = fs::read_to_string(&path).ok()?; + let content = content.replace("\r\n", "\n"); + + let uri = url.to_string(); + let id = files.insert(uri, content); + let content = files.content(id); + + engine.set_content(id, content); + if let Ok((parsed, _)) = engine.parsed(id) + && let Some(name) = parsed.module_name() + { + engine.set_module_file(&name, id); + } + + Some(id) +} + +/// Loads all packages from a packages directory. +/// +/// Returns (engine, files, file_ids) where file_ids contains all loaded .purs files. +pub fn load_packages(packages_dir: &Path) -> (QueryEngine, Files, Vec) { + let mut engine = QueryEngine::default(); + let mut files = Files::default(); + prim::configure(&mut engine, &mut files); + + let pattern = format!("{}/**/*.purs", packages_dir.display()); + let mut paths: Vec<_> = glob(&pattern).into_iter().flatten().filter_map(Result::ok).collect(); + paths.sort(); + + let mut file_ids = Vec::new(); + for path in paths { + if let Some(id) = load_file(&mut engine, &mut files, &path) { + file_ids.push(id); + } + } + + (engine, files, file_ids) +} + +/// Filters file IDs to only those belonging to a specific package directory. +pub fn filter_package_files(files: &Files, file_ids: &[FileId], package_dir: &Path) -> Vec { + file_ids + .iter() + .copied() + .filter(|&id| { + let uri = files.path(id); + Url::parse(&uri) + .ok() + .and_then(|u| u.to_file_path().ok()) + .is_some_and(|p| p.starts_with(package_dir)) + }) + .collect() +} diff --git a/compiler-compatibility/command/src/main.rs b/compiler-compatibility/command/src/main.rs new file mode 100644 index 000000000..d3921c05c --- /dev/null +++ b/compiler-compatibility/command/src/main.rs @@ -0,0 +1,274 @@ +mod compat; +mod error; +mod layout; +mod loader; +mod report; +mod repositories; +mod resolver; +mod storage; +mod trace; +mod types; +mod unpacker; + +use std::collections::BTreeMap; +use std::path::PathBuf; + +use clap::Parser; +use registry::{FsRegistry, RegistryReader}; +use tracing::level_filters::LevelFilter; + +use crate::report::{Classification, CompatReport, PackageReport, ReportSummary}; + +#[derive(Parser, Debug)] +#[command(name = "compiler-compatibility")] +#[command(about = "Fetch PureScript packages for compatibility testing")] +struct Cli { + #[arg(help = "Package names to fetch and unpack")] + packages: Vec, + + #[arg(long, help = "Check all packages in the package set")] + all: bool, + + #[arg(long, help = "Use specific package set version (default: latest)")] + package_set: Option, + + #[arg(long, help = "List available package set versions")] + list_sets: bool, + + #[arg(long, help = "Update local registry repos (git pull)")] + update: bool, + + #[arg(long, default_value = "target/compiler-compatibility", help = "Output directory")] + output: PathBuf, + + #[arg(long, default_value = "target/compiler-tracing", help = "Trace output directory")] + trace_output: PathBuf, + + #[arg(long, help = "Disable tarball caching")] + no_cache: bool, + + #[arg(short, long, help = "Verbose output")] + verbose: bool, + + #[arg(long, help = "Suppress per-file diagnostic output")] + quiet: bool, + + #[arg(long, value_name = "PATH", help = "Write JSON report to path")] + report_json: Option, + + #[arg( + long, + value_name = "LevelFilter", + default_value = "off", + help = "Log level for checking crate traces" + )] + log_level: LevelFilter, +} + +fn main() -> error::Result<()> { + let cli = Cli::parse(); + + if cli.all && !cli.packages.is_empty() { + return Err(error::CompatError::Other( + "Cannot specify both --all and package names".to_string(), + )); + } + + let stdout_level = if cli.verbose { LevelFilter::DEBUG } else { LevelFilter::INFO }; + let trace_output = cli.trace_output.clone(); + let tracing_handle = trace::init_tracing(stdout_level, cli.log_level, trace_output); + + let layout = layout::Layout::new(&cli.output); + + repositories::ensure_repositories(&layout.registry, cli.update)?; + + let reader = FsRegistry::new(layout.registry.clone()); + + if cli.list_sets { + let sets = reader.list_package_sets()?; + for set in sets { + println!("{}", set); + } + return Ok(()); + } + + if !cli.all && cli.packages.is_empty() { + println!( + "No packages specified. Use --all or provide package names. Use --help for usage." + ); + return Ok(()); + } + + let package_set = reader.read_package_set(cli.package_set.as_deref())?; + tracing::info!(target: "compiler_compatibility", version = %package_set.version, "Using package set"); + + if cli.all { + run_all_mode(&cli, &package_set, &reader, &layout)?; + } else { + run_packages_mode(&cli, &package_set, &reader, &layout, &tracing_handle)?; + } + + Ok(()) +} + +fn run_all_mode( + cli: &Cli, + package_set: ®istry::PackageSet, + reader: &impl RegistryReader, + layout: &layout::Layout, +) -> error::Result<()> { + let resolved = resolver::resolve_all(package_set, reader)?; + tracing::info!(target: "compiler_compatibility", count = resolved.packages.len(), "Resolved all packages"); + + // Fetch and unpack all packages + for (name, version) in &resolved.packages { + let metadata = reader.read_metadata(name)?; + let published = metadata.published.get(version).ok_or_else(|| { + error::CompatError::ManifestNotFound { name: name.clone(), version: version.clone() } + })?; + + let tarball = storage::fetch_tarball(name, version, layout, cli.no_cache)?; + storage::verify_tarball(&tarball, &published.hash, name, version)?; + unpacker::unpack_tarball(&tarball, &layout.packages)?; + + tracing::debug!(target: "compiler_compatibility", name, version, "Unpacked"); + } + + tracing::info!(target: "compiler_compatibility", directory = %layout.packages.display(), "Finished unpacking"); + + let all_result = compat::check_all(&layout.packages, &resolved, cli.quiet); + + if let Some(ref report_path) = cli.report_json { + let report = build_report(&package_set.version, &all_result); + let json = serde_json::to_string_pretty(&report) + .map_err(|e| error::CompatError::Other(format!("Failed to serialize report: {}", e)))?; + std::fs::write(report_path, json)?; + tracing::info!(target: "compiler_compatibility", path = %report_path.display(), "Report written"); + } + + Ok(()) +} + +fn run_packages_mode( + cli: &Cli, + package_set: ®istry::PackageSet, + reader: &impl RegistryReader, + layout: &layout::Layout, + tracing_handle: &trace::TracingHandle, +) -> error::Result<()> { + let resolved = resolver::resolve(&cli.packages, package_set, reader)?; + tracing::info!(target: "compiler_compatibility", count = resolved.packages.len(), "Resolved packages"); + + for (name, version) in &resolved.packages { + let metadata = reader.read_metadata(name)?; + let published = metadata.published.get(version).ok_or_else(|| { + error::CompatError::ManifestNotFound { name: name.clone(), version: version.clone() } + })?; + + let tarball = storage::fetch_tarball(name, version, layout, cli.no_cache)?; + storage::verify_tarball(&tarball, &published.hash, name, version)?; + unpacker::unpack_tarball(&tarball, &layout.packages)?; + + tracing::debug!(target: "compiler_compatibility", name, version, "Unpacked"); + } + + tracing::info!(target: "compiler_compatibility", directory = %layout.packages.display(), "Finished unpacking"); + + for package in &cli.packages { + let _span = + tracing::info_span!(target: "compiler_compatibility", "for_each_package", package) + .entered(); + + let guard = + tracing_handle.begin_package(package).expect("failed to start package trace capture"); + let log_file = guard.path().to_path_buf(); + + let result = compat::check_package(&layout.packages, package, &resolved); + + drop(guard); + + if !cli.quiet { + for file_result in &result.files { + if !file_result.output.is_empty() { + print!("{}", file_result.output); + } + } + } + + let summary = format!( + "{}: {} errors, {} warnings", + package, result.total_errors, result.total_warnings + ); + + if result.total_errors > 0 { + tracing::error!(target: "compiler_compatibility", "{}", summary); + } else if result.total_warnings > 0 { + tracing::warn!(target: "compiler_compatibility", "{}", summary); + } else { + tracing::info!(target: "compiler_compatibility", "{}", summary); + } + + tracing::debug!(target: "compiler_compatibility", path = %log_file.display(), "Trace written"); + } + + Ok(()) +} + +fn build_report(package_set_version: &str, all_result: &compat::AllCheckResult) -> CompatReport { + let mut packages = BTreeMap::new(); + let mut ok = 0; + let mut warnings_only = 0; + let mut failed = 0; + let mut failed_root_cause = 0; + let mut failed_cascaded = 0; + let mut root_causes = Vec::new(); + + for (name, outcome) in &all_result.outcomes { + if outcome.total_errors > 0 { + failed += 1; + if outcome.root_cause { + failed_root_cause += 1; + root_causes.push(name.clone()); + } else { + failed_cascaded += 1; + } + } else if outcome.total_warnings > 0 { + warnings_only += 1; + } else { + ok += 1; + } + + packages.insert( + name.clone(), + PackageReport { + version: outcome.version.clone(), + topo_layer: outcome.topo_layer, + errors: outcome.total_errors, + warnings: outcome.total_warnings, + classification: Classification { + root_cause: outcome.root_cause, + cascaded_from: outcome.cascaded_from.clone(), + cascaded_from_root_causes: outcome.cascaded_from_root_causes.clone(), + }, + }, + ); + } + + let total = all_result.outcomes.len(); + + CompatReport { + timestamp: chrono::Utc::now().to_rfc3339(), + git_sha: std::env::var("GITHUB_SHA").unwrap_or_else(|_| "unknown".to_string()), + package_set: package_set_version.to_string(), + summary: ReportSummary { + total, + ok, + warnings_only, + failed, + failed_root_cause, + failed_cascaded, + }, + root_causes, + packages, + } +} diff --git a/compiler-compatibility/command/src/report.rs b/compiler-compatibility/command/src/report.rs new file mode 100644 index 000000000..665507e2e --- /dev/null +++ b/compiler-compatibility/command/src/report.rs @@ -0,0 +1,39 @@ +use std::collections::BTreeMap; + +use serde::Serialize; + +#[derive(Debug, Serialize)] +pub struct CompatReport { + pub timestamp: String, + pub git_sha: String, + pub package_set: String, + pub summary: ReportSummary, + pub root_causes: Vec, + pub packages: BTreeMap, +} + +#[derive(Debug, Serialize)] +pub struct ReportSummary { + pub total: usize, + pub ok: usize, + pub warnings_only: usize, + pub failed: usize, + pub failed_root_cause: usize, + pub failed_cascaded: usize, +} + +#[derive(Debug, Serialize)] +pub struct PackageReport { + pub version: String, + pub topo_layer: usize, + pub errors: usize, + pub warnings: usize, + pub classification: Classification, +} + +#[derive(Debug, Serialize)] +pub struct Classification { + pub root_cause: bool, + pub cascaded_from: Vec, + pub cascaded_from_root_causes: Vec, +} diff --git a/compiler-compatibility/command/src/repositories.rs b/compiler-compatibility/command/src/repositories.rs new file mode 100644 index 000000000..c05ca1603 --- /dev/null +++ b/compiler-compatibility/command/src/repositories.rs @@ -0,0 +1,50 @@ +use std::fs; + +use git2::build::RepoBuilder; +use git2::{FetchOptions, Repository}; +use registry::RegistryLayout; + +use crate::error::Result; + +const REGISTRY_URL: &str = "https://github.com/purescript/registry"; +const REGISTRY_INDEX_URL: &str = "https://github.com/purescript/registry-index"; + +pub fn ensure_repositories(layout: &RegistryLayout, update: bool) -> Result<()> { + if let Some(parent) = layout.registry_dir.parent() { + fs::create_dir_all(parent)?; + } + + ensure_repository(REGISTRY_URL, &layout.registry_dir, update)?; + ensure_repository(REGISTRY_INDEX_URL, &layout.index_dir, update)?; + + Ok(()) +} + +fn ensure_repository(url: &str, path: &std::path::Path, update: bool) -> Result<()> { + if path.exists() { + if update { + let repo = Repository::open(path)?; + let mut remote = repo.find_remote("origin")?; + remote.fetch(&["master"], None, None)?; + + let fetch_head = repo.find_reference("FETCH_HEAD")?; + let commit = repo.reference_to_annotated_commit(&fetch_head)?; + let (analysis, _) = repo.merge_analysis(&[&commit])?; + + if analysis.is_fast_forward() || analysis.is_normal() { + let refname = "refs/heads/master"; + let mut reference = repo.find_reference(refname)?; + reference.set_target(commit.id(), "pull: fast-forward")?; + repo.set_head(refname)?; + repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?; + } + } + } else { + let mut fetch_opts = FetchOptions::new(); + fetch_opts.depth(1); + + RepoBuilder::new().fetch_options(fetch_opts).clone(url, path)?; + } + + Ok(()) +} diff --git a/compiler-compatibility/command/src/resolver.rs b/compiler-compatibility/command/src/resolver.rs new file mode 100644 index 000000000..eee700c97 --- /dev/null +++ b/compiler-compatibility/command/src/resolver.rs @@ -0,0 +1,201 @@ +use std::collections::{BTreeMap, HashSet, VecDeque}; + +use petgraph::graphmap::DiGraphMap; +use registry::{PackageSet, RegistryReader}; +use semver::Version; + +use crate::error::{CompatError, Result}; +use crate::types::ResolvedSet; + +pub fn resolve( + root_packages: &[String], + package_set: &PackageSet, + registry: &impl RegistryReader, +) -> Result { + let mut resolved = BTreeMap::new(); + let mut dependencies = BTreeMap::new(); + let mut visited = HashSet::new(); + + for name in root_packages { + let version = package_set + .packages + .packages + .get(name) + .ok_or_else(|| CompatError::MissingFromPackageSet(name.clone()))?; + resolve_recursive( + name, + version, + package_set, + registry, + &mut resolved, + &mut dependencies, + &mut visited, + )?; + } + + Ok(ResolvedSet { packages: resolved, dependencies }) +} + +/// Resolves all packages in the package set. +/// +/// Unlike `resolve()`, this does not recurse—it iterates all pinned packages +/// and reads their manifests to extract dependency edges. +pub fn resolve_all( + package_set: &PackageSet, + registry: &impl RegistryReader, +) -> Result { + let mut packages = BTreeMap::new(); + let mut dependencies = BTreeMap::new(); + + for (name, version) in &package_set.packages.packages { + packages.insert(name.clone(), version.clone()); + + let manifests = registry.read_manifest_versions(name)?; + let parsed_version: Version = version.parse()?; + let manifest = manifests.iter().find(|m| m.version == parsed_version).ok_or_else(|| { + CompatError::ManifestNotFound { name: name.clone(), version: version.clone() } + })?; + + let dep_names: Vec = manifest + .dependencies + .keys() + .filter(|d| package_set.packages.packages.contains_key(*d)) + .cloned() + .collect(); + + dependencies.insert(name.clone(), dep_names); + } + + Ok(ResolvedSet { packages, dependencies }) +} + +fn resolve_recursive( + name: &str, + version: &str, + package_set: &PackageSet, + registry: &impl RegistryReader, + resolved: &mut BTreeMap, + dependencies: &mut BTreeMap>, + visited: &mut HashSet, +) -> Result<()> { + if !visited.insert(name.to_string()) { + return Ok(()); + } + + resolved.insert(name.to_string(), version.to_string()); + + let manifests = registry.read_manifest_versions(name)?; + let parsed_version: Version = version.parse()?; + let manifest = manifests.iter().find(|m| m.version == parsed_version).ok_or_else(|| { + CompatError::ManifestNotFound { name: name.to_string(), version: version.to_string() } + })?; + + let mut dependency_names = Vec::new(); + + for dependency in manifest.dependencies.keys() { + let version = package_set + .packages + .packages + .get(dependency) + .ok_or_else(|| CompatError::MissingFromPackageSet(dependency.clone()))?; + + dependency_names.push(dependency.clone()); + resolve_recursive( + dependency, + version, + package_set, + registry, + resolved, + dependencies, + visited, + )?; + } + + dependencies.insert(name.to_string(), dependency_names); + + Ok(()) +} + +/// Returns packages grouped into topological layers. +/// +/// Layer 0 contains packages with no dependencies, layer 1 contains packages +/// whose dependencies are all in layer 0, and so on. +pub fn topological_order(resolved: &ResolvedSet) -> Vec> { + let all_packages: HashSet<&String> = resolved.packages.keys().collect(); + + let mut graph = DiGraphMap::new(); + + for name in &all_packages { + graph.add_node(name.as_str()); + } + + // Edge direction: dependent -> dependency (name depends on dep). + for (name, deps) in &resolved.dependencies { + if all_packages.contains(name) { + for dep in deps { + if all_packages.contains(dep) { + graph.add_edge(name.as_str(), dep.as_str(), ()); + } + } + } + } + + topological_layers(&graph) + .into_iter() + .map(|layer| layer.into_iter().map(String::from).collect()) + .collect() +} + +/// Computes topological layers from a dependency graph using Kahn's algorithm. +/// +/// Edges point from dependent to dependency. Layer 0 contains nodes with no +/// outgoing edges (no dependencies), layer 1 contains nodes whose dependencies +/// are all in layer 0, and so on. +pub fn topological_layers( + graph: &DiGraphMap, +) -> Vec> { + // Count of unprocessed dependencies per node. + let mut dependency_count: BTreeMap = BTreeMap::new(); + for node in graph.nodes() { + dependency_count.insert(node, graph.neighbors(node).count()); + } + + // Reverse edges: dependency -> list of dependents. + let mut dependents: BTreeMap> = BTreeMap::new(); + for node in graph.nodes() { + for dependency in graph.neighbors(node) { + dependents.entry(dependency).or_default().push(node); + } + } + + let mut layers = Vec::new(); + let mut queue: VecDeque = VecDeque::new(); + + for (&node, &count) in &dependency_count { + if count == 0 { + queue.push_back(node); + } + } + + while !queue.is_empty() { + let layer: Vec = queue.drain(..).collect(); + let mut next_queue = VecDeque::new(); + + for &node in &layer { + if let Some(dependent_nodes) = dependents.get(&node) { + for &dependent in dependent_nodes { + let count = dependency_count.get_mut(&dependent).unwrap(); + *count -= 1; + if *count == 0 { + next_queue.push_back(dependent); + } + } + } + } + + layers.push(layer); + queue = next_queue; + } + + layers +} diff --git a/compiler-compatibility/command/src/storage.rs b/compiler-compatibility/command/src/storage.rs new file mode 100644 index 000000000..b92bd2985 --- /dev/null +++ b/compiler-compatibility/command/src/storage.rs @@ -0,0 +1,66 @@ +use std::fs; +use std::io::Read; +use std::path::PathBuf; + +use sha2::{Digest, Sha256}; + +use crate::error::{CompatError, Result}; +use crate::layout::Layout; + +pub fn tarball_url(name: &str, version: &str) -> String { + format!("https://packages.registry.purescript.org/{}/{}.tar.gz", name, version) +} + +pub fn fetch_tarball( + name: &str, + version: &str, + layout: &Layout, + no_cache: bool, +) -> Result { + let tarball_path = layout.tarball_cache_path(name, version); + + if !no_cache && tarball_path.exists() { + return Ok(tarball_path); + } + + fs::create_dir_all(&layout.cache_tarballs)?; + + let url = tarball_url(name, version); + let response = reqwest::blocking::get(&url)?; + let bytes = response.bytes()?; + + let part_path = tarball_path.with_extension("tar.gz.part"); + fs::write(&part_path, &bytes)?; + fs::rename(&part_path, &tarball_path)?; + + Ok(tarball_path) +} + +pub fn verify_tarball( + path: &PathBuf, + expected_sha256: &str, + name: &str, + version: &str, +) -> Result<()> { + use base64::Engine; + use base64::engine::general_purpose::STANDARD; + + let mut file = fs::File::open(path)?; + let mut hasher = Sha256::new(); + let mut buffer = Vec::new(); + file.read_to_end(&mut buffer)?; + hasher.update(&buffer); + let hash = hasher.finalize(); + let hash_b64 = STANDARD.encode(hash); + + let expected_b64 = expected_sha256.strip_prefix("sha256-").unwrap_or(expected_sha256); + + if hash_b64 != expected_b64 { + return Err(CompatError::HashMismatch { + name: name.to_string(), + version: version.to_string(), + }); + } + + Ok(()) +} diff --git a/compiler-compatibility/command/src/trace.rs b/compiler-compatibility/command/src/trace.rs new file mode 100644 index 000000000..8bb01efe8 --- /dev/null +++ b/compiler-compatibility/command/src/trace.rs @@ -0,0 +1,162 @@ +//! Tracing setup with dynamically switchable file output for per-package logs. + +use std::fs::{self, File}; +use std::io::{self, BufWriter, Write}; +use std::path::{Path, PathBuf}; +use std::sync::{Arc, Mutex}; +use std::time::{SystemTime, UNIX_EPOCH}; + +use tracing_subscriber::Layer; +use tracing_subscriber::filter::{LevelFilter, Targets}; +use tracing_subscriber::fmt::MakeWriter; +use tracing_subscriber::fmt::format::FmtSpan; +use tracing_subscriber::layer::SubscriberExt; +use tracing_tree::HierarchicalLayer; + +struct RouterState { + writer: Option>, + current_path: Option, +} + +#[derive(Clone)] +struct CheckingLogsRouter { + state: Arc>, +} + +struct CheckingLogsWriter { + state: Arc>, +} + +impl CheckingLogsRouter { + fn new() -> CheckingLogsRouter { + CheckingLogsRouter { + state: Arc::new(Mutex::new(RouterState { writer: None, current_path: None })), + } + } + + fn set_writer(&self, writer: BufWriter, path: PathBuf) -> io::Result<()> { + let mut state = self.state.lock().unwrap(); + if let Some(w) = state.writer.as_mut() { + w.flush()?; + } + state.writer = Some(writer); + state.current_path = Some(path); + Ok(()) + } + + fn clear(&self) -> io::Result<()> { + let mut state = self.state.lock().unwrap(); + if let Some(w) = state.writer.as_mut() { + w.flush()?; + } + state.writer = None; + state.current_path = None; + Ok(()) + } +} + +impl Write for CheckingLogsWriter { + fn write(&mut self, buf: &[u8]) -> io::Result { + let mut state = self.state.lock().unwrap(); + if let Some(w) = state.writer.as_mut() { w.write(buf) } else { Ok(buf.len()) } + } + + fn flush(&mut self) -> io::Result<()> { + let mut state = self.state.lock().unwrap(); + if let Some(w) = state.writer.as_mut() { w.flush() } else { Ok(()) } + } +} + +impl<'a> MakeWriter<'a> for CheckingLogsRouter { + type Writer = CheckingLogsWriter; + + fn make_writer(&'a self) -> CheckingLogsWriter { + CheckingLogsWriter { state: self.state.clone() } + } +} + +/// Handle for controlling per-package trace capture. +pub struct TracingHandle { + router: CheckingLogsRouter, + trace_dir: PathBuf, +} + +impl TracingHandle { + /// Begin capturing `checking` crate logs to a new file for the given package. + /// + /// Returns a guard that flushes and closes the file when dropped. + pub fn begin_package(&self, package: &str) -> io::Result { + fs::create_dir_all(&self.trace_dir)?; + + let timestamp = + SystemTime::now().duration_since(UNIX_EPOCH).expect("time before epoch").as_millis(); + + let sanitized: String = package + .chars() + .map(|c| if c.is_ascii_alphanumeric() || c == '-' { c } else { '_' }) + .collect(); + + let path = self.trace_dir.join(format!("{}_{}.jsonl", timestamp, sanitized)); + let file = File::create(&path)?; + let writer = BufWriter::new(file); + + self.router.set_writer(writer, path.clone())?; + + Ok(PackageTraceGuard { router: self.router.clone(), path }) + } +} + +/// Guard that flushes and closes the trace file on drop. +pub struct PackageTraceGuard { + router: CheckingLogsRouter, + path: PathBuf, +} + +impl PackageTraceGuard { + pub fn path(&self) -> &Path { + &self.path + } +} + +impl Drop for PackageTraceGuard { + fn drop(&mut self) { + let _ = self.router.clear(); + } +} + +/// Initialize global tracing with dual outputs: +/// - `compiler_compatibility` logs → stdout at `stdout_level` +/// - `checking` logs → per-package JSONL files at `checking_level` +/// +/// Returns a handle for switching the file output between packages. +pub fn init_tracing( + stdout_level: LevelFilter, + checking_level: LevelFilter, + trace_dir: PathBuf, +) -> TracingHandle { + let router = CheckingLogsRouter::new(); + + let stderr_filter = Targets::new() + .with_target("compiler_compatibility", stdout_level) + .with_default(LevelFilter::OFF); + let stderr_layer = HierarchicalLayer::new(2) + .with_writer(io::stderr) + .with_targets(true) + .with_indent_lines(true) + .with_filter(stderr_filter); + + let file_filter = + Targets::new().with_target("checking", checking_level).with_default(LevelFilter::OFF); + let file_layer = tracing_subscriber::fmt::layer() + .with_writer(router.clone()) + .json() + .with_span_events(FmtSpan::CLOSE) + .with_filter(file_filter); + + let subscriber = tracing_subscriber::registry().with(stderr_layer).with(file_layer); + + tracing::subscriber::set_global_default(subscriber) + .expect("failed to set global tracing subscriber"); + + TracingHandle { router, trace_dir } +} diff --git a/compiler-compatibility/command/src/types.rs b/compiler-compatibility/command/src/types.rs new file mode 100644 index 000000000..6b93f77ce --- /dev/null +++ b/compiler-compatibility/command/src/types.rs @@ -0,0 +1,6 @@ +#[derive(Debug, Clone, Default)] +pub struct ResolvedSet { + pub packages: std::collections::BTreeMap, + /// Maps each package name to its direct dependency package names. + pub dependencies: std::collections::BTreeMap>, +} diff --git a/compiler-compatibility/command/src/unpacker.rs b/compiler-compatibility/command/src/unpacker.rs new file mode 100644 index 000000000..025b0bc20 --- /dev/null +++ b/compiler-compatibility/command/src/unpacker.rs @@ -0,0 +1,35 @@ +use std::fs::{self, File}; +use std::path::{Path, PathBuf}; + +use flate2::read::GzDecoder; +use tar::Archive; + +use crate::error::{CompatError, Result}; + +pub fn unpack_tarball(tarball: &Path, dest_dir: &Path) -> Result { + fs::create_dir_all(dest_dir)?; + + let file = File::open(tarball)?; + let decoder = GzDecoder::new(file); + let mut archive = Archive::new(decoder); + + for entry in archive.entries()? { + let mut entry = entry?; + let path = entry.path()?; + + if path.components().any(|c| c == std::path::Component::ParentDir) { + return Err(CompatError::Other(format!("path traversal detected: {}", path.display()))); + } + + if path.is_absolute() { + return Err(CompatError::Other(format!( + "absolute path in archive: {}", + path.display() + ))); + } + + entry.unpack_in(dest_dir)?; + } + + Ok(dest_dir.to_path_buf()) +} diff --git a/compiler-compatibility/registry/Cargo.toml b/compiler-compatibility/registry/Cargo.toml new file mode 100644 index 000000000..efa0d68f7 --- /dev/null +++ b/compiler-compatibility/registry/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "registry" +version = "0.1.0" +edition = "2024" + +[dependencies] +serde = { version = "1", features = ["derive"] } +serde_json = "1" +semver = { version = "1", features = ["serde"] } +thiserror = "2" diff --git a/compiler-compatibility/registry/src/error.rs b/compiler-compatibility/registry/src/error.rs new file mode 100644 index 000000000..b9c5bc8d5 --- /dev/null +++ b/compiler-compatibility/registry/src/error.rs @@ -0,0 +1,21 @@ +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum RegistryError { + #[error("io: {0}")] + Io(#[from] std::io::Error), + + #[error("json: {0}")] + Json(#[from] serde_json::Error), + + #[error("semver: {0}")] + Semver(#[from] semver::Error), + + #[error("no package sets found")] + NoPackageSets, + + #[error("package set not found: {0}")] + PackageSetNotFound(String), +} + +pub type Result = std::result::Result; diff --git a/compiler-compatibility/registry/src/layout.rs b/compiler-compatibility/registry/src/layout.rs new file mode 100644 index 000000000..cd1203b51 --- /dev/null +++ b/compiler-compatibility/registry/src/layout.rs @@ -0,0 +1,52 @@ +use std::path::{Path, PathBuf}; + +/// Layout for PureScript registry and registry-index directories. +/// +/// This represents the on-disk structure of the registry repositories, +/// independent of where they are located or how they are managed. +#[derive(Debug, Clone)] +pub struct RegistryLayout { + pub registry_dir: PathBuf, + pub index_dir: PathBuf, +} + +impl RegistryLayout { + pub fn new(registry_dir: impl AsRef, index_dir: impl AsRef) -> RegistryLayout { + RegistryLayout { + registry_dir: registry_dir.as_ref().to_path_buf(), + index_dir: index_dir.as_ref().to_path_buf(), + } + } + + pub fn package_sets_dir(&self) -> PathBuf { + self.registry_dir.join("package-sets") + } + + pub fn metadata_dir(&self) -> PathBuf { + self.registry_dir.join("metadata") + } + + /// Returns the path to a package's manifest in the registry index. + /// + /// The index uses a sharding scheme based on package name length: + /// - 1-char names: `1/{name}` + /// - 2-char names: `2/{name}` + /// - 3-char names: `3/{first-char}/{name}` + /// - 4+ char names: `{first-two}/{chars-3-4}/{name}` + pub fn index_path(&self, name: &str) -> PathBuf { + let chars: Vec = name.chars().collect(); + match chars.len() { + 1 => self.index_dir.join("1").join(name), + 2 => self.index_dir.join("2").join(name), + 3 => { + let first_char: String = chars[..1].iter().collect(); + self.index_dir.join("3").join(first_char).join(name) + } + _ => { + let first_two: String = chars[..2].iter().collect(); + let second_two: String = chars[2..4].iter().collect(); + self.index_dir.join(first_two).join(second_two).join(name) + } + } + } +} diff --git a/compiler-compatibility/registry/src/lib.rs b/compiler-compatibility/registry/src/lib.rs new file mode 100644 index 000000000..3248f8163 --- /dev/null +++ b/compiler-compatibility/registry/src/lib.rs @@ -0,0 +1,12 @@ +mod error; +mod layout; +mod reader; +mod types; + +pub use error::{RegistryError, Result}; +pub use layout::RegistryLayout; +pub use reader::{FsRegistry, RegistryReader}; +pub use types::{ + Location, Manifest, Metadata, PackageSet, PackageSetPackages, PublishedVersion, + UnpublishedVersion, +}; diff --git a/compiler-compatibility/registry/src/reader.rs b/compiler-compatibility/registry/src/reader.rs new file mode 100644 index 000000000..7b1ca7d8a --- /dev/null +++ b/compiler-compatibility/registry/src/reader.rs @@ -0,0 +1,99 @@ +use std::fs; +use std::io::BufRead; + +use crate::error::{RegistryError, Result}; +use crate::layout::RegistryLayout; +use crate::types::{Manifest, Metadata, PackageSet}; + +pub trait RegistryReader { + fn list_package_sets(&self) -> Result>; + fn read_package_set(&self, version: Option<&str>) -> Result; + fn read_manifest_versions(&self, name: &str) -> Result>; + fn read_metadata(&self, name: &str) -> Result; +} + +pub struct FsRegistry { + layout: RegistryLayout, +} + +impl FsRegistry { + pub fn new(layout: RegistryLayout) -> FsRegistry { + FsRegistry { layout } + } + + pub fn layout(&self) -> &RegistryLayout { + &self.layout + } +} + +impl RegistryReader for FsRegistry { + fn list_package_sets(&self) -> Result> { + let package_sets_dir = self.layout.package_sets_dir(); + let mut versions: Vec = Vec::new(); + + for entry in fs::read_dir(&package_sets_dir)? { + let entry = entry?; + let path = entry.path(); + + if path.extension().is_some_and(|ext| ext == "json") + && let Some(stem) = path.file_stem().and_then(|s| s.to_str()) + && let Ok(version) = stem.parse::() + { + versions.push(version); + } + } + + if versions.is_empty() { + return Err(RegistryError::NoPackageSets); + } + + versions.sort_by(|a, b| b.cmp(a)); + + Ok(versions.into_iter().map(|v| v.to_string()).collect()) + } + + fn read_package_set(&self, version: Option<&str>) -> Result { + let version = match version { + Some(v) => v.to_string(), + None => { + self.list_package_sets()?.into_iter().next().ok_or(RegistryError::NoPackageSets)? + } + }; + + let path = self.layout.package_sets_dir().join(format!("{}.json", version)); + + if !path.exists() { + return Err(RegistryError::PackageSetNotFound(version)); + } + + let content = fs::read_to_string(&path)?; + let package_set: PackageSet = serde_json::from_str(&content)?; + + Ok(package_set) + } + + fn read_manifest_versions(&self, name: &str) -> Result> { + let path = self.layout.index_path(name); + let file = fs::File::open(&path)?; + let reader = std::io::BufReader::new(file); + + let mut manifests = Vec::new(); + for line in reader.lines() { + let line = line?; + if !line.is_empty() { + let manifest: Manifest = serde_json::from_str(&line)?; + manifests.push(manifest); + } + } + + Ok(manifests) + } + + fn read_metadata(&self, name: &str) -> Result { + let path = self.layout.metadata_dir().join(format!("{}.json", name)); + let content = fs::read_to_string(&path)?; + let metadata: Metadata = serde_json::from_str(&content)?; + + Ok(metadata) + } +} diff --git a/compiler-compatibility/registry/src/types.rs b/compiler-compatibility/registry/src/types.rs new file mode 100644 index 000000000..48b382417 --- /dev/null +++ b/compiler-compatibility/registry/src/types.rs @@ -0,0 +1,80 @@ +//! PureScript Registry schema types. +//! +//! These types are schema-complete to match the registry format. +//! Some fields may be unused by specific consumers. +#![allow(dead_code)] + +use serde::Deserialize; +use std::collections::HashMap; + +#[derive(Debug, Clone, Deserialize)] +pub struct PackageSet { + pub version: String, + pub compiler: String, + pub published: String, + #[serde(flatten)] + pub packages: PackageSetPackages, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct PackageSetPackages { + pub packages: HashMap, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct Manifest { + pub name: String, + pub version: semver::Version, + pub license: String, + #[serde(default)] + pub description: Option, + pub location: Location, + #[serde(default)] + pub dependencies: HashMap, +} + +#[derive(Debug, Clone, Deserialize)] +#[serde(untagged)] +pub enum Location { + GitHub { + #[serde(rename = "githubOwner")] + owner: String, + #[serde(rename = "githubRepo")] + repo: String, + subdir: Option, + }, + Git { + #[serde(rename = "gitUrl")] + url: String, + subdir: Option, + }, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct Metadata { + pub location: Location, + #[serde(default)] + pub owners: serde_json::Value, + pub published: HashMap, + #[serde(default)] + pub unpublished: HashMap, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct PublishedVersion { + pub hash: String, + pub ref_: Option, + pub bytes: Option, + #[serde(rename = "publishedTime")] + pub published_time: String, +} + +#[derive(Debug, Clone, Deserialize)] +pub struct UnpublishedVersion { + pub ref_: Option, + pub reason: String, + #[serde(rename = "publishedTime")] + pub published_time: String, + #[serde(rename = "unpublishedTime")] + pub unpublished_time: String, +} diff --git a/compiler-core/AGENTS.md b/compiler-core/AGENTS.md new file mode 100644 index 000000000..d27bc88ed --- /dev/null +++ b/compiler-core/AGENTS.md @@ -0,0 +1,33 @@ +## Architecture + +The compiler core is split into components with well-defined responsibilities, designed for +transparency (editor introspection) and compatibility with query-based incremental builds. + +### Pipeline Components + +The component names listed below are crate names in this workspace. + +- **lexing**: tokenization and the layout algorithm +- **parsing**: parsing into a rowan-based CST +- **syntax**: types for the rowan-based CST +- **sugar**: syntax desugaring (e.g., operator bracketing) +- **lowering**: core semantic representation, name resolution +- **indexing**: high-level relationships between module items +- **resolving**: name-indexed interface for module items +- **stabilizing**: assigns stable IDs to source ranges +- **checking**: type checking and elaboration +- **diagnostics**: error collection and rendering for LSP and tests + +### Infrastructure + +- **building**: query-based parallel build system +- **building-types**: shared type definitions +- **files**: virtual file system +- **interner**: generic interner implementation +- **prim-constants**: primitive type constants + +## Key Concepts + +- Uses rust-analyzer/rowan, a lossless syntax tree library inspired by Swift's libsyntax +- Query-based incremental builds (not traditional phase-based) +- Interning and arena allocation enable better caching (e.g., whitespace changes don't invalidate type checking) diff --git a/compiler-core/CLAUDE.md b/compiler-core/CLAUDE.md new file mode 120000 index 000000000..47dc3e3d8 --- /dev/null +++ b/compiler-core/CLAUDE.md @@ -0,0 +1 @@ +AGENTS.md \ No newline at end of file diff --git a/compiler-core/checking/Cargo.toml b/compiler-core/checking/Cargo.toml index e38cfe9e2..e3cf5c0d9 100644 --- a/compiler-core/checking/Cargo.toml +++ b/compiler-core/checking/Cargo.toml @@ -24,3 +24,4 @@ rustc-hash = "2.1.1" smol_str = "0.3.4" stabilizing = { version = "0.1.0", path = "../stabilizing" } sugar = { version = "0.1.0", path = "../sugar" } +syntax = { version = "0.1.0", path = "../syntax" } diff --git a/compiler-core/checking/src/algorithm.rs b/compiler-core/checking/src/algorithm.rs index 351784b4a..ff25ed9d4 100644 --- a/compiler-core/checking/src/algorithm.rs +++ b/compiler-core/checking/src/algorithm.rs @@ -15,6 +15,8 @@ pub mod constraint; /// Implements type class deriving. pub mod derive; +pub mod exhaustiveness; + /// Implements type folding for traversals that modify. pub mod fold; @@ -45,6 +47,9 @@ pub mod substitute; /// Implements type inference and checking for [`lowering::ExpressionKind`]. pub mod term; +/// Implements equation checking and inference shared by value and let bindings. +pub mod equation; + /// Shared utilities for common type manipulation patterns. pub mod toolkit; @@ -73,65 +78,31 @@ use crate::core::{Role, Type, TypeId}; use crate::{CheckedModule, ExternalQueries}; pub fn check_source(queries: &impl ExternalQueries, file_id: FileId) -> QueryResult { - let mut state = state::CheckState::default(); + let mut state = state::CheckState::new(file_id); let context = state::CheckContext::new(queries, &mut state, file_id)?; - check_type_signatures(&mut state, &context)?; - check_type_definitions(&mut state, &context)?; + check_types(&mut state, &context)?; + type_item::commit_pending_types(&mut state, &context); check_term_signatures(&mut state, &context)?; check_instance_heads(&mut state, &context)?; - check_derive_heads(&mut state, &context)?; + let derive_results = check_derive_heads(&mut state, &context)?; check_value_groups(&mut state, &context)?; check_instance_members(&mut state, &context)?; + check_derive_members(&mut state, &context, &derive_results)?; + + term_item::commit_pending_terms(&mut state, &context); Ok(state.checked) } -/// See [`type_item::check_type_signature`] -/// -/// Kind signatures are acyclic, and can be checked separately from the -/// type definitions. Checking these early adds better information for -/// inference, especially for mutually recursive type declarations. -/// -/// Consider the following example: +/// Checks all type declarations in topological order. /// -/// ```purescript -/// data F a = MkF (G a) -/// -/// data G :: Int -> Type -/// data G a = MkG (F a) -/// ``` -/// -/// By checking the kind signature of `G` first, we can avoid allocating -/// a unification variable for `G` when checking the mutually recursive -/// declarations of `{F, G}` -fn check_type_signatures( - state: &mut state::CheckState, - context: &state::CheckContext, -) -> QueryResult<()> -where - Q: ExternalQueries, -{ - for scc in &context.grouped.type_scc { - let items = match scc { - Scc::Base(id) | Scc::Recursive(id) => slice::from_ref(id), - Scc::Mutual(items) => items, - }; - for id in items { - type_item::check_type_signature(state, context, *id)?; - } - } - Ok(()) -} - -/// See [`type_item::check_type_item`] +/// Within [`Scc::Mutual`], kind signatures are checked first so that items with +/// signatures provide better information when inferring items with no signatures. /// -/// This function calls [`state::CheckState::with_type_group`] to insert -/// placeholder unification variables for recursive binding groups. After -/// checking a binding group, it calls [`type_item::commit_type_item`] to -/// generalise the types and add them to [`state::CheckState::checked`]. -fn check_type_definitions( +/// See [`type_item::check_type_signature`] and [`type_item::check_type_item`]. +fn check_types( state: &mut state::CheckState, context: &state::CheckContext, ) -> QueryResult<()> @@ -141,11 +112,13 @@ where for scc in &context.grouped.type_scc { match scc { Scc::Base(id) => { + type_item::check_type_signature(state, context, *id)?; if let Some(item) = type_item::check_type_item(state, context, *id)? { type_item::commit_type_item(state, context, *id, item)?; } } Scc::Recursive(id) => { + type_item::check_type_signature(state, context, *id)?; state.with_type_group(context, [*id], |state| { if let Some(item) = type_item::check_type_item(state, context, *id)? { type_item::commit_type_item(state, context, *id, item)?; @@ -154,6 +127,9 @@ where })?; } Scc::Mutual(mutual) => { + for id in mutual { + type_item::check_type_signature(state, context, *id)?; + } state.with_type_group(context, mutual, |state| { let mut items = vec![]; for &id in mutual { @@ -277,7 +253,7 @@ where fn check_derive_heads( state: &mut state::CheckState, context: &state::CheckContext, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -286,6 +262,8 @@ where Scc::Mutual(items) => items.as_slice(), }); + let mut results = vec![]; + for &item_id in items { let Some(TermItemIr::Derive { newtype, constraints, arguments, resolution }) = context.lowered.info.get_term_item(item_id) @@ -311,9 +289,25 @@ where is_newtype: *newtype, }; - derive::check_derive(state, context, check_derive)?; + if let Some(result) = derive::check_derive_head(state, context, check_derive)? { + results.push(result); + } } + Ok(results) +} + +fn check_derive_members( + state: &mut state::CheckState, + context: &state::CheckContext, + derive_results: &[derive::DeriveHeadResult], +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + for result in derive_results { + derive::check_derive_member(state, context, result)?; + } Ok(()) } @@ -341,10 +335,11 @@ where }; state.with_term_group(context, [*id], |state| { if let Some(item) = term_item::check_value_group(state, context, value_group)? { - term_item::commit_value_group(state, context, *id, item)?; + term_item::commit_inferred_value_group(state, context, *id, item)?; } Ok(()) })?; + term_item::commit_checked_value_group(state, context, *id)?; } Scc::Mutual(mutual) => { let value_groups = @@ -373,13 +368,17 @@ where } } for (item_id, group) in groups { - term_item::commit_value_group(state, context, item_id, group)?; + term_item::commit_inferred_value_group(state, context, item_id, group)?; } Ok(()) })?; - for value_group in with_signature { - term_item::check_value_group(state, context, value_group)?; + for value_group in &with_signature { + term_item::check_value_group(state, context, *value_group)?; + } + + for value_group in &with_signature { + term_item::commit_checked_value_group(state, context, value_group.item_id)?; } } } @@ -396,6 +395,11 @@ pub fn check_prim(queries: &impl ExternalQueries, file_id: FileId) -> QueryResul prim_type.unwrap_or_else(|| unreachable!("invariant violated: {name} not in Prim")) }; + let lookup_class = |name: &str| { + let prim_class = resolved.exports.lookup_class(name); + prim_class.unwrap_or_else(|| unreachable!("invariant violated: {name} not in Prim")) + }; + let type_core = { let (file_id, item_id) = lookup_type("Type"); queries.intern_type(Type::Constructor(file_id, item_id)) @@ -431,11 +435,13 @@ pub fn check_prim(queries: &impl ExternalQueries, file_id: FileId) -> QueryResul insert_type("String", type_core); insert_type("Char", type_core); insert_type("Boolean", type_core); - insert_type("Partial", constraint_core); insert_type("Constraint", type_core); insert_type("Symbol", type_core); insert_type("Row", type_to_type_core); + let (_, partial_id) = lookup_class("Partial"); + checked_module.types.insert(partial_id, constraint_core); + let mut insert_roles = |name: &str, roles: &[Role]| { let (_, item_id) = lookup_type(name); checked_module.roles.insert(item_id, Arc::from(roles)); diff --git a/compiler-core/checking/src/algorithm/binder.rs b/compiler-core/checking/src/algorithm/binder.rs index abc8bdc21..81cecc846 100644 --- a/compiler-core/checking/src/algorithm/binder.rs +++ b/compiler-core/checking/src/algorithm/binder.rs @@ -1,16 +1,20 @@ +use std::sync::Arc; + use building_types::QueryResult; +use itertools::{EitherOrBoth, Itertools}; use smol_str::SmolStr; use crate::ExternalQueries; use crate::algorithm::state::{CheckContext, CheckState}; +use crate::algorithm::unification::ElaborationMode; use crate::algorithm::{binder, kind, operator, term, toolkit, unification}; use crate::core::{RowField, RowType, Type, TypeId}; -use crate::error::ErrorStep; +use crate::error::{ErrorKind, ErrorStep}; #[derive(Copy, Clone, Debug)] enum BinderMode { Infer, - Check { expected_type: TypeId }, + Check { expected_type: TypeId, elaboration: ElaborationMode }, } pub fn infer_binder( @@ -35,11 +39,49 @@ pub fn check_binder( where Q: ExternalQueries, { + let elaboration = ElaborationMode::Yes; state.with_error_step(ErrorStep::CheckingBinder(binder_id), |state| { - binder_core(state, context, binder_id, BinderMode::Check { expected_type }) + binder_core(state, context, binder_id, BinderMode::Check { expected_type, elaboration }) }) } +pub fn check_argument_binder( + state: &mut CheckState, + context: &CheckContext, + binder_id: lowering::BinderId, + expected_type: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let elaboration = ElaborationMode::No; + state.with_error_step(ErrorStep::CheckingBinder(binder_id), |state| { + binder_core(state, context, binder_id, BinderMode::Check { expected_type, elaboration }) + }) +} + +pub fn requires_instantiation(context: &CheckContext, binder_id: lowering::BinderId) -> bool +where + Q: ExternalQueries, +{ + let Some(kind) = context.lowered.info.get_binder_kind(binder_id) else { + return false; + }; + match kind { + lowering::BinderKind::Variable { .. } | lowering::BinderKind::Wildcard => false, + lowering::BinderKind::Named { binder, .. } => { + binder.is_some_and(|id| requires_instantiation(context, id)) + } + lowering::BinderKind::Parenthesized { parenthesized } => { + parenthesized.is_some_and(|id| requires_instantiation(context, id)) + } + lowering::BinderKind::Typed { binder, .. } => { + binder.is_some_and(|id| requires_instantiation(context, id)) + } + _ => true, + } +} + fn binder_core( state: &mut CheckState, context: &CheckContext, @@ -61,10 +103,17 @@ where let Some(t) = type_ else { return Ok(unknown) }; let (t, _) = kind::infer_surface_kind(state, context, *t)?; - check_binder(state, context, *b, t)?; + match mode { + BinderMode::Check { elaboration: ElaborationMode::No, .. } => { + check_argument_binder(state, context, *b, t)?; + } + _ => { + check_binder(state, context, *b, t)?; + } + } - if let BinderMode::Check { expected_type } = mode { - unification::subtype(state, context, t, expected_type)?; + if let BinderMode::Check { expected_type, elaboration } = mode { + unification::subtype_with_mode(state, context, t, expected_type, elaboration)?; } Ok(t) @@ -73,27 +122,33 @@ where lowering::BinderKind::OperatorChain { .. } => { let (_, inferred_type) = operator::infer_operator_chain(state, context, binder_id)?; - if let BinderMode::Check { expected_type } = mode { - unification::subtype(state, context, inferred_type, expected_type)?; + if let BinderMode::Check { expected_type, elaboration } = mode { + unification::subtype_with_mode( + state, + context, + inferred_type, + expected_type, + elaboration, + )?; } Ok(inferred_type) } - lowering::BinderKind::Integer => { + lowering::BinderKind::Integer { .. } => { let inferred_type = context.prim.int; - if let BinderMode::Check { expected_type } = mode { + if let BinderMode::Check { expected_type, .. } = mode { unification::unify(state, context, inferred_type, expected_type)?; } Ok(inferred_type) } - lowering::BinderKind::Number => { + lowering::BinderKind::Number { .. } => { let inferred_type = context.prim.number; - if let BinderMode::Check { expected_type } = mode { + if let BinderMode::Check { expected_type, .. } = mode { unification::unify(state, context, inferred_type, expected_type)?; } @@ -109,7 +164,7 @@ where // Non-nullary constructors are instantiated during application. let inferred_type = if arguments.is_empty() { constructor_t = toolkit::instantiate_forall(state, constructor_t); - toolkit::collect_constraints(state, constructor_t) + toolkit::collect_wanteds(state, constructor_t) } else { for &argument in arguments.iter() { constructor_t = binder::check_constructor_binder_application( @@ -122,8 +177,14 @@ where constructor_t }; - if let BinderMode::Check { expected_type } = mode { - unification::subtype(state, context, inferred_type, expected_type)?; + if let BinderMode::Check { expected_type, elaboration } = mode { + unification::subtype_with_mode( + state, + context, + inferred_type, + expected_type, + elaboration, + )?; Ok(expected_type) } else { Ok(inferred_type) @@ -133,7 +194,7 @@ where lowering::BinderKind::Variable { .. } => { let type_id = match mode { BinderMode::Infer => state.fresh_unification_type(context), - BinderMode::Check { expected_type } => expected_type, + BinderMode::Check { expected_type, .. } => expected_type, }; state.term_scope.bind_binder(binder_id, type_id); Ok(type_id) @@ -144,9 +205,12 @@ where let type_id = match mode { BinderMode::Infer => infer_binder(state, context, *binder)?, - BinderMode::Check { expected_type } => { - check_binder(state, context, *binder, expected_type)? - } + BinderMode::Check { expected_type, elaboration } => match elaboration { + ElaborationMode::Yes => check_binder(state, context, *binder, expected_type)?, + ElaborationMode::No => { + check_argument_binder(state, context, *binder, expected_type)? + } + }, }; state.term_scope.bind_binder(binder_id, type_id); @@ -155,23 +219,23 @@ where lowering::BinderKind::Wildcard => match mode { BinderMode::Infer => Ok(state.fresh_unification_type(context)), - BinderMode::Check { expected_type } => Ok(expected_type), + BinderMode::Check { expected_type, .. } => Ok(expected_type), }, - lowering::BinderKind::String => { + lowering::BinderKind::String { .. } => { let inferred_type = context.prim.string; - if let BinderMode::Check { expected_type } = mode { + if let BinderMode::Check { expected_type, .. } = mode { unification::unify(state, context, inferred_type, expected_type)?; } Ok(inferred_type) } - lowering::BinderKind::Char => { + lowering::BinderKind::Char { .. } => { let inferred_type = context.prim.char; - if let BinderMode::Check { expected_type } = mode { + if let BinderMode::Check { expected_type, .. } = mode { unification::unify(state, context, inferred_type, expected_type)?; } @@ -181,7 +245,7 @@ where lowering::BinderKind::Boolean { .. } => { let inferred_type = context.prim.boolean; - if let BinderMode::Check { expected_type } = mode { + if let BinderMode::Check { expected_type, .. } = mode { unification::unify(state, context, inferred_type, expected_type)?; } @@ -193,73 +257,242 @@ where for binder in array.iter() { let binder_type = infer_binder(state, context, *binder)?; - unification::subtype(state, context, binder_type, element_type)?; + unification::subtype_with_mode( + state, + context, + binder_type, + element_type, + ElaborationMode::No, + )?; } let array_type = state.storage.intern(Type::Application(context.prim.array, element_type)); - if let BinderMode::Check { expected_type } = mode { - unification::subtype(state, context, array_type, expected_type)?; + if let BinderMode::Check { expected_type, elaboration } = mode { + unification::subtype_with_mode( + state, + context, + array_type, + expected_type, + elaboration, + )?; } Ok(array_type) } lowering::BinderKind::Record { record } => { - let mut fields = vec![]; + if let BinderMode::Check { expected_type, elaboration } = mode { + check_record_binder(state, context, binder_id, record, expected_type, elaboration) + } else { + infer_record_binder(state, context, binder_id, record) + } + } - for field in record.iter() { - match field { - lowering::BinderRecordItem::RecordField { name, value } => { - let Some(name) = name else { continue }; - let Some(value) = value else { continue }; + lowering::BinderKind::Parenthesized { parenthesized } => { + let Some(parenthesized) = parenthesized else { return Ok(unknown) }; + binder_core(state, context, *parenthesized, mode) + } + } +} - let label = SmolStr::clone(name); - let id = infer_binder(state, context, *value)?; +fn check_constructor_binder_application( + state: &mut CheckState, + context: &CheckContext, + constructor_t: TypeId, + binder_id: lowering::BinderId, +) -> QueryResult +where + Q: ExternalQueries, +{ + term::check_function_application_core(state, context, constructor_t, binder_id, check_binder) +} - fields.push(RowField { label, id }); - } - lowering::BinderRecordItem::RecordPun { id, name } => { - let Some(name) = name else { continue }; +enum PatternItem { + Field(lowering::BinderId), + Pun(lowering::RecordPunId), +} - let label = SmolStr::clone(name); - let field_type = state.fresh_unification_type(context); - state.term_scope.bind_pun(*id, field_type); +fn collect_pattern_items(record: &[lowering::BinderRecordItem]) -> Vec<(SmolStr, PatternItem)> { + let mut items = vec![]; + for field in record { + match field { + lowering::BinderRecordItem::RecordField { name, value } => { + let Some(name) = name else { continue }; + let Some(value) = value else { continue }; + let name = SmolStr::clone(name); + items.push((name, PatternItem::Field(*value))); + } + lowering::BinderRecordItem::RecordPun { id, name } => { + let Some(name) = name else { continue }; + let name = SmolStr::clone(name); + items.push((name, PatternItem::Pun(*id))); + } + } + } + items.sort_by(|a, b| a.0.cmp(&b.0)); + items +} - fields.push(RowField { label, id: field_type }); - } - } +fn check_pattern_item( + state: &mut CheckState, + context: &CheckContext, + item: &PatternItem, + expected_type: TypeId, + elaboration: ElaborationMode, +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + match *item { + PatternItem::Field(binder_id) => match elaboration { + ElaborationMode::Yes => { + check_binder(state, context, binder_id, expected_type)?; } + ElaborationMode::No => { + check_argument_binder(state, context, binder_id, expected_type)?; + } + }, + PatternItem::Pun(pun_id) => { + state.term_scope.bind_pun(pun_id, expected_type); + } + } + Ok(()) +} - let row_type = RowType::from_unsorted(fields, None); - let row_type = state.storage.intern(Type::Row(row_type)); +fn infer_record_binder( + state: &mut CheckState, + context: &CheckContext, + binder_id: lowering::BinderId, + record: &[lowering::BinderRecordItem], +) -> QueryResult +where + Q: ExternalQueries, +{ + let mut fields = vec![]; - let record_type = - state.storage.intern(Type::Application(context.prim.record, row_type)); + for field in record.iter() { + match field { + lowering::BinderRecordItem::RecordField { name, value } => { + let Some(name) = name else { continue }; + let Some(value) = value else { continue }; - if let BinderMode::Check { expected_type } = mode { - unification::subtype(state, context, record_type, expected_type)?; + let label = SmolStr::clone(name); + let id = infer_binder(state, context, *value)?; + fields.push(RowField { label, id }); } + lowering::BinderRecordItem::RecordPun { id, name } => { + let Some(name) = name else { continue }; - Ok(record_type) - } + let label = SmolStr::clone(name); + let field_type = state.fresh_unification_type(context); - lowering::BinderKind::Parenthesized { parenthesized } => { - let Some(parenthesized) = parenthesized else { return Ok(unknown) }; - binder_core(state, context, *parenthesized, mode) + state.term_scope.bind_pun(*id, field_type); + fields.push(RowField { label, id: field_type }); + } } } + + let row_tail = state.fresh_unification_kinded(context.prim.row_type); + let row_type = RowType::from_unsorted(fields, Some(row_tail)); + let row_type = state.storage.intern(Type::Row(row_type)); + let record_type = state.storage.intern(Type::Application(context.prim.record, row_type)); + + state.term_scope.bind_binder(binder_id, record_type); + Ok(record_type) } -pub fn check_constructor_binder_application( +fn extract_expected_row( + state: &mut CheckState, + context: &CheckContext, + expected_type: TypeId, +) -> QueryResult> +where + Q: ExternalQueries, +{ + let expected_type = toolkit::normalise_expand_type(state, context, expected_type)?; + let &Type::Application(function, argument) = &state.storage[expected_type] else { + return Ok(None); + }; + if function != context.prim.record { + return Ok(None); + } + let row = toolkit::normalise_expand_type(state, context, argument)?; + let Type::Row(row) = &state.storage[row] else { + return Ok(None); + }; + Ok(Some(RowType::clone(row))) +} + +fn check_record_binder( state: &mut CheckState, context: &CheckContext, - constructor_t: TypeId, binder_id: lowering::BinderId, + record: &[lowering::BinderRecordItem], + expected_type: TypeId, + elaboration: ElaborationMode, ) -> QueryResult where Q: ExternalQueries, { - term::check_function_application_core(state, context, constructor_t, binder_id, check_binder) + let pattern_items = collect_pattern_items(record); + + let expected_type = toolkit::normalise_expand_type(state, context, expected_type)?; + + let expected_row = if let &Type::Application(function, _) = &state.storage[expected_type] + && function == context.prim.record + { + extract_expected_row(state, context, expected_type)? + } else { + None + }; + + let Some(expected_row) = expected_row else { + let result = infer_record_binder(state, context, binder_id, record)?; + unification::unify(state, context, result, expected_type)?; + return Ok(expected_type); + }; + + let mut extra_fields = vec![]; + + let patterns = pattern_items.iter(); + let expected = expected_row.fields.iter(); + + for pair in patterns.merge_join_by(expected, |pattern, expected| pattern.0.cmp(&expected.label)) + { + match pair { + // If a label exists in both, perform checking + EitherOrBoth::Both((_, item), expected) => { + check_pattern_item(state, context, item, expected.id, elaboration)?; + } + // If a label only exists in the pattern, track it + EitherOrBoth::Left((label, item)) => { + let id = state.fresh_unification_type(context); + check_pattern_item(state, context, item, id, elaboration)?; + + let label = SmolStr::clone(label); + extra_fields.push(RowField { label, id }); + } + // If a label only exists in the type, do nothing + EitherOrBoth::Right(_) => (), + } + } + + if !extra_fields.is_empty() { + if let Some(tail) = expected_row.tail { + let row_tail = state.fresh_unification_kinded(context.prim.row_type); + + let row_type = RowType::from_unsorted(extra_fields, Some(row_tail)); + let row_type = state.storage.intern(Type::Row(row_type)); + + unification::unify(state, context, tail, row_type)?; + } else { + let labels = extra_fields.into_iter().map(|field| field.label); + state.insert_error(ErrorKind::AdditionalProperty { labels: Arc::from_iter(labels) }); + } + } + + state.term_scope.bind_binder(binder_id, expected_type); + Ok(expected_type) } diff --git a/compiler-core/checking/src/algorithm/constraint.rs b/compiler-core/checking/src/algorithm/constraint.rs index 671fb0254..21f5f89d9 100644 --- a/compiler-core/checking/src/algorithm/constraint.rs +++ b/compiler-core/checking/src/algorithm/constraint.rs @@ -5,11 +5,11 @@ mod compiler_solved; mod functional_dependency; use compiler_solved::*; -use functional_dependency::Fd; +use functional_dependency::{Fd, get_all_determined}; use std::collections::{HashSet, VecDeque}; -use std::iter; use std::sync::Arc; +use std::{iter, mem}; use building_types::QueryResult; use files::FileId; @@ -18,10 +18,13 @@ use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use crate::algorithm::fold::{FoldAction, TypeFold, fold_type}; +use crate::algorithm::state::implication::ImplicationId; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::visit::{CollectFileReferences, TypeVisitor, VisitAction, visit_type}; -use crate::algorithm::{toolkit, transfer, unification}; -use crate::core::{Class, Instance, InstanceKind, Variable, debruijn}; +use crate::algorithm::visit::{ + CollectFileReferences, HasLabeledRole, TypeVisitor, VisitAction, visit_type, +}; +use crate::algorithm::{substitute, toolkit, transfer, unification}; +use crate::core::{self, Class, Instance, InstanceKind, Name, Variable}; use crate::{CheckedModule, ExternalQueries, Type, TypeId}; #[tracing::instrument(skip_all, name = "solve_constraints")] @@ -29,7 +32,7 @@ pub fn solve_constraints( state: &mut CheckState, context: &CheckContext, wanted: VecDeque, - given: Vec, + given: &[TypeId], ) -> QueryResult> where Q: ExternalQueries, @@ -119,6 +122,72 @@ where Ok(residual) } +#[tracing::instrument(skip_all, name = "solve_implication")] +pub fn solve_implication( + state: &mut CheckState, + context: &CheckContext, +) -> QueryResult> +where + Q: ExternalQueries, +{ + let implication = state.implications.current(); + solve_implication_id(state, context, implication, &[]) +} + +/// Recursively solves an implication and its children. +fn solve_implication_id( + state: &mut CheckState, + context: &CheckContext, + implication: ImplicationId, + inherited: &[TypeId], +) -> QueryResult> +where + Q: ExternalQueries, +{ + let (wanted, given, children) = { + let node = &mut state.implications[implication]; + (mem::take(&mut node.wanted), mem::take(&mut node.given), node.children.clone()) + }; + + let all_given = { + let inherited = inherited.iter().copied(); + let given = given.iter().copied(); + inherited.chain(given).collect_vec() + }; + + crate::debug_fields!(state, context, { + ?implication = implication, + ?wanted = wanted.len(), + ?given = given.len(), + ?inherited = inherited.len(), + ?children = children.len(), + }); + + // Solve this implication's children with all_given. + for child in &children { + let residual = solve_implication_id(state, context, *child, &all_given)?; + + crate::debug_fields!(state, context, { + ?child = child, + ?residual = residual.len(), + }); + + // TODO: partition_by_skolem_escape once skolems are introduced. + state.implications[implication].wanted.extend(residual); + } + + // Solve this implication's wanted constraints with all_given. + let remaining = mem::take(&mut state.implications[implication].wanted); + let wanted: VecDeque = wanted.into_iter().chain(remaining).collect(); + let residuals = solve_constraints(state, context, wanted, &all_given)?; + + let implication = &mut state.implications[implication]; + implication.given = given; + implication.wanted = Vec::clone(&residuals).into(); + + Ok(residuals) +} + #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub(crate) struct ConstraintApplication { pub(crate) file_id: FileId, @@ -142,19 +211,41 @@ pub(crate) fn constraint_application( fn elaborate_given( state: &mut CheckState, context: &CheckContext, - given: Vec, + given: &[TypeId], ) -> QueryResult> where Q: ExternalQueries, { let mut elaborated = vec![]; - for constraint in given { + for &constraint in given { elaborated.push(constraint); elaborate_superclasses(state, context, constraint, &mut elaborated)?; } - Ok(elaborated.into_iter().filter_map(|given| constraint_application(state, given)).collect()) + let applications = + elaborated.into_iter().filter_map(|given| constraint_application(state, given)); + let mut applications = applications.collect_vec(); + + let is_coercible = |file_id, item_id| { + context.prim_coerce.file_id == file_id && context.prim_coerce.coercible == item_id + }; + + // For coercible applications, also elaborate into symmetric versions. + let symmetric = applications.iter().filter_map(|application| { + let is_coercible = is_coercible(application.file_id, application.item_id); + let &[left, right] = application.arguments.as_slice() else { return None }; + is_coercible.then(|| ConstraintApplication { + file_id: application.file_id, + item_id: application.item_id, + arguments: vec![right, left], + }) + }); + + let reversed = symmetric.collect_vec(); + applications.extend(reversed); + + Ok(applications) } /// Discovers superclass constraints for a given constraint. @@ -195,11 +286,9 @@ where return Ok(()); } - let initial_level = class_info.quantified_variables.0 + class_info.kind_variables.0; - let mut bindings = FxHashMap::default(); - for (index, &argument) in arguments.iter().enumerate() { - let level = debruijn::Level(initial_level + index as u32); - bindings.insert(level, argument); + let mut bindings: substitute::NameToType = FxHashMap::default(); + for (name, &argument) in class_info.type_variable_names.iter().zip(arguments.iter()) { + bindings.insert(name.clone(), argument); } for &(superclass, _) in class_info.superclasses.iter() { @@ -298,6 +387,7 @@ where Class { superclasses, type_variable_kinds, + type_variable_names: class.type_variable_names.clone(), quantified_variables: class.quantified_variables, kind_variables: class.kind_variables, } @@ -461,13 +551,17 @@ enum MatchInstance { /// We use the [`can_unify`] function to speculate if these two types can be /// unified, or if unifying them solves unification variables, encoded by the /// [`CanUnify::Unify`] variant. -fn match_type( +fn match_type( state: &mut CheckState, - bindings: &mut FxHashMap, + context: &CheckContext, + bindings: &mut FxHashMap, equalities: &mut Vec<(TypeId, TypeId)>, wanted: TypeId, given: TypeId, -) -> MatchType { +) -> MatchType +where + Q: ExternalQueries, +{ let wanted = state.normalize_type(wanted); let given = state.normalize_type(given); @@ -479,8 +573,8 @@ fn match_type( let given_core = &state.storage[given]; match (wanted_core, given_core) { - (_, Type::Variable(Variable::Bound(level, _))) => { - if let Some(&bound) = bindings.get(level) { + (_, Type::Variable(Variable::Bound(name, _))) => { + if let Some(&bound) = bindings.get(name) { match can_unify(state, wanted, bound) { CanUnify::Equal => MatchType::Match, CanUnify::Apart => MatchType::Apart, @@ -490,37 +584,55 @@ fn match_type( } } } else { - bindings.insert(*level, wanted); + bindings.insert(name.clone(), wanted); MatchType::Match } } (Type::Unification(_), _) => MatchType::Stuck, + (Type::Row(wanted_row), Type::Row(given_row)) => { + let wanted_row = wanted_row.clone(); + let given_row = given_row.clone(); + match_row_type(state, context, bindings, equalities, wanted_row, given_row) + } + ( &Type::Application(w_function, w_argument), &Type::Application(g_function, g_argument), - ) => match_type(state, bindings, equalities, w_function, g_function) - .and_also(|| match_type(state, bindings, equalities, w_argument, g_argument)), + ) => match_type(state, context, bindings, equalities, w_function, g_function) + .and_also(|| match_type(state, context, bindings, equalities, w_argument, g_argument)), (&Type::Function(w_argument, w_result), &Type::Function(g_argument, g_result)) => { - match_type(state, bindings, equalities, w_argument, g_argument) - .and_also(|| match_type(state, bindings, equalities, w_result, g_result)) + match_type(state, context, bindings, equalities, w_argument, g_argument) + .and_also(|| match_type(state, context, bindings, equalities, w_result, g_result)) + } + + (&Type::Function(w_argument, w_result), &Type::Application(_, _)) => { + let wanted = state.storage.intern(Type::Application(context.prim.function, w_argument)); + let wanted = state.storage.intern(Type::Application(wanted, w_result)); + match_type(state, context, bindings, equalities, wanted, given) + } + + (&Type::Application(_, _), &Type::Function(g_argument, g_result)) => { + let given = state.storage.intern(Type::Application(context.prim.function, g_argument)); + let given = state.storage.intern(Type::Application(given, g_result)); + match_type(state, context, bindings, equalities, wanted, given) } ( &Type::KindApplication(w_function, w_argument), &Type::KindApplication(g_function, g_argument), - ) => match_type(state, bindings, equalities, w_function, g_function) - .and_also(|| match_type(state, bindings, equalities, w_argument, g_argument)), + ) => match_type(state, context, bindings, equalities, w_function, g_function) + .and_also(|| match_type(state, context, bindings, equalities, w_argument, g_argument)), ( &Type::OperatorApplication(f1, t1, l1, r1), &Type::OperatorApplication(f2, t2, l2, r2), ) => { if f1 == f2 && t1 == t2 { - match_type(state, bindings, equalities, l1, l2) - .and_also(|| match_type(state, bindings, equalities, r1, r2)) + match_type(state, context, bindings, equalities, l1, l2) + .and_also(|| match_type(state, context, bindings, equalities, r1, r2)) } else { MatchType::Apart } @@ -531,7 +643,7 @@ fn match_type( let a1 = Arc::clone(a1); let a2 = Arc::clone(a2); iter::zip(a1.iter(), a2.iter()).fold(MatchType::Match, |result, (&a1, &a2)| { - result.and_also(|| match_type(state, bindings, equalities, a1, a2)) + result.and_also(|| match_type(state, context, bindings, equalities, a1, a2)) }) } else { MatchType::Apart @@ -542,13 +654,122 @@ fn match_type( } } +/// Matches row types in instance heads. +/// +/// This function handles structural row matching for both the tail variable +/// form `( | r )` in determiner positions and labeled rows in determined +/// positions `( x :: T | r )`. This function partitions the two row types, +/// matches the shared fields, and handles the row tail. +fn match_row_type( + state: &mut CheckState, + context: &CheckContext, + bindings: &mut FxHashMap, + equalities: &mut Vec<(TypeId, TypeId)>, + wanted_row: core::RowType, + given_row: core::RowType, +) -> MatchType +where + Q: ExternalQueries, +{ + let mut wanted_only = vec![]; + let mut given_only = vec![]; + let mut result = MatchType::Match; + + let wanted_fields = wanted_row.fields.iter(); + let given_fields = given_row.fields.iter(); + + for field in itertools::merge_join_by(wanted_fields, given_fields, |wanted, given| { + wanted.label.cmp(&given.label) + }) { + match field { + itertools::EitherOrBoth::Both(wanted, given) => { + result = result.and_also(|| { + match_type(state, context, bindings, equalities, wanted.id, given.id) + }); + // Given an open wanted row, additional fields from the + // given row can be absorbed into the wanted row's tail. + if matches!(result, MatchType::Apart) && wanted_row.tail.is_none() { + return MatchType::Apart; + } + } + itertools::EitherOrBoth::Left(wanted) => wanted_only.push(wanted), + itertools::EitherOrBoth::Right(given) => given_only.push(given), + } + } + + enum RowRest { + /// `( a :: Int )` and `( a :: Int | r )` + Additional, + /// `( | r )` + Open(TypeId), + /// `( )` + Closed, + } + + impl RowRest { + fn new(only: &[&core::RowField], tail: Option) -> RowRest { + if !only.is_empty() { + RowRest::Additional + } else if let Some(tail) = tail { + RowRest::Open(tail) + } else { + RowRest::Closed + } + } + } + + let given_rest = RowRest::new(&given_only, given_row.tail); + let wanted_rest = RowRest::new(&wanted_only, wanted_row.tail); + + use RowRest::*; + + match given_rest { + // If there are additional given fields + Additional => match wanted_rest { + // we cannot match it against a tail-less wanted, + // nor against the additional wanted fields. + Closed | Additional => MatchType::Apart, + // we could potentially make progress by having the + // wanted tail absorb the additional given fields + Open(_) => MatchType::Stuck, + }, + + // If the given row has a tail, match it against the + // additional fields and tail from the wanted row + Open(given_tail) => { + let fields = Arc::from_iter(wanted_only.into_iter().cloned()); + let row = core::RowType { fields, tail: wanted_row.tail }; + let row_id = state.storage.intern(Type::Row(row)); + result.and_also(|| match_type(state, context, bindings, equalities, row_id, given_tail)) + } + + // If we have a closed given row + Closed => match wanted_rest { + // we cannot match it against fields in the wanted row + Additional => MatchType::Apart, + // we could make progress with an open wanted row + Open(_) => MatchType::Stuck, + // we can match it directly with a closed wanted row + Closed => result, + }, + } +} + /// Matches an argument from a wanted constraint to one from a given constraint. /// /// This function is specialised for matching given constraints, like those /// found in value signatures rather than top-level instance declarations; /// unlike [`match_type`], this function does not build bindings or equalities /// for [`Variable::Bound`] or [`Variable::Implicit`] variables. -fn match_given_type(state: &mut CheckState, wanted: TypeId, given: TypeId) -> MatchType { +fn match_given_type( + state: &mut CheckState, + context: &CheckContext, + wanted: TypeId, + given: TypeId, +) -> MatchType +where + Q: ExternalQueries, +{ let wanted = state.normalize_type(wanted); let given = state.normalize_type(given); @@ -563,22 +784,22 @@ fn match_given_type(state: &mut CheckState, wanted: TypeId, given: TypeId) -> Ma (Type::Unification(_), _) => MatchType::Stuck, ( - Type::Variable(Variable::Bound(w_level, w_kind)), - Type::Variable(Variable::Bound(g_level, g_kind)), + Type::Variable(Variable::Bound(w_name, w_kind)), + Type::Variable(Variable::Bound(g_name, g_kind)), ) => { - if w_level == g_level { - match_given_type(state, *w_kind, *g_kind) + if w_name == g_name { + match_given_type(state, context, *w_kind, *g_kind) } else { MatchType::Apart } } ( - Type::Variable(Variable::Skolem(w_level, w_kind)), - Type::Variable(Variable::Skolem(g_level, g_kind)), + Type::Variable(Variable::Skolem(w_name, w_kind)), + Type::Variable(Variable::Skolem(g_name, g_kind)), ) => { - if w_level == g_level { - match_given_type(state, *w_kind, *g_kind) + if w_name == g_name { + match_given_type(state, context, *w_kind, *g_kind) } else { MatchType::Apart } @@ -587,26 +808,84 @@ fn match_given_type(state: &mut CheckState, wanted: TypeId, given: TypeId) -> Ma ( &Type::Application(w_function, w_argument), &Type::Application(g_function, g_argument), - ) => match_given_type(state, w_function, g_function) - .and_also(|| match_given_type(state, w_argument, g_argument)), + ) => match_given_type(state, context, w_function, g_function) + .and_also(|| match_given_type(state, context, w_argument, g_argument)), (&Type::Function(w_argument, w_result), &Type::Function(g_argument, g_result)) => { - match_given_type(state, w_argument, g_argument) - .and_also(|| match_given_type(state, w_result, g_result)) + match_given_type(state, context, w_argument, g_argument) + .and_also(|| match_given_type(state, context, w_result, g_result)) + } + + (&Type::Function(w_argument, w_result), &Type::Application(_, _)) => { + let wanted = state.storage.intern(Type::Application(context.prim.function, w_argument)); + let wanted = state.storage.intern(Type::Application(wanted, w_result)); + match_given_type(state, context, wanted, given) + } + + (&Type::Application(_, _), &Type::Function(g_argument, g_result)) => { + let given = state.storage.intern(Type::Application(context.prim.function, g_argument)); + let given = state.storage.intern(Type::Application(given, g_result)); + match_given_type(state, context, wanted, given) + } + + (Type::Row(wanted_row), Type::Row(given_row)) => { + if wanted_row.fields.len() != given_row.fields.len() { + return MatchType::Apart; + } + + let wanted_fields = Arc::clone(&wanted_row.fields); + let given_fields = Arc::clone(&given_row.fields); + + let wanted_tail = wanted_row.tail; + let given_tail = given_row.tail; + + let mut result = MatchType::Match; + for (wanted_field, given_field) in iter::zip(wanted_fields.iter(), given_fields.iter()) + { + if wanted_field.label != given_field.label { + return MatchType::Apart; + } + result = result + .and_also(|| match_given_type(state, context, wanted_field.id, given_field.id)); + } + + match (wanted_tail, given_tail) { + (Some(wanted_tail), Some(given_tail)) => { + result.and_also(|| match_given_type(state, context, wanted_tail, given_tail)) + } + (Some(wanted_tail), None) => { + let wanted_tail = state.normalize_type(wanted_tail); + if matches!(state.storage[wanted_tail], Type::Unification(_)) { + result.and_also(|| MatchType::Stuck) + } else { + MatchType::Apart + } + } + (None, Some(given_tail)) => { + let given_tail = state.normalize_type(given_tail); + if matches!(state.storage[given_tail], Type::Unification(_)) { + result.and_also(|| MatchType::Stuck) + } else { + MatchType::Apart + } + } + (None, None) => result, + } } ( &Type::KindApplication(w_function, w_argument), &Type::KindApplication(g_function, g_argument), - ) => match_given_type(state, w_function, g_function) - .and_also(|| match_given_type(state, w_argument, g_argument)), + ) => match_given_type(state, context, w_function, g_function) + .and_also(|| match_given_type(state, context, w_argument, g_argument)), ( &Type::OperatorApplication(f1, t1, l1, r1), &Type::OperatorApplication(f2, t2, l2, r2), ) => { if f1 == f2 && t1 == t2 { - match_given_type(state, l1, l2).and_also(|| match_given_type(state, r1, r2)) + match_given_type(state, context, l1, l2) + .and_also(|| match_given_type(state, context, r1, r2)) } else { MatchType::Apart } @@ -617,7 +896,7 @@ fn match_given_type(state: &mut CheckState, wanted: TypeId, given: TypeId) -> Ma let a1 = Arc::clone(a1); let a2 = Arc::clone(a2); iter::zip(a1.iter(), a2.iter()).fold(MatchType::Match, |result, (&a1, &a2)| { - result.and_also(|| match_given_type(state, a1, a2)) + result.and_also(|| match_given_type(state, context, a1, a2)) }) } else { MatchType::Apart @@ -667,6 +946,11 @@ fn can_unify(state: &mut CheckState, t1: TypeId, t2: TypeId) -> CanUnify { .and_also(|| can_unify(state, t1_result, t2_result)) } + // Function(a, b) and Application(Application(f, a), b) can + // unify when `f` resolves to the Function constructor. + (&Type::Function(..), &Type::Application(..)) + | (&Type::Application(..), &Type::Function(..)) => Unify, + ( &Type::KindApplication(t1_function, t1_argument), &Type::KindApplication(t2_function, t2_argument), @@ -683,8 +967,9 @@ fn can_unify(state: &mut CheckState, t1: TypeId, t2: TypeId) -> CanUnify { ) => can_unify(state, t1_constraint, t2_constraint) .and_also(|| can_unify(state, t1_body, t2_body)), - (&Type::Forall(_, t1_body), &Type::Forall(_, t2_body)) => { - can_unify(state, t1_body, t2_body) + (&Type::Forall(ref t1_binder, t1_body), &Type::Forall(ref t2_binder, t2_body)) => { + can_unify(state, t1_binder.kind, t2_binder.kind) + .and_also(|| can_unify(state, t1_body, t2_body)) } ( @@ -718,10 +1003,10 @@ fn can_unify(state: &mut CheckState, t1: TypeId, t2: TypeId) -> CanUnify { } ( - &Type::Variable(Variable::Bound(t1_level, t1_kind)), - &Type::Variable(Variable::Bound(t2_level, t2_kind)), + &Type::Variable(Variable::Bound(ref t1_name, t1_kind)), + &Type::Variable(Variable::Bound(ref t2_name, t2_kind)), ) => { - if t1_level == t2_level { + if t1_name == t2_name { can_unify(state, t1_kind, t2_kind) } else { Apart @@ -729,10 +1014,10 @@ fn can_unify(state: &mut CheckState, t1: TypeId, t2: TypeId) -> CanUnify { } ( - &Type::Variable(Variable::Skolem(t1_level, t1_kind)), - &Type::Variable(Variable::Skolem(t2_level, t2_kind)), + &Type::Variable(Variable::Skolem(ref t1_name, t1_kind)), + &Type::Variable(Variable::Skolem(ref t2_name, t2_kind)), ) => { - if t1_level == t2_level { + if t1_name == t2_name { can_unify(state, t1_kind, t2_kind) } else { Apart @@ -758,13 +1043,13 @@ where let mut bindings = FxHashMap::default(); let mut equalities = vec![]; - let mut match_results = vec![]; let mut stuck_positions = vec![]; for (index, (wanted, (given, _))) in arguments.iter().zip(&instance.arguments).enumerate() { let given = transfer::localize(state, context, *given); - let match_result = match_type(state, &mut bindings, &mut equalities, *wanted, given); + let match_result = + match_type(state, context, &mut bindings, &mut equalities, *wanted, given); if matches!(match_result, MatchType::Apart) { crate::trace_fields!(state, context, { ?wanted = wanted, ?given = given }, "apart"); @@ -792,10 +1077,10 @@ where } } - let mut argument_levels = FxHashSet::default(); + let mut argument_names = FxHashSet::default(); for &(argument, _) in &instance.arguments { let localized = transfer::localize(state, context, argument); - CollectBoundLevels::on(state, localized, &mut argument_levels); + CollectBoundNames::on(state, localized, &mut argument_names); } let mut constraint_variables = FxHashMap::default(); @@ -804,10 +1089,10 @@ where CollectBoundVariables::on(state, localized, &mut constraint_variables); } - for (level, kind) in constraint_variables { - if !argument_levels.contains(&level) && !bindings.contains_key(&level) { + for (name, kind) in constraint_variables { + if !argument_names.contains(&name) && !bindings.contains_key(&name) { let unification = state.fresh_unification_kinded(kind); - bindings.insert(level, unification); + bindings.insert(name, unification); } } @@ -847,13 +1132,12 @@ where continue; } - let mut match_results = Vec::with_capacity(wanted.arguments.len()); let mut stuck_positions = vec![]; for (index, (&wanted_argument, &given_argument)) in wanted.arguments.iter().zip(&given.arguments).enumerate() { - let match_result = match_given_type(state, wanted_argument, given_argument); + let match_result = match_given_type(state, context, wanted_argument, given_argument); if matches!(match_result, MatchType::Apart) { continue 'given; @@ -862,24 +1146,11 @@ where if matches!(match_result, MatchType::Stuck) { stuck_positions.push(index); } - - match_results.push(match_result); - } - - if stuck_positions.is_empty() { - return Ok(Some(MatchInstance::Match { constraints: vec![], equalities: vec![] })); - } - - if !can_determine_stuck( - context, - wanted.file_id, - wanted.item_id, - &match_results, - &stuck_positions, - )? { - continue 'given; } + // Given constraints are valid by construction. When a unification + // variable makes a position stuck, it's safe to emit an equality + // rather than require functional dependencies to cover it. let equalities = stuck_positions.iter().map(|&index| { let wanted = wanted.arguments[index]; let given = given.arguments[index]; @@ -938,10 +1209,10 @@ where cons => prim_symbol_cons(state, arguments), }, context.prim_row => { - union => prim_row_union(state, arguments), - cons => prim_row_cons(state, arguments), - lacks => prim_row_lacks(state, arguments), - nub => prim_row_nub(state, arguments), + union => prim_row_union(state, context, arguments)?, + cons => prim_row_cons(state, context, arguments)?, + lacks => prim_row_lacks(state, context, arguments)?, + nub => prim_row_nub(state, context, arguments)?, }, context.prim_row_list => { row_to_list => prim_rowlist_row_to_list(state, context, arguments), @@ -962,13 +1233,13 @@ where } struct ApplyBindings<'a> { - bindings: &'a FxHashMap, + bindings: &'a FxHashMap, } impl<'a> ApplyBindings<'a> { fn on( state: &mut CheckState, - bindings: &'a FxHashMap, + bindings: &'a FxHashMap, type_id: TypeId, ) -> TypeId { fold_type(state, type_id, &mut ApplyBindings { bindings }) @@ -978,8 +1249,8 @@ impl<'a> ApplyBindings<'a> { impl TypeFold for ApplyBindings<'_> { fn transform(&mut self, _state: &mut CheckState, id: TypeId, t: &Type) -> FoldAction { match t { - Type::Variable(Variable::Bound(level, _)) => { - let id = self.bindings.get(level).copied().unwrap_or(id); + Type::Variable(Variable::Bound(name, _)) => { + let id = self.bindings.get(name).copied().unwrap_or(id); FoldAction::Replace(id) } _ => FoldAction::Continue, @@ -987,21 +1258,21 @@ impl TypeFold for ApplyBindings<'_> { } } -/// Collects all bound variable levels from a type. -struct CollectBoundLevels<'a> { - levels: &'a mut FxHashSet, +/// Collects all bound variable names from a type. +struct CollectBoundNames<'a> { + names: &'a mut FxHashSet, } -impl<'a> CollectBoundLevels<'a> { - fn on(state: &mut CheckState, type_id: TypeId, levels: &'a mut FxHashSet) { - visit_type(state, type_id, &mut CollectBoundLevels { levels }); +impl<'a> CollectBoundNames<'a> { + fn on(state: &mut CheckState, type_id: TypeId, names: &'a mut FxHashSet) { + visit_type(state, type_id, &mut CollectBoundNames { names }); } } -impl TypeVisitor for CollectBoundLevels<'_> { +impl TypeVisitor for CollectBoundNames<'_> { fn visit(&mut self, _state: &mut CheckState, _id: TypeId, t: &Type) -> VisitAction { - if let Type::Variable(Variable::Bound(level, _)) = t { - self.levels.insert(*level); + if let Type::Variable(Variable::Bound(name, _)) = t { + self.names.insert(name.clone()); } VisitAction::Continue } @@ -1009,24 +1280,57 @@ impl TypeVisitor for CollectBoundLevels<'_> { /// Collects all bound variables with their kinds from a type. struct CollectBoundVariables<'a> { - variables: &'a mut FxHashMap, + variables: &'a mut FxHashMap, } impl<'a> CollectBoundVariables<'a> { - fn on( - state: &mut CheckState, - type_id: TypeId, - variables: &'a mut FxHashMap, - ) { + fn on(state: &mut CheckState, type_id: TypeId, variables: &'a mut FxHashMap) { visit_type(state, type_id, &mut CollectBoundVariables { variables }); } } impl TypeVisitor for CollectBoundVariables<'_> { fn visit(&mut self, _state: &mut CheckState, _id: TypeId, t: &Type) -> VisitAction { - if let Type::Variable(Variable::Bound(level, kind)) = t { - self.variables.insert(*level, *kind); + if let Type::Variable(Variable::Bound(name, kind)) = t { + self.variables.insert(name.clone(), *kind); } VisitAction::Continue } } + +/// Validates that all rows in instance declaration arguments +/// do not have labels in non-determined positions. +/// +/// In PureScript, instance declarations can only contain rows with labels +/// in positions that are determined by functional dependencies. In the +/// determiner position, only row variables such as `( | r )` are valid. +pub fn validate_instance_rows( + state: &mut CheckState, + context: &CheckContext, + class_file: FileId, + class_item: TypeItemId, + arguments: &[(TypeId, TypeId)], +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + let functional_dependencies = get_functional_dependencies(context, class_file, class_item)?; + let all_determined = get_all_determined(&functional_dependencies); + + for (position, &(argument_type, _)) in arguments.iter().enumerate() { + if all_determined.contains(&position) { + continue; + } + if HasLabeledRole::on(state, argument_type) { + let type_message = state.render_local_type(context, argument_type); + state.insert_error(crate::error::ErrorKind::InstanceHeadLabeledRow { + class_file, + class_item, + position, + type_message, + }); + } + } + + Ok(()) +} diff --git a/compiler-core/checking/src/algorithm/constraint/compiler_solved/prim_coerce.rs b/compiler-core/checking/src/algorithm/constraint/compiler_solved/prim_coerce.rs index cf4ccf299..f7eec195b 100644 --- a/compiler-core/checking/src/algorithm/constraint/compiler_solved/prim_coerce.rs +++ b/compiler-core/checking/src/algorithm/constraint/compiler_solved/prim_coerce.rs @@ -9,7 +9,7 @@ use crate::algorithm::constraint::{self, MatchInstance}; use crate::algorithm::safety::safe_loop; use crate::algorithm::state::{CheckContext, CheckState}; use crate::algorithm::{derive, kind, substitute, toolkit}; -use crate::core::Role; +use crate::core::{Role, Variable}; use crate::{ExternalQueries, Type, TypeId}; enum NewtypeCoercionResult { @@ -30,8 +30,8 @@ where return Ok(None); }; - let left = state.normalize_type(left); - let right = state.normalize_type(right); + let left = toolkit::normalise_expand_type(state, context, left)?; + let right = toolkit::normalise_expand_type(state, context, right)?; if left == right { return Ok(Some(MatchInstance::Match { constraints: vec![], equalities: vec![] })); @@ -41,6 +41,10 @@ where return Ok(Some(MatchInstance::Stuck)); } + if try_refl(state, context, left, right)? { + return Ok(Some(MatchInstance::Match { constraints: vec![], equalities: vec![] })); + } + let newtype_result = try_newtype_coercion(state, context, left, right)?; if let NewtypeCoercionResult::Success(result) = newtype_result { return Ok(Some(result)); @@ -50,6 +54,10 @@ where return Ok(Some(result)); } + if let Some(result) = try_function_coercion(state, context, left, right)? { + return Ok(Some(result)); + } + if let Some(result) = try_higher_kinded_coercion(state, context, left, right)? { return Ok(Some(result)); } @@ -97,7 +105,7 @@ where && is_newtype(context, file_id, type_id)? { if is_constructor_in_scope(context, file_id, type_id)? { - let inner = derive::get_newtype_inner(state, context, file_id, type_id, left)?; + let (inner, _) = derive::get_newtype_inner(state, context, file_id, type_id, left)?; let constraint = make_coercible_constraint(state, context, inner, right); return Ok(NewtypeCoercionResult::Success(MatchInstance::Match { constraints: vec![constraint], @@ -113,7 +121,7 @@ where && is_newtype(context, file_id, type_id)? { if is_constructor_in_scope(context, file_id, type_id)? { - let inner = derive::get_newtype_inner(state, context, file_id, type_id, right)?; + let (inner, _) = derive::get_newtype_inner(state, context, file_id, type_id, right)?; let constraint = make_coercible_constraint(state, context, left, inner); return Ok(NewtypeCoercionResult::Success(MatchInstance::Match { constraints: vec![constraint], @@ -258,6 +266,32 @@ where } } +/// Decomposes `Coercible (a -> b) (c -> d)` into `Coercible a c` and `Coercible b d`. +fn try_function_coercion( + state: &mut CheckState, + context: &CheckContext, + left: TypeId, + right: TypeId, +) -> QueryResult> +where + Q: ExternalQueries, +{ + let strict = toolkit::SynthesiseFunction::No; + let Some((left_argument, left_result)) = + toolkit::decompose_function(state, context, left, strict)? + else { + return Ok(None); + }; + let Some((right_argument, right_result)) = + toolkit::decompose_function(state, context, right, strict)? + else { + return Ok(None); + }; + let c1 = make_coercible_constraint(state, context, left_argument, right_argument); + let c2 = make_coercible_constraint(state, context, left_result, right_result); + Ok(Some(MatchInstance::Match { constraints: vec![c1, c2], equalities: vec![] })) +} + fn try_row_coercion( state: &mut CheckState, context: &CheckContext, @@ -334,7 +368,7 @@ where // decompose_kind_for_coercion instantiates the variables into // skolem variables, then returns the first argument, which in - // this case is the already-skolemized `~k` + // this case is the already skolemised `~k` // // left_kind_applied := Maybe @~k // left_domain := ~k @@ -382,19 +416,132 @@ fn decompose_kind_for_coercion( kind_id = state.normalize_type(kind_id); let forall = match &state.storage[kind_id] { - Type::Forall(binder, inner) => Some((binder.kind, binder.level, *inner)), + Type::Forall(binder, inner) => Some((binder.kind, binder.variable.clone(), *inner)), Type::Function(domain, _) => return Some((type_id, *domain)), _ => return None, }; - if let Some((binder_kind, binder_level, inner_kind)) = forall { + if let Some((binder_kind, binder_variable, inner_kind)) = forall { let fresh_kind = state.fresh_skolem_kinded(binder_kind); type_id = state.storage.intern(Type::KindApplication(type_id, fresh_kind)); - kind_id = substitute::SubstituteBound::on(state, binder_level, fresh_kind, inner_kind); + kind_id = substitute::SubstituteBound::on(state, binder_variable, fresh_kind, inner_kind); } } } +fn try_refl( + state: &mut CheckState, + context: &CheckContext, + t1: TypeId, + t2: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let t1 = toolkit::normalise_expand_type(state, context, t1)?; + let t2 = toolkit::normalise_expand_type(state, context, t2)?; + + if t1 == t2 { + return Ok(true); + } + + match (&state.storage[t1], &state.storage[t2]) { + ( + &Type::Application(t1_function, t1_argument), + &Type::Application(t2_function, t2_argument), + ) + | ( + &Type::Constrained(t1_function, t1_argument), + &Type::Constrained(t2_function, t2_argument), + ) + | ( + &Type::KindApplication(t1_function, t1_argument), + &Type::KindApplication(t2_function, t2_argument), + ) + | (&Type::Kinded(t1_function, t1_argument), &Type::Kinded(t2_function, t2_argument)) => { + Ok(try_refl(state, context, t1_function, t2_function)? + && try_refl(state, context, t1_argument, t2_argument)?) + } + + (&Type::Function(t1_argument, t1_result), &Type::Function(t2_argument, t2_result)) => { + Ok(try_refl(state, context, t1_argument, t2_argument)? + && try_refl(state, context, t1_result, t2_result)?) + } + + (&Type::Forall(ref t1_binder, t1_inner), &Type::Forall(ref t2_binder, t2_inner)) => { + Ok(try_refl(state, context, t1_binder.kind, t2_binder.kind)? + && try_refl(state, context, t1_inner, t2_inner)?) + } + + ( + &Type::OperatorApplication(t1_file, t1_item, t1_left, t1_right), + &Type::OperatorApplication(t2_file, t2_item, t2_left, t2_right), + ) => Ok((t1_file, t1_item) == (t2_file, t2_item) + && try_refl(state, context, t1_left, t2_left)? + && try_refl(state, context, t1_right, t2_right)?), + + ( + Type::SynonymApplication(_, t1_file, t1_item, t1_arguments), + Type::SynonymApplication(_, t2_file, t2_item, t2_arguments), + ) => { + let equal = (t1_file, t1_item) == (t2_file, t2_item) + && t1_arguments.len() == t2_arguments.len(); + + if !equal { + return Ok(false); + } + + let t1_arguments = Arc::clone(t1_arguments); + let t2_arguments = Arc::clone(t2_arguments); + + for (&t1_argument, &t2_argument) in + std::iter::zip(t1_arguments.iter(), t2_arguments.iter()) + { + if !try_refl(state, context, t1_argument, t2_argument)? { + return Ok(false); + } + } + + Ok(true) + } + + (Type::Row(t1_row), Type::Row(t2_row)) => { + if t1_row.fields.len() != t2_row.fields.len() { + return Ok(false); + } + + let t1_row = t1_row.clone(); + let t2_row = t2_row.clone(); + + for (t1_field, t2_field) in std::iter::zip(t1_row.fields.iter(), t2_row.fields.iter()) { + if t1_field.label != t2_field.label + || !try_refl(state, context, t1_field.id, t2_field.id)? + { + return Ok(false); + } + } + + match (t1_row.tail, t2_row.tail) { + (Some(t1_tail), Some(t2_tail)) => try_refl(state, context, t1_tail, t2_tail), + (None, None) => Ok(true), + _ => Ok(false), + } + } + + ( + &Type::Variable(Variable::Bound(ref t1_name, t1_kind)), + &Type::Variable(Variable::Bound(ref t2_name, t2_kind)), + ) => Ok(t1_name == t2_name && try_refl(state, context, t1_kind, t2_kind)?), + + ( + &Type::Variable(Variable::Skolem(ref t1_name, t1_kind)), + &Type::Variable(Variable::Skolem(ref t2_name, t2_kind)), + ) => Ok(t1_name == t2_name && try_refl(state, context, t1_kind, t2_kind)?), + + _ => Ok(false), + } +} + fn make_coercible_constraint( state: &mut CheckState, context: &CheckContext, diff --git a/compiler-core/checking/src/algorithm/constraint/compiler_solved/prim_row.rs b/compiler-core/checking/src/algorithm/constraint/compiler_solved/prim_row.rs index a281aa928..a6c1b310c 100644 --- a/compiler-core/checking/src/algorithm/constraint/compiler_solved/prim_row.rs +++ b/compiler-core/checking/src/algorithm/constraint/compiler_solved/prim_row.rs @@ -1,10 +1,13 @@ use std::cmp::Ordering; use std::iter; +use building_types::QueryResult; use rustc_hash::FxHashSet; +use crate::ExternalQueries; use crate::algorithm::constraint::{self, MatchInstance}; -use crate::algorithm::state::CheckState; +use crate::algorithm::state::{CheckContext, CheckState}; +use crate::algorithm::toolkit; use crate::core::{RowField, RowType}; use crate::{Type, TypeId}; @@ -65,78 +68,133 @@ fn subtract_row_fields( Some((result, equalities)) } -pub fn prim_row_union(state: &mut CheckState, arguments: &[TypeId]) -> Option { +pub fn prim_row_union( + state: &mut CheckState, + context: &CheckContext, + arguments: &[TypeId], +) -> QueryResult> +where + Q: ExternalQueries, +{ let &[left, right, union] = arguments else { - return None; + return Ok(None); }; - let left = state.normalize_type(left); - let right = state.normalize_type(right); - let union = state.normalize_type(union); + let left = toolkit::normalise_expand_type(state, context, left)?; + let right = toolkit::normalise_expand_type(state, context, right)?; + let union = toolkit::normalise_expand_type(state, context, union)?; - let left_row = extract_closed_row(state, left); - let right_row = extract_closed_row(state, right); - let union_row = extract_closed_row(state, union); + let left_row = extract_row(state, left); + let right_row = extract_row(state, right); + let union_row = extract_row(state, union); match (left_row, right_row, union_row) { (Some(left_row), Some(right_row), _) => { - let left_fields = left_row.fields.iter(); - let right_fields = right_row.fields.iter(); + if let Some(rest) = left_row.tail { + if left_row.fields.is_empty() { + return Ok(Some(MatchInstance::Stuck)); + } + + let fresh_tail = state.fresh_unification_kinded(context.prim.row_type); + + let result = state.storage.intern(Type::Row(RowType::from_unsorted( + left_row.fields.to_vec(), + Some(fresh_tail), + ))); + + let prim_row = &context.prim_row; - let union_fields = iter::chain(left_fields, right_fields).cloned().collect(); - let result = state.storage.intern(Type::Row(RowType::closed(union_fields))); + let constraint = + state.storage.intern(Type::Constructor(prim_row.file_id, prim_row.union)); - Some(MatchInstance::Match { constraints: vec![], equalities: vec![(union, result)] }) + let constraint = state.storage.intern(Type::Application(constraint, rest)); + let constraint = state.storage.intern(Type::Application(constraint, right)); + let constraint = state.storage.intern(Type::Application(constraint, fresh_tail)); + + return Ok(Some(MatchInstance::Match { + constraints: vec![constraint], + equalities: vec![(union, result)], + })); + } + + let union_fields = { + let left = left_row.fields.iter(); + let right = right_row.fields.iter(); + iter::chain(left, right).cloned().collect() + }; + + let result = state + .storage + .intern(Type::Row(RowType::from_unsorted(union_fields, right_row.tail))); + + Ok(Some(MatchInstance::Match { constraints: vec![], equalities: vec![(union, result)] })) } (_, Some(right_row), Some(union_row)) => { + if right_row.tail.is_some() { + return Ok(Some(MatchInstance::Stuck)); + } if let Some((remaining, mut equalities)) = subtract_row_fields(state, &union_row.fields, &right_row.fields) { - let result = state.storage.intern(Type::Row(RowType::closed(remaining))); + let result = state + .storage + .intern(Type::Row(RowType::from_unsorted(remaining, union_row.tail))); equalities.push((left, result)); - Some(MatchInstance::Match { constraints: vec![], equalities }) + Ok(Some(MatchInstance::Match { constraints: vec![], equalities })) } else { - Some(MatchInstance::Apart) + Ok(Some(MatchInstance::Apart)) } } (Some(left_row), _, Some(union_row)) => { + if left_row.tail.is_some() { + return Ok(Some(MatchInstance::Stuck)); + } if let Some((remaining, mut equalities)) = subtract_row_fields(state, &union_row.fields, &left_row.fields) { - let result = state.storage.intern(Type::Row(RowType::closed(remaining))); + let result = state + .storage + .intern(Type::Row(RowType::from_unsorted(remaining, union_row.tail))); equalities.push((right, result)); - Some(MatchInstance::Match { constraints: vec![], equalities }) + Ok(Some(MatchInstance::Match { constraints: vec![], equalities })) } else { - Some(MatchInstance::Apart) + Ok(Some(MatchInstance::Apart)) } } - _ => Some(MatchInstance::Stuck), + _ => Ok(Some(MatchInstance::Stuck)), } } -pub fn prim_row_cons(state: &mut CheckState, arguments: &[TypeId]) -> Option { +pub fn prim_row_cons( + state: &mut CheckState, + context: &CheckContext, + arguments: &[TypeId], +) -> QueryResult> +where + Q: ExternalQueries, +{ let &[label, a, tail, row] = arguments else { - return None; + return Ok(None); }; - let label = state.normalize_type(label); - let a = state.normalize_type(a); - let tail = state.normalize_type(tail); - let row = state.normalize_type(row); + let label = toolkit::normalise_expand_type(state, context, label)?; + let a = toolkit::normalise_expand_type(state, context, a)?; + let tail = toolkit::normalise_expand_type(state, context, tail)?; + let row = toolkit::normalise_expand_type(state, context, row)?; let label_symbol = extract_symbol(state, label); - let tail_row = extract_closed_row(state, tail); - let row_row = extract_closed_row(state, row); + let tail_row = extract_row(state, tail); + let row_row = extract_row(state, row); match (label_symbol, tail_row, row_row) { (Some(label_value), Some(tail_row), _) => { let mut fields = vec![RowField { label: label_value, id: a }]; fields.extend(tail_row.fields.iter().cloned()); - let result_row = RowType::from_unsorted(fields, None); + let result_row = RowType::from_unsorted(fields, tail_row.tail); let result = state.storage.intern(Type::Row(result_row)); - Some(MatchInstance::Match { constraints: vec![], equalities: vec![(row, result)] }) + Ok(Some(MatchInstance::Match { constraints: vec![], equalities: vec![(row, result)] })) } (Some(label_value), _, Some(row_row)) => { @@ -152,56 +210,82 @@ pub fn prim_row_cons(state: &mut CheckState, arguments: &[TypeId]) -> Option Some(MatchInstance::Stuck), + _ => Ok(Some(MatchInstance::Stuck)), } } -pub fn prim_row_lacks(state: &mut CheckState, arguments: &[TypeId]) -> Option { +pub fn prim_row_lacks( + state: &mut CheckState, + context: &CheckContext, + arguments: &[TypeId], +) -> QueryResult> +where + Q: ExternalQueries, +{ let &[label, row] = arguments else { - return None; + return Ok(None); }; - let label = state.normalize_type(label); - let row = state.normalize_type(row); + let label = toolkit::normalise_expand_type(state, context, label)?; + let row = toolkit::normalise_expand_type(state, context, row)?; let Some(label_value) = extract_symbol(state, label) else { - return Some(MatchInstance::Stuck); + return Ok(Some(MatchInstance::Stuck)); }; let Some(row_row) = extract_row(state, row) else { - return Some(MatchInstance::Stuck); + return Ok(Some(MatchInstance::Stuck)); }; let has_label = row_row.fields.iter().any(|field| field.label == label_value); if has_label { - Some(MatchInstance::Apart) - } else if row_row.tail.is_some() { - Some(MatchInstance::Stuck) + Ok(Some(MatchInstance::Apart)) + } else if let Some(tail) = row_row.tail { + if row_row.fields.is_empty() { + return Ok(Some(MatchInstance::Stuck)); + } + + let prim_row = &context.prim_row; + + let constraint = state.storage.intern(Type::Constructor(prim_row.file_id, prim_row.lacks)); + let constraint = state.storage.intern(Type::Application(constraint, label)); + let constraint = state.storage.intern(Type::Application(constraint, tail)); + + Ok(Some(MatchInstance::Match { constraints: vec![constraint], equalities: vec![] })) } else { - Some(MatchInstance::Match { constraints: vec![], equalities: vec![] }) + Ok(Some(MatchInstance::Match { constraints: vec![], equalities: vec![] })) } } -pub fn prim_row_nub(state: &mut CheckState, arguments: &[TypeId]) -> Option { +pub fn prim_row_nub( + state: &mut CheckState, + context: &CheckContext, + arguments: &[TypeId], +) -> QueryResult> +where + Q: ExternalQueries, +{ let &[original, nubbed] = arguments else { - return None; + return Ok(None); }; - let original = state.normalize_type(original); - let nubbed = state.normalize_type(nubbed); + let original = toolkit::normalise_expand_type(state, context, original)?; + let nubbed = toolkit::normalise_expand_type(state, context, nubbed)?; let Some(original_row) = extract_closed_row(state, original) else { - return Some(MatchInstance::Stuck); + return Ok(Some(MatchInstance::Stuck)); }; let mut seen = FxHashSet::default(); @@ -214,5 +298,5 @@ pub fn prim_row_nub(state: &mut CheckState, arguments: &[TypeId]) -> Option HashSet { + functional_dependencies.iter().flat_map(|fd| fd.determined.iter().copied()).collect() +} + /// Compute the closure of positions determined by functional dependencies. /// /// Starting from `initial` positions, iteratively applies fundeps: @@ -123,4 +128,42 @@ mod tests { let result = compute_closure(&fundeps, &initial); assert_eq!(result, [0].into_iter().collect()); } + + #[test] + fn test_all_determined_no_fundeps() { + let result = get_all_determined(&[]); + assert_eq!(result, HashSet::new()); + } + + #[test] + fn test_all_determined_single_fundep() { + // a -> b (position 0 determines position 1) + let fundeps = vec![Fd::new([0], [1])]; + let result = get_all_determined(&fundeps); + assert_eq!(result, [1].into_iter().collect()); + } + + #[test] + fn test_all_determined_multiple_fundeps() { + // a -> b, b -> c + let fundeps = vec![Fd::new([0], [1]), Fd::new([1], [2])]; + let result = get_all_determined(&fundeps); + assert_eq!(result, [1, 2].into_iter().collect()); + } + + #[test] + fn test_all_determined_overlapping() { + // a -> b c, d -> b + let fundeps = vec![Fd::new([0], [1, 2]), Fd::new([3], [1])]; + let result = get_all_determined(&fundeps); + assert_eq!(result, [1, 2].into_iter().collect()); + } + + #[test] + fn test_all_determined_empty_determiners() { + // -> a (empty determiners, determines position 0) + let fundeps = vec![Fd::new([], [0])]; + let result = get_all_determined(&fundeps); + assert_eq!(result, [0].into_iter().collect()); + } } diff --git a/compiler-core/checking/src/algorithm/derive.rs b/compiler-core/checking/src/algorithm/derive.rs index 261963e55..9ba608d11 100644 --- a/compiler-core/checking/src/algorithm/derive.rs +++ b/compiler-core/checking/src/algorithm/derive.rs @@ -14,15 +14,17 @@ mod variance; use building_types::QueryResult; use files::FileId; use indexing::{DeriveId, TermItemId, TypeItemId}; +use itertools::Itertools; use crate::ExternalQueries; +use crate::algorithm::derive::variance::VarianceConfig; use crate::algorithm::safety::safe_loop; use crate::algorithm::state::{CheckContext, CheckState}; use crate::algorithm::{kind, term_item, toolkit, transfer}; -use crate::core::{Type, TypeId, debruijn}; +use crate::core::{Type, TypeId, Variable, debruijn}; use crate::error::{ErrorKind, ErrorStep}; -/// Input fields for [`check_derive`]. +/// Input fields for [`check_derive_head`]. pub struct CheckDerive<'a> { pub item_id: TermItemId, pub derive_id: DeriveId, @@ -33,12 +35,74 @@ pub struct CheckDerive<'a> { pub is_newtype: bool, } -/// Checks a derived instance. -pub fn check_derive( +/// Determines how [`check_derive_member`] generates constraints. +enum DeriveStrategy { + /// [`generate_field_constraints`] strategy. + /// + /// * Eq + /// * Ord + FieldConstraints { + data_file: FileId, + data_id: TypeItemId, + derived_type: TypeId, + class: (FileId, TypeItemId), + }, + /// [`generate_variance_constraints`] strategy. + /// + /// * Functor, Bifunctor + /// * Contravariant, Profunctor + /// * Foldable, Bifoldable + /// * Traversable, Bitraversable + /// + /// [`generate_variance_constraints`]: variance::generate_variance_constraints + VarianceConstraints { + data_file: FileId, + data_id: TypeItemId, + derived_type: TypeId, + config: VarianceConfig, + }, + /// [`generate_delegate_constraint`] strategy. + /// + /// * Eq1 + /// * Ord1 + DelegateConstraint { derived_type: TypeId, class: (FileId, TypeItemId) }, + /// `derive newtype instance` + NewtypeDeriveConstraint { delegate_constraint: TypeId }, + /// The instance head was sufficient. + /// + /// * Generic + /// * Newtype + HeadOnly, +} + +/// Carries state from [`check_derive_head`] to [`check_derive_member`]. +pub struct DeriveHeadResult { + item_id: TermItemId, + constraints: Vec, + class_file: FileId, + class_id: TypeItemId, + arguments: Vec<(TypeId, TypeId)>, + strategy: DeriveStrategy, +} + +pub fn check_derive_head( state: &mut CheckState, context: &CheckContext, input: CheckDerive<'_>, -) -> QueryResult<()> +) -> QueryResult> +where + Q: ExternalQueries, +{ + state.with_error_step(ErrorStep::TermDeclaration(input.item_id), |state| { + state.with_implication(|state| check_derive_head_core(state, context, input)) + }) +} + +fn check_derive_head_core( + state: &mut CheckState, + context: &CheckContext, + input: CheckDerive<'_>, +) -> QueryResult> where Q: ExternalQueries, { @@ -52,89 +116,95 @@ where is_newtype, } = input; - state.with_error_step(ErrorStep::TermDeclaration(item_id), |state| { - // Save the current size of the environment for unbinding. - let size = state.type_scope.size(); + let size = state.type_scope.size(); - let class_kind = kind::lookup_file_type(state, context, class_file, class_id)?; - let expected_kinds = term_item::instantiate_class_kind(state, context, class_kind)?; + let class_kind = kind::lookup_file_type(state, context, class_file, class_id)?; + let expected_kinds = term_item::instantiate_class_kind(state, context, class_kind)?; - if expected_kinds.len() != arguments.len() { - state.insert_error(ErrorKind::InstanceHeadMismatch { - class_file, - class_item: class_id, - expected: expected_kinds.len(), - actual: arguments.len(), - }); - } + if expected_kinds.len() != arguments.len() { + state.insert_error(ErrorKind::InstanceHeadMismatch { + class_file, + class_item: class_id, + expected: expected_kinds.len(), + actual: arguments.len(), + }); + } - let mut core_arguments = vec![]; - for (argument, expected_kind) in arguments.iter().zip(expected_kinds) { - let (inferred_type, inferred_kind) = - kind::check_surface_kind(state, context, *argument, expected_kind)?; - core_arguments.push((inferred_type, inferred_kind)); - } + let mut core_arguments = vec![]; + for (argument, expected_kind) in arguments.iter().zip(expected_kinds) { + let (inferred_type, inferred_kind) = + kind::check_surface_kind(state, context, *argument, expected_kind)?; + core_arguments.push((inferred_type, inferred_kind)); + } - let mut core_constraints = vec![]; - for constraint in constraints.iter() { - let (inferred_type, inferred_kind) = - kind::infer_surface_kind(state, context, *constraint)?; - core_constraints.push((inferred_type, inferred_kind)); - } + let mut core_constraints = vec![]; + for constraint in constraints.iter() { + let (inferred_type, inferred_kind) = kind::infer_surface_kind(state, context, *constraint)?; + core_constraints.push((inferred_type, inferred_kind)); + } - let elaborated = tools::ElaboratedDerive { - derive_id, - constraints: core_constraints, - arguments: core_arguments, - class_file, - class_id, - }; + let constraints = core_constraints.iter().map(|&(t, _)| t).collect_vec(); + let arguments = core_arguments.clone(); - if is_newtype { - check_newtype_derive(state, context, elaborated)?; - } else { - let class_is = |known| Some((class_file, class_id)) == known; - let known_types = &context.known_types; - - macro_rules! dispatch { - ($($($known:ident)|+ => $handler:path),+ $(,)?) => { - $(if $(class_is(known_types.$known))||+ { - $handler(state, context, elaborated)?; - } else)+ { - state.insert_error(ErrorKind::CannotDeriveClass { class_file, class_id }); - } - }; - } + let elaborated = tools::ElaboratedDerive { + derive_id, + constraints: core_constraints, + arguments: core_arguments, + class_file, + class_id, + }; - dispatch! { - eq | ord => check_derive_class, - functor => functor::check_derive_functor, - bifunctor => functor::check_derive_bifunctor, - contravariant => contravariant::check_derive_contravariant, - profunctor => contravariant::check_derive_profunctor, - foldable => foldable::check_derive_foldable, - bifoldable => foldable::check_derive_bifoldable, - traversable => traversable::check_derive_traversable, - bitraversable => traversable::check_derive_bitraversable, - eq1 => eq1::check_derive_eq1, - ord1 => eq1::check_derive_ord1, - newtype => newtype::check_derive_newtype, - generic => generic::check_derive_generic, - } + let strategy = if is_newtype { + check_newtype_derive(state, context, elaborated)? + } else { + let class_is = |known| Some((class_file, class_id)) == known; + let known_types = &context.known_types; + + macro_rules! dispatch { + ($($($known:ident)|+ => $handler:path),+ $(,)?) => { + $(if $(class_is(known_types.$known))||+ { + $handler(state, context, elaborated)? + } else)+ { + state.insert_error(ErrorKind::CannotDeriveClass { class_file, class_id }); + None + } + }; } - // Unbind type variables bound during elaboration. - state.type_scope.unbind(debruijn::Level(size.0)); + dispatch! { + eq | ord => check_derive_class, + functor => functor::check_derive_functor, + bifunctor => functor::check_derive_bifunctor, + contravariant => contravariant::check_derive_contravariant, + profunctor => contravariant::check_derive_profunctor, + foldable => foldable::check_derive_foldable, + bifoldable => foldable::check_derive_bifoldable, + traversable => traversable::check_derive_traversable, + bitraversable => traversable::check_derive_bitraversable, + eq1 => eq1::check_derive_eq1, + ord1 => eq1::check_derive_ord1, + newtype => newtype::check_derive_newtype, + generic => generic::check_derive_generic, + } + }; - Ok(()) - }) + state.type_scope.unbind(debruijn::Level(size.0)); + + Ok(strategy.map(|strategy| DeriveHeadResult { + item_id, + constraints, + class_file, + class_id, + arguments, + strategy, + })) } fn check_derive_class( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -145,61 +215,68 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; let class = (input.class_file, input.class_id); - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); - - generate_field_constraints(state, context, data_file, data_id, derived_type, class)?; + tools::register_derived_instance(state, context, input)?; - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::FieldConstraints { data_file, data_id, derived_type, class })) } fn check_newtype_derive( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { let [ref preceding_arguments @ .., (newtype_type, _)] = input.arguments[..] else { - return Ok(()); + return Ok(None); }; - let insert_error = - |state: &mut CheckState, context: &CheckContext, kind: fn(TypeId) -> ErrorKind| { - let global = transfer::globalize(state, context, newtype_type); - state.insert_error(kind(global)); - }; - let Some((newtype_file, newtype_id)) = extract_type_constructor(state, newtype_type) else { - insert_error(state, context, |type_id| ErrorKind::CannotDeriveForType { type_id }); - return Ok(()); + let type_message = state.render_local_type(context, newtype_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; if newtype_file != context.id { - insert_error(state, context, |type_id| ErrorKind::CannotDeriveForType { type_id }); - return Ok(()); + let type_message = state.render_local_type(context, newtype_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); } if !is_newtype(context, newtype_file, newtype_id)? { - insert_error(state, context, |type_id| ErrorKind::ExpectedNewtype { type_id }); - return Ok(()); + let type_message = state.render_local_type(context, newtype_type); + state.insert_error(ErrorKind::ExpectedNewtype { type_message }); + return Ok(None); } - let inner_type = get_newtype_inner(state, context, newtype_file, newtype_id, newtype_type)?; + let (inner_type, skolem_count) = + get_newtype_inner(state, context, newtype_file, newtype_id, newtype_type)?; + + let inner_type = if skolem_count == 0 { + inner_type + } else if let Some(inner_type) = + try_peel_trailing_skolems(state, context, inner_type, skolem_count) + { + inner_type + } else { + state.insert_error(ErrorKind::InvalidNewtypeDeriveSkolemArguments); + return Ok(None); + }; + + // Make sure that the constraint solver sees the synonym expansion. + let inner_type = toolkit::normalise_expand_type(state, context, inner_type)?; - // Build `Class t1 t2 Inner` given the constraint `Class t1 t2 Newtype` let delegate_constraint = { let class_type = state.storage.intern(Type::Constructor(input.class_file, input.class_id)); @@ -211,14 +288,156 @@ where state.storage.intern(Type::Application(preceding_arguments, inner_type)) }; - tools::push_given_constraints(state, &input.constraints); - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; - state.constraints.push_wanted(delegate_constraint); + Ok(Some(DeriveStrategy::NewtypeDeriveConstraint { delegate_constraint })) +} + +pub fn check_derive_member( + state: &mut CheckState, + context: &CheckContext, + result: &DeriveHeadResult, +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + state.with_error_step(ErrorStep::TermDeclaration(result.item_id), |state| { + state.with_implication(|state| check_derive_member_core(state, context, result)) + }) +} + +fn check_derive_member_core( + state: &mut CheckState, + context: &CheckContext, + result: &DeriveHeadResult, +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + for &constraint in &result.constraints { + state.push_given(constraint); + } + + match &result.strategy { + DeriveStrategy::FieldConstraints { data_file, data_id, derived_type, class } => { + tools::emit_superclass_constraints( + state, + context, + result.class_file, + result.class_id, + &result.arguments, + )?; + generate_field_constraints( + state, + context, + *data_file, + *data_id, + *derived_type, + *class, + )?; + } + DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config } => { + tools::emit_superclass_constraints( + state, + context, + result.class_file, + result.class_id, + &result.arguments, + )?; + variance::generate_variance_constraints( + state, + context, + *data_file, + *data_id, + *derived_type, + *config, + )?; + } + DeriveStrategy::DelegateConstraint { derived_type, class } => { + tools::emit_superclass_constraints( + state, + context, + result.class_file, + result.class_id, + &result.arguments, + )?; + generate_delegate_constraint(state, context.prim.t, *derived_type, *class); + } + DeriveStrategy::NewtypeDeriveConstraint { delegate_constraint } => { + state.push_wanted(*delegate_constraint); + } + DeriveStrategy::HeadOnly => { + tools::emit_superclass_constraints( + state, + context, + result.class_file, + result.class_id, + &result.arguments, + )?; + } + } tools::solve_and_report_constraints(state, context) } +fn generate_delegate_constraint( + state: &mut CheckState, + prim_type: TypeId, + derived_type: TypeId, + class: (FileId, TypeItemId), +) { + // Introduce a fresh skolem `~a` for the last type parameter. + let skolem_type = state.fresh_skolem_kinded(prim_type); + + // Given `Eq ~a`, prove `Eq (Identity ~a)`. + let applied_type = state.storage.intern(Type::Application(derived_type, skolem_type)); + + let class_type = state.storage.intern(Type::Constructor(class.0, class.1)); + let given_constraint = state.storage.intern(Type::Application(class_type, skolem_type)); + state.push_given(given_constraint); + + let wanted_constraint = state.storage.intern(Type::Application(class_type, applied_type)); + state.push_wanted(wanted_constraint); +} + +fn try_peel_trailing_skolems( + state: &mut CheckState, + context: &CheckContext, + mut type_id: TypeId, + mut count: usize, +) -> Option +where + Q: ExternalQueries, +{ + safe_loop! { + if count == 0 { + break Some(type_id); + } + type_id = state.normalize_type(type_id); + if let Type::Application(function, argument) | Type::KindApplication(function, argument) = + state.storage[type_id] + { + let argument = state.normalize_type(argument); + if matches!(state.storage[argument], Type::Variable(Variable::Skolem(_, _))) { + count -= 1; + type_id = function; + } else { + break None; + } + } else if let Type::Function(argument, result) = state.storage[type_id] { + let result = state.normalize_type(result); + if matches!(state.storage[result], Type::Variable(Variable::Skolem(_, _))) { + count -= 1; + type_id = state.storage.intern(Type::Application(context.prim.function, argument)); + } else { + break None; + } + } else { + break None; + } + } +} + pub fn extract_type_constructor( state: &mut CheckState, mut type_id: TypeId, @@ -270,34 +489,37 @@ where Ok(global_type.map(|global_type| transfer::localize(state, context, global_type))) } -/// Gets the inner type for a newtype, specialized with type arguments. +/// Gets the inner type for a newtype, specialised with type arguments. /// /// Newtypes have exactly one constructor with exactly one field. /// This function extracts that field type, substituting any type parameters. +/// If not enough type arguments are supplied, it skolemises the remaining +/// binders and returns the skolem count. pub fn get_newtype_inner( state: &mut CheckState, context: &CheckContext, newtype_file: FileId, newtype_id: TypeItemId, newtype_type: TypeId, -) -> QueryResult +) -> QueryResult<(TypeId, usize)> where Q: ExternalQueries, { let constructors = tools::lookup_data_constructors(context, newtype_file, newtype_id)?; let [constructor_id] = constructors[..] else { - return Ok(context.prim.unknown); + return Ok((context.prim.unknown, 0)); }; let constructor_type = lookup_local_term_type(state, context, newtype_file, constructor_id)?; let Some(constructor_type) = constructor_type else { - return Ok(context.prim.unknown); + return Ok((context.prim.unknown, 0)); }; let arguments = toolkit::extract_all_applications(state, newtype_type); - let fields = instantiate_constructor_fields(state, constructor_type, &arguments); - Ok(fields.into_iter().next().unwrap_or(context.prim.unknown)) + let (fields, skolem_count) = + instantiate_constructor_fields(state, constructor_type, &arguments); + Ok((fields.into_iter().next().unwrap_or(context.prim.unknown), skolem_count)) } /// Generates constraints for all fields of across all constructors. @@ -332,7 +554,7 @@ where let constructor_type = lookup_local_term_type(state, context, data_file, constructor_id)?; let Some(constructor_type) = constructor_type else { continue }; - let field_types = instantiate_constructor_fields(state, constructor_type, &arguments); + let (field_types, _) = instantiate_constructor_fields(state, constructor_type, &arguments); for field_type in field_types { higher_kinded::generate_constraint(state, context, field_type, class, class1); } @@ -345,7 +567,8 @@ where /// /// This function uses [`toolkit::instantiate_with_arguments`] to specialise /// the constructor type with the given type arguments, then extracts the -/// function arguments. Consider the ff: +/// function arguments, returning the fields and the number of skolems that +/// were introduced for the remaining arguments. Consider the ff: /// /// ```purescript /// data Either a b = Left a | Right b @@ -358,6 +581,10 @@ where /// /// derive instance Eq (Proxy @Type Int) /// -- Proxy :: Proxy @Type Int +/// +/// derive instance Eq1 (Vector n) +/// -- Vector :: Vector n ~a +/// -- skolem_count := 1 /// ``` /// /// The `arguments` parameter should be obtained by calling @@ -367,8 +594,9 @@ fn instantiate_constructor_fields( state: &mut CheckState, constructor_type: TypeId, arguments: &[TypeId], -) -> Vec { - let constructor = toolkit::instantiate_with_arguments(state, constructor_type, arguments); +) -> (Vec, usize) { + let (constructor, skolem_count) = + toolkit::instantiate_with_arguments(state, constructor_type, arguments); let (fields, _) = toolkit::extract_function_arguments(state, constructor); - fields + (fields, skolem_count) } diff --git a/compiler-core/checking/src/algorithm/derive/contravariant.rs b/compiler-core/checking/src/algorithm/derive/contravariant.rs index 9e21cf17b..18dc3c5bd 100644 --- a/compiler-core/checking/src/algorithm/derive/contravariant.rs +++ b/compiler-core/checking/src/algorithm/derive/contravariant.rs @@ -3,18 +3,17 @@ use building_types::QueryResult; use crate::ExternalQueries; -use crate::algorithm::derive::variance::{Variance, VarianceConfig, generate_variance_constraints}; -use crate::algorithm::derive::{self, tools}; +use crate::algorithm::derive::variance::{Variance, VarianceConfig}; +use crate::algorithm::derive::{self, DeriveStrategy, tools}; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::transfer; use crate::error::ErrorKind; -/// Checks a derive instance for Contravariant. +/// Checks a derive instance head for Contravariant. pub fn check_derive_contravariant( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -25,32 +24,28 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; let contravariant = Some((input.class_file, input.class_id)); - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; let config = VarianceConfig::Single((Variance::Contravariant, contravariant)); - generate_variance_constraints(state, context, data_file, data_id, derived_type, config)?; - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config })) } -/// Checks a derive instance for Profunctor. +/// Checks a derive instance head for Profunctor. pub fn check_derive_profunctor( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -61,28 +56,24 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; // Profunctor: first param is contravariant, second is covariant. let contravariant = context.known_types.contravariant; let functor = context.known_types.functor; - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; let config = VarianceConfig::Pair( (Variance::Contravariant, contravariant), (Variance::Covariant, functor), ); - generate_variance_constraints(state, context, data_file, data_id, derived_type, config)?; - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config })) } diff --git a/compiler-core/checking/src/algorithm/derive/eq1.rs b/compiler-core/checking/src/algorithm/derive/eq1.rs index 9915e73a7..13255eb37 100644 --- a/compiler-core/checking/src/algorithm/derive/eq1.rs +++ b/compiler-core/checking/src/algorithm/derive/eq1.rs @@ -11,22 +11,18 @@ //! 3. Solves the constraints to determine if it's satisfiable use building_types::QueryResult; -use files::FileId; -use indexing::TypeItemId; use crate::ExternalQueries; -use crate::algorithm::derive::{self, tools}; +use crate::algorithm::derive::{self, DeriveStrategy, tools}; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::transfer; -use crate::core::{Type, Variable, debruijn}; use crate::error::ErrorKind; -/// Checks a derive instance for Eq1. +/// Checks a derive instance head for Eq1. pub fn check_derive_eq1( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -35,18 +31,18 @@ where class_file: input.class_file, class_id: input.class_id, }); - return Ok(()); + return Ok(None); }; check_derive_class1(state, context, input, eq) } -/// Checks a derive instance for Ord1. +/// Checks a derive instance head for Ord1. pub fn check_derive_ord1( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -55,19 +51,19 @@ where class_file: input.class_file, class_id: input.class_id, }); - return Ok(()); + return Ok(None); }; check_derive_class1(state, context, input, ord) } -/// Shared implementation for Eq1 and Ord1 derivation. +/// Shared implementation for Eq1 and Ord1 head phase. fn check_derive_class1( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, - class: (FileId, TypeItemId), -) -> QueryResult<()> + class: (files::FileId, indexing::TypeItemId), +) -> QueryResult> where Q: ExternalQueries, { @@ -78,37 +74,16 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; if derive::extract_type_constructor(state, derived_type).is_none() { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; - // Create a fresh skolem for the last type parameter. - let skolem_level = state.type_scope.size().0; - let skolem_level = debruijn::Level(skolem_level); - - let skolem_type = Variable::Skolem(skolem_level, context.prim.t); - let skolem_type = state.storage.intern(Type::Variable(skolem_type)); - - // Build the fully-applied type e.g. `Identity` -> `Identity a` - let applied_type = state.storage.intern(Type::Application(derived_type, skolem_type)); - - // Insert the given constraint `Eq a` - let class_type = state.storage.intern(Type::Constructor(class.0, class.1)); - let given_constraint = state.storage.intern(Type::Application(class_type, skolem_type)); - state.constraints.push_given(given_constraint); - - // Emit the wanted constraint `Eq (Identity a)` - let wanted_constraint = state.storage.intern(Type::Application(class_type, applied_type)); - state.constraints.push_wanted(wanted_constraint); - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::DelegateConstraint { derived_type, class })) } diff --git a/compiler-core/checking/src/algorithm/derive/foldable.rs b/compiler-core/checking/src/algorithm/derive/foldable.rs index 96b061b53..df2d8a73e 100644 --- a/compiler-core/checking/src/algorithm/derive/foldable.rs +++ b/compiler-core/checking/src/algorithm/derive/foldable.rs @@ -3,18 +3,17 @@ use building_types::QueryResult; use crate::ExternalQueries; -use crate::algorithm::derive::variance::{Variance, VarianceConfig, generate_variance_constraints}; -use crate::algorithm::derive::{self, tools}; +use crate::algorithm::derive::variance::{Variance, VarianceConfig}; +use crate::algorithm::derive::{self, DeriveStrategy, tools}; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::transfer; use crate::error::ErrorKind; -/// Checks a derive instance for Foldable. +/// Checks a derive instance head for Foldable. pub fn check_derive_foldable( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -25,32 +24,28 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; let foldable = Some((input.class_file, input.class_id)); - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; let config = VarianceConfig::Single((Variance::Covariant, foldable)); - generate_variance_constraints(state, context, data_file, data_id, derived_type, config)?; - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config })) } -/// Checks a derive instance for Bifoldable. +/// Checks a derive instance head for Bifoldable. pub fn check_derive_bifoldable( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -61,25 +56,21 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; // Bifoldable derivation emits Foldable constraints for wrapped parameters. let foldable = context.known_types.foldable; - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; let config = VarianceConfig::Pair((Variance::Covariant, foldable), (Variance::Covariant, foldable)); - generate_variance_constraints(state, context, data_file, data_id, derived_type, config)?; - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config })) } diff --git a/compiler-core/checking/src/algorithm/derive/functor.rs b/compiler-core/checking/src/algorithm/derive/functor.rs index e7466672a..6ffa8df64 100644 --- a/compiler-core/checking/src/algorithm/derive/functor.rs +++ b/compiler-core/checking/src/algorithm/derive/functor.rs @@ -3,18 +3,17 @@ use building_types::QueryResult; use crate::ExternalQueries; -use crate::algorithm::derive::variance::{Variance, VarianceConfig, generate_variance_constraints}; -use crate::algorithm::derive::{self, tools}; +use crate::algorithm::derive::variance::{Variance, VarianceConfig}; +use crate::algorithm::derive::{self, DeriveStrategy, tools}; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::transfer; use crate::error::ErrorKind; -/// Checks a derive instance for Functor. +/// Checks a derive instance head for Functor. pub fn check_derive_functor( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -25,32 +24,28 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; let functor = Some((input.class_file, input.class_id)); - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; let config = VarianceConfig::Single((Variance::Covariant, functor)); - generate_variance_constraints(state, context, data_file, data_id, derived_type, config)?; - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config })) } -/// Checks a derive instance for Bifunctor. +/// Checks a derive instance head for Bifunctor. pub fn check_derive_bifunctor( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -61,25 +56,21 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; // Bifunctor derivation emits Functor constraints for wrapped parameters. let functor = context.known_types.functor; - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; let config = VarianceConfig::Pair((Variance::Covariant, functor), (Variance::Covariant, functor)); - generate_variance_constraints(state, context, data_file, data_id, derived_type, config)?; - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config })) } diff --git a/compiler-core/checking/src/algorithm/derive/generic.rs b/compiler-core/checking/src/algorithm/derive/generic.rs index d5ce8a761..9a1a096e6 100644 --- a/compiler-core/checking/src/algorithm/derive/generic.rs +++ b/compiler-core/checking/src/algorithm/derive/generic.rs @@ -15,9 +15,9 @@ use lowering::StringKind; use smol_str::SmolStr; use crate::ExternalQueries; -use crate::algorithm::derive::{self, tools}; +use crate::algorithm::derive::{self, DeriveStrategy, tools}; use crate::algorithm::state::{CheckContext, CheckState, KnownGeneric}; -use crate::algorithm::{toolkit, transfer, unification}; +use crate::algorithm::{toolkit, unification}; use crate::core::{Type, TypeId}; use crate::error::ErrorKind; @@ -25,7 +25,7 @@ pub fn check_derive_generic( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -36,13 +36,13 @@ where expected: 2, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; let Some(ref known_generic) = context.known_generic else { @@ -50,7 +50,7 @@ where class_file: input.class_file, class_id: input.class_id, }); - return Ok(()); + return Ok(None); }; let constructors = tools::lookup_data_constructors(context, data_file, data_id)?; @@ -60,11 +60,9 @@ where let _ = unification::unify(state, context, wildcard_type, generic_rep)?; - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; - Ok(()) + Ok(Some(DeriveStrategy::HeadOnly)) } fn build_generic_rep( @@ -131,7 +129,9 @@ where derive::lookup_local_term_type(state, context, data_file, constructor_id)?; let field_types = if let Some(constructor_type) = constructor_type { - derive::instantiate_constructor_fields(state, constructor_type, arguments) + let (constructor_type, _) = + derive::instantiate_constructor_fields(state, constructor_type, arguments); + constructor_type } else { vec![] }; diff --git a/compiler-core/checking/src/algorithm/derive/newtype.rs b/compiler-core/checking/src/algorithm/derive/newtype.rs index 3c74bb3a0..55a50e652 100644 --- a/compiler-core/checking/src/algorithm/derive/newtype.rs +++ b/compiler-core/checking/src/algorithm/derive/newtype.rs @@ -3,16 +3,16 @@ use building_types::QueryResult; use crate::ExternalQueries; -use crate::algorithm::derive::{self, tools}; +use crate::algorithm::derive::{self, DeriveStrategy, tools}; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::{transfer, unification}; +use crate::algorithm::unification; use crate::error::ErrorKind; pub fn check_derive_newtype( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -23,30 +23,28 @@ where expected: 2, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((newtype_file, newtype_id)) = derive::extract_type_constructor(state, newtype_type) else { - let global_type = transfer::globalize(state, context, newtype_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, newtype_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; if !derive::is_newtype(context, newtype_file, newtype_id)? { - let global_type = transfer::globalize(state, context, newtype_type); - state.insert_error(ErrorKind::ExpectedNewtype { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, newtype_type); + state.insert_error(ErrorKind::ExpectedNewtype { type_message }); + return Ok(None); } - let inner_type = + let (inner_type, _) = derive::get_newtype_inner(state, context, newtype_file, newtype_id, newtype_type)?; let _ = unification::unify(state, context, wildcard_type, inner_type); - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::HeadOnly)) } diff --git a/compiler-core/checking/src/algorithm/derive/tools.rs b/compiler-core/checking/src/algorithm/derive/tools.rs index b2ad20c11..b524f7057 100644 --- a/compiler-core/checking/src/algorithm/derive/tools.rs +++ b/compiler-core/checking/src/algorithm/derive/tools.rs @@ -9,10 +9,14 @@ use rustc_hash::FxHashMap; use crate::ExternalQueries; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::{constraint, quantify, substitute, transfer}; -use crate::core::{Instance, InstanceKind, Type, TypeId, debruijn}; +use crate::algorithm::{constraint, quantify, transfer}; +use crate::core::{Instance, InstanceKind, Type, TypeId}; use crate::error::ErrorKind; +mod substitute { + pub use crate::algorithm::substitute::{NameToType, SubstituteBindings}; +} + /// Elaborated derive instance after kind inference. pub struct ElaboratedDerive { pub derive_id: DeriveId, @@ -30,14 +34,7 @@ pub fn emit_constraint( ) { let class_type = state.storage.intern(Type::Constructor(class_file, class_id)); let constraint = state.storage.intern(Type::Application(class_type, type_id)); - state.constraints.push_wanted(constraint); -} - -/// Pushes given constraints from the instance head onto the constraint stack. -pub fn push_given_constraints(state: &mut CheckState, constraints: &[(TypeId, TypeId)]) { - for (constraint_type, _) in constraints { - state.constraints.push_given(*constraint_type); - } + state.push_wanted(constraint); } /// Emits wanted constraints for the superclasses of the class being derived. @@ -51,13 +48,14 @@ pub fn push_given_constraints(state: &mut CheckState, constraints: &[(TypeId, Ty pub fn emit_superclass_constraints( state: &mut CheckState, context: &CheckContext, - input: &ElaboratedDerive, + class_file: FileId, + class_id: TypeItemId, + arguments: &[(TypeId, TypeId)], ) -> QueryResult<()> where Q: ExternalQueries, { - let Some(class_info) = - constraint::lookup_file_class(state, context, input.class_file, input.class_id)? + let Some(class_info) = constraint::lookup_file_class(state, context, class_file, class_id)? else { return Ok(()); }; @@ -66,16 +64,14 @@ where return Ok(()); } - let initial_level = class_info.quantified_variables.0 + class_info.kind_variables.0; - let mut bindings = FxHashMap::default(); - for (index, &(argument_type, _)) in input.arguments.iter().enumerate() { - let level = debruijn::Level(initial_level + index as u32); - bindings.insert(level, argument_type); + let mut bindings: substitute::NameToType = FxHashMap::default(); + for (name, &(argument_type, _)) in class_info.type_variable_names.iter().zip(arguments) { + bindings.insert(name.clone(), argument_type); } for &(superclass, _) in class_info.superclasses.iter() { - let specialized = substitute::SubstituteBindings::on(state, &bindings, superclass); - state.constraints.push_wanted(specialized); + let specialised = substitute::SubstituteBindings::on(state, &bindings, superclass); + state.push_wanted(specialised); } Ok(()) @@ -91,7 +87,7 @@ where { let residual = state.solve_constraints(context)?; for constraint in residual { - let constraint = transfer::globalize(state, context, constraint); + let constraint = state.render_local_type(context, constraint); state.insert_error(ErrorKind::NoInstanceFound { constraint }); } Ok(()) @@ -105,7 +101,8 @@ pub fn register_derived_instance( state: &mut CheckState, context: &CheckContext, input: ElaboratedDerive, -) where +) -> QueryResult<()> +where Q: ExternalQueries, { let ElaboratedDerive { derive_id, constraints, arguments, class_file, class_id } = input; @@ -120,6 +117,8 @@ pub fn register_derived_instance( quantify::quantify_instance(state, &mut instance); + constraint::validate_instance_rows(state, context, class_file, class_id, &instance.arguments)?; + for (t, k) in instance.arguments.iter_mut() { *t = transfer::globalize(state, context, *t); *k = transfer::globalize(state, context, *k); @@ -131,6 +130,8 @@ pub fn register_derived_instance( } state.checked.derived.insert(derive_id, instance); + + Ok(()) } /// Looks up data constructors for a type, handling cross-file lookups. diff --git a/compiler-core/checking/src/algorithm/derive/traversable.rs b/compiler-core/checking/src/algorithm/derive/traversable.rs index e32c4d2ca..795da22a4 100644 --- a/compiler-core/checking/src/algorithm/derive/traversable.rs +++ b/compiler-core/checking/src/algorithm/derive/traversable.rs @@ -3,18 +3,17 @@ use building_types::QueryResult; use crate::ExternalQueries; -use crate::algorithm::derive::variance::{Variance, VarianceConfig, generate_variance_constraints}; -use crate::algorithm::derive::{self, tools}; +use crate::algorithm::derive::variance::{Variance, VarianceConfig}; +use crate::algorithm::derive::{self, DeriveStrategy, tools}; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::transfer; use crate::error::ErrorKind; -/// Checks a derive instance for Traversable. +/// Checks a derive instance head for Traversable. pub fn check_derive_traversable( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -25,32 +24,28 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; let traversable = Some((input.class_file, input.class_id)); - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; let config = VarianceConfig::Single((Variance::Covariant, traversable)); - generate_variance_constraints(state, context, data_file, data_id, derived_type, config)?; - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config })) } -/// Checks a derive instance for Bitraversable. +/// Checks a derive instance head for Bitraversable. pub fn check_derive_bitraversable( state: &mut CheckState, context: &CheckContext, input: tools::ElaboratedDerive, -) -> QueryResult<()> +) -> QueryResult> where Q: ExternalQueries, { @@ -61,27 +56,23 @@ where expected: 1, actual: input.arguments.len(), }); - return Ok(()); + return Ok(None); }; let Some((data_file, data_id)) = derive::extract_type_constructor(state, derived_type) else { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); - return Ok(()); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); + return Ok(None); }; // Bitraversable derivation emits Traversable constraints for wrapped parameters. let traversable = context.known_types.traversable; - tools::push_given_constraints(state, &input.constraints); - tools::emit_superclass_constraints(state, context, &input)?; - tools::register_derived_instance(state, context, input); + tools::register_derived_instance(state, context, input)?; let config = VarianceConfig::Pair( (Variance::Covariant, traversable), (Variance::Covariant, traversable), ); - generate_variance_constraints(state, context, data_file, data_id, derived_type, config)?; - - tools::solve_and_report_constraints(state, context) + Ok(Some(DeriveStrategy::VarianceConstraints { data_file, data_id, derived_type, config })) } diff --git a/compiler-core/checking/src/algorithm/derive/variance.rs b/compiler-core/checking/src/algorithm/derive/variance.rs index f1a733573..44211b38c 100644 --- a/compiler-core/checking/src/algorithm/derive/variance.rs +++ b/compiler-core/checking/src/algorithm/derive/variance.rs @@ -11,8 +11,8 @@ use crate::ExternalQueries; use crate::algorithm::derive::{self, tools}; use crate::algorithm::safety::safe_loop; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::{substitute, toolkit, transfer}; -use crate::core::{RowType, Type, TypeId, Variable, debruijn}; +use crate::algorithm::{substitute, toolkit}; +use crate::core::{Name, RowType, Type, TypeId, Variable}; use crate::error::ErrorKind; /// Variance of a type position. @@ -32,9 +32,9 @@ impl Variance { } /// A derived type parameter with its expected variance and wrapper class. -#[derive(Clone, Copy)] +#[derive(Clone)] struct DerivedParameter { - level: debruijn::Level, + name: Name, /// Expected variance for this parameter. expected: Variance, /// The class to emit when this parameter appears wrapped in a type application. @@ -42,8 +42,8 @@ struct DerivedParameter { } impl DerivedParameter { - fn new(level: debruijn::Level, (expected, class): ParameterConfig) -> DerivedParameter { - DerivedParameter { level, expected, class } + fn new(name: Name, (expected, class): ParameterConfig) -> DerivedParameter { + DerivedParameter { name, expected, class } } } @@ -59,8 +59,8 @@ enum DerivedSkolems { } impl DerivedSkolems { - fn get(&self, level: debruijn::Level) -> Option<&DerivedParameter> { - self.iter().find(|p| p.level == level) + fn get(&self, name: &Name) -> Option<&DerivedParameter> { + self.iter().find(|p| p.name == *name) } fn iter(&self) -> impl Iterator { @@ -77,6 +77,7 @@ impl DerivedSkolems { pub type ParameterConfig = (Variance, Option<(FileId, TypeItemId)>); /// Configuration for variance-aware derivation. +#[derive(Clone, Copy)] pub enum VarianceConfig { Single(ParameterConfig), Pair(ParameterConfig, ParameterConfig), @@ -132,40 +133,40 @@ where let type_arguments = toolkit::extract_all_applications(state, derived_type); let mut arguments_iter = type_arguments.into_iter(); let mut current_id = constructor_type; - let mut levels = vec![]; + let mut names = vec![]; safe_loop! { current_id = state.normalize_type(current_id); match &state.storage[current_id] { Type::Forall(binder, inner) => { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let inner = *inner; let argument_type = arguments_iter.next().unwrap_or_else(|| { - levels.push(binder_level); - let skolem = Variable::Skolem(binder_level, binder_kind); + names.push(binder_variable.clone()); + let skolem = Variable::Skolem(binder_variable.clone(), binder_kind); state.storage.intern(Type::Variable(skolem)) }); - current_id = substitute::SubstituteBound::on(state, binder_level, argument_type, inner); + current_id = substitute::SubstituteBound::on(state, binder_variable, argument_type, inner); } _ => break, } } - // The last N levels correspond to the N derived parameters. - let skolems = match (config, &levels[..]) { + // The last N names correspond to the N derived parameters. + let skolems = match (config, &names[..]) { (VarianceConfig::Single(config), [.., a]) => { - DerivedSkolems::Single(DerivedParameter::new(*a, *config)) + DerivedSkolems::Single(DerivedParameter::new(a.clone(), *config)) } (VarianceConfig::Pair(a_config, b_config), [.., a, b]) => DerivedSkolems::Pair( - DerivedParameter::new(*a, *a_config), - DerivedParameter::new(*b, *b_config), + DerivedParameter::new(a.clone(), *a_config), + DerivedParameter::new(b.clone(), *b_config), ), _ => { - let global_type = transfer::globalize(state, context, derived_type); - state.insert_error(ErrorKind::CannotDeriveForType { type_id: global_type }); + let type_message = state.render_local_type(context, derived_type); + state.insert_error(ErrorKind::CannotDeriveForType { type_message }); DerivedSkolems::Invalid } }; @@ -198,15 +199,15 @@ fn check_variance_field( let type_id = state.normalize_type(type_id); match state.storage[type_id].clone() { - Type::Variable(Variable::Skolem(level, _)) => { - if let Some(parameter) = skolems.get(level) + Type::Variable(Variable::Skolem(name, _)) => { + if let Some(parameter) = skolems.get(&name) && variance != parameter.expected { - let global = transfer::globalize(state, context, type_id); + let type_message = state.render_local_type(context, type_id); if variance == Variance::Covariant { - state.insert_error(ErrorKind::CovariantOccurrence { type_id: global }); + state.insert_error(ErrorKind::CovariantOccurrence { type_message }); } else { - state.insert_error(ErrorKind::ContravariantOccurrence { type_id: global }); + state.insert_error(ErrorKind::ContravariantOccurrence { type_message }); } } } @@ -223,16 +224,14 @@ fn check_variance_field( check_variance_field(state, context, argument, variance, skolems); } else { for parameter in skolems.iter() { - if contains_skolem_level(state, argument, parameter.level) { + if contains_skolem_name(state, argument, ¶meter.name) { if variance != parameter.expected { - let global = transfer::globalize(state, context, type_id); + let type_message = state.render_local_type(context, type_id); if variance == Variance::Covariant { - state.insert_error(ErrorKind::CovariantOccurrence { - type_id: global, - }); + state.insert_error(ErrorKind::CovariantOccurrence { type_message }); } else { state.insert_error(ErrorKind::ContravariantOccurrence { - type_id: global, + type_message, }); } } else if let Some(class) = parameter.class { @@ -260,30 +259,30 @@ fn check_variance_field( } } -/// Checks if a type contains a specific Skolem level. -fn contains_skolem_level(state: &mut CheckState, type_id: TypeId, target: debruijn::Level) -> bool { +/// Checks if a type contains a specific Skolem name. +fn contains_skolem_name(state: &mut CheckState, type_id: TypeId, target: &Name) -> bool { let type_id = state.normalize_type(type_id); match state.storage[type_id].clone() { - Type::Variable(Variable::Skolem(level, _)) => level == target, + Type::Variable(Variable::Skolem(name, _)) => name == *target, Type::Application(function, argument) | Type::KindApplication(function, argument) => { - contains_skolem_level(state, function, target) - || contains_skolem_level(state, argument, target) + contains_skolem_name(state, function, target) + || contains_skolem_name(state, argument, target) } Type::Function(argument, result) => { - contains_skolem_level(state, argument, target) - || contains_skolem_level(state, result, target) + contains_skolem_name(state, argument, target) + || contains_skolem_name(state, result, target) } Type::Row(RowType { ref fields, tail }) => { - fields.iter().any(|f| contains_skolem_level(state, f.id, target)) - || tail.is_some_and(|t| contains_skolem_level(state, t, target)) + fields.iter().any(|f| contains_skolem_name(state, f.id, target)) + || tail.is_some_and(|t| contains_skolem_name(state, t, target)) } Type::Forall(_, inner) | Type::Constrained(_, inner) | Type::Kinded(inner, _) => { - contains_skolem_level(state, inner, target) + contains_skolem_name(state, inner, target) } _ => false, diff --git a/compiler-core/checking/src/algorithm/equation.rs b/compiler-core/checking/src/algorithm/equation.rs new file mode 100644 index 000000000..96059abec --- /dev/null +++ b/compiler-core/checking/src/algorithm/equation.rs @@ -0,0 +1,278 @@ +use building_types::QueryResult; +use indexing::TermItemId; + +use crate::ExternalQueries; +use crate::algorithm::state::{CheckContext, CheckState}; +use crate::algorithm::{binder, exhaustiveness, inspect, term, toolkit, unification}; +use crate::core::{Type, TypeId}; +use crate::error::ErrorKind; + +/// Type origin for the [`patterns`] function. +pub enum ExhaustivenessOrigin<'a> { + /// The types of equation patterns comes from checking. + FromSignature(&'a [TypeId]), + /// The types of equation patterns comes from inference. + FromType(TypeId), +} + +/// Checks and reports exhaustiveness for an equation group. +pub fn patterns( + state: &mut CheckState, + context: &CheckContext, + origin: ExhaustivenessOrigin<'_>, + equations: &[lowering::Equation], +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + match origin { + ExhaustivenessOrigin::FromSignature(signature) => { + let exhaustiveness = + exhaustiveness::check_equation_patterns(state, context, signature, equations)?; + state.report_exhaustiveness(exhaustiveness); + } + ExhaustivenessOrigin::FromType(t) => { + let (arguments, _) = toolkit::extract_function_arguments(state, t); + let exhaustiveness = + exhaustiveness::check_equation_patterns(state, context, &arguments, equations)?; + state.report_exhaustiveness(exhaustiveness); + } + }; + Ok(()) +} + +/// Constraints policy for the [`constraints`] function. +pub enum ConstraintsPolicy { + /// Residual constraints are returned to the caller. + Return, + /// Residual constraints are eagerly reported. + Report, +} + +/// Solves constraints for an equation group. +pub fn constraints( + state: &mut CheckState, + context: &CheckContext, + policy: ConstraintsPolicy, +) -> QueryResult> +where + Q: ExternalQueries, +{ + let residual = state.solve_constraints(context)?; + match policy { + ConstraintsPolicy::Return => Ok(residual), + ConstraintsPolicy::Report => { + for constraint in residual { + let constraint = state.render_local_type(context, constraint); + state.insert_error(ErrorKind::NoInstanceFound { constraint }); + } + Ok(vec![]) + } + } +} + +/// Infers the type of top-level value group equations. +/// +/// This function depends on the unification variable created for the current +/// binding group by [`CheckState::with_term_group`]. This function returns +/// the inferred type and residual constraints for later generalisation via +/// [`term_item::commit_value_group`]. +/// +/// [`term_item::commit_value_group`]: crate::algorithm::term_item::commit_value_group +pub fn infer_equations( + state: &mut CheckState, + context: &CheckContext, + item_id: TermItemId, + equations: &[lowering::Equation], +) -> QueryResult<(TypeId, Vec)> +where + Q: ExternalQueries, +{ + let group_type = state + .binding_group + .lookup_term(item_id) + .expect("invariant violated: invalid binding_group in type inference"); + + infer_equations_core(state, context, group_type, equations)?; + + let origin = ExhaustivenessOrigin::FromType(group_type); + patterns(state, context, origin, equations)?; + + let residual = constraints(state, context, ConstraintsPolicy::Return)?; + Ok((group_type, residual)) +} + +/// Infers the type of value group equations. +/// +/// This function infers the type of each value equation, and then checks +/// that it's a subtype of the provided `group_type`. The `group_type` is +/// usually a unification variable. +/// +/// This function is used to implement inference for the following: +/// - [`lowering::TermItemIr::ValueGroup`] +/// - [`lowering::LetBindingNameGroup`] +/// - [`lowering::InstanceMemberGroup`] +pub(crate) fn infer_equations_core( + state: &mut CheckState, + context: &CheckContext, + group_type: TypeId, + equations: &[lowering::Equation], +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + let minimum_equation_arity = + equations.iter().map(|equation| equation.binders.len()).min().unwrap_or(0); + + for equation in equations { + let mut argument_types = vec![]; + for &binder_id in equation.binders.iter() { + let argument_type = binder::infer_binder(state, context, binder_id)?; + argument_types.push(argument_type); + } + + let result_type = state.fresh_unification_type(context); + + // Only use the minimum number of binders across equations. + let argument_types = &argument_types[..minimum_equation_arity]; + let equation_type = state.make_function(argument_types, result_type); + let _ = unification::subtype(state, context, equation_type, group_type)?; + + if let Some(guarded) = &equation.guarded { + let inferred_type = term::infer_guarded_expression(state, context, guarded)?; + let _ = unification::subtype(state, context, inferred_type, result_type)?; + } + } + + Ok(()) +} + +/// Checks the type of value group equations. +/// +/// This function checks each value equation against the signature previously +/// checked by the [`check_term_signature`] and [`inspect_signature_core`] +/// functions. +/// +/// This function depends on a couple of side-effects produced by the +/// [`inspect_signature_core`] function. Type variables that appear in the +/// signature are made visible through rebinding, and given constraints +/// are pushed onto the environment. See the implementation for more details. +/// +/// This function solves all constraints during checking using the +/// [`CheckState::solve_constraints`] function, and reports residual +/// constraints as [`ErrorKind::NoInstanceFound`] errors. +/// +/// [`check_term_signature`]: crate::algorithm::term_item::check_term_signature +/// [`inspect_signature_core`]: crate::algorithm::inspect::inspect_signature_core +pub fn check_equations( + state: &mut CheckState, + context: &CheckContext, + signature_id: lowering::TypeId, + signature: inspect::InspectSignature, + equations: &[lowering::Equation], +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + check_equations_core(state, context, signature_id, &signature, equations)?; + + let origin = ExhaustivenessOrigin::FromSignature(&signature.arguments); + patterns(state, context, origin, equations)?; + + let _ = constraints(state, context, ConstraintsPolicy::Report)?; + + if let Some(variable) = signature.variables.first() { + state.type_scope.unbind_name(&variable.variable); + } + + Ok(()) +} + +pub(crate) fn check_equations_core( + state: &mut CheckState, + context: &CheckContext, + signature_id: lowering::TypeId, + signature: &inspect::InspectSignature, + equations: &[lowering::Equation], +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + let expected_arity = signature.arguments.len(); + + for equation in equations { + let equation_arity = equation.binders.len(); + + if equation_arity > expected_arity { + let expected = expected_arity as u32; + let actual = equation_arity as u32; + state.insert_error(ErrorKind::TooManyBinders { + signature: signature_id, + expected, + actual, + }); + } + + for (&binder_id, &argument_type) in equation.binders.iter().zip(&signature.arguments) { + let _ = binder::check_argument_binder(state, context, binder_id, argument_type)?; + } + + if equation_arity > expected_arity { + let extra_binders = &equation.binders[expected_arity..]; + for &binder_id in extra_binders { + let _ = binder::infer_binder(state, context, binder_id)?; + } + } + + // Compute expected result type based on how many binders there + // are on each equation, wrapping remaining arguments if partial. + // + // foo :: forall a. a -> a -> Int + // foo = \a b -> a + b + // foo a = \b -> a + b + // foo a b = a + b + // + // signature.arguments := [a, a] + // signature.result := Int + // + // expected_type := + // 0 binders := forall a. a -> a -> Int + // 1 binder := a -> Int + // 2 binders := Int + // + // This matters for type synonyms that expand to functions. The + // return type synonym introduces hidden function arrows that + // increase the expected arity after expansion. + // + // type ReturnsInt a = a -> Int + // + // bar :: forall a. ReturnsInt a -> ReturnsInt a + // bar = \f -> f + // bar f = f + // bar f a = f a + // + // signature.arguments := [ReturnsInt a, a] + // signature.result := Int + // + // expected_type := + // 0 binders := forall a. ReturnsInt a -> ReturnsInt a + // 1 binder := ReturnsInt a + // 2 binders := Int + let expected_type = if equation_arity == 0 { + signature.function + } else if equation_arity >= expected_arity { + signature.result + } else { + let remaining_arguments = &signature.arguments[equation_arity..]; + remaining_arguments.iter().rfold(signature.result, |result, &argument| { + state.storage.intern(Type::Function(argument, result)) + }) + }; + + if let Some(guarded) = &equation.guarded { + term::check_guarded_expression(state, context, guarded, expected_type)?; + } + } + + Ok(()) +} diff --git a/compiler-core/checking/src/algorithm/exhaustiveness.rs b/compiler-core/checking/src/algorithm/exhaustiveness.rs new file mode 100644 index 000000000..7bddc556f --- /dev/null +++ b/compiler-core/checking/src/algorithm/exhaustiveness.rs @@ -0,0 +1,1186 @@ +mod convert; +mod pretty; + +use std::iter; + +use building_types::QueryResult; +use files::FileId; +use indexing::TermItemId; +use itertools::Itertools; +use rustc_hash::FxHashSet; +use smol_str::SmolStr; + +use crate::algorithm::state::{CheckContext, CheckState}; +use crate::algorithm::{derive, toolkit}; +use crate::{ExternalQueries, TypeId}; + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct Pattern { + pub kind: PatternKind, + pub t: TypeId, +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum PatternKind { + Wildcard, + Constructor { constructor: PatternConstructor }, +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub enum PatternConstructor { + DataConstructor { file_id: FileId, item_id: TermItemId, fields: Vec }, + Record { labels: Vec, fields: Vec }, + Array { fields: Vec }, + Boolean(bool), + Integer(i32), + Number(bool, SmolStr), + String(SmolStr), + Char(char), +} + +impl PatternConstructor { + /// Returns the arity of this pattern constructor. + /// + /// [`PatternConstructor::DataConstructor`], [`PatternConstructor::Record`], + /// and [`PatternConstructor::Array`] have non-zero arity based on their fields. + pub fn arity(&self) -> usize { + match self { + PatternConstructor::DataConstructor { fields, .. } => fields.len(), + PatternConstructor::Record { fields, .. } => fields.len(), + PatternConstructor::Array { fields } => fields.len(), + _ => 0, + } + } + + /// Returns the fields of this pattern constructor. + /// + /// [`PatternConstructor::DataConstructor`], [`PatternConstructor::Record`], + /// and [`PatternConstructor::Array`] have fields corresponding to their arguments. + pub fn fields(&self) -> &[PatternId] { + match self { + PatternConstructor::DataConstructor { fields, .. } => fields, + PatternConstructor::Record { fields, .. } => fields, + PatternConstructor::Array { fields } => fields, + _ => &[], + } + } + + /// Checks if a pattern constructor matches another. + /// + /// This is used during the specialisation algorithm to determine if a + /// pattern row should be included in the specialised pattern matrix. + pub fn matches(&self, other: &PatternConstructor) -> bool { + match (self, other) { + ( + PatternConstructor::DataConstructor { file_id: f1, item_id: i1, .. }, + PatternConstructor::DataConstructor { file_id: f2, item_id: i2, .. }, + ) => f1 == f2 && i1 == i2, + // Any record constructor matches any other record constructor + (PatternConstructor::Record { .. }, PatternConstructor::Record { .. }) => true, + // Array constructors match only when their lengths match + ( + PatternConstructor::Array { fields: f1 }, + PatternConstructor::Array { fields: f2 }, + ) => f1.len() == f2.len(), + (PatternConstructor::Boolean(b1), PatternConstructor::Boolean(b2)) => b1 == b2, + (PatternConstructor::Integer(i1), PatternConstructor::Integer(i2)) => i1 == i2, + (PatternConstructor::Number(n1, v1), PatternConstructor::Number(n2, v2)) => { + n1 == n2 && v1 == v2 + } + (PatternConstructor::String(s1), PatternConstructor::String(s2)) => s1 == s2, + (PatternConstructor::Char(c1), PatternConstructor::Char(c2)) => c1 == c2, + _ => false, + } + } + + /// Reconstructs a [`PatternConstructor`] with the given fields. + /// + /// For [`PatternConstructor::DataConstructor`], [`PatternConstructor::Record`], + /// and [`PatternConstructor::Array`], this function overrides the fields. + /// Otherwise, the fields must be empty as enforced by an assertion. + /// + /// This algorithm is used in [`algorithm_m`] to replace the fields of the + /// pattern constructor we're specialising on with the fields generated by + /// the witnesses. + pub fn reconstruct(&self, fields: &[PatternId]) -> PatternConstructor { + match *self { + PatternConstructor::DataConstructor { file_id, item_id, .. } => { + let fields = fields.to_vec(); + PatternConstructor::DataConstructor { file_id, item_id, fields } + } + PatternConstructor::Record { ref labels, .. } => { + let fields = fields.to_vec(); + PatternConstructor::Record { labels: labels.clone(), fields } + } + PatternConstructor::Array { .. } => { + let fields = fields.to_vec(); + PatternConstructor::Array { fields } + } + PatternConstructor::Boolean(b) => { + assert!(fields.is_empty(), "Boolean constructor has arity 0"); + PatternConstructor::Boolean(b) + } + PatternConstructor::Integer(i) => { + assert!(fields.is_empty(), "Integer constructor has arity 0"); + PatternConstructor::Integer(i) + } + PatternConstructor::Number(negative, ref n) => { + assert!(fields.is_empty(), "Number constructor has arity 0"); + PatternConstructor::Number(negative, SmolStr::clone(n)) + } + PatternConstructor::String(ref s) => { + assert!(fields.is_empty(), "String constructor has arity 0"); + PatternConstructor::String(SmolStr::clone(s)) + } + PatternConstructor::Char(c) => { + assert!(fields.is_empty(), "Char constructor has arity 0"); + PatternConstructor::Char(c) + } + } + } +} + +impl MissingConstructor { + /// Constructs a witness pattern for this missing constructor. + pub fn construct_missing_witness(&self, state: &mut CheckState, t: TypeId) -> PatternId { + match *self { + MissingConstructor::DataConstructor { file_id, item_id, ref fields } => { + let fields = fields + .iter() + .map(|&field_type| state.allocate_wildcard(field_type)) + .collect_vec(); + + let constructor = PatternConstructor::DataConstructor { file_id, item_id, fields }; + let pattern = PatternKind::Constructor { constructor }; + + state.allocate_pattern(pattern, t) + } + MissingConstructor::Boolean(b) => { + let constructor = PatternConstructor::Boolean(b); + let pattern = PatternKind::Constructor { constructor }; + state.allocate_pattern(pattern, t) + } + } + } +} + +pub type PatternId = interner::Id; +pub type PatternStorage = interner::Interner; + +type PatternVector = Vec; +type PatternMatrix = Vec; +pub type WitnessVector = Vec; + +/// Determines if a [`PatternVector`] is useful with respect to a [`PatternMatrix`]. +/// +/// A pattern vector is useful if it matches at least one value not matched by +/// any pattern vector in the matrix. This is one of the core algorithms from +/// Maranget's "Warnings for pattern matching" paper. +/// +/// See [`algorithm_u_constructor`] and [`algorithm_u_wildcard`] for reference. +fn algorithm_u( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, +) -> QueryResult +where + Q: ExternalQueries, +{ + // Base case: any pattern is useful against an empty matrix + if matrix.is_empty() { + return Ok(true); + } + + // Base case: an empty pattern vector against non-empty matrix is useless + let [first_pattern, ..] = vector[..] else { + return Ok(false); + }; + + let first_pattern = state.patterns[first_pattern].clone(); + + match first_pattern.kind { + PatternKind::Constructor { constructor } => { + algorithm_u_constructor(state, context, matrix, vector, constructor) + } + PatternKind::Wildcard => { + algorithm_u_wildcard(state, context, matrix, vector, first_pattern.t) + } + } +} + +/// Induction 1 +/// +/// This function uses specialisation to spread the provided [`PatternConstructor`] +/// over both the [`PatternMatrix`] and the [`PatternVector`], before calling +/// [`algorithm_u`] recursively with the specialised structures. +fn algorithm_u_constructor( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, + constructor: PatternConstructor, +) -> QueryResult +where + Q: ExternalQueries, +{ + let constructor = canonicalise_record_constructor(state, constructor, matrix); + let specialised_matrix = specialise_matrix(state, &constructor, matrix); + + let Some(specialised_vector) = specialise_vector(state, &constructor, vector) else { + unreachable!("invariant violated: vector contains constructor"); + }; + + algorithm_u(state, context, &specialised_matrix, &specialised_vector) +} + +/// Induction 2 +/// +/// This function collects all constructor references from the first column of +/// the matrix into a collection called the sigma. +/// +/// If the sigma is complete, for each constructor in the sigma, we specialise +/// the pattern matrix and pattern vector against it. Then, we recursively call +/// [`algorithm_u`] against the specialised structures. The pattern vector is +/// useful if any specialised pattern vector is useful against its specialised +/// pattern matrix. +/// +/// If the sigma is incomplete, we recursively call [`algorithm_u`] against the +/// [`default_matrix`] of the pattern matrix and the tail of the pattern vector. +fn algorithm_u_wildcard( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, + t: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let sigma = collect_sigma(state, context, matrix, t)?; + let complete = sigma_is_complete(context, &sigma)?; + + if complete { + algorithm_u_wildcard_complete(state, context, matrix, vector, sigma) + } else { + algorithm_u_wildcard_incomplete(state, context, matrix, vector) + } +} + +fn algorithm_u_wildcard_complete( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, + sigma: Sigma, +) -> QueryResult +where + Q: ExternalQueries, +{ + for constructor in sigma.constructors { + let specialised_matrix = specialise_matrix(state, &constructor, matrix); + + let Some(specialised_vector) = specialise_vector(state, &constructor, vector) else { + unreachable!("invariant violated: vector contains constructor"); + }; + + if algorithm_u(state, context, &specialised_matrix, &specialised_vector)? { + return Ok(true); + } + } + Ok(false) +} + +fn algorithm_u_wildcard_incomplete( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, +) -> QueryResult +where + Q: ExternalQueries, +{ + let default = default_matrix(state, matrix); + let tail_columns = vector[1..].to_vec(); + algorithm_u(state, context, &default, &tail_columns) +} + +/// Determines the matching [`WitnessVector`] given a [`PatternMatrix`] +/// and some [`PatternVector`]. +/// +/// If the pattern vector is useful against the provided matrix, that is, +/// there are cases yet to be covered, this function will return a non-empty +/// list of witnesses. Inversely, if the pattern vector is useless against +/// the provided matrix, that is, the cases are exhaustive, this function +/// will return [`None`]. +/// +/// So... what exactly are witnesses? In the paper, these are defined as +/// 'value vectors' that are known not to be matched against the pattern +/// matrix but are instantiations of the pattern vector. In our implementation, +/// these witnesses are pattern vectors that denote values not yet covered by +/// the matrix. +/// +/// The [`algorithm_m_wildcard`] induction is prolific for producing these +/// these witnesses as it compares the constructors that appear in the +/// matrix against the constructors available in the checking environment. +fn algorithm_m( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, +) -> QueryResult>> +where + Q: ExternalQueries, +{ + // Base case: any pattern is its own witness against an empty matrix + if matrix.is_empty() { + let vector = vector.clone(); + return Ok(Some(vec![vector])); + } + + // Base case: an empty pattern vector against non-empty matrix has no witnesses + let [first_pattern, ..] = vector[..] else { + return Ok(None); + }; + + let first_pattern = state.patterns[first_pattern].clone(); + + match first_pattern.kind { + PatternKind::Constructor { constructor } => { + algorithm_m_constructor(state, context, matrix, vector, constructor, first_pattern.t) + } + PatternKind::Wildcard => { + algorithm_m_wildcard(state, context, matrix, vector, first_pattern.t) + } + } +} + +/// Induction 1 +/// +/// This function uses specialisation to spread the provided [`PatternConstructor`] +/// over both the [`PatternMatrix`] and the [`PatternVector`], before calling +/// [`algorithm_m`] recursively with the specialised structures. +/// +/// The final set of witnesses returned by this induction includes a +/// reconstruction of the original constructor passed to this function. +/// +/// See documentation for [`specialise_matrix`] and [`specialise_vector`] for +/// more information on what specialisation entails given a constructor. +fn algorithm_m_constructor( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, + constructor: PatternConstructor, + t: TypeId, +) -> QueryResult>> +where + Q: ExternalQueries, +{ + let constructor = canonicalise_record_constructor(state, constructor, matrix); + let arity = constructor.arity(); + + let specialised_matrix = specialise_matrix(state, &constructor, matrix); + + let Some(specialised_vector) = specialise_vector(state, &constructor, vector) else { + unreachable!("invariant violated: vector contains constructor"); + }; + + let witnesses = algorithm_m(state, context, &specialised_matrix, &specialised_vector)?; + + let Some(witnesses) = witnesses else { + return Ok(None); + }; + + let witnesses = witnesses.into_iter().map(|witness| { + let (argument_columns, tail_columns) = witness.split_at(arity); + + let constructor = constructor.reconstruct(argument_columns); + let constructor_id = state.allocate_constructor(constructor, t); + let tail_columns = tail_columns.iter().copied(); + + iter::once(constructor_id).chain(tail_columns).collect() + }); + + let witnesses = witnesses.collect(); + Ok(Some(witnesses)) +} + +/// Induction 2 +/// +/// If the first column in the [`PatternVector`] is a wildcard, this function +/// produces witnesses that correspond to patterns not yet covered by the +/// [`PatternMatrix`]. This is where pattern suggestion warnings are built +/// for the compiler! +/// +/// This function collects all constructor references from the first column +/// of all rows in the matrix into a collection called the sigma. We handle +/// the structure in different ways: +/// +/// If the sigma is complete, for each constructor in the sigma, we apply +/// a rule similar to [`algorithm_m_constructor`] to collect witnesses +/// across all constructors. +/// +/// If the sigma is incomplete, we recursively apply [`algorithm_m`] to the +/// [`default_matrix`] of the pattern matrix and the tail columns of the +/// pattern vector. The induction ends if the recursive call is exhaustive. +/// +/// If the recursive call returns witnesses, and the sigma is non-empty, +/// we move our attention to generating [`Constructor`] patterns for +/// constructors not present in the sigma. This is what we use for +/// reporting pattern warnings. Otherwise, if the sigma is empty, we +/// simply produce a wildcard pattern. +fn algorithm_m_wildcard( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, + t: TypeId, +) -> QueryResult>> +where + Q: ExternalQueries, +{ + let sigma = collect_sigma(state, context, matrix, t)?; + let complete = sigma_is_complete(context, &sigma)?; + if complete { + algorithm_m_wildcard_complete(state, context, matrix, vector, t, &sigma) + } else { + algorithm_m_wildcard_incomplete(state, context, matrix, vector, t, &sigma) + } +} + +fn algorithm_m_wildcard_complete( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, + t: TypeId, + sigma: &Sigma, +) -> QueryResult>> +where + Q: ExternalQueries, +{ + let mut all_witnesses = vec![]; + + for constructor in &sigma.constructors { + let arity = constructor.arity(); + + let specialised_matrix = specialise_matrix(state, constructor, matrix); + + let Some(specialised_vector) = specialise_vector(state, constructor, vector) else { + unreachable!("invariant violated: vector contains constructor"); + }; + + if let Some(witnesses) = + algorithm_m(state, context, &specialised_matrix, &specialised_vector)? + { + for witness in witnesses { + let (argument_columns, tail_columns) = witness.split_at(arity); + + let constructor = constructor.reconstruct(argument_columns); + let constructor_id = state.allocate_constructor(constructor, t); + let tail_columns = tail_columns.iter().copied(); + + let witnesses = iter::once(constructor_id).chain(tail_columns).collect(); + all_witnesses.push(witnesses); + } + } + } + + if all_witnesses.is_empty() { Ok(None) } else { Ok(Some(all_witnesses)) } +} + +fn algorithm_m_wildcard_incomplete( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + vector: &PatternVector, + t: TypeId, + sigma: &Sigma, +) -> QueryResult>> +where + Q: ExternalQueries, +{ + let default = default_matrix(state, matrix); + let tail_columns = vector[1..].to_vec(); + + let witnesses = algorithm_m(state, context, &default, &tail_columns)?; + + let Some(witnesses) = witnesses else { + return Ok(None); + }; + + let first_column = if let Some(constructor) = sigma.missing.first() { + constructor.construct_missing_witness(state, t) + } else { + state.allocate_wildcard(t) + }; + + let witness = witnesses + .into_iter() + .map(|witness| iter::once(first_column).chain(witness).collect()) + .collect(); + + Ok(Some(witness)) +} + +/// Computes a canonical [`PatternConstructor::Record`] that includes the union of all +/// record labels appearing in the first column of the matrix and the given constructor. +/// +/// Record patterns in PureScript can mention different subsets of the full row type. +/// For example, `{ a }` and `{ a, b }` both match `{ a :: Int, b :: Int }`, but they +/// produce record constructors with different arities. The specialisation algorithm +/// requires all rows to have the same width, so we normalise to a canonical label set +/// before specialising. +/// +/// For non-record constructors this function returns the constructor unchanged. +fn canonicalise_record_constructor( + state: &CheckState, + constructor: PatternConstructor, + matrix: &PatternMatrix, +) -> PatternConstructor { + let PatternConstructor::Record { labels, fields } = &constructor else { + return constructor; + }; + + let mut canonical = { + let labels = labels.iter().cloned(); + let fields = fields.iter().copied(); + labels.zip(fields).collect_vec() + }; + + let initial_length = canonical.len(); + + for row in matrix { + let Some(&first) = row.first() else { + continue; + }; + + let pattern = &state.patterns[first]; + if let PatternKind::Constructor { + constructor: PatternConstructor::Record { labels, fields }, + } = &pattern.kind + { + for (label, &field) in iter::zip(labels, fields) { + if !canonical.iter().any(|(existing, _)| existing == label) { + let label = SmolStr::clone(label); + canonical.push((label, field)); + } + } + } + } + + if canonical.len() == initial_length { + return constructor; + } + + // Subtle: stable sorting by label + canonical.sort_by(|(a, _), (b, _)| a.cmp(b)); + + let (labels, fields) = canonical.into_iter().unzip(); + PatternConstructor::Record { labels, fields } +} + +/// Specialises a [`PatternMatrix`] given a [`PatternConstructor`]. +/// +/// See documentation below for [`specialise_vector`]. +fn specialise_matrix( + state: &mut CheckState, + expected: &PatternConstructor, + matrix: &PatternMatrix, +) -> PatternMatrix { + matrix.iter().filter_map(|row| specialise_vector(state, expected, row)).collect() +} + +/// Specialises a [`PatternVector`] given a [`PatternConstructor`]. +/// +/// Specialisation takes a pattern vector and applies the following rules: +/// 1. If the first column is a wildcard, it expands it to `n` wildcards +/// where `n` is the arity of the expected [`PatternConstructor`]. +/// For non-ADT constructors, arity is 0 (no expansion needed). +/// 2. It returns `None` for constructors that are not the expected +/// [`PatternConstructor`], which excludes them from the specialised matrix. +/// For example, a pattern vector specialised on `Just` removes `Nothing`. +/// 3. For matching constructors, it 'splats' the fields, effectively turning +/// a pattern vector like `[Just _]` into `[_]` or `[Nothing]` into `[]`. +/// For non-ADT constructors, arity is 0 so nothing is splatted. +fn specialise_vector( + state: &mut CheckState, + expected: &PatternConstructor, + vector: &PatternVector, +) -> Option { + let [first_column_id, ref tail_columns @ ..] = vector[..] else { + unreachable!("invariant violated: specialise_vector processed empty row"); + }; + + // Clone to release any borrow on state.patterns, allowing mutable + // access later when allocating wildcard patterns for record padding. + let first_pattern = state.patterns[first_column_id].clone(); + + if let PatternKind::Wildcard = first_pattern.kind { + // Expand wildcard to the expected constructor's arity + match expected { + PatternConstructor::DataConstructor { fields, .. } + | PatternConstructor::Record { fields, .. } => { + let wildcards = fields.iter().map(|&pattern_id| { + let t = state.patterns[pattern_id].t; + state.allocate_wildcard(t) + }); + let tail_columns = tail_columns.iter().copied(); + return Some(iter::chain(wildcards, tail_columns).collect()); + } + PatternConstructor::Array { fields } => { + let wildcards = fields.iter().map(|&pattern_id| { + let t = state.patterns[pattern_id].t; + state.allocate_wildcard(t) + }); + let tail_columns = tail_columns.iter().copied(); + return Some(iter::chain(wildcards, tail_columns).collect()); + } + _ => { + return Some(tail_columns.to_vec()); + } + } + } + + let PatternKind::Constructor { constructor } = &first_pattern.kind else { + return Some(tail_columns.to_vec()); + }; + + // Check if constructors match + if !constructor.matches(expected) { + return None; + } + + // Splat fields for constructors with arity + match constructor { + PatternConstructor::DataConstructor { fields, .. } => { + Some(iter::chain(fields, tail_columns).copied().collect()) + } + PatternConstructor::Record { labels: actual_labels, fields: actual_fields } => { + specialise_record_fields(state, expected, actual_labels, actual_fields, tail_columns) + } + PatternConstructor::Array { fields } => { + Some(iter::chain(fields, tail_columns).copied().collect()) + } + _ => Some(tail_columns.to_vec()), + } +} + +/// Maps the fields of an actual record pattern to the expected (canonical) label set. +/// +/// When record patterns in different branches mention different subsets of labels, +/// the actual pattern may have fewer labels than the expected canonical constructor. +/// This function aligns the actual fields to the expected label positions, inserting +/// wildcard patterns for any labels present in the expected set but absent from the +/// actual pattern. +fn specialise_record_fields( + state: &mut CheckState, + expected: &PatternConstructor, + actual_labels: &[SmolStr], + actual_fields: &[PatternId], + tail_columns: &[PatternId], +) -> Option { + let PatternConstructor::Record { labels: expected_labels, fields: expected_fields } = expected + else { + return Some( + iter::chain(actual_fields.iter().copied(), tail_columns.iter().copied()).collect(), + ); + }; + + // Fast path: labels match exactly. + if actual_labels == expected_labels.as_slice() { + return Some( + iter::chain(actual_fields.iter().copied(), tail_columns.iter().copied()).collect(), + ); + } + + let mut mapped_fields = Vec::with_capacity(expected_labels.len()); + for (expected_label, &expected_field) in expected_labels.iter().zip(expected_fields.iter()) { + if let Some(position) = actual_labels.iter().position(|label| label == expected_label) { + mapped_fields.push(actual_fields[position]); + } else { + let t = state.patterns[expected_field].t; + mapped_fields.push(state.allocate_wildcard(t)); + } + } + + Some(mapped_fields.into_iter().chain(tail_columns.iter().copied()).collect()) +} + +fn default_matrix(state: &CheckState, matrix: &PatternMatrix) -> PatternMatrix { + let filter_map = matrix.iter().filter_map(|row| { + let [first_column, ref default_columns @ ..] = row[..] else { + unreachable!("invariant violated: default_matrix processed empty row"); + }; + if let PatternKind::Wildcard = state.patterns[first_column].kind { + Some(default_columns.to_vec()) + } else { + None + } + }); + filter_map.collect() +} + +/// Key for identifying a unique constructor. +#[derive(Clone, PartialEq, Eq, Hash)] +enum ConstructorKey { + Data(FileId, TermItemId), + Record, + Array(usize), + Boolean(bool), + Integer(i32), + Number(bool, SmolStr), + String(SmolStr), + Char(char), +} + +impl ConstructorKey { + fn from_pattern_constructor(pc: &PatternConstructor) -> Self { + match pc { + PatternConstructor::DataConstructor { file_id, item_id, .. } => { + ConstructorKey::Data(*file_id, *item_id) + } + // All record constructors share the same key (single constructor semantics) + PatternConstructor::Record { .. } => ConstructorKey::Record, + // Array constructors are keyed by their length + PatternConstructor::Array { fields } => ConstructorKey::Array(fields.len()), + PatternConstructor::Boolean(b) => ConstructorKey::Boolean(*b), + PatternConstructor::Integer(i) => ConstructorKey::Integer(*i), + PatternConstructor::Number(negative, n) => { + let n = SmolStr::clone(n); + ConstructorKey::Number(*negative, n) + } + PatternConstructor::String(s) => { + let s = SmolStr::clone(s); + ConstructorKey::String(s) + } + PatternConstructor::Char(c) => ConstructorKey::Char(*c), + } + } +} + +#[derive(Clone, Debug)] +struct Sigma { + constructors: Vec, + missing: Vec, +} + +#[derive(Clone, Debug)] +enum MissingConstructor { + DataConstructor { file_id: FileId, item_id: TermItemId, fields: Vec }, + Boolean(bool), +} + +/// Extracts the sigma, a set of constructors from the first column of the matrix. +/// +/// Returns a list of unique constructors seen in the first column, keeping one +/// representative [`PatternConstructor`] per distinct constructor. Other patterns +/// like wildcards, records, and arrays are ignored for now. +fn collect_sigma( + state: &mut CheckState, + context: &CheckContext, + matrix: &PatternMatrix, + scrutinee_type: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let mut seen = FxHashSet::default(); + let mut constructors = vec![]; + + for row in matrix { + let [first_column, ..] = row[..] else { + continue; + }; + let pattern = &state.patterns[first_column]; + if let PatternKind::Constructor { constructor } = &pattern.kind { + let key = ConstructorKey::from_pattern_constructor(constructor); + if seen.insert(key) { + constructors.push(constructor.clone()); + } + } + } + + // Canonicalise record constructors to include all labels from the matrix. + // Different record patterns may mention different subsets of the row type, + // and the specialisation algorithm requires a consistent arity. + if let Some(index) = + constructors.iter().position(|c| matches!(c, PatternConstructor::Record { .. })) + { + let record = constructors.remove(index); + let canonical = canonicalise_record_constructor(state, record, matrix); + constructors.insert(index, canonical); + } + + let missing = collect_missing_constructors(state, context, scrutinee_type, &constructors)?; + Ok(Sigma { constructors, missing }) +} + +/// Checks whether the set of constructors (sigma) is complete for the scrutinee type. +/// +/// A sigma is complete if it contains all constructors of the data type. +/// For Boolean, both true and false must be present. +/// For records, there is exactly one constructor, so any record pattern makes sigma complete. +/// For other literal constructors (Integer, Number, String, Char), sigma is never complete +/// (infinite domains). +/// If we can't determine the type or its constructors, we conservatively return false. +fn sigma_is_complete(context: &CheckContext, sigma: &Sigma) -> QueryResult +where + Q: ExternalQueries, +{ + // Empty sigma is never complete + let Some(first) = sigma.constructors.first() else { + return Ok(false); + }; + + match first { + PatternConstructor::DataConstructor { file_id, item_id, .. } => { + // Get the indexed module for the constructor's file + let indexed = context.queries.indexed(*file_id)?; + + // Find the type this constructor belongs to + let Some(type_item_id) = indexed.pairs.constructor_type(*item_id) else { + return Ok(false); + }; + + // Get all constructors for this type + let all_constructors: FxHashSet = + indexed.pairs.data_constructors(type_item_id).collect(); + + // Check if sigma covers all constructors + let sigma_terms: FxHashSet = sigma + .constructors + .iter() + .filter_map(|c| match c { + PatternConstructor::DataConstructor { item_id, .. } => Some(*item_id), + _ => None, + }) + .collect(); + + Ok(all_constructors.iter().all(|term_id| sigma_terms.contains(term_id))) + } + // Records have exactly one constructor, so sigma is always complete for records + PatternConstructor::Record { .. } => Ok(true), + // Arrays have infinite possible lengths, so sigma is never complete + PatternConstructor::Array { .. } => Ok(false), + PatternConstructor::Boolean(_) => { + // Boolean is complete when both true and false are present + let has_true = + sigma.constructors.iter().any(|c| matches!(c, PatternConstructor::Boolean(true))); + let has_false = + sigma.constructors.iter().any(|c| matches!(c, PatternConstructor::Boolean(false))); + Ok(has_true && has_false) + } + // Other literal constructors have infinite domains, so they're never complete + PatternConstructor::Integer(_) + | PatternConstructor::Number(_, _) + | PatternConstructor::String(_) + | PatternConstructor::Char(_) => Ok(false), + } +} + +fn collect_missing_constructors( + state: &mut CheckState, + context: &CheckContext, + scrutinee_type: TypeId, + constructors: &[PatternConstructor], +) -> QueryResult> +where + Q: ExternalQueries, +{ + let Some(first_constructor) = constructors.first() else { + return Ok(vec![]); + }; + + match first_constructor { + PatternConstructor::DataConstructor { file_id, item_id, .. } => { + let indexed = context.queries.indexed(*file_id)?; + + let Some(type_item_id) = indexed.pairs.constructor_type(*item_id) else { + return Ok(vec![]); + }; + + let sigma: FxHashSet = constructors + .iter() + .filter_map(|c| match c { + PatternConstructor::DataConstructor { item_id, .. } => Some(*item_id), + _ => None, + }) + .collect(); + let arguments = toolkit::extract_all_applications(state, scrutinee_type); + + let mut missing = vec![]; + for missing_item_id in indexed.pairs.data_constructors(type_item_id) { + if !sigma.contains(&missing_item_id) { + let fields = constructor_field_types( + state, + context, + *file_id, + missing_item_id, + &arguments, + )?; + missing.push(MissingConstructor::DataConstructor { + file_id: *file_id, + item_id: missing_item_id, + fields, + }); + } + } + + Ok(missing) + } + // Arrays have infinite possible lengths, so we don't report specific missing values. + // The algorithm will fall back to wildcard suggestion. + PatternConstructor::Array { .. } => Ok(vec![]), + PatternConstructor::Boolean(_) => { + // Check which boolean values are missing + let has_true = + constructors.iter().any(|c| matches!(c, PatternConstructor::Boolean(true))); + let has_false = + constructors.iter().any(|c| matches!(c, PatternConstructor::Boolean(false))); + let mut missing = vec![]; + if !has_true { + missing.push(MissingConstructor::Boolean(true)); + } + if !has_false { + missing.push(MissingConstructor::Boolean(false)); + } + Ok(missing) + } + // Other literal constructors have infinite domains, so we don't report specific missing values + _ => Ok(vec![]), + } +} + +fn constructor_field_types( + state: &mut CheckState, + context: &CheckContext, + file_id: FileId, + term_id: TermItemId, + arguments: &[TypeId], +) -> QueryResult> +where + Q: ExternalQueries, +{ + let constructor_type = derive::lookup_local_term_type(state, context, file_id, term_id)?; + if let Some(constructor_type) = constructor_type { + let (constructor, _) = + toolkit::instantiate_with_arguments(state, constructor_type, arguments); + let (fields, _) = toolkit::extract_function_arguments(state, constructor); + Ok(fields) + } else { + let arity = get_constructor_arity(context, file_id, term_id)?; + Ok(iter::repeat_n(context.prim.unknown, arity).collect()) + } +} + +/// Gets the arity (number of fields) of a constructor. +fn get_constructor_arity( + context: &CheckContext, + file_id: FileId, + term_id: TermItemId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let on_lowered = |lowered: &lowering::LoweredModule| { + if let Some(lowering::TermItemIr::Constructor { arguments }) = + lowered.info.get_term_item(term_id) + { + arguments.len() + } else { + 0 + } + }; + if file_id == context.id { + let lowered = &context.lowered; + Ok(on_lowered(lowered)) + } else { + let lowered = context.queries.lowered(file_id)?; + Ok(on_lowered(&lowered)) + } +} + +pub struct ExhaustivenessReport { + pub missing: Option>, + pub redundant: Vec, +} + +pub fn check_lambda_patterns( + state: &mut CheckState, + context: &CheckContext, + pattern_types: &[TypeId], + binders: &[lowering::BinderId], +) -> QueryResult +where + Q: ExternalQueries, +{ + if pattern_types.is_empty() { + return Ok(ExhaustivenessReport { missing: None, redundant: vec![] }); + } + + let unconditional = + collect_unconditional_rows(state, context, &[binders], pattern_types, |binders| { + (binders, &None) + })?; + + check_exhaustiveness_core(state, context, pattern_types, unconditional) +} + +pub fn check_case_patterns( + state: &mut CheckState, + context: &CheckContext, + pattern_types: &[TypeId], + branches: &[lowering::CaseBranch], +) -> QueryResult +where + Q: ExternalQueries, +{ + if pattern_types.is_empty() { + return Ok(ExhaustivenessReport { missing: None, redundant: vec![] }); + } + + let unconditional = collect_unconditional_rows( + state, + context, + branches, + pattern_types, + |branch: &lowering::CaseBranch| (&branch.binders, &branch.guarded_expression), + )?; + + check_exhaustiveness_core(state, context, pattern_types, unconditional) +} + +pub fn check_equation_patterns( + state: &mut CheckState, + context: &CheckContext, + pattern_types: &[TypeId], + equations: &[lowering::Equation], +) -> QueryResult +where + Q: ExternalQueries, +{ + if pattern_types.is_empty() { + return Ok(ExhaustivenessReport { missing: None, redundant: vec![] }); + } + + let unconditional = collect_unconditional_rows( + state, + context, + equations, + pattern_types, + |equation: &lowering::Equation| (&equation.binders, &equation.guarded), + )?; + + check_exhaustiveness_core(state, context, pattern_types, unconditional) +} + +/// Returns `true` if any alternative in a conditional guard set is trivially true. +fn has_trivially_true_alternative( + context: &CheckContext, + pattern_guarded: &[lowering::PatternGuarded], +) -> bool +where + Q: ExternalQueries, +{ + pattern_guarded.iter().any(|pg| { + !pg.pattern_guards.is_empty() + && pg.pattern_guards.iter().all(|g| is_trivially_true_guard(context, g)) + }) +} + +/// Returns `true` if the guard is `true` or `otherwise` from `Data.Boolean`. +fn is_trivially_true_guard(context: &CheckContext, guard: &lowering::PatternGuard) -> bool +where + Q: ExternalQueries, +{ + if guard.binder.is_some() { + return false; + } + let Some(expr_id) = guard.expression else { + return false; + }; + let Some(kind) = context.lowered.info.get_expression_kind(expr_id) else { + return false; + }; + match kind { + lowering::ExpressionKind::Boolean { boolean: true } => true, + lowering::ExpressionKind::Variable { + resolution: Some(lowering::TermVariableResolution::Reference(file_id, term_id)), + } => context.known_terms.otherwise == Some((*file_id, *term_id)), + _ => false, + } +} + +fn collect_unconditional_rows( + state: &mut CheckState, + context: &CheckContext, + items: &[T], + pattern_types: &[TypeId], + to_binders: F, +) -> QueryResult> +where + Q: ExternalQueries, + F: Fn(&T) -> (&[lowering::BinderId], &Option), +{ + let mut pattern_rows = vec![]; + for item in items { + let (binders, guarded) = to_binders(item); + + match guarded { + Some(lowering::GuardedExpression::Unconditional { .. }) | None => {} + Some(lowering::GuardedExpression::Conditionals { pattern_guarded }) => { + if !has_trivially_true_alternative(context, pattern_guarded) { + continue; + } + } + } + + let mut pattern_row = vec![]; + for &binder_id in binders { + pattern_row.push(convert::convert_binder(state, context, binder_id)?); + } + + let additional = pattern_types.iter().skip(pattern_row.len()); + pattern_row.extend(additional.map(|&t| state.allocate_wildcard(t))); + + if !pattern_row.is_empty() { + pattern_rows.push(pattern_row); + } + } + Ok(pattern_rows) +} + +fn check_exhaustiveness_core( + state: &mut CheckState, + context: &CheckContext, + pattern_types: &[TypeId], + unconditional: PatternMatrix, +) -> QueryResult +where + Q: ExternalQueries, +{ + let mut redundant = vec![]; + let mut matrix = vec![]; + for vector in &unconditional { + let useful = algorithm_u(state, context, &matrix, vector)?; + if useful { + matrix.push(PatternVector::clone(vector)); + } else { + redundant.push(pretty::pretty_witness(context, state, vector)); + } + } + + let query = pattern_types.iter().map(|&t| state.allocate_wildcard(t)).collect(); + let witnesses = algorithm_m(state, context, &unconditional, &query)?; + let missing = witnesses.map(|witnesses| { + witnesses + .iter() + .take(5) + .map(|witness| pretty::pretty_witness(context, state, witness)) + .collect() + }); + + Ok(ExhaustivenessReport { missing, redundant }) +} diff --git a/compiler-core/checking/src/algorithm/exhaustiveness/convert.rs b/compiler-core/checking/src/algorithm/exhaustiveness/convert.rs new file mode 100644 index 000000000..4c22fbe74 --- /dev/null +++ b/compiler-core/checking/src/algorithm/exhaustiveness/convert.rs @@ -0,0 +1,326 @@ +use std::sync::Arc; + +use building_types::QueryResult; +use files::FileId; +use indexing::TermItemId; +use itertools::Itertools; +use lowering::{BinderId, TermOperatorId}; +use rustc_hash::FxHashMap; +use smol_str::SmolStr; +use sugar::OperatorTree; + +use crate::ExternalQueries; +use crate::algorithm::exhaustiveness::{PatternConstructor, PatternId}; +use crate::algorithm::state::{CheckContext, CheckState, OperatorBranchTypes}; +use crate::algorithm::toolkit; +use crate::core::{Type, TypeId}; + +pub fn convert_binder( + state: &mut CheckState, + context: &CheckContext, + id: BinderId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let t = state.term_scope.lookup_binder(id).unwrap_or(context.prim.unknown); + + let Some(kind) = context.lowered.info.get_binder_kind(id) else { + return Ok(state.allocate_wildcard(t)); + }; + + match kind { + lowering::BinderKind::Typed { binder, .. } => match binder { + Some(id) => convert_binder(state, context, *id), + None => Ok(state.allocate_wildcard(t)), + }, + lowering::BinderKind::OperatorChain { .. } => { + convert_operator_chain_binder(state, context, id, t) + } + lowering::BinderKind::Integer { value } => match value { + Some(v) => { + let constructor = PatternConstructor::Integer(*v); + Ok(state.allocate_constructor(constructor, t)) + } + None => Ok(state.allocate_wildcard(t)), + }, + lowering::BinderKind::Number { negative, value } => { + if let Some(value) = value { + let constructor = PatternConstructor::Number(*negative, SmolStr::clone(value)); + Ok(state.allocate_constructor(constructor, t)) + } else { + Ok(state.allocate_wildcard(t)) + } + } + lowering::BinderKind::Constructor { resolution, arguments } => { + convert_constructor_binder(state, context, resolution, arguments, t) + } + lowering::BinderKind::Variable { .. } => Ok(state.allocate_wildcard(t)), + lowering::BinderKind::Named { binder, .. } => match binder { + Some(id) => convert_binder(state, context, *id), + None => Ok(state.allocate_wildcard(t)), + }, + lowering::BinderKind::Wildcard => Ok(state.allocate_wildcard(t)), + lowering::BinderKind::String { value, .. } => { + if let Some(value) = value { + let constructor = PatternConstructor::String(SmolStr::clone(value)); + Ok(state.allocate_constructor(constructor, t)) + } else { + Ok(state.allocate_wildcard(t)) + } + } + lowering::BinderKind::Char { value } => match value { + Some(v) => { + let constructor = PatternConstructor::Char(*v); + Ok(state.allocate_constructor(constructor, t)) + } + None => Ok(state.allocate_wildcard(t)), + }, + lowering::BinderKind::Boolean { boolean } => { + let constructor = PatternConstructor::Boolean(*boolean); + Ok(state.allocate_constructor(constructor, t)) + } + lowering::BinderKind::Array { array } => lower_array_binder(state, context, array, t), + lowering::BinderKind::Record { record } => lower_record_binder(state, context, record, t), + lowering::BinderKind::Parenthesized { parenthesized } => match parenthesized { + Some(id) => convert_binder(state, context, *id), + None => Ok(state.allocate_wildcard(t)), + }, + } +} + +fn lower_array_binder( + state: &mut CheckState, + context: &CheckContext, + array: &[BinderId], + t: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let mut fields = vec![]; + for &element in array { + fields.push(convert_binder(state, context, element)?); + } + let constructor = PatternConstructor::Array { fields }; + Ok(state.allocate_constructor(constructor, t)) +} + +fn lower_record_binder( + state: &mut CheckState, + context: &CheckContext, + record: &[lowering::BinderRecordItem], + t: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + match try_build_record_constructor(state, context, record, t)? { + Some((labels, fields)) => { + let constructor = PatternConstructor::Record { labels, fields }; + Ok(state.allocate_constructor(constructor, t)) + } + None => { + // Fallback: use a wildcard when we can't build a canonical record constructor + Ok(state.allocate_wildcard(t)) + } + } +} + +fn try_build_record_constructor( + state: &mut CheckState, + context: &CheckContext, + record: &[lowering::BinderRecordItem], + t: TypeId, +) -> QueryResult, Vec)>> +where + Q: ExternalQueries, +{ + let expanded_t = toolkit::normalise_expand_type(state, context, t)?; + + let (constructor, arguments) = toolkit::extract_type_application(state, expanded_t); + + if constructor != context.prim.record { + return Ok(None); + } + + let Some(row_type_id) = arguments.first() else { + return Ok(None); + }; + + let row_type_id = state.normalize_type(*row_type_id); + let row_fields = if let Type::Row(row_type) = &state.storage[row_type_id] { + Arc::clone(&row_type.fields) + } else { + return Ok(None); + }; + + let field_type_map: FxHashMap = + row_fields.iter().map(|field| (field.label.clone(), field.id)).collect(); + + let mut provided_patterns = FxHashMap::default(); + for element in record.iter() { + match element { + lowering::BinderRecordItem::RecordField { name, value } => { + let Some(name) = name.clone() else { continue }; + let pattern = if let Some(value) = value { + convert_binder(state, context, *value)? + } else { + state.allocate_wildcard(context.prim.unknown) + }; + provided_patterns.insert(name, pattern); + } + lowering::BinderRecordItem::RecordPun { id: _, name } => { + let Some(name) = name.clone() else { continue }; + let t = field_type_map.get(&name).copied().unwrap_or(context.prim.unknown); + let pattern = state.allocate_wildcard(t); + provided_patterns.insert(name, pattern); + } + } + } + + let mut sorted_labels = field_type_map.keys().cloned().collect_vec(); + sorted_labels.sort(); + + let mut labels = Vec::with_capacity(sorted_labels.len()); + let mut fields = Vec::with_capacity(sorted_labels.len()); + + for label in sorted_labels { + let pattern = provided_patterns.get(&label).copied().unwrap_or_else(|| { + let t = field_type_map[&label]; + state.allocate_wildcard(t) + }); + labels.push(label); + fields.push(pattern); + } + + Ok(Some((labels, fields))) +} + +fn convert_constructor_binder( + state: &mut CheckState, + context: &CheckContext, + resolution: &Option<(FileId, TermItemId)>, + arguments: &Arc<[BinderId]>, + t: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let Some((file_id, item_id)) = *resolution else { + return Ok(state.allocate_wildcard(t)); + }; + + let mut fields = vec![]; + for &argument in arguments.iter() { + fields.push(convert_binder(state, context, argument)?); + } + + let constructor = PatternConstructor::DataConstructor { file_id, item_id, fields }; + Ok(state.allocate_constructor(constructor, t)) +} + +fn convert_operator_chain_binder( + state: &mut CheckState, + context: &CheckContext, + id: BinderId, + t: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let Some(tree) = context.bracketed.binders.get(&id) else { + return Ok(state.allocate_wildcard(t)); + }; + + let Ok(tree) = tree else { + return Ok(state.allocate_wildcard(t)); + }; + + convert_operator_tree(state, context, tree, t) +} + +fn convert_operator_tree( + state: &mut CheckState, + context: &CheckContext, + tree: &OperatorTree, + t: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + match tree { + OperatorTree::Leaf(None) => Ok(state.allocate_wildcard(t)), + OperatorTree::Leaf(Some(binder_id)) => convert_binder(state, context, *binder_id), + OperatorTree::Branch(operator_id, children) => { + convert_operator_branch(state, context, *operator_id, children, t) + } + } +} + +fn convert_operator_branch( + state: &mut CheckState, + context: &CheckContext, + operator_id: TermOperatorId, + children: &[OperatorTree; 2], + t: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let Some((file_id, item_id)) = context.lowered.info.get_term_operator(operator_id) else { + return Ok(state.allocate_wildcard(t)); + }; + + // The operator_id points to itself, thus we need to follow the + // resolution to find the constructor that it actually points to. + let Some((constructor_file_id, constructor_item_id)) = + resolve_term_operator(context, file_id, item_id)? + else { + return Ok(state.allocate_wildcard(t)); + }; + + let Some(OperatorBranchTypes { left, right, result }) = + state.term_scope.lookup_operator_node(operator_id) + else { + return Ok(state.allocate_wildcard(t)); + }; + + let [left_tree, right_tree] = children; + + let left_pattern = convert_operator_tree(state, context, left_tree, left)?; + let right_pattern = convert_operator_tree(state, context, right_tree, right)?; + + let constructor = PatternConstructor::DataConstructor { + file_id: constructor_file_id, + item_id: constructor_item_id, + fields: vec![left_pattern, right_pattern], + }; + + Ok(state.allocate_constructor(constructor, result)) +} + +fn resolve_term_operator( + context: &CheckContext, + file_id: FileId, + item_id: TermItemId, +) -> QueryResult> +where + Q: ExternalQueries, +{ + let on_lowered = |lowered: &lowering::LoweredModule| { + if let Some(lowering::TermItemIr::Operator { resolution, .. }) = + lowered.info.get_term_item(item_id) + { + *resolution + } else { + None + } + }; + if file_id == context.id { + Ok(on_lowered(&context.lowered)) + } else { + let lowered = context.queries.lowered(file_id)?; + Ok(on_lowered(&lowered)) + } +} diff --git a/compiler-core/checking/src/algorithm/exhaustiveness/pretty.rs b/compiler-core/checking/src/algorithm/exhaustiveness/pretty.rs new file mode 100644 index 000000000..6feca0d3e --- /dev/null +++ b/compiler-core/checking/src/algorithm/exhaustiveness/pretty.rs @@ -0,0 +1,166 @@ +use std::sync::Arc; + +use files::FileId; +use indexing::TermItemId; +use smol_str::{SmolStr, SmolStrBuilder}; + +use crate::ExternalQueries; +use crate::algorithm::exhaustiveness::{PatternConstructor, PatternId, PatternKind, WitnessVector}; +use crate::algorithm::state::{CheckContext, CheckState}; + +pub fn pretty_witness( + context: &CheckContext, + state: &CheckState, + witness: &WitnessVector, +) -> SmolStr +where + Q: ExternalQueries, +{ + join_smolstr(witness.iter().map(|&id| pretty_pattern(context, state, id)), ", ") +} + +fn pretty_pattern(context: &CheckContext, state: &CheckState, id: PatternId) -> SmolStr +where + Q: ExternalQueries, +{ + let pattern = &state.patterns[id]; + match &pattern.kind { + PatternKind::Wildcard => SmolStr::new_inline("_"), + PatternKind::Constructor { constructor } => pretty_constructor(context, state, constructor), + } +} + +fn join_smolstr(iterator: impl Iterator, separator: &str) -> SmolStr { + let mut builder = SmolStrBuilder::default(); + join_with_sep(&mut builder, iterator, separator, |builder, item| builder.push_str(&item)); + builder.finish() +} + +fn join_with_sep( + builder: &mut SmolStrBuilder, + iter: impl Iterator, + sep: &str, + mut render: impl FnMut(&mut SmolStrBuilder, T), +) { + let mut first = true; + for item in iter { + if !first { + builder.push_str(sep); + } + first = false; + render(builder, item); + } +} + +fn pretty_constructor( + context: &CheckContext, + state: &CheckState, + constructor: &PatternConstructor, +) -> SmolStr +where + Q: ExternalQueries, +{ + match constructor { + PatternConstructor::DataConstructor { file_id, item_id, fields } => { + let name = lookup_constructor_name(context, *file_id, *item_id) + .unwrap_or_else(|| SmolStr::new_inline("")); + + if fields.is_empty() { + return name; + } + + let mut builder = SmolStrBuilder::default(); + builder.push_str(&name); + + for &id in fields.iter() { + builder.push(' '); + let rendered = pretty_pattern(context, state, id); + let pattern = &state.patterns[id]; + if let PatternKind::Constructor { constructor } = &pattern.kind + && !constructor.fields().is_empty() + { + builder.push('('); + builder.push_str(&rendered); + builder.push(')'); + } else { + builder.push_str(&rendered); + } + } + + builder.finish() + } + PatternConstructor::Record { labels, fields } => { + if labels.len() != fields.len() { + return SmolStr::new_inline("{ }"); + } + + let mut builder = SmolStrBuilder::default(); + builder.push_str("{ "); + join_with_sep( + &mut builder, + labels.iter().zip(fields.iter()), + ", ", + |b, (label, field_id)| { + let field = pretty_pattern(context, state, *field_id); + b.push_str(label); + b.push_str(": "); + b.push_str(&field); + }, + ); + builder.push_str(" }"); + builder.finish() + } + PatternConstructor::Array { fields } => { + let mut builder = SmolStrBuilder::default(); + builder.push('['); + join_with_sep(&mut builder, fields.iter().copied(), ", ", |b, id| { + let rendered = pretty_pattern(context, state, id); + b.push_str(&rendered); + }); + builder.push(']'); + builder.finish() + } + PatternConstructor::Boolean(b) => SmolStr::from(b.to_string()), + PatternConstructor::Char(c) => { + let mut builder = SmolStrBuilder::default(); + builder.push('\''); + builder.push(*c); + builder.push('\''); + builder.finish() + } + PatternConstructor::String(s) => { + let mut builder = SmolStrBuilder::default(); + builder.push('"'); + builder.push_str(s); + builder.push('"'); + builder.finish() + } + PatternConstructor::Integer(i) => SmolStr::from(i.to_string()), + PatternConstructor::Number(negative, n) => { + let mut builder = SmolStrBuilder::default(); + if *negative { + builder.push('-'); + } + builder.push_str(n.as_ref()); + builder.finish() + } + } +} + +fn lookup_constructor_name( + context: &CheckContext, + file_id: FileId, + term_id: TermItemId, +) -> Option +where + Q: ExternalQueries, +{ + let indexed = if file_id == context.id { + Arc::clone(&context.indexed) + } else { + context.queries.indexed(file_id).ok()? + }; + + let item = &indexed.items[term_id]; + item.name.clone() +} diff --git a/compiler-core/checking/src/algorithm/fold.rs b/compiler-core/checking/src/algorithm/fold.rs index 82230846f..161f0d26b 100644 --- a/compiler-core/checking/src/algorithm/fold.rs +++ b/compiler-core/checking/src/algorithm/fold.rs @@ -18,10 +18,10 @@ pub trait TypeFold { fn transform_binder(&mut self, _binder: &mut ForallBinder) {} } -/// Zonking normalizes a type by substituting solved unification variables. +/// Zonking normalises a type by substituting solved unification variables. /// /// Unlike [`CheckState::normalize_type`] which only follows unification -/// chains at the head, this recursively normalizes the entire type structure. +/// chains at the head, this recursively normalises the entire type structure. /// /// The simplicity of the [`TypeFold`] implementation is an artefact of how /// [`fold_type`] uses [`CheckState::normalize_type`] to inspect a type. @@ -104,13 +104,13 @@ pub fn fold_type(state: &mut CheckState, id: TypeId, folder: &mut F } Type::Unification(_) => id, Type::Variable(variable) => match variable { - Variable::Bound(level, kind) => { + Variable::Bound(name, kind) => { let kind = fold_type(state, kind, folder); - state.storage.intern(Type::Variable(Variable::Bound(level, kind))) + state.storage.intern(Type::Variable(Variable::Bound(name, kind))) } - Variable::Skolem(level, kind) => { + Variable::Skolem(name, kind) => { let kind = fold_type(state, kind, folder); - state.storage.intern(Type::Variable(Variable::Skolem(level, kind))) + state.storage.intern(Type::Variable(Variable::Skolem(name, kind))) } Variable::Free(_) => id, }, diff --git a/compiler-core/checking/src/algorithm/inspect.rs b/compiler-core/checking/src/algorithm/inspect.rs index 995d4e76f..c49fbac26 100644 --- a/compiler-core/checking/src/algorithm/inspect.rs +++ b/compiler-core/checking/src/algorithm/inspect.rs @@ -1,14 +1,13 @@ -use std::mem; use std::sync::Arc; use building_types::QueryResult; use lowering::TypeVariableBindingId; use crate::ExternalQueries; -use crate::algorithm::kind::synonym; +use crate::algorithm::safety::safe_loop; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::substitute; -use crate::core::{ForallBinder, Type, TypeId, Variable, debruijn}; +use crate::algorithm::{substitute, toolkit}; +use crate::core::{ForallBinder, Type, TypeId, Variable}; pub struct InspectSignature { pub variables: Vec, @@ -54,7 +53,7 @@ where Arc::from(variables) } -pub fn inspect_signature_core( +pub fn inspect_signature( state: &mut CheckState, context: &CheckContext, type_id: TypeId, @@ -63,59 +62,85 @@ pub fn inspect_signature_core( where Q: ExternalQueries, { - const INSPECTION_LIMIT: u32 = 1_000_000; - + // Consider a synonym that hides a quantifier: + // + // type NaturalTransformation f g = forall a. f a -> g a + // + // transform :: forall f g. NaturalTransformation f g + // + // Synonym expansion can reveal additional quantifiers: + // + // transform :: forall f g. forall a. f a -> g a + // + // The following algorithm rebinds each quantifier's variable + // name to a fresh name in the current scope. let mut surface_bindings = surface_bindings.iter(); let mut variables = vec![]; let mut current_id = type_id; - for _ in 0..INSPECTION_LIMIT { - let expanded_id = synonym::normalize_expand_type(state, context, current_id)?; - match state.storage[expanded_id] { - // Foralls can and will have levels that are different from - // when they were originally checked, such as when a Forall - // appears on a type synonym. We rebind each type variable - // to get the current level then substitute it within the - // quantified type to ensure correct scoping. + let mut bindings = substitute::NameToType::default(); + + safe_loop! { + current_id = toolkit::normalise_expand_type(state, context, current_id)?; + + // In the example, after the Forall case has peeled f, g, and the + // synonym-expanded a, the accumulated bindings are the following: + // + // { old_f -> f', old_g -> g', old_a -> a' } + // + // We're at a monomorphic type at this point, so we can now proceed + // with applying the substitutions and continuing. + if !matches!(state.storage[current_id], Type::Forall(..)) && !bindings.is_empty() { + current_id = substitute::SubstituteBindings::on(state, &bindings, current_id); + bindings.clear(); + continue; + } + + match state.storage[current_id] { + // Bind each ForallBinder relative to the current scope, recording + // the name substitution for later. Type::Forall(ref binder, inner) => { - let mut binder = binder.clone(); + let mut binder = ForallBinder::clone(binder); + + let old_name = binder.variable.clone(); + let new_name = state.fresh_name(&binder.text); - let new_level = if let Some(&binding_id) = surface_bindings.next() { - state.type_scope.bound.bind(debruijn::Variable::Forall(binding_id)) + if !binder.implicit && let Some(&binding_id) = surface_bindings.next() { + state.type_scope.bind_forall(binding_id, binder.kind, new_name.clone()); } else { - state.type_scope.bound.bind(debruijn::Variable::Core) - }; + state.type_scope.bind_core(binder.kind, new_name.clone()); + } - let old_level = mem::replace(&mut binder.level, new_level); + binder.variable = new_name.clone(); - state.type_scope.kinds.insert(new_level, binder.kind); + // Substitute the binder's kind through existing bindings so that + // references to earlier forall variables use the fresh Names. + if !bindings.is_empty() { + binder.kind = substitute::SubstituteBindings::on(state, &bindings, binder.kind); + } - let variable = - state.storage.intern(Type::Variable(Variable::Bound(new_level, binder.kind))); - let inner = substitute::SubstituteBound::on(state, old_level, variable, inner); + let variable = Type::Variable(Variable::Bound(new_name, binder.kind)); + let variable = state.storage.intern(variable); + bindings.insert(old_name, variable); variables.push(binder); - current_id = inner; } Type::Constrained(constraint, constrained) => { - state.constraints.push_given(constraint); + state.push_given(constraint); current_id = constrained; } _ => { - let function = expanded_id; - let (arguments, result) = signature_components_core(state, context, expanded_id)?; - return Ok(InspectSignature { variables, function, arguments, result }); + let (arguments, result) = signature_components(state, context, current_id)?; + return Ok(InspectSignature { variables, function: current_id, arguments, result }); } } } - - unreachable!("critical violation: limit reached in inspect_signature_core") } -fn signature_components_core( +fn signature_components( state: &mut CheckState, context: &CheckContext, type_id: TypeId, @@ -123,36 +148,48 @@ fn signature_components_core( where Q: ExternalQueries, { - const INSPECTION_LIMIT: u32 = 1_000_000; - let mut arguments = vec![]; let mut current_id = type_id; + let mut bindings = substitute::NameToType::default(); - for _ in 0..INSPECTION_LIMIT { - let expanded = synonym::normalize_expand_type(state, context, current_id)?; - match state.storage[expanded] { - Type::Function(argument_id, return_id) => { - arguments.push(argument_id); - current_id = return_id; - } + safe_loop! { + current_id = toolkit::normalise_expand_type(state, context, current_id)?; + if !matches!(state.storage[current_id], Type::Forall(..)) && !bindings.is_empty() { + current_id = substitute::SubstituteBindings::on(state, &bindings, current_id); + bindings.clear(); + continue; + } + + match state.storage[current_id] { Type::Forall(ref binder, inner) => { - let binder_level = binder.level; - let binder_kind = binder.kind; + let old_name = binder.variable.clone(); + let mut kind = binder.kind; - let level = state.type_scope.bound.bind(debruijn::Variable::Core); - state.type_scope.kinds.insert(level, binder_kind); + let text = binder.text.clone(); + let name = state.fresh_name(&text); - let variable = - state.storage.intern(Type::Variable(Variable::Bound(level, binder_kind))); - current_id = substitute::SubstituteBound::on(state, binder_level, variable, inner); + state.type_scope.bind_core(kind, name.clone()); + + if !bindings.is_empty() { + kind = substitute::SubstituteBindings::on(state, &bindings, kind); + } + + let variable = Type::Variable(Variable::Bound(name, kind)); + let variable = state.storage.intern(variable); + + bindings.insert(old_name, variable); + current_id = inner; + } + + Type::Function(argument_id, return_id) => { + arguments.push(argument_id); + current_id = return_id; } _ => { - return Ok((arguments, expanded)); + return Ok((arguments, current_id)); } } } - - unreachable!("critical violation: limit reached in signature_components_core") } diff --git a/compiler-core/checking/src/algorithm/kind.rs b/compiler-core/checking/src/algorithm/kind.rs index 044f81b2d..33b319285 100644 --- a/compiler-core/checking/src/algorithm/kind.rs +++ b/compiler-core/checking/src/algorithm/kind.rs @@ -10,10 +10,11 @@ use lowering::TypeVariableBindingId; use smol_str::SmolStr; use crate::ExternalQueries; +use crate::algorithm::safety::safe_loop; use crate::algorithm::state::{CheckContext, CheckState}; use crate::algorithm::{substitute, transfer, unification}; -use crate::core::{ForallBinder, RowField, RowType, Type, TypeId, Variable}; -use crate::error::ErrorStep; +use crate::core::{ForallBinder, RowField, RowType, Type, TypeId, Variable, debruijn}; +use crate::error::{ErrorKind, ErrorStep}; const MISSING_NAME: SmolStr = SmolStr::new_static(""); @@ -133,6 +134,8 @@ where } lowering::TypeKind::Forall { bindings, inner } => { + let unbind_level = debruijn::Level(state.type_scope.size().0); + let binders = bindings .iter() .map(|binding| check_type_variable_binding(state, context, binding)) @@ -154,8 +157,8 @@ where let k = context.prim.t; - if let Some(binder) = binders.first() { - state.type_scope.unbind(binder.level); + if !binders.is_empty() { + state.type_scope.unbind(unbind_level); } Ok((t, k)) @@ -214,7 +217,8 @@ where } Some(lowering::TypeVariableResolution::Implicit(implicit)) => { - Ok(infer_implicit_variable(state, context, implicit)) + let text = name.clone().unwrap_or(MISSING_NAME); + Ok(infer_implicit_variable(state, context, implicit, text)) } None => { @@ -234,11 +238,8 @@ where } lowering::TypeKind::Record { items, tail } => { - let expected_kind = - state.storage.intern(Type::Application(context.prim.row, context.prim.t)); - let (row_t, row_k) = infer_row_kind(state, context, items, tail)?; - let _ = unification::subtype(state, context, row_k, expected_kind)?; + let _ = unification::subtype(state, context, row_k, context.prim.row_type)?; let t = state.storage.intern(Type::Application(context.prim.record, row_t)); let k = context.prim.t; @@ -259,14 +260,14 @@ fn infer_forall_variable( state: &mut CheckState, forall: TypeVariableBindingId, ) -> (TypeId, TypeId) { - let level = + let name = state.type_scope.lookup_forall(forall).expect("invariant violated: TypeScope::bind_forall"); let k = state .type_scope .lookup_forall_kind(forall) .expect("invariant violated: TypeScope::bind_forall"); - let variable = Variable::Bound(level, k); + let variable = Variable::Bound(name, k); let t = state.storage.intern(Type::Variable(variable)); (t, k) @@ -276,16 +277,18 @@ fn infer_implicit_variable( state: &mut CheckState, context: &CheckContext, implicit: &lowering::ImplicitTypeVariable, + text: SmolStr, ) -> (TypeId, TypeId) { let (t, k) = if implicit.binding { let kind = state.fresh_unification(context); + let name = state.fresh_name(&text); - let level = state.type_scope.bind_implicit(implicit.node, implicit.id, kind); - let variable = Variable::Bound(level, kind); + let name = state.type_scope.bind_implicit(implicit.node, implicit.id, kind, name); + let variable = Variable::Bound(name, kind); (state.storage.intern(Type::Variable(variable)), kind) } else { - let level = state + let name = state .type_scope .lookup_implicit(implicit.node, implicit.id) .expect("invariant violated: TypeScope::bind_implicit"); @@ -294,7 +297,7 @@ fn infer_implicit_variable( .lookup_implicit_kind(implicit.node, implicit.id) .expect("invariant violated: TypeScope::bind_implicit"); - let variable = Variable::Bound(level, kind); + let variable = Variable::Bound(name, kind); (state.storage.intern(Type::Variable(variable)), kind) }; @@ -390,19 +393,36 @@ where } Type::Forall(ref binder, function_k) => { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let k = state.normalize_type(binder_kind); let t = state.fresh_unification_kinded(k); let function_t = state.storage.intern(Type::KindApplication(function_t, t)); - let function_k = substitute::SubstituteBound::on(state, binder_level, t, function_k); + let function_k = substitute::SubstituteBound::on(state, binder_variable, t, function_k); infer_surface_app_kind(state, context, (function_t, function_k), argument) } - _ => Ok((context.prim.unknown, context.prim.unknown)), + _ => { + // Even if the function type cannot be applied, the argument must + // still be inferred. For invalid applications on instance heads, + // this ensures that implicit variables are bound. + let (argument_t, _) = infer_surface_kind(state, context, argument)?; + + let function_type = state.render_local_type(context, function_t); + let function_kind = state.render_local_type(context, function_k); + let argument_type = state.render_local_type(context, argument_t); + state.insert_error(ErrorKind::InvalidTypeApplication { + function_type, + function_kind, + argument_type, + }); + + let t = state.storage.intern(Type::Application(function_t, argument_t)); + Ok((t, context.prim.unknown)) + } } } @@ -428,10 +448,10 @@ where Type::Function(_, result) => result, Type::Unification(unification_id) => { - let domain = state.unification.get(unification_id).domain; + let depth = state.unification.get(unification_id).depth; - let argument_u = state.fresh_unification_kinded_at(domain, context.prim.t); - let result_u = state.fresh_unification_kinded_at(domain, context.prim.t); + let argument_u = state.fresh_unification_kinded_at(depth, context.prim.t); + let result_u = state.fresh_unification_kinded_at(depth, context.prim.t); let function = state.storage.intern(Type::Function(argument_u, result_u)); let _ = unification::solve(state, context, unification_id, function); @@ -457,9 +477,9 @@ where let function_kind = state.normalize_type(function_kind); match state.storage[function_kind] { Type::Forall(ref binder, inner) => { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let argument = state.normalize_type(argument); - substitute::SubstituteBound::on(state, binder_level, argument, inner) + substitute::SubstituteBound::on(state, binder_variable, argument, inner) } _ => unknown, } @@ -525,26 +545,81 @@ where Ok(type_id) } +/// Instantiates kind-level foralls using [`Type::KindApplication`]. +/// +/// If the inferred kind is polymorphic, and the inferred kind is monomorphic, +/// this function adds the necessary kind applications to the inferred type. +/// For example, when checking `RowList.Nil` against `RowList Type`: +/// +/// ```text +/// Nil :: forall k. RowList k. +/// +/// check(Nil, RowList Type) +/// infer(Nil) -> forall k. RowList k +/// instantiate(Nil) -> (Nil @?t, RowList ?t) +/// +/// subtype(RowList ?t, RowList Type) +/// solve(?t, Type) +/// +/// t := Nil @Type +/// k := RowList Type +/// ``` +fn instantiate_kind_applications( + state: &mut CheckState, + mut t: TypeId, + mut k: TypeId, + expected_kind: TypeId, +) -> (TypeId, TypeId) { + let expected_kind = state.normalize_type(expected_kind); + + if matches!(state.storage[expected_kind], Type::Forall(_, _)) { + return (t, k); + } + + safe_loop! { + k = state.normalize_type(k); + + let Type::Forall(ref binder, inner_kind) = state.storage[k] else { + break; + }; + + let binder_variable = binder.variable.clone(); + let binder_kind = state.normalize_type(binder.kind); + + let argument_type = state.fresh_unification_kinded(binder_kind); + t = state.storage.intern(Type::KindApplication(t, argument_type)); + k = substitute::SubstituteBound::on(state, binder_variable, argument_type, inner_kind); + } + + (t, k) +} + #[tracing::instrument(skip_all, name = "check_surface_kind")] pub fn check_surface_kind( state: &mut CheckState, context: &CheckContext, id: lowering::TypeId, - kind: TypeId, + expected_kind: TypeId, ) -> QueryResult<(TypeId, TypeId)> where Q: ExternalQueries, { - crate::trace_fields!(state, context, { expected_kind = kind }); + crate::trace_fields!(state, context, { expected_kind = expected_kind }); state.with_error_step(ErrorStep::CheckingKind(id), |state| { let (inferred_type, inferred_kind) = infer_surface_kind_core(state, context, id)?; - let _ = unification::subtype(state, context, inferred_kind, kind)?; + + let (inferred_type, inferred_kind) = + instantiate_kind_applications(state, inferred_type, inferred_kind, expected_kind); + + let _ = unification::subtype(state, context, inferred_kind, expected_kind)?; + crate::trace_fields!(state, context, { inferred_type = inferred_type, inferred_kind = inferred_kind, - expected_kind = kind + expected_kind = expected_kind }); + Ok((inferred_type, inferred_kind)) }) } @@ -558,7 +633,7 @@ where Q: ExternalQueries, { let visible = binding.visible; - let name = binding.name.clone().unwrap_or(MISSING_NAME); + let text = binding.name.clone().unwrap_or(MISSING_NAME); let kind = if let Some(id) = binding.kind { let (kind, _) = check_surface_kind(state, context, id, context.prim.t)?; @@ -567,8 +642,9 @@ where state.fresh_unification_type(context) }; - let level = state.type_scope.bind_forall(binding.id, kind); - Ok(ForallBinder { visible, name, level, kind }) + let name = state.fresh_name(&text); + state.type_scope.bind_forall(binding.id, kind, name.clone()); + Ok(ForallBinder { visible, implicit: false, text, variable: name, kind }) } pub(crate) fn lookup_file_type( @@ -583,6 +659,8 @@ where let type_id = if file_id == context.id { if let Some(k) = state.binding_group.lookup_type(type_id) { k + } else if let Some(&k) = state.pending_types.get(&type_id) { + TypeId::from(k) } else if let Some(&k) = state.checked.types.get(&type_id) { transfer::localize(state, context, k) } else { diff --git a/compiler-core/checking/src/algorithm/kind/operator.rs b/compiler-core/checking/src/algorithm/kind/operator.rs index 4b2d2e589..d5b382faa 100644 --- a/compiler-core/checking/src/algorithm/kind/operator.rs +++ b/compiler-core/checking/src/algorithm/kind/operator.rs @@ -3,6 +3,7 @@ use building_types::QueryResult; use files::FileId; use indexing::TypeItemId; +use lowering::{LoweredModule, TypeItemIr}; use crate::ExternalQueries; use crate::algorithm::state::{CheckContext, CheckState}; @@ -44,3 +45,48 @@ where Ok(result_kind) } + +pub fn resolve_type_operator_target( + lowered: &LoweredModule, + item_id: TypeItemId, +) -> Option<(FileId, TypeItemId)> { + let TypeItemIr::Operator { resolution, .. } = lowered.info.get_type_item(item_id)? else { + return None; + }; + *resolution +} + +pub fn expand_type_operator( + state: &mut CheckState, + context: &CheckContext, + type_id: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let Type::OperatorApplication(file_id, item_id, left, right) = state.storage[type_id] else { + return Ok(type_id); + }; + + let resolution = if file_id == context.id { + context.lowered.info.get_type_item(item_id).and_then(|ir| match ir { + TypeItemIr::Operator { resolution, .. } => *resolution, + _ => None, + }) + } else { + context.queries.lowered(file_id)?.info.get_type_item(item_id).and_then(|ir| match ir { + TypeItemIr::Operator { resolution, .. } => *resolution, + _ => None, + }) + }; + + let Some((file_id, item_id)) = resolution else { + return Ok(type_id); + }; + + let constructor = state.storage.intern(Type::Constructor(file_id, item_id)); + let left = state.storage.intern(Type::Application(constructor, left)); + let right = state.storage.intern(Type::Application(left, right)); + + Ok(right) +} diff --git a/compiler-core/checking/src/algorithm/kind/synonym.rs b/compiler-core/checking/src/algorithm/kind/synonym.rs index 9de92bbb3..6c00f95f0 100644 --- a/compiler-core/checking/src/algorithm/kind/synonym.rs +++ b/compiler-core/checking/src/algorithm/kind/synonym.rs @@ -131,6 +131,7 @@ pub fn infer_synonym_constructor( } if is_recursive_synonym(context, file_id, type_id)? { + state.insert_error(ErrorKind::RecursiveSynonymExpansion { file_id, item_id: type_id }); let synonym_type = state.storage.intern(Type::Constructor(file_id, type_id)); return Ok((synonym_type, kind)); } @@ -178,7 +179,21 @@ where let expected_arity = synonym.type_variables.0 as usize; let actual_arity = arguments.len(); - if expected_arity != actual_arity { + // A synonym's result kind can itself be a function kind, which means + // the application chain may contain more arguments than the synonym + // has parameters. For example: + // + // type C2 :: forall k. (k -> Type) -> (k -> Type) -> k -> Type + // type C2 a z = Coproduct a z + // + // in1 :: forall a z. a ~> C2 a z + // + // The lowered application chain for `C2 a z x` has 3 arguments, but + // the synonym only has 2 parameters. After expanding with the first + // 2 arguments, `C2 a z` becomes `Coproduct a z` with kind `k -> Type`. + // The third argument `x` is then applied to the expanded result as a + // regular type application, giving `Coproduct a z x` of kind `Type`. + if actual_arity < expected_arity { if state.defer_synonym_expansion { let (synonym_type, synonym_kind) = infer_partial_synonym_application( state, @@ -194,13 +209,37 @@ where } } - let defer_synonym_expansion = mem::replace(&mut state.defer_synonym_expansion, true); - - let (synonym_type, synonym_kind) = - infer_synonym_application_arguments(state, context, (file_id, type_id), kind, arguments)?; + // Continuing our previous example, `C2 a z x` produces: + // + // synonym_arguments := [a, z] + // excess_arguments := [x] + let (synonym_arguments, excess_arguments) = arguments.split_at(expected_arity); + let defer_synonym_expansion = mem::replace(&mut state.defer_synonym_expansion, true); + let (mut synonym_type, mut synonym_kind) = infer_synonym_application_arguments( + state, + context, + (file_id, type_id), + kind, + synonym_arguments, + )?; state.defer_synonym_expansion = defer_synonym_expansion; + // Continuing our previous example, `C2 a z x` expands: + // + // synonym_type := Coproduct a z + // synonym_kind := (k -> Type) + // + // Finally, we append the excess arguments to get: + // + // synonym_type := Coproduct a z x + // synonym_kind := Type + // + for &argument in excess_arguments { + (synonym_type, synonym_kind) = + kind::infer_surface_app_kind(state, context, (synonym_type, synonym_kind), argument)?; + } + Ok((synonym_type, synonym_kind)) } @@ -275,13 +314,13 @@ where } Type::Forall(ref binder, inner) => { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let k = state.normalize_type(binder_kind); let t = state.fresh_unification_kinded(k); - let function_k = substitute::SubstituteBound::on(state, binder_level, t, inner); + let function_k = substitute::SubstituteBound::on(state, binder_variable, t, inner); infer_synonym_application_argument(state, context, function_k, argument) } @@ -412,6 +451,44 @@ where } } + Type::OperatorApplication(operator_file_id, operator_item_id, left, right) => { + let resolution = if operator_file_id == context.id { + kind::operator::resolve_type_operator_target(&context.lowered, operator_item_id) + } else { + let lowered = context.queries.lowered(operator_file_id)?; + kind::operator::resolve_type_operator_target(&lowered, operator_item_id) + }; + + let Some((file_id, item_id)) = resolution else { + return Ok(None); + }; + + let Some((synonym, _)) = lookup_file_synonym(state, context, file_id, item_id)? else { + return Ok(None); + }; + + if is_recursive_synonym(context, file_id, item_id)? { + state.insert_error(ErrorKind::RecursiveSynonymExpansion { file_id, item_id }); + return Ok(None); + } + + let arguments = vec![left, right]; + + if arguments.len() != synonym.type_variables.0 as usize { + return Ok(None); + } + + if additional.is_empty() { + Ok(Some(DiscoveredSynonym::Saturated { synonym, arguments })) + } else { + Ok(Some(DiscoveredSynonym::Additional { + synonym, + arguments: Arc::from(arguments), + additional, + })) + } + } + _ => Ok(None), } } @@ -422,11 +499,12 @@ fn instantiate_saturated(state: &mut CheckState, synonym: Synonym, arguments: &[ for _ in 0..count { if let Type::Forall(ref binder, inner) = state.storage[instantiated] { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let unification = state.fresh_unification_kinded(binder_kind); - instantiated = substitute::SubstituteBound::on(state, binder_level, unification, inner); + instantiated = + substitute::SubstituteBound::on(state, binder_variable, unification, inner); } else { break; } @@ -434,7 +512,8 @@ fn instantiate_saturated(state: &mut CheckState, synonym: Synonym, arguments: &[ for &argument in arguments { if let Type::Forall(ref binder, inner) = state.storage[instantiated] { - instantiated = substitute::SubstituteBound::on(state, binder.level, argument, inner); + instantiated = + substitute::SubstituteBound::on(state, binder.variable.clone(), argument, inner); } else { break; } @@ -467,27 +546,3 @@ where } }) } - -pub fn normalize_expand_type( - state: &mut CheckState, - context: &CheckContext, - mut type_id: TypeId, -) -> QueryResult -where - Q: ExternalQueries, -{ - const EXPANSION_LIMIT: u32 = 1_000_000; - - for _ in 0..EXPANSION_LIMIT { - let normalized_id = state.normalize_type(type_id); - let expanded_id = expand_type_synonym(state, context, normalized_id)?; - - if expanded_id == type_id { - return Ok(type_id); - } - - type_id = expanded_id; - } - - unreachable!("critical violation: limit reached in normalize_expand_type") -} diff --git a/compiler-core/checking/src/algorithm/operator.rs b/compiler-core/checking/src/algorithm/operator.rs index 5e367b60f..0d439e82e 100644 --- a/compiler-core/checking/src/algorithm/operator.rs +++ b/compiler-core/checking/src/algorithm/operator.rs @@ -6,7 +6,7 @@ use sugar::OperatorTree; use sugar::bracketing::BracketingResult; use crate::ExternalQueries; -use crate::algorithm::state::{CheckContext, CheckState}; +use crate::algorithm::state::{CheckContext, CheckState, OperatorBranchTypes}; use crate::algorithm::{binder, kind, term, toolkit, unification}; use crate::core::{Type, TypeId}; @@ -59,6 +59,10 @@ where OperatorTree::Leaf(Some(type_id)) => match mode { OperatorKindMode::Infer => E::infer_surface(state, context, *type_id), OperatorKindMode::Check { expected_type } => { + // Peel constraints from the expected type as givens, + // so operator arguments like `unsafePartial $ expr` + // can discharge constraints like Partial properly. + let expected_type = toolkit::collect_givens(state, expected_type); E::check_surface(state, context, *type_id, expected_type) } }, @@ -78,6 +82,7 @@ where traverse_operator_branch( state, context, + *operator_id, (file_id, item_id), operator_type, children, @@ -90,6 +95,7 @@ where fn traverse_operator_branch( state: &mut CheckState, context: &CheckContext, + operator_id: E::OperatorId, operator: (FileId, E::ItemId), operator_type: TypeId, children: &[OperatorTree; 2], @@ -106,18 +112,28 @@ where }; let operator_type = toolkit::instantiate_forall(state, operator_type); - let operator_type = toolkit::collect_constraints(state, operator_type); + let operator_type = toolkit::collect_wanteds(state, operator_type); - let Type::Function(left_type, operator_type) = state.storage[operator_type] else { + let synthesise = toolkit::SynthesiseFunction::Yes; + + let Some((left_type, operator_type)) = + toolkit::decompose_function(state, context, operator_type, synthesise)? + else { return Ok(unknown); }; - let operator_type = state.normalize_type(operator_type); - let Type::Function(right_type, result_type) = state.storage[operator_type] else { + let Some((right_type, result_type)) = + toolkit::decompose_function(state, context, operator_type, synthesise)? + else { return Ok(unknown); }; + E::record_branch_types(state, operator_id, left_type, right_type, result_type); + if let OperatorKindMode::Check { expected_type } = mode { + // Peel constraints from the expected type as givens, + // so operator result constraints can be discharged. + let expected_type = toolkit::collect_givens(state, expected_type); let _ = unification::subtype(state, context, result_type, expected_type)?; } @@ -183,6 +199,14 @@ pub trait IsOperator: IsElement { result_tree: (Self::Elaborated, Self::Elaborated), result_type: TypeId, ) -> (Self::Elaborated, TypeId); + + fn record_branch_types( + state: &mut CheckState, + operator_id: Self::OperatorId, + left: TypeId, + right: TypeId, + result: TypeId, + ); } impl IsOperator for lowering::TypeId { @@ -247,6 +271,18 @@ impl IsOperator for lowering::TypeId { (elaborated_type, result_kind) } + + fn record_branch_types( + state: &mut CheckState, + operator_id: Self::OperatorId, + left: TypeId, + right: TypeId, + result: TypeId, + ) { + state + .type_scope + .bind_operator_node(operator_id, OperatorBranchTypes { left, right, result }); + } } impl IsOperator for lowering::ExpressionId { @@ -306,6 +342,18 @@ impl IsOperator for lowering::ExpressionId { ) -> (Self::Elaborated, TypeId) { ((), result_type) } + + fn record_branch_types( + state: &mut CheckState, + operator_id: Self::OperatorId, + left: TypeId, + right: TypeId, + result: TypeId, + ) { + state + .term_scope + .bind_operator_node(operator_id, OperatorBranchTypes { left, right, result }); + } } impl IsOperator for lowering::BinderId { @@ -365,4 +413,16 @@ impl IsOperator for lowering::BinderId { ) -> (Self::Elaborated, TypeId) { ((), result_type) } + + fn record_branch_types( + state: &mut CheckState, + operator_id: Self::OperatorId, + left: TypeId, + right: TypeId, + result: TypeId, + ) { + state + .term_scope + .bind_operator_node(operator_id, OperatorBranchTypes { left, right, result }); + } } diff --git a/compiler-core/checking/src/algorithm/quantify.rs b/compiler-core/checking/src/algorithm/quantify.rs index 19fa52f52..5c2a18f4d 100644 --- a/compiler-core/checking/src/algorithm/quantify.rs +++ b/compiler-core/checking/src/algorithm/quantify.rs @@ -12,7 +12,7 @@ use smol_str::SmolStrBuilder; use crate::algorithm::constraint::{self, ConstraintApplication}; use crate::algorithm::fold::Zonk; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::substitute::{ShiftBound, SubstituteUnification, UniToLevel}; +use crate::algorithm::substitute::{SubstituteUnification, UniToName}; use crate::core::{Class, ForallBinder, Instance, RowType, Type, TypeId, Variable, debruijn}; use crate::{ExternalQueries, safe_loop}; @@ -30,24 +30,20 @@ pub fn quantify(state: &mut CheckState, id: TypeId) -> Option<(TypeId, debruijn: debruijn::Size(size as u32) }; - // Shift existing bound variable levels to make room for new quantifiers - let mut quantified = ShiftBound::on(state, id, size.0); - let mut substitutions = UniToLevel::default(); + let mut quantified = id; + let mut substitutions = UniToName::default(); - for (index, &id) in unsolved.iter().rev().enumerate() { + for &id in unsolved.iter().rev() { let kind = state.unification.get(id).kind; - let kind = ShiftBound::on(state, kind, size.0); - let name = generate_type_name(id); + let text = generate_type_name(id); + let mut name = state.fresh_name(&text); + name.depth = debruijn::Size(0); - let index = debruijn::Index(index as u32); - let level = index - .to_level(size) - .unwrap_or_else(|| unreachable!("invariant violated: invalid {index} for {size}")); - - let binder = ForallBinder { visible: false, name, level, kind }; + let binder = + ForallBinder { visible: false, implicit: true, text, variable: name.clone(), kind }; quantified = state.storage.intern(Type::Forall(binder, quantified)); - substitutions.insert(id, (level, kind)); + substitutions.insert(id, (name, kind)); } let quantified = SubstituteUnification::on(&substitutions, state, quantified); @@ -98,9 +94,20 @@ where let mut pending = vec![]; let mut unsatisfied = vec![]; + let mut latent = vec![]; for constraint in constraints { let constraint = Zonk::on(state, constraint); + + // Partial is a latent constraint, it has no type arguments so it + // never has unification variables, but should be generalised + // rather than reported as unsatisfied. This allows inferring + // `Partial => Int` for expressions with non-exhaustive patterns. + if constraint == context.prim.partial { + latent.push(constraint); + continue; + } + let unification: FxHashSet = collect_unification(state, constraint).nodes().collect(); if unification.is_empty() { unsatisfied.push(constraint); @@ -113,7 +120,7 @@ where let (generalised, ambiguous) = classify_constraints_by_reachability(pending, in_signature); // Subtle: stable ordering for consistent output - let generalised = generalised.into_iter().sorted().collect_vec(); + let generalised = latent.into_iter().chain(generalised).sorted().collect_vec(); let minimized = minimize_by_superclasses(state, context, generalised)?; let constrained_type = minimized.into_iter().rfold(type_id, |constrained, constraint| { @@ -217,7 +224,7 @@ fn classify_constraints_by_reachability( } } -fn generate_type_name(id: u32) -> smol_str::SmolStr { +pub(crate) fn generate_type_name(id: u32) -> smol_str::SmolStr { let mut builder = SmolStrBuilder::default(); write!(builder, "t{id}").unwrap(); builder.finish() @@ -248,26 +255,24 @@ pub fn quantify_class(state: &mut CheckState, class: &mut Class) -> Option Opt } let unsolved = ordered_toposort(&graph, state)?; - let size = debruijn::Size(unsolved.len() as u32); - let mut substitutions = UniToLevel::default(); - for (index, &id) in unsolved.iter().rev().enumerate() { + let mut substitutions = UniToName::default(); + for &id in unsolved.iter().rev() { let kind = state.unification.get(id).kind; - let kind = ShiftBound::on(state, kind, size.0); - let index = debruijn::Index(index as u32); - let level = index.to_level(size)?; - substitutions.insert(id, (level, kind)); + let text = generate_type_name(id); + let mut name = state.fresh_name(&text); + name.depth = debruijn::Size(0); + substitutions.insert(id, (name, kind)); } - let kind_variables = substitutions.values().copied(); - let kind_variables = kind_variables.sorted_by_key(|(level, _)| *level); - let kind_variables = kind_variables.map(|(_, kind)| kind).collect_vec(); + let kind_variables = substitutions.values().cloned(); + let kind_variables = kind_variables.sorted_by_key(|(name, _)| name.unique); + let kind_variables = kind_variables.collect_vec(); let arguments = instance.arguments.iter().map(|&(t, k)| { - let t = ShiftBound::on(state, t, size.0); let t = SubstituteUnification::on(&substitutions, state, t); - let k = ShiftBound::on(state, k, size.0); let k = SubstituteUnification::on(&substitutions, state, k); (t, k) }); @@ -328,9 +330,7 @@ pub fn quantify_instance(state: &mut CheckState, instance: &mut Instance) -> Opt instance.arguments = arguments.collect(); let constraints = instance.constraints.iter().map(|&(t, k)| { - let t = ShiftBound::on(state, t, size.0); let t = SubstituteUnification::on(&substitutions, state, t); - let k = ShiftBound::on(state, k, size.0); let k = SubstituteUnification::on(&substitutions, state, k); (t, k) }); @@ -344,14 +344,14 @@ pub fn quantify_instance(state: &mut CheckState, instance: &mut Instance) -> Opt /// Builds a topological sort of the [`UniGraph`]. /// -/// This function uses the domain-based sorting of the unification variables +/// This function uses the depth-based sorting of the unification variables /// as the base for the post-order traversal. In turn, this ensures that -/// unconnected nodes are ordered by domain while connected ones are sorted +/// unconnected nodes are ordered by depth while connected ones are sorted /// topologically. The resulting [`IndexSet`] can be iterated in reverse to /// build the `forall` binders during quantification. fn ordered_toposort(graph: &UniGraph, state: &CheckState) -> Option> { let mut nodes: Vec = graph.nodes().collect(); - nodes.sort_by_key(|&id| (state.unification.get(id).domain, id)); + nodes.sort_by_key(|&id| (state.unification.get(id).depth, id)); let mut dfs = DfsPostOrder::empty(graph); let mut unsolved = IndexSet::new(); @@ -477,14 +477,19 @@ fn collect_unification(state: &mut CheckState, id: TypeId) -> UniGraph { mod tests { use super::*; - fn add_unification(state: &mut CheckState, domain: u32) -> u32 { + fn test_file_id() -> files::FileId { + let mut files = files::Files::default(); + files.insert("Test.purs", "module Test where\n\n") + } + + fn add_unification(state: &mut CheckState, depth: u32) -> u32 { let kind = state.storage.intern(Type::Unknown); - state.unification.fresh(debruijn::Size(domain), kind) + state.unification.fresh(debruijn::Size(depth), kind) } #[test] fn test_toposort_dag() { - let mut state = CheckState::default(); + let mut state = CheckState::new(test_file_id()); let mut graph = UniGraph::default(); let id0 = add_unification(&mut state, 0); @@ -504,7 +509,7 @@ mod tests { #[test] fn test_toposort_tuple_cycle() { - let mut state = CheckState::default(); + let mut state = CheckState::new(test_file_id()); let mut graph = UniGraph::default(); let id0 = add_unification(&mut state, 0); @@ -521,7 +526,7 @@ mod tests { #[test] fn test_toposort_self_cycle() { - let mut state = CheckState::default(); + let mut state = CheckState::new(test_file_id()); let mut graph = UniGraph::default(); let id0 = add_unification(&mut state, 0); @@ -535,7 +540,7 @@ mod tests { #[test] fn test_toposort_triple_cycle() { - let mut state = CheckState::default(); + let mut state = CheckState::new(test_file_id()); let mut graph = UniGraph::default(); let id0 = add_unification(&mut state, 0); @@ -553,7 +558,7 @@ mod tests { #[test] fn test_toposort_domain_ordering() { - let mut state = CheckState::default(); + let mut state = CheckState::new(test_file_id()); let mut graph = UniGraph::default(); let id0 = add_unification(&mut state, 1); @@ -571,7 +576,7 @@ mod tests { #[test] fn test_toposort_id_ordering() { - let mut state = CheckState::default(); + let mut state = CheckState::new(test_file_id()); let mut graph = UniGraph::default(); let id0 = add_unification(&mut state, 0); @@ -589,7 +594,7 @@ mod tests { #[test] fn test_toposort_dependency_ordering() { - let mut state = CheckState::default(); + let mut state = CheckState::new(test_file_id()); let mut graph = UniGraph::default(); let id0 = add_unification(&mut state, 2); @@ -608,7 +613,7 @@ mod tests { #[test] fn test_toposort_diamond() { - let mut state = CheckState::default(); + let mut state = CheckState::new(test_file_id()); let mut graph = UniGraph::default(); let id0 = add_unification(&mut state, 0); @@ -626,7 +631,7 @@ mod tests { let sorted: Vec = result.unwrap().into_iter().collect(); - // All have the same domain, + // All have the same depth, assert_eq!(sorted, vec![id3, id2, id0, id1]); } } diff --git a/compiler-core/checking/src/algorithm/state.rs b/compiler-core/checking/src/algorithm/state.rs index 4009aff67..3a4fc316a 100644 --- a/compiler-core/checking/src/algorithm/state.rs +++ b/compiler-core/checking/src/algorithm/state.rs @@ -1,53 +1,84 @@ pub mod unification; -use itertools::Itertools; pub use unification::*; -use std::collections::VecDeque; -use std::mem; +pub mod implication; +pub use implication::*; + use std::sync::Arc; use building_types::QueryResult; use files::FileId; use indexing::{IndexedModule, TermItemId, TypeItemId}; +use itertools::Itertools; use lowering::{ BinderId, GraphNodeId, GroupedModule, ImplicitBindingId, LetBindingNameGroupId, LoweredModule, - RecordPunId, TypeItemIr, TypeVariableBindingId, + RecordPunId, TermOperatorId, TypeItemIr, TypeOperatorId, TypeVariableBindingId, }; use resolving::ResolvedModule; use rustc_hash::FxHashMap; +use smol_str::{SmolStr, ToSmolStr}; use stabilizing::StabilizedModule; use sugar::{Bracketed, Sectioned}; +use crate::algorithm::exhaustiveness::{ + ExhaustivenessReport, Pattern, PatternConstructor, PatternId, PatternKind, PatternStorage, +}; use crate::algorithm::{constraint, transfer}; -use crate::core::{Type, TypeId, TypeInterner, Variable, debruijn}; +use crate::core::{Name, Type, TypeId, TypeInterner, Variable, debruijn, pretty}; use crate::error::{CheckError, ErrorKind, ErrorStep}; -use crate::{CheckedModule, ExternalQueries}; +use crate::{CheckedModule, ExternalQueries, TypeErrorMessageId}; + +/// Produces globally unique [`Name`] values. +pub struct Names { + next: u32, + file: FileId, +} + +impl Names { + pub fn new(file: FileId) -> Names { + Names { next: 0, file } + } +} + +#[derive(Copy, Clone, Debug)] +pub struct OperatorBranchTypes { + pub left: TypeId, + pub right: TypeId, + pub result: TypeId, +} /// Manually-managed scope for type-level bindings. #[derive(Default)] pub struct TypeScope { pub bound: debruijn::Bound, pub kinds: debruijn::BoundMap, + pub names: FxHashMap, + pub operator_node: FxHashMap, } impl TypeScope { - pub fn bind_forall(&mut self, id: TypeVariableBindingId, kind: TypeId) -> debruijn::Level { + pub fn bind_forall(&mut self, id: TypeVariableBindingId, kind: TypeId, name: Name) -> Name { let variable = debruijn::Variable::Forall(id); let level = self.bound.bind(variable); self.kinds.insert(level, kind); - level + let result = name.clone(); + self.names.insert(level, name); + result } - pub fn bind_core(&mut self, kind: TypeId) -> debruijn::Level { + pub fn bind_core(&mut self, kind: TypeId, name: Name) -> Name { let variable = debruijn::Variable::Core; let level = self.bound.bind(variable); self.kinds.insert(level, kind); - level + let result = name.clone(); + self.names.insert(level, name); + result } - pub fn lookup_forall(&self, id: TypeVariableBindingId) -> Option { + pub fn lookup_forall(&self, id: TypeVariableBindingId) -> Option { let variable = debruijn::Variable::Forall(id); - self.bound.level_of(variable) + let level = self.bound.level_of(variable)?; + self.names.get(&level).cloned() } pub fn lookup_forall_kind(&self, id: TypeVariableBindingId) -> Option { @@ -61,20 +92,20 @@ impl TypeScope { node: GraphNodeId, id: ImplicitBindingId, kind: TypeId, - ) -> debruijn::Level { + name: Name, + ) -> Name { let variable = debruijn::Variable::Implicit { node, id }; let level = self.bound.level_of(variable).unwrap_or_else(|| self.bound.bind(variable)); self.kinds.insert(level, kind); - level + let result = name.clone(); + self.names.insert(level, name); + result } - pub fn lookup_implicit( - &self, - node: GraphNodeId, - id: ImplicitBindingId, - ) -> Option { + pub fn lookup_implicit(&self, node: GraphNodeId, id: ImplicitBindingId) -> Option { let variable = debruijn::Variable::Implicit { node, id }; - self.bound.level_of(variable) + let level = self.bound.level_of(variable)?; + self.names.get(&level).cloned() } pub fn lookup_implicit_kind(&self, node: GraphNodeId, id: ImplicitBindingId) -> Option { @@ -86,6 +117,15 @@ impl TypeScope { pub fn unbind(&mut self, level: debruijn::Level) { self.bound.unbind(level); self.kinds.unbind(level); + self.names.retain(|&l, _| l < level); + } + + /// Finds the level for a given [`Name`] and unbinds from that level. + pub fn unbind_name(&mut self, name: &Name) { + let level = self.names.iter().find_map(|(&level, n)| (n == name).then_some(level)); + if let Some(level) = level { + self.unbind(level); + } } /// Unbinds variables starting from a level and returns captured implicit bindings. @@ -99,8 +139,9 @@ impl TypeScope { for (level, variable) in self.bound.iter_from(level) { if let debruijn::Variable::Implicit { node, id } = variable && let Some(&kind) = self.kinds.get(level) + && let Some(name) = self.names.get(&level).cloned() { - implicits.push(InstanceHeadBinding { node, id, kind }); + implicits.push(InstanceHeadBinding { node, id, kind, name }); } } @@ -111,6 +152,14 @@ impl TypeScope { pub fn size(&self) -> debruijn::Size { self.bound.size() } + + pub fn bind_operator_node(&mut self, id: TypeOperatorId, types: OperatorBranchTypes) { + self.operator_node.insert(id, types); + } + + pub fn lookup_operator_node(&self, id: TypeOperatorId) -> Option { + self.operator_node.get(&id).copied() + } } /// Manually-managed scope for term-level bindings. @@ -120,6 +169,7 @@ pub struct TermScope { pub let_binding: FxHashMap, pub record_pun: FxHashMap, pub section: FxHashMap, + pub operator_node: FxHashMap, } impl TermScope { @@ -154,6 +204,14 @@ impl TermScope { pub fn lookup_section(&self, id: lowering::ExpressionId) -> Option { self.section.get(&id).copied() } + + pub fn bind_operator_node(&mut self, id: TermOperatorId, types: OperatorBranchTypes) { + self.operator_node.insert(id, types); + } + + pub fn lookup_operator_node(&self, id: TermOperatorId) -> Option { + self.operator_node.get(&id).copied() + } } /// A single implicit variable captured from an instance head. @@ -165,6 +223,7 @@ pub struct InstanceHeadBinding { pub node: GraphNodeId, pub id: ImplicitBindingId, pub kind: TypeId, + pub name: Name, } /// Tracks type variables declared in surface syntax. @@ -235,35 +294,24 @@ impl SurfaceBindings { } } -/// Collects wanted and given constraints. -#[derive(Default)] -pub struct ConstraintContext { - pub wanted: VecDeque, - pub given: Vec, +#[derive(Clone, Copy)] +pub enum PendingType { + Immediate(TypeId), + Deferred(TypeId), } -impl ConstraintContext { - pub fn push_wanted(&mut self, constraint: TypeId) { - self.wanted.push_back(constraint); - } - - pub fn extend_wanted(&mut self, constraints: &[TypeId]) { - self.wanted.extend(constraints); - } - - pub fn push_given(&mut self, constraint: TypeId) { - self.given.push(constraint); - } - - pub fn take(&mut self) -> (VecDeque, Vec) { - (mem::take(&mut self.wanted), mem::take(&mut self.given)) +impl From for TypeId { + fn from(value: PendingType) -> Self { + match value { + PendingType::Immediate(id) => id, + PendingType::Deferred(id) => id, + } } } /// The core state structure threaded through the [`algorithm`]. /// /// [`algorithm`]: crate::algorithm -#[derive(Default)] pub struct CheckState { /// Interns and stores all types created during checking. pub storage: TypeInterner, @@ -278,18 +326,54 @@ pub struct CheckState { /// Tracks surface variables for rebinding, see struct documentation. pub surface_bindings: SurfaceBindings, - /// Collects wanted/given type class constraints. - pub constraints: ConstraintContext, + /// Stores implication scopes for constraint solving. + pub implications: Implications, + /// Collects unification variables and solutions. pub unification: UnificationContext, /// The in-progress binding group; used for recursive declarations. pub binding_group: BindingGroupContext, + /// Stores terms whose signatures have been kind-checked that still need + /// additional unification before moving into [`CheckedModule::terms`]. + pub pending_terms: FxHashMap, + + /// Stores types whose signatures have been kind-checked that still need + /// additional unification before moving into [`CheckedModule::types`]. + pub pending_types: FxHashMap, + /// Error context breadcrumbs for [`CheckedModule::errors`]. pub check_steps: Vec, /// Flag that determines when it's appropriate to expand synonyms. pub defer_synonym_expansion: bool, + + /// Interns patterns for exhaustiveness checking. + pub patterns: PatternStorage, + + /// Produces fresh [`Name`] values for bound type variables. + pub names: Names, +} + +impl CheckState { + pub fn new(file_id: FileId) -> CheckState { + CheckState { + storage: Default::default(), + checked: Default::default(), + type_scope: Default::default(), + term_scope: Default::default(), + surface_bindings: Default::default(), + implications: Default::default(), + unification: Default::default(), + binding_group: Default::default(), + pending_terms: Default::default(), + pending_types: Default::default(), + check_steps: Default::default(), + defer_synonym_expansion: Default::default(), + patterns: Default::default(), + names: Names::new(file_id), + } + } } #[derive(Clone)] @@ -340,6 +424,7 @@ where pub prim_coerce: PrimCoerceCore, pub prim_type_error: PrimTypeErrorCore, pub known_types: KnownTypesCore, + pub known_terms: KnownTermsCore, pub known_reflectable: KnownReflectableCore, pub known_generic: Option, @@ -381,6 +466,7 @@ where let prim_coerce = PrimCoerceCore::collect(queries)?; let prim_type_error = PrimTypeErrorCore::collect(queries, state)?; let known_types = KnownTypesCore::collect(queries)?; + let known_terms = KnownTermsCore::collect(queries)?; let known_reflectable = KnownReflectableCore::collect(queries, &mut state.storage)?; let known_generic = KnownGeneric::collect(queries, &mut state.storage)?; let resolved = queries.resolved(id)?; @@ -399,6 +485,7 @@ where prim_coerce, prim_type_error, known_types, + known_terms, known_reflectable, known_generic, id, @@ -446,15 +533,33 @@ impl<'r, 's> PrimLookup<'r, 's> { self.storage.intern(Type::Constructor(file_id, type_id)) } + fn class_item(&self, name: &str) -> TypeItemId { + let (_, type_id) = + self.resolved.lookup_class(self.resolved, None, name).unwrap_or_else(|| { + unreachable!("invariant violated: {name} not in {}", self.module_name) + }); + type_id + } + + fn class_constructor(&mut self, name: &str) -> TypeId { + let (file_id, type_id) = + self.resolved.lookup_class(self.resolved, None, name).unwrap_or_else(|| { + unreachable!("invariant violated: {name} not in {}", self.module_name) + }); + self.storage.intern(Type::Constructor(file_id, type_id)) + } + fn intern(&mut self, ty: Type) -> TypeId { self.storage.intern(ty) } } pub struct PrimCore { + pub prim_id: FileId, pub t: TypeId, pub type_to_type: TypeId, pub function: TypeId, + pub function_item: TypeItemId, pub array: TypeId, pub record: TypeId, pub number: TypeId, @@ -466,21 +571,31 @@ pub struct PrimCore { pub constraint: TypeId, pub symbol: TypeId, pub row: TypeId, + pub row_type: TypeId, pub unknown: TypeId, } impl PrimCore { fn collect(queries: &impl ExternalQueries, state: &mut CheckState) -> QueryResult { - let resolved = queries.resolved(queries.prim_id())?; + let prim_id = queries.prim_id(); + let resolved = queries.resolved(prim_id)?; let mut lookup = PrimLookup::new(&resolved, &mut state.storage, "Prim"); let t = lookup.type_constructor("Type"); let type_to_type = lookup.intern(Type::Function(t, t)); + let row = lookup.type_constructor("Row"); + let row_type = lookup.intern(Type::Application(row, t)); + + let function = lookup.type_constructor("Function"); + let function_item = lookup.type_item("Function"); + Ok(PrimCore { + prim_id, t, type_to_type, - function: lookup.type_constructor("Function"), + function, + function_item, array: lookup.type_constructor("Array"), record: lookup.type_constructor("Record"), number: lookup.type_constructor("Number"), @@ -488,10 +603,11 @@ impl PrimCore { string: lookup.type_constructor("String"), char: lookup.type_constructor("Char"), boolean: lookup.type_constructor("Boolean"), - partial: lookup.type_constructor("Partial"), + partial: lookup.class_constructor("Partial"), constraint: lookup.type_constructor("Constraint"), symbol: lookup.type_constructor("Symbol"), - row: lookup.type_constructor("Row"), + row, + row_type, unknown: lookup.intern(Type::Unknown), }) } @@ -516,10 +632,10 @@ impl PrimIntCore { Ok(PrimIntCore { file_id, - add: lookup.type_item("Add"), - mul: lookup.type_item("Mul"), - compare: lookup.type_item("Compare"), - to_string: lookup.type_item("ToString"), + add: lookup.class_item("Add"), + mul: lookup.class_item("Mul"), + compare: lookup.class_item("Compare"), + to_string: lookup.class_item("ToString"), }) } } @@ -575,9 +691,9 @@ impl PrimSymbolCore { Ok(PrimSymbolCore { file_id, - append: lookup.type_item("Append"), - compare: lookup.type_item("Compare"), - cons: lookup.type_item("Cons"), + append: lookup.class_item("Append"), + compare: lookup.class_item("Compare"), + cons: lookup.class_item("Cons"), }) } } @@ -621,10 +737,10 @@ impl PrimRowCore { Ok(PrimRowCore { file_id, - union: lookup.type_item("Union"), - cons: lookup.type_item("Cons"), - lacks: lookup.type_item("Lacks"), - nub: lookup.type_item("Nub"), + union: lookup.class_item("Union"), + cons: lookup.class_item("Cons"), + lacks: lookup.class_item("Lacks"), + nub: lookup.class_item("Nub"), }) } } @@ -650,7 +766,7 @@ impl PrimRowListCore { Ok(PrimRowListCore { file_id, - row_to_list: lookup.type_item("RowToList"), + row_to_list: lookup.class_item("RowToList"), cons: lookup.type_constructor("Cons"), nil: lookup.type_constructor("Nil"), }) @@ -671,7 +787,7 @@ impl PrimCoerceCore { let resolved = queries.resolved(file_id)?; let (_, coercible) = resolved .exports - .lookup_type("Coercible") + .lookup_class("Coercible") .unwrap_or_else(|| unreachable!("invariant violated: Coercible not in Prim.Coerce")); Ok(PrimCoerceCore { file_id, coercible }) @@ -700,8 +816,8 @@ impl PrimTypeErrorCore { Ok(PrimTypeErrorCore { file_id, - warn: lookup.type_item("Warn"), - fail: lookup.type_item("Fail"), + warn: lookup.class_item("Warn"), + fail: lookup.class_item("Fail"), text: lookup.type_constructor("Text"), quote: lookup.type_constructor("Quote"), quote_label: lookup.type_constructor("QuoteLabel"), @@ -711,7 +827,22 @@ impl PrimTypeErrorCore { } } -fn fetch_known_type( +fn fetch_known_term( + queries: &impl ExternalQueries, + m: &str, + n: &str, +) -> QueryResult> { + let Some(file_id) = queries.module_file(m) else { + return Ok(None); + }; + let resolved = queries.resolved(file_id)?; + let Some((file_id, term_id)) = resolved.exports.lookup_term(n) else { + return Ok(None); + }; + Ok(Some((file_id, term_id))) +} + +fn fetch_known_class( queries: &impl ExternalQueries, m: &str, n: &str, @@ -720,7 +851,7 @@ fn fetch_known_type( return Ok(None); }; let resolved = queries.resolved(file_id)?; - let Some((file_id, type_id)) = resolved.exports.lookup_type(n) else { + let Some((file_id, type_id)) = resolved.exports.lookup_class(n) else { return Ok(None); }; Ok(Some((file_id, type_id))) @@ -761,21 +892,21 @@ pub struct KnownTypesCore { impl KnownTypesCore { fn collect(queries: &impl ExternalQueries) -> QueryResult { - let eq = fetch_known_type(queries, "Data.Eq", "Eq")?; - let eq1 = fetch_known_type(queries, "Data.Eq", "Eq1")?; - let ord = fetch_known_type(queries, "Data.Ord", "Ord")?; - let ord1 = fetch_known_type(queries, "Data.Ord", "Ord1")?; - let functor = fetch_known_type(queries, "Data.Functor", "Functor")?; - let bifunctor = fetch_known_type(queries, "Data.Bifunctor", "Bifunctor")?; + let eq = fetch_known_class(queries, "Data.Eq", "Eq")?; + let eq1 = fetch_known_class(queries, "Data.Eq", "Eq1")?; + let ord = fetch_known_class(queries, "Data.Ord", "Ord")?; + let ord1 = fetch_known_class(queries, "Data.Ord", "Ord1")?; + let functor = fetch_known_class(queries, "Data.Functor", "Functor")?; + let bifunctor = fetch_known_class(queries, "Data.Bifunctor", "Bifunctor")?; let contravariant = - fetch_known_type(queries, "Data.Functor.Contravariant", "Contravariant")?; - let profunctor = fetch_known_type(queries, "Data.Profunctor", "Profunctor")?; - let foldable = fetch_known_type(queries, "Data.Foldable", "Foldable")?; - let bifoldable = fetch_known_type(queries, "Data.Bifoldable", "Bifoldable")?; - let traversable = fetch_known_type(queries, "Data.Traversable", "Traversable")?; - let bitraversable = fetch_known_type(queries, "Data.Bitraversable", "Bitraversable")?; - let newtype = fetch_known_type(queries, "Data.Newtype", "Newtype")?; - let generic = fetch_known_type(queries, "Data.Generic.Rep", "Generic")?; + fetch_known_class(queries, "Data.Functor.Contravariant", "Contravariant")?; + let profunctor = fetch_known_class(queries, "Data.Profunctor", "Profunctor")?; + let foldable = fetch_known_class(queries, "Data.Foldable", "Foldable")?; + let bifoldable = fetch_known_class(queries, "Data.Bifoldable", "Bifoldable")?; + let traversable = fetch_known_class(queries, "Data.Traversable", "Traversable")?; + let bitraversable = fetch_known_class(queries, "Data.Bitraversable", "Bitraversable")?; + let newtype = fetch_known_class(queries, "Data.Newtype", "Newtype")?; + let generic = fetch_known_class(queries, "Data.Generic.Rep", "Generic")?; Ok(KnownTypesCore { eq, eq1, @@ -806,8 +937,8 @@ impl KnownReflectableCore { queries: &impl ExternalQueries, storage: &mut TypeInterner, ) -> QueryResult { - let is_symbol = fetch_known_type(queries, "Data.Symbol", "IsSymbol")?; - let reflectable = fetch_known_type(queries, "Data.Reflectable", "Reflectable")?; + let is_symbol = fetch_known_class(queries, "Data.Symbol", "IsSymbol")?; + let reflectable = fetch_known_class(queries, "Data.Reflectable", "Reflectable")?; let ordering = fetch_known_constructor(queries, storage, "Data.Ordering", "Ordering")?; Ok(KnownReflectableCore { is_symbol, reflectable, ordering }) } @@ -867,6 +998,17 @@ impl KnownGeneric { } } +pub struct KnownTermsCore { + pub otherwise: Option<(FileId, indexing::TermItemId)>, +} + +impl KnownTermsCore { + fn collect(queries: &impl ExternalQueries) -> QueryResult { + let otherwise = fetch_known_term(queries, "Data.Boolean", "otherwise")?; + Ok(KnownTermsCore { otherwise }) + } +} + impl CheckState { /// Executes the given closure with a term binding group in scope. /// @@ -883,7 +1025,7 @@ impl CheckState { F: FnOnce(&mut Self) -> T, { for item in group { - if !self.checked.terms.contains_key(&item) { + if !self.checked.terms.contains_key(&item) && !self.pending_terms.contains_key(&item) { let t = self.fresh_unification_type(context); self.binding_group.terms.insert(item, t); } @@ -915,6 +1057,9 @@ impl CheckState { if self.checked.types.contains_key(&item_id) { return false; } + if self.pending_types.contains_key(&item_id) { + return false; + } true }); @@ -940,8 +1085,10 @@ impl CheckState { ); let kind = self.binding_group.lookup_type(item_id).or_else(|| { - let kind = self.checked.types.get(&item_id)?; - Some(transfer::localize(self, context, *kind)) + self.pending_types.get(&item_id).map(|&k| TypeId::from(k)).or_else(|| { + let kind = self.checked.types.get(&item_id)?; + Some(transfer::localize(self, context, *kind)) + }) }); let kind = kind.expect("invariant violated: expected kind for operator target"); @@ -953,12 +1100,58 @@ impl CheckState { result } + pub fn with_implication(&mut self, action: impl FnOnce(&mut Self) -> T) -> T { + let child = self.push_implication(); + let result = action(self); + self.pop_implication(child); + result + } + + pub fn push_implication(&mut self) -> ImplicationId { + self.implications.push() + } + + pub fn pop_implication(&mut self, implication: ImplicationId) { + self.implications.pop(implication); + } + + pub fn current_implication(&self) -> ImplicationId { + self.implications.current() + } + + pub fn current_implication_mut(&mut self) -> &mut implication::Implication { + self.implications.current_mut() + } + + pub fn push_wanted(&mut self, constraint: TypeId) { + self.current_implication_mut().wanted.push_back(constraint); + } + + pub fn extend_wanted(&mut self, constraints: &[TypeId]) { + self.current_implication_mut().wanted.extend(constraints.iter().copied()); + } + + pub fn push_given(&mut self, constraint: TypeId) { + self.current_implication_mut().given.push(constraint); + } + pub fn solve_constraints(&mut self, context: &CheckContext) -> QueryResult> where Q: ExternalQueries, { - let (wanted, given) = self.constraints.take(); - constraint::solve_constraints(self, context, wanted, given) + constraint::solve_implication(self, context) + } + + pub fn report_exhaustiveness(&mut self, exhaustiveness: ExhaustivenessReport) { + if let Some(patterns) = exhaustiveness.missing { + let patterns = Arc::from(patterns); + self.insert_error(ErrorKind::MissingPatterns { patterns }); + } + + if !exhaustiveness.redundant.is_empty() { + let patterns = Arc::from(exhaustiveness.redundant); + self.insert_error(ErrorKind::RedundantPatterns { patterns }); + } } /// Executes an action with an [`ErrorStep`] in scope. @@ -978,20 +1171,74 @@ impl CheckState { let error = CheckError { kind, step }; self.checked.errors.push(error); } + + /// Interns an error message in [`CheckedModule::error_messages`]. + pub fn intern_error_message(&mut self, message: impl ToSmolStr) -> TypeErrorMessageId { + self.checked.error_messages.intern(message.to_smolstr()) + } + + /// Renders a local type and interns it in [`CheckedModule::error_messages`]. + pub fn render_local_type( + &mut self, + context: &CheckContext, + t: TypeId, + ) -> TypeErrorMessageId + where + Q: ExternalQueries, + { + let t = pretty::print_local(self, context, t); + self.intern_error_message(t) + } +} + +/// Functions for creating fresh [`Name`] values. +impl CheckState { + pub fn fresh_name(&mut self, text: &SmolStr) -> Name { + let unique = self.names.next; + self.names.next += 1; + let file = self.names.file; + let text = SmolStr::clone(text); + let depth = self.type_scope.size(); + Name { unique, file, text, depth } + } + + pub fn fresh_name_str(&mut self, text: &str) -> Name { + let unique = self.names.next; + self.names.next += 1; + let file = self.names.file; + let text = SmolStr::new(text); + let depth = self.type_scope.size(); + Name { unique, file, text, depth } + } + + pub fn fresh_name_unify(&mut self, left: &SmolStr, right: &SmolStr) -> (Name, Name) { + let unique = self.names.next; + self.names.next += 1; + let file = self.names.file; + let depth = self.type_scope.size(); + ( + Name { unique, file, text: SmolStr::clone(left), depth }, + Name { unique, file, text: SmolStr::clone(right), depth }, + ) + } + + pub fn name_text<'a>(&self, name: &'a Name) -> &'a str { + name.text.as_str() + } } /// Functions for creating unification variables. impl CheckState { - /// Creates a fresh unification variable with the provided domain and kind. - pub fn fresh_unification_kinded_at(&mut self, domain: debruijn::Size, kind: TypeId) -> TypeId { - let unification_id = self.unification.fresh(domain, kind); + /// Creates a fresh unification variable with the provided depth and kind. + pub fn fresh_unification_kinded_at(&mut self, depth: debruijn::Size, kind: TypeId) -> TypeId { + let unification_id = self.unification.fresh(depth, kind); self.storage.intern(Type::Unification(unification_id)) } /// Creates a fresh unification variable with the provided kind. pub fn fresh_unification_kinded(&mut self, kind: TypeId) -> TypeId { - let domain = self.type_scope.size(); - self.fresh_unification_kinded_at(domain, kind) + let depth = self.type_scope.size(); + self.fresh_unification_kinded_at(depth, kind) } /// Creates a fresh polykinded unification variable. @@ -1013,15 +1260,14 @@ impl CheckState { /// Creates a fresh skolem variable with the provided kind. pub fn fresh_skolem_kinded(&mut self, kind: TypeId) -> TypeId { - let domain = self.type_scope.size(); - let level = debruijn::Level(domain.0); - let skolem = Variable::Skolem(level, kind); + let name = self.fresh_name_str("_"); + let skolem = Variable::Skolem(name, kind); self.storage.intern(Type::Variable(skolem)) } } impl CheckState { - /// Normalizes unification and bound type variables. + /// Normalises unification and bound type variables. /// /// This function also applies path compression to unification variables, /// where if a unification variable `?0` solves to `?1`, which solves to @@ -1041,6 +1287,13 @@ impl CheckState { break id; } } + Type::Row(ref row) if row.fields.is_empty() => { + if let Some(tail) = row.tail { + id = tail; + } else { + break id; + } + } _ => break id, } }; @@ -1060,3 +1313,23 @@ impl CheckState { }) } } + +impl CheckState { + pub fn allocate_pattern(&mut self, kind: PatternKind, t: TypeId) -> PatternId { + let pattern = Pattern { kind, t }; + self.patterns.intern(pattern) + } + + pub fn allocate_constructor( + &mut self, + constructor: PatternConstructor, + t: TypeId, + ) -> PatternId { + let kind = PatternKind::Constructor { constructor }; + self.allocate_pattern(kind, t) + } + + pub fn allocate_wildcard(&mut self, t: TypeId) -> PatternId { + self.allocate_pattern(PatternKind::Wildcard, t) + } +} diff --git a/compiler-core/checking/src/algorithm/state/implication.rs b/compiler-core/checking/src/algorithm/state/implication.rs new file mode 100644 index 000000000..6c940ae62 --- /dev/null +++ b/compiler-core/checking/src/algorithm/state/implication.rs @@ -0,0 +1,79 @@ +use std::collections::VecDeque; +use std::ops::{Index, IndexMut}; + +use crate::core::{TypeId, debruijn}; + +/// A unique identifier for an implication scope. +pub type ImplicationId = u32; + +/// A node in the implication tree. +#[derive(Default)] +pub struct Implication { + pub skolems: Vec, + pub given: Vec, + pub wanted: VecDeque, + pub children: Vec, + pub parent: Option, +} + +impl Implication { + pub fn new(parent: Option) -> Implication { + Implication { parent, ..Implication::default() } + } +} + +pub struct Implications { + nodes: Vec, + current: ImplicationId, +} + +impl Implications { + pub fn new() -> Self { + Implications { nodes: vec![Implication::new(None)], current: 0 } + } + + pub fn current(&self) -> ImplicationId { + self.current + } + + pub fn current_mut(&mut self) -> &mut Implication { + let current = self.current as usize; + &mut self.nodes[current] + } + + pub fn push(&mut self) -> ImplicationId { + let parent = self.current; + let id = self.nodes.len() as ImplicationId; + self.nodes.push(Implication::new(Some(parent))); + self.nodes[parent as usize].children.push(id); + self.current = id; + id + } + + pub fn pop(&mut self, implication: ImplicationId) { + debug_assert_eq!(implication, self.current); + let parent = + self[implication].parent.expect("invariant violated: missing implication parent"); + self.current = parent; + } +} + +impl Default for Implications { + fn default() -> Implications { + Implications::new() + } +} + +impl Index for Implications { + type Output = Implication; + + fn index(&self, index: ImplicationId) -> &Self::Output { + &self.nodes[index as usize] + } +} + +impl IndexMut for Implications { + fn index_mut(&mut self, index: ImplicationId) -> &mut Self::Output { + &mut self.nodes[index as usize] + } +} diff --git a/compiler-core/checking/src/algorithm/state/unification.rs b/compiler-core/checking/src/algorithm/state/unification.rs index 501400496..0596c60b8 100644 --- a/compiler-core/checking/src/algorithm/state/unification.rs +++ b/compiler-core/checking/src/algorithm/state/unification.rs @@ -8,7 +8,7 @@ pub enum UnificationState { #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub struct UnificationEntry { - pub domain: debruijn::Size, + pub depth: debruijn::Size, pub kind: TypeId, pub state: UnificationState, } @@ -20,11 +20,11 @@ pub struct UnificationContext { } impl UnificationContext { - pub fn fresh(&mut self, domain: debruijn::Size, kind: TypeId) -> u32 { + pub fn fresh(&mut self, depth: debruijn::Size, kind: TypeId) -> u32 { let unique = self.unique; self.unique += 1; - self.entries.push(UnificationEntry { domain, kind, state: UnificationState::Unsolved }); + self.entries.push(UnificationEntry { depth, kind, state: UnificationState::Unsolved }); unique } diff --git a/compiler-core/checking/src/algorithm/substitute.rs b/compiler-core/checking/src/algorithm/substitute.rs index d64c9323f..589fe6fff 100644 --- a/compiler-core/checking/src/algorithm/substitute.rs +++ b/compiler-core/checking/src/algorithm/substitute.rs @@ -2,85 +2,53 @@ use rustc_hash::FxHashMap; use crate::algorithm::fold::{FoldAction, TypeFold, fold_type}; use crate::algorithm::state::CheckState; -use crate::core::{ForallBinder, Type, TypeId, Variable, debruijn}; +use crate::core::{Name, Type, TypeId, Variable}; pub struct SubstituteBound { - target_level: debruijn::Level, - with_type: TypeId, + target: Name, + replacement: TypeId, } impl SubstituteBound { - /// Substitutes a bound variable at a specific level with a replacement type. + /// Substitutes a bound variable with a specific name with a replacement type. /// - /// Since levels are absolute positions, no scope tracking is needed, - /// we simply match on the target level directly. + /// Since names are globally unique, no scope tracking is needed. + /// We simply match on the target name directly. pub fn on( state: &mut CheckState, - target_level: debruijn::Level, - with_type: TypeId, + target: Name, + replacement: TypeId, in_type: TypeId, ) -> TypeId { - fold_type(state, in_type, &mut SubstituteBound { target_level, with_type }) + fold_type(state, in_type, &mut SubstituteBound { target, replacement }) } } impl TypeFold for SubstituteBound { - fn transform(&mut self, _state: &mut CheckState, _id: TypeId, t: &Type) -> FoldAction { - if let Type::Variable(Variable::Bound(level, _)) = t - && *level == self.target_level - { - return FoldAction::Replace(self.with_type); - } - FoldAction::Continue - } -} - -pub struct ShiftBound { - offset: u32, -} - -impl ShiftBound { - /// Shifts all bound variable levels in a type by a given offset. - /// - /// This is needed when adding new forall binders at the front of a type, - /// as existing bound variables need their levels adjusted to account for - /// the new binders. - pub fn on(state: &mut CheckState, id: TypeId, offset: u32) -> TypeId { - if offset == 0 { - return id; - } - fold_type(state, id, &mut ShiftBound { offset }) - } -} - -impl TypeFold for ShiftBound { - fn transform(&mut self, state: &mut CheckState, _id: TypeId, t: &Type) -> FoldAction { - if let Type::Variable(Variable::Bound(level, kind)) = t { - let level = debruijn::Level(level.0 + self.offset); - let kind = ShiftBound::on(state, *kind, self.offset); - FoldAction::Replace(state.storage.intern(Type::Variable(Variable::Bound(level, kind)))) - } else { - FoldAction::Continue + fn transform(&mut self, _state: &mut CheckState, id: TypeId, t: &Type) -> FoldAction { + match t { + // The forall rebinds the target name, so substitution stops. + Type::Forall(binder, _) if binder.variable == self.target => FoldAction::Replace(id), + Type::Variable(Variable::Bound(name, _)) if *name == self.target => { + FoldAction::Replace(self.replacement) + } + _ => FoldAction::Continue, } } - - fn transform_binder(&mut self, binder: &mut ForallBinder) { - binder.level = debruijn::Level(binder.level.0 + self.offset); - } } -pub type UniToLevel = FxHashMap; +pub type UniToName = FxHashMap; pub struct SubstituteUnification<'a> { - substitutions: &'a UniToLevel, + substitutions: &'a UniToName, } impl SubstituteUnification<'_> { - /// Level-based substitution over a [`Type`]. + /// Name-based substitution over a [`Type`]. /// - /// Replaces unification variables with bound variables using a level-based - /// mapping. Since levels are absolute positions, no scope tracking is needed. - pub fn on(substitutions: &UniToLevel, state: &mut CheckState, id: TypeId) -> TypeId { + /// Replaces unification variables with bound variables using a name-based + /// mapping. Since names are globally unique, no scope tracking is needed. + pub fn on(substitutions: &UniToName, state: &mut CheckState, id: TypeId) -> TypeId { fold_type(state, id, &mut SubstituteUnification { substitutions }) } } @@ -88,10 +56,11 @@ impl SubstituteUnification<'_> { impl TypeFold for SubstituteUnification<'_> { fn transform(&mut self, state: &mut CheckState, id: TypeId, t: &Type) -> FoldAction { if let Type::Unification(unification_id) = t { - if let Some(&(level, kind)) = self.substitutions.get(unification_id) { + if let Some((name, kind)) = self.substitutions.get(unification_id) { + let (name, kind) = (name.clone(), *kind); let kind = SubstituteUnification::on(self.substitutions, state, kind); return FoldAction::Replace( - state.storage.intern(Type::Variable(Variable::Bound(level, kind))), + state.storage.intern(Type::Variable(Variable::Bound(name, kind))), ); } return FoldAction::Replace(id); @@ -100,20 +69,20 @@ impl TypeFold for SubstituteUnification<'_> { } } -pub type LevelToType = FxHashMap; +pub type NameToType = FxHashMap; pub struct SubstituteBindings<'a> { - bindings: &'a LevelToType, + bindings: &'a NameToType, } impl SubstituteBindings<'_> { - /// Substitutes bound and implicit variables using a level-based mapping. + /// Substitutes bound variables using a name-based mapping. /// - /// This is used to specialize class superclasses with instance arguments. + /// This is used to specialise class superclasses with instance arguments. /// For example, when deriving `Traversable (Compose f g)`, the superclass - /// `Functor t` becomes `Functor (Compose f g)` by binding `t`'s level to + /// `Functor t` becomes `Functor (Compose f g)` by binding `t`'s name to /// `Compose f g`. - pub fn on(state: &mut CheckState, bindings: &LevelToType, id: TypeId) -> TypeId { + pub fn on(state: &mut CheckState, bindings: &NameToType, id: TypeId) -> TypeId { fold_type(state, id, &mut SubstituteBindings { bindings }) } } @@ -121,8 +90,8 @@ impl SubstituteBindings<'_> { impl TypeFold for SubstituteBindings<'_> { fn transform(&mut self, _state: &mut CheckState, id: TypeId, t: &Type) -> FoldAction { match t { - Type::Variable(Variable::Bound(level, _)) => { - let id = self.bindings.get(level).copied().unwrap_or(id); + Type::Variable(Variable::Bound(name, _)) => { + let id = self.bindings.get(name).copied().unwrap_or(id); FoldAction::Replace(id) } _ => FoldAction::Continue, diff --git a/compiler-core/checking/src/algorithm/term.rs b/compiler-core/checking/src/algorithm/term.rs index 4a102c429..dbadb91bb 100644 --- a/compiler-core/checking/src/algorithm/term.rs +++ b/compiler-core/checking/src/algorithm/term.rs @@ -1,294 +1,134 @@ +use std::iter; + use building_types::QueryResult; -use indexing::TermItemId; -use itertools::Itertools; +use itertools::{Itertools, Position}; use smol_str::SmolStr; use crate::ExternalQueries; use crate::algorithm::state::{CheckContext, CheckState}; use crate::algorithm::{ - binder, inspect, kind, operator, substitute, toolkit, transfer, unification, + binder, equation, exhaustiveness, inspect, kind, operator, substitute, toolkit, transfer, + unification, }; use crate::core::{RowField, RowType, Type, TypeId}; use crate::error::{ErrorKind, ErrorStep}; -/// Infers the type of top-level value group equations. -/// -/// This function depends on the unification variable created for the current -/// binding group by [`CheckState::with_term_group`]. This function returns -/// the inferred type and residual constraints for later generalisation via -/// [`term_item::commit_value_group`]. -/// -/// [`term_item::commit_value_group`]: crate::algorithm::term_item::commit_value_group -pub fn infer_equations( - state: &mut CheckState, - context: &CheckContext, - item_id: TermItemId, - equations: &[lowering::Equation], -) -> QueryResult<(TypeId, Vec)> -where - Q: ExternalQueries, -{ - let group_type = state - .binding_group - .lookup_term(item_id) - .expect("invariant violated: invalid binding_group in type inference"); - - infer_equations_core(state, context, group_type, equations)?; - - let residual_constraints = state.solve_constraints(context)?; - Ok((group_type, residual_constraints)) -} - -/// Infers the type of value group equations. -/// -/// This function infers the type of each value equation, and then checks -/// that it's a subtype of the provided `group_type`. The `group_type` is -/// usually a unification variable. -/// -/// This function is used to implement inference for the following: -/// - [`lowering::TermItemIr::ValueGroup`] -/// - [`lowering::LetBindingNameGroup`] -/// - [`lowering::InstanceMemberGroup`] -pub fn infer_equations_core( +/// Checks the type of an expression. +#[tracing::instrument(skip_all, name = "check_expression")] +pub fn check_expression( state: &mut CheckState, context: &CheckContext, - group_type: TypeId, - equations: &[lowering::Equation], -) -> QueryResult<()> + expression: lowering::ExpressionId, + expected: TypeId, +) -> QueryResult where Q: ExternalQueries, { - let minimum_equation_arity = - equations.iter().map(|equation| equation.binders.len()).min().unwrap_or(0); - - for equation in equations { - let mut argument_types = vec![]; - for &binder_id in equation.binders.iter() { - let argument_type = binder::infer_binder(state, context, binder_id)?; - argument_types.push(argument_type); - } - - let result_type = state.fresh_unification_type(context); - - // Only use the minimum number of binders across equations. - let argument_types = &argument_types[..minimum_equation_arity]; - let equation_type = state.make_function(argument_types, result_type); - let _ = unification::subtype(state, context, equation_type, group_type)?; - - if let Some(guarded) = &equation.guarded { - let inferred_type = infer_guarded_expression(state, context, guarded)?; - let _ = unification::subtype(state, context, inferred_type, result_type)?; - } - } - - Ok(()) + state.with_error_step(ErrorStep::CheckingExpression(expression), |state| { + crate::trace_fields!(state, context, { expected = expected }); + check_expression_quiet(state, context, expression, expected) + }) } -/// Checks the type of value group equations. -/// -/// This function checks each value equation against the signature previously -/// checked by the [`check_term_signature`] and [`inspect_signature_core`] -/// functions. -/// -/// This function depends on a couple of side-effects produced by the -/// [`inspect_signature_core`] function. Type variables that appear in the -/// signature are made visible through rebinding, and given constraints -/// are pushed onto the environment. See the implementation for more details. -/// -/// This function solves all constraints during checking using the -/// [`CheckState::solve_constraints`] function, and reports residual -/// constraints as [`ErrorKind::NoInstanceFound`] errors. -/// -/// [`check_term_signature`]: crate::algorithm::term_item::check_term_signature -/// [`inspect_signature_core`]: crate::algorithm::inspect::inspect_signature_core -pub fn check_equations( +fn check_expression_quiet( state: &mut CheckState, context: &CheckContext, - signature_id: lowering::TypeId, - signature: inspect::InspectSignature, - equations: &[lowering::Equation], -) -> QueryResult<()> + expression: lowering::ExpressionId, + expected: TypeId, +) -> QueryResult where Q: ExternalQueries, { - let expected_arity = signature.arguments.len(); - - for equation in equations { - let equation_arity = equation.binders.len(); - - if equation_arity > expected_arity { - let expected = expected_arity as u32; - let actual = equation_arity as u32; - state.insert_error(ErrorKind::TooManyBinders { - signature: signature_id, - expected, - actual, - }); - } - - for (&binder_id, &argument_type) in equation.binders.iter().zip(&signature.arguments) { - let _ = binder::check_binder(state, context, binder_id, argument_type)?; - } - - if equation_arity > expected_arity { - let extra_binders = &equation.binders[expected_arity..]; - for &binder_id in extra_binders { - let _ = binder::infer_binder(state, context, binder_id)?; - } - } - - // Compute expected result type based on how many binders there - // are on each equation, wrapping remaining arguments if partial. - // - // foo :: forall a. a -> a -> Int - // foo = \a b -> a + b - // foo a = \b -> a + b - // foo a b = a + b - // - // signature.arguments := [a, a] - // signature.result := Int - // - // expected_type := - // 0 binders := forall a. a -> a -> Int - // 1 binder := a -> Int - // 2 binders := Int - // - // This matters for type synonyms that expand to functions. The - // return type synonym introduces hidden function arrows that - // increase the expected arity after expansion. - // - // type ReturnsInt a = a -> Int - // - // bar :: forall a. ReturnsInt a -> ReturnsInt a - // bar = \f -> f - // bar f = f - // bar f a = f a - // - // signature.arguments := [ReturnsInt a, a] - // signature.result := Int - // - // expected_type := - // 0 binders := forall a. ReturnsInt a -> ReturnsInt a - // 1 binder := ReturnsInt a - // 2 binders := Int - let expected_type = if equation_arity == 0 { - signature.function - } else if equation_arity >= expected_arity { - signature.result - } else { - let remaining_arguments = &signature.arguments[equation_arity..]; - remaining_arguments.iter().rfold(signature.result, |result, &argument| { - state.storage.intern(Type::Function(argument, result)) - }) - }; - - if let Some(guarded) = &equation.guarded { - let inferred_type = infer_guarded_expression(state, context, guarded)?; - let _ = unification::subtype(state, context, inferred_type, expected_type)?; - } - } - - let residual = state.solve_constraints(context)?; - for constraint in residual { - let constraint = transfer::globalize(state, context, constraint); - state.insert_error(ErrorKind::NoInstanceFound { constraint }); - } - - if let Some(variable) = signature.variables.first() { - state.type_scope.unbind(variable.level); + let expected = toolkit::normalise_expand_type(state, context, expected)?; + let expected = toolkit::skolemise_forall(state, expected); + let expected = toolkit::collect_givens(state, expected); + if let Some(section_result) = context.sectioned.expressions.get(&expression) { + check_sectioned_expression(state, context, expression, section_result, expected) + } else { + check_expression_core(state, context, expression, expected) } - - Ok(()) } -fn infer_guarded_expression( +fn check_sectioned_expression( state: &mut CheckState, context: &CheckContext, - guarded: &lowering::GuardedExpression, + expression: lowering::ExpressionId, + section_result: &sugar::SectionResult, + expected: TypeId, ) -> QueryResult where Q: ExternalQueries, { - match guarded { - lowering::GuardedExpression::Unconditional { where_expression } => { - let Some(w) = where_expression else { - return Ok(context.prim.unknown); - }; - infer_where_expression(state, context, w) - } - lowering::GuardedExpression::Conditionals { pattern_guarded } => { - let mut inferred_type = context.prim.unknown; - for pattern_guarded in pattern_guarded.iter() { - for pattern_guard in pattern_guarded.pattern_guards.iter() { - check_pattern_guard(state, context, pattern_guard)?; - } - if let Some(w) = &pattern_guarded.where_expression { - inferred_type = infer_where_expression(state, context, w)?; - } - } - Ok(inferred_type) + let mut current = expected; + let mut parameters = vec![]; + + for §ion_id in section_result.iter() { + let decomposed = + toolkit::decompose_function(state, context, current, toolkit::SynthesiseFunction::Yes)?; + if let Some((argument_type, result_type)) = decomposed { + state.term_scope.bind_section(section_id, argument_type); + parameters.push(argument_type); + current = result_type; + } else { + let parameter = state.fresh_unification_type(context); + state.term_scope.bind_section(section_id, parameter); + parameters.push(parameter); } } -} - -fn check_pattern_guard( - state: &mut CheckState, - context: &CheckContext, - guard: &lowering::PatternGuard, -) -> QueryResult<()> -where - Q: ExternalQueries, -{ - let Some(expression) = guard.expression else { - return Ok(()); - }; - let expression_type = infer_expression(state, context, expression)?; - - let Some(binder) = guard.binder else { - return Ok(()); - }; + let result_type = infer_expression_core(state, context, expression)?; + let result_type = toolkit::instantiate_constrained(state, result_type); - let _ = binder::check_binder(state, context, binder, expression_type)?; + let _ = unification::subtype(state, context, result_type, current)?; - Ok(()) + let function_type = state.make_function(¶meters, result_type); + Ok(function_type) } -fn infer_where_expression( +fn check_expression_core( state: &mut CheckState, context: &CheckContext, - where_expression: &lowering::WhereExpression, + expression: lowering::ExpressionId, + expected: TypeId, ) -> QueryResult where Q: ExternalQueries, { - check_let_chunks(state, context, &where_expression.bindings)?; - - let Some(expression) = where_expression.expression else { + let Some(kind) = context.lowered.info.get_expression_kind(expression) else { return Ok(context.prim.unknown); }; - infer_expression(state, context, expression) -} + match kind { + lowering::ExpressionKind::Lambda { binders, expression } => { + check_lambda(state, context, binders, *expression, expected) + } + lowering::ExpressionKind::IfThenElse { if_, then, else_ } => { + check_if_then_else(state, context, *if_, *then, *else_, expected) + } + lowering::ExpressionKind::CaseOf { trunk, branches } => { + check_case_of(state, context, trunk, branches, expected) + } + lowering::ExpressionKind::LetIn { bindings, expression } => { + check_let_in(state, context, bindings, *expression, expected) + } + lowering::ExpressionKind::Parenthesized { parenthesized } => { + let Some(parenthesized) = parenthesized else { return Ok(context.prim.unknown) }; + check_expression(state, context, *parenthesized, expected) + } + lowering::ExpressionKind::Array { array } => check_array(state, context, array, expected), + lowering::ExpressionKind::Record { record } => { + check_record(state, context, record, expected) + } + _ => { + let inferred = infer_expression_quiet(state, context, expression)?; + let inferred = toolkit::instantiate_constrained(state, inferred); -/// Checks the type of an expression. -#[tracing::instrument(skip_all, name = "check_expression")] -pub fn check_expression( - state: &mut CheckState, - context: &CheckContext, - expr_id: lowering::ExpressionId, - expected: TypeId, -) -> QueryResult -where - Q: ExternalQueries, -{ - crate::trace_fields!(state, context, { expected = expected }); - state.with_error_step(ErrorStep::CheckingExpression(expr_id), |state| { - let inferred = infer_expression_quiet(state, context, expr_id)?; - let _ = unification::subtype(state, context, inferred, expected)?; - crate::trace_fields!(state, context, { inferred = inferred, expected = expected }); - Ok(inferred) - }) + let _ = unification::subtype(state, context, inferred, expected)?; + + crate::trace_fields!(state, context, { inferred = inferred, expected = expected }); + Ok(inferred) + } + } } /// Infers the type of an expression. @@ -296,13 +136,13 @@ where pub fn infer_expression( state: &mut CheckState, context: &CheckContext, - expr_id: lowering::ExpressionId, + expression: lowering::ExpressionId, ) -> QueryResult where Q: ExternalQueries, { - state.with_error_step(ErrorStep::InferringExpression(expr_id), |state| { - let inferred = infer_expression_quiet(state, context, expr_id)?; + state.with_error_step(ErrorStep::InferringExpression(expression), |state| { + let inferred = infer_expression_quiet(state, context, expression)?; crate::trace_fields!(state, context, { inferred = inferred }); Ok(inferred) }) @@ -311,22 +151,22 @@ where fn infer_expression_quiet( state: &mut CheckState, context: &CheckContext, - expr_id: lowering::ExpressionId, + expression: lowering::ExpressionId, ) -> QueryResult where Q: ExternalQueries, { - if let Some(section_result) = context.sectioned.expressions.get(&expr_id) { - infer_sectioned_expression(state, context, expr_id, section_result) + if let Some(section_result) = context.sectioned.expressions.get(&expression) { + infer_sectioned_expression(state, context, expression, section_result) } else { - infer_expression_core(state, context, expr_id) + infer_expression_core(state, context, expression) } } fn infer_sectioned_expression( state: &mut CheckState, context: &CheckContext, - expr_id: lowering::ExpressionId, + expression: lowering::ExpressionId, section_result: &sugar::SectionResult, ) -> QueryResult where @@ -339,7 +179,9 @@ where }); let parameter_types = parameter_types.collect_vec(); - let result_type = infer_expression_core(state, context, expr_id)?; + + let result_type = infer_expression_core(state, context, expression)?; + let result_type = toolkit::instantiate_constrained(state, result_type); Ok(state.make_function(¶meter_types, result_type)) } @@ -347,14 +189,14 @@ where fn infer_expression_core( state: &mut CheckState, context: &CheckContext, - expr_id: lowering::ExpressionId, + expression: lowering::ExpressionId, ) -> QueryResult where Q: ExternalQueries, { let unknown = context.prim.unknown; - let Some(kind) = context.lowered.info.get_expression_kind(expr_id) else { + let Some(kind) = context.lowered.info.get_expression_kind(expression) else { return Ok(unknown); }; @@ -370,7 +212,7 @@ where } lowering::ExpressionKind::OperatorChain { .. } => { - let (_, inferred_type) = operator::infer_operator_chain(state, context, expr_id)?; + let (_, inferred_type) = operator::infer_operator_chain(state, context, expression)?; Ok(inferred_type) } @@ -420,15 +262,10 @@ where } lowering::ExpressionKind::Do { bind, discard, statements } => { - let Some(bind) = bind else { return Ok(unknown) }; - let Some(discard) = discard else { return Ok(unknown) }; infer_do(state, context, *bind, *discard, statements) } lowering::ExpressionKind::Ado { map, apply, pure, statements, expression } => { - let Some(map) = map else { return Ok(unknown) }; - let Some(apply) = apply else { return Ok(unknown) }; - let Some(pure) = pure else { return Ok(unknown) }; infer_ado(state, context, *map, *apply, *pure, statements, *expression) } @@ -448,7 +285,7 @@ where } lowering::ExpressionKind::Section => { - if let Some(type_id) = state.term_scope.lookup_section(expr_id) { + if let Some(type_id) = state.term_scope.lookup_section(expression) { Ok(type_id) } else { Ok(unknown) @@ -568,12 +405,290 @@ where } let result_type = if let Some(body) = expression { - infer_expression(state, context, body)? + let body_type = infer_expression(state, context, body)?; + toolkit::instantiate_constrained(state, body_type) } else { state.fresh_unification_type(context) }; - Ok(state.make_function(&argument_types, result_type)) + let function_type = state.make_function(&argument_types, result_type); + + let exhaustiveness = + exhaustiveness::check_lambda_patterns(state, context, &argument_types, binders)?; + + let has_missing = exhaustiveness.missing.is_some(); + state.report_exhaustiveness(exhaustiveness); + + if has_missing { + let constrained_type = Type::Constrained(context.prim.partial, function_type); + Ok(state.storage.intern(constrained_type)) + } else { + Ok(function_type) + } +} + +fn check_lambda( + state: &mut CheckState, + context: &CheckContext, + binders: &[lowering::BinderId], + expression: Option, + expected: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let mut arguments = vec![]; + let mut remaining = expected; + + for &binder_id in binders.iter() { + let decomposed = toolkit::decompose_function( + state, + context, + remaining, + toolkit::SynthesiseFunction::Yes, + )?; + if let Some((argument, result)) = decomposed { + let _ = binder::check_binder(state, context, binder_id, argument)?; + arguments.push(argument); + remaining = result; + } else { + let argument_type = state.fresh_unification_type(context); + let _ = binder::check_binder(state, context, binder_id, argument_type)?; + arguments.push(argument_type); + } + } + + let result_type = if let Some(body) = expression { + check_expression(state, context, body, remaining)? + } else { + state.fresh_unification_type(context) + }; + + let function_type = state.make_function(&arguments, result_type); + + let exhaustiveness = + exhaustiveness::check_lambda_patterns(state, context, &arguments, binders)?; + + let has_missing = exhaustiveness.missing.is_some(); + state.report_exhaustiveness(exhaustiveness); + + if has_missing { + state.push_wanted(context.prim.partial); + } + + Ok(function_type) +} + +fn check_if_then_else( + state: &mut CheckState, + context: &CheckContext, + if_: Option, + then: Option, + else_: Option, + expected: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + if let Some(if_) = if_ { + check_expression(state, context, if_, context.prim.boolean)?; + } + + if let Some(then) = then { + check_expression(state, context, then, expected)?; + } + + if let Some(else_) = else_ { + check_expression(state, context, else_, expected)?; + } + + Ok(expected) +} + +fn instantiate_trunk_types( + state: &mut CheckState, + context: &CheckContext, + trunk_types: &mut [TypeId], + branches: &[lowering::CaseBranch], +) where + Q: ExternalQueries, +{ + for (position, trunk_type) in trunk_types.iter_mut().enumerate() { + let should_instantiate = branches.iter().any(|branch| { + let binder = branch.binders.get(position); + binder.is_some_and(|&binder_id| binder::requires_instantiation(context, binder_id)) + }); + if should_instantiate { + *trunk_type = toolkit::instantiate_forall(state, *trunk_type); + } + } +} + +fn check_case_of( + state: &mut CheckState, + context: &CheckContext, + trunk: &[lowering::ExpressionId], + branches: &[lowering::CaseBranch], + expected: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let mut trunk_types = vec![]; + for trunk in trunk.iter() { + let trunk_type = infer_expression(state, context, *trunk)?; + trunk_types.push(trunk_type); + } + + instantiate_trunk_types(state, context, &mut trunk_types, branches); + + for branch in branches.iter() { + for (binder, trunk) in branch.binders.iter().zip(&trunk_types) { + let _ = binder::check_binder(state, context, *binder, *trunk)?; + } + if let Some(guarded) = &branch.guarded_expression { + check_guarded_expression(state, context, guarded, expected)?; + } + } + + let exhaustiveness = + exhaustiveness::check_case_patterns(state, context, &trunk_types, branches)?; + + let has_missing = exhaustiveness.missing.is_some(); + state.report_exhaustiveness(exhaustiveness); + + if has_missing { + state.push_wanted(context.prim.partial); + } + + Ok(expected) +} + +fn check_let_in( + state: &mut CheckState, + context: &CheckContext, + bindings: &[lowering::LetBindingChunk], + expression: Option, + expected: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + check_let_chunks(state, context, bindings)?; + + let Some(expression) = expression else { + return Ok(context.prim.unknown); + }; + + check_expression(state, context, expression, expected) +} + +fn check_array( + state: &mut CheckState, + context: &CheckContext, + array: &[lowering::ExpressionId], + expected: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let normalised = state.normalize_type(expected); + if let Type::Application(constructor, element_type) = state.storage[normalised] { + let constructor = state.normalize_type(constructor); + if constructor == context.prim.array { + for expression in array.iter() { + check_expression(state, context, *expression, element_type)?; + } + return Ok(expected); + } + } + + // Fallback: infer then subtype. + let inferred = infer_array(state, context, array)?; + let _ = unification::subtype(state, context, inferred, expected)?; + Ok(inferred) +} + +fn check_record( + state: &mut CheckState, + context: &CheckContext, + record: &[lowering::ExpressionRecordItem], + expected: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + let normalised = state.normalize_type(expected); + if let Type::Application(constructor, row_type) = state.storage[normalised] { + let constructor = state.normalize_type(constructor); + if constructor == context.prim.record { + let row_type = state.normalize_type(row_type); + if let Type::Row(ref row) = state.storage[row_type] { + let expected_fields = row.fields.clone(); + + let mut fields = vec![]; + + for field in record.iter() { + match field { + lowering::ExpressionRecordItem::RecordField { name, value } => { + let Some(name) = name else { continue }; + let Some(value) = value else { continue }; + + let label = SmolStr::clone(name); + + // Look up whether this field has an expected type. + let expected_field_type = + expected_fields.iter().find(|f| f.label == label).map(|f| f.id); + + let id = if let Some(expected_type) = expected_field_type { + check_expression(state, context, *value, expected_type)? + } else { + let id = infer_expression(state, context, *value)?; + let id = toolkit::instantiate_forall(state, id); + toolkit::collect_wanteds(state, id) + }; + + fields.push(RowField { label, id }); + } + lowering::ExpressionRecordItem::RecordPun { name, resolution } => { + let Some(name) = name else { continue }; + let Some(resolution) = resolution else { continue }; + + let label = SmolStr::clone(name); + + let expected_field_type = + expected_fields.iter().find(|f| f.label == label).map(|f| f.id); + + let id = lookup_term_variable(state, context, *resolution)?; + + let id = if let Some(expected_type) = expected_field_type { + let _ = unification::subtype(state, context, id, expected_type)?; + id + } else { + let id = toolkit::instantiate_forall(state, id); + toolkit::collect_wanteds(state, id) + }; + + fields.push(RowField { label, id }); + } + } + } + + let row_type = RowType::from_unsorted(fields, None); + let row_type = state.storage.intern(Type::Row(row_type)); + let record_type = + state.storage.intern(Type::Application(context.prim.record, row_type)); + + let _ = unification::subtype(state, context, record_type, expected)?; + return Ok(record_type); + } + } + } + + // Fallback: infer then subtype. + let inferred = infer_record(state, context, record)?; + let _ = unification::subtype(state, context, inferred, expected)?; + Ok(inferred) } fn infer_case_of( @@ -585,7 +700,7 @@ fn infer_case_of( where Q: ExternalQueries, { - let inferred_type = state.fresh_unification_type(context); + let result_type = state.fresh_unification_type(context); let mut trunk_types = vec![]; for trunk in trunk.iter() { @@ -593,37 +708,151 @@ where trunk_types.push(trunk_type); } + instantiate_trunk_types(state, context, &mut trunk_types, branches); + for branch in branches.iter() { for (binder, trunk) in branch.binders.iter().zip(&trunk_types) { let _ = binder::check_binder(state, context, *binder, *trunk)?; } if let Some(guarded) = &branch.guarded_expression { let guarded_type = infer_guarded_expression(state, context, guarded)?; - let _ = unification::subtype(state, context, inferred_type, guarded_type)?; + let _ = unification::subtype(state, context, guarded_type, result_type)?; } } - Ok(inferred_type) + let exhaustiveness = + exhaustiveness::check_case_patterns(state, context, &trunk_types, branches)?; + + let has_missing = exhaustiveness.missing.is_some(); + state.report_exhaustiveness(exhaustiveness); + + if has_missing { + let constrained_type = Type::Constrained(context.prim.partial, result_type); + Ok(state.storage.intern(constrained_type)) + } else { + Ok(result_type) + } +} + +/// Lookup `bind` from resolution, or synthesize `?m ?a -> (?a -> ?m ?b) -> ?m ?b`. +fn lookup_or_synthesise_bind( + state: &mut CheckState, + context: &CheckContext, + resolution: Option, +) -> QueryResult +where + Q: ExternalQueries, +{ + if let Some(resolution) = resolution { + lookup_term_variable(state, context, resolution) + } else { + let m = state.fresh_unification_kinded(context.prim.type_to_type); + let a = state.fresh_unification_type(context); + let b = state.fresh_unification_type(context); + let m_a = state.storage.intern(Type::Application(m, a)); + let m_b = state.storage.intern(Type::Application(m, b)); + let a_to_m_b = state.storage.intern(Type::Function(a, m_b)); + Ok(state.make_function(&[m_a, a_to_m_b], m_b)) + } +} + +/// Lookup `discard` from resolution, or synthesize `?m ?a -> (?a -> ?m ?b) -> ?m ?b`. +fn lookup_or_synthesise_discard( + state: &mut CheckState, + context: &CheckContext, + resolution: Option, +) -> QueryResult +where + Q: ExternalQueries, +{ + // Same shape as bind + lookup_or_synthesise_bind(state, context, resolution) +} + +/// Lookup `map` from resolution, or synthesize `(?a -> ?b) -> ?f ?a -> ?f ?b`. +fn lookup_or_synthesise_map( + state: &mut CheckState, + context: &CheckContext, + resolution: Option, +) -> QueryResult +where + Q: ExternalQueries, +{ + if let Some(resolution) = resolution { + lookup_term_variable(state, context, resolution) + } else { + let f = state.fresh_unification_kinded(context.prim.type_to_type); + let a = state.fresh_unification_type(context); + let b = state.fresh_unification_type(context); + let f_a = state.storage.intern(Type::Application(f, a)); + let f_b = state.storage.intern(Type::Application(f, b)); + let a_to_b = state.storage.intern(Type::Function(a, b)); + Ok(state.make_function(&[a_to_b, f_a], f_b)) + } +} + +/// Lookup `apply` from resolution, or synthesize `?f (?a -> ?b) -> ?f ?a -> ?f ?b`. +fn lookup_or_synthesise_apply( + state: &mut CheckState, + context: &CheckContext, + resolution: Option, +) -> QueryResult +where + Q: ExternalQueries, +{ + if let Some(resolution) = resolution { + lookup_term_variable(state, context, resolution) + } else { + let f = state.fresh_unification_kinded(context.prim.type_to_type); + let a = state.fresh_unification_type(context); + let b = state.fresh_unification_type(context); + let a_to_b = state.storage.intern(Type::Function(a, b)); + let f_a_to_b = state.storage.intern(Type::Application(f, a_to_b)); + let f_a = state.storage.intern(Type::Application(f, a)); + let f_b = state.storage.intern(Type::Application(f, b)); + Ok(state.make_function(&[f_a_to_b, f_a], f_b)) + } +} + +/// Lookup `pure` from resolution, or synthesize `?a -> ?f ?a`. +fn lookup_or_synthesise_pure( + state: &mut CheckState, + context: &CheckContext, + resolution: Option, +) -> QueryResult +where + Q: ExternalQueries, +{ + if let Some(resolution) = resolution { + lookup_term_variable(state, context, resolution) + } else { + let f = state.fresh_unification_kinded(context.prim.type_to_type); + let a = state.fresh_unification_type(context); + let f_a = state.storage.intern(Type::Application(f, a)); + Ok(state.storage.intern(Type::Function(a, f_a))) + } } +#[tracing::instrument(skip_all, name = "infer_do")] fn infer_do( state: &mut CheckState, context: &CheckContext, - bind: lowering::TermVariableResolution, - discard: lowering::TermVariableResolution, - statements: &[lowering::DoStatement], + bind: Option, + discard: Option, + statement_ids: &[lowering::DoStatementId], ) -> QueryResult where Q: ExternalQueries, { - let bind_type = lookup_term_variable(state, context, bind)?; - let discard_type = lookup_term_variable(state, context, discard)?; - - // First, perform a forward pass where variable bindings are - // bound to unification variables and let bindings are checked. - // This is like inferring the lambda in a desugared representation. - let mut do_statements = vec![]; - for statement in statements.iter() { + // First, perform a forward pass where variable bindings are bound + // to unification variables. Let bindings are not checked here to + // avoid premature solving of unification variables. Instead, they + // are checked inline during the statement checking loop. + let mut steps = vec![]; + for &statement_id in statement_ids.iter() { + let Some(statement) = context.lowered.info.get_do_statement(statement_id) else { + continue; + }; match statement { lowering::DoStatement::Bind { binder, expression } => { let binder_type = if let Some(binder) = binder { @@ -631,94 +860,268 @@ where } else { state.fresh_unification_type(context) }; - do_statements.push((Some(binder_type), *expression)); + steps.push(DoStep::Bind { + statement: statement_id, + binder_type, + expression: *expression, + }); } lowering::DoStatement::Let { statements } => { - check_let_chunks(state, context, statements)?; + steps.push(DoStep::Let { statement: statement_id, statements }); } lowering::DoStatement::Discard { expression } => { - do_statements.push((None, *expression)); + steps.push(DoStep::Discard { statement: statement_id, expression: *expression }); } } } - let [bind_statements @ .., (_, pure_expression)] = &do_statements[..] else { - state.insert_error(ErrorKind::EmptyDoBlock); - return Ok(context.prim.unknown); + let action_count = steps + .iter() + .filter(|step| matches!(step, DoStep::Bind { .. } | DoStep::Discard { .. })) + .count(); + + let (has_bind_step, has_discard_step) = { + let mut has_bind = false; + let mut has_discard = false; + for (position, statement) in steps.iter().with_position() { + let is_final = matches!(position, Position::Last | Position::Only); + match statement { + DoStep::Bind { .. } => has_bind = true, + DoStep::Discard { .. } if !is_final => has_discard = true, + _ => (), + } + } + (has_bind, has_discard) + }; + + let bind_type = if has_bind_step { + lookup_or_synthesise_bind(state, context, bind)? + } else { + context.prim.unknown }; - let Some(pure_expression) = pure_expression else { + let discard_type = if has_discard_step { + lookup_or_synthesise_discard(state, context, discard)? + } else { + context.prim.unknown + }; + + let pure_expression = match steps.iter().last() { + Some(statement) => match statement { + // Technically valid, syntactically disallowed. This allows + // partially-written do expressions to infer, with a friendly + // warning to nudge the user that `bind` is prohibited. + DoStep::Bind { statement, expression, .. } => { + state.with_error_step(ErrorStep::InferringDoBind(*statement), |state| { + state.insert_error(ErrorKind::InvalidFinalBind); + }); + expression + } + DoStep::Discard { expression, .. } => expression, + DoStep::Let { statement, .. } => { + state.with_error_step(ErrorStep::CheckingDoLet(*statement), |state| { + state.insert_error(ErrorKind::InvalidFinalLet); + }); + return Ok(context.prim.unknown); + } + }, + None => { + state.insert_error(ErrorKind::EmptyDoBlock); + return Ok(context.prim.unknown); + } + }; + + // If either don't actually have expressions, it's empty! + let Some(pure_expression) = *pure_expression else { + state.insert_error(ErrorKind::EmptyDoBlock); return Ok(context.prim.unknown); }; - // With the binders and let-bound names in scope, infer - // the type of the last expression as our starting point. + // Create unification variables that each statement in the do expression + // will unify against. The next section will get into more detail how + // these are used. These unification variables are used to enable GHC-like + // left-to-right checking of do expressions while maintaining the same + // semantics as rebindable `do` in PureScript. + let continuation_types = iter::repeat_with(|| state.fresh_unification_type(context)) + .take(action_count) + .collect_vec(); + + // Let's trace over the following example: // - // main = do - // pure 42 - // y <- pure "Hello!" - // pure $ Message y + // main = do + // a <- effect + // b <- aff + // pure { a, b } // - // accumulated_type := Effect Message - let mut accumulated_type = infer_expression(state, context, *pure_expression)?; - - // Then, infer do statements in reverse order to emulate - // inside-out type inference for desugared do statements. - for (binder, expression) in bind_statements.iter().rev() { - accumulated_type = if let Some(binder) = binder { - // This applies bind_type to expression_type to get - // bind_applied, which is then applied to lambda_type - // to get the accumulated_type and to solve ?y. - // - // bind_type := m a -> (a -> m b) -> m b - // expression_type := Effect String - // - // bind_applied := (String -> Effect b) -> Effect b - // lambda_type := ?y -> Effect Message - // - // accumulated_type := Effect Message - infer_do_bind(state, context, bind_type, accumulated_type, *expression, *binder)? - } else { - // This applies discard_type to expression_type to - // get discard_applied, which is then deconstructed - // to subsume against the `Effect b`. - // - // discard_type := m a -> (a -> m b) -> m b - // expression_type := Effect Int - // - // discard_applied := (Int -> Effect b) -> Effect b - // accumulated_type := Effect Message - // - // accumulated_type <: Effect b - infer_do_discard(state, context, discard_type, accumulated_type, *expression)? + // For the first statement, we know the following information. We + // instantiate the `bind` function to prepare it for application. + // The first argument is easy, it's just the expression_type; the + // second argument involves synthesising a function type using the + // `binder_type` and the `next` continuation. The application of + // these arguments creates important unifications, listed below. + // Additionally, we also create a unification to unify the `now` + // type with the result of the `bind` application. + // + // expression_type := Effect Int + // binder_type := ?a + // now := ?0 + // next := ?1 + // lambda_type := ?a -> ?1 + // + // bind_type := m a -> (a -> m b) -> m b + // | + // := apply(expression_type) + // := (Int -> Effect ?r1) -> Effect ?r1 + // | + // := apply(lambda_type) + // := Effect ?r1 + // | + // >> ?a := Int + // >> ?1 := Effect ?r1 + // >> ?0 := Effect ?r1 + // + // For the second statement, we know the following information. + // The `now` type was already solved by the previous statement, + // and an error should surface once we check the inferred type + // of the statement against it. + // + // expression_type := Aff Int + // binder_type := ?b + // now := ?1 := Effect ?r1 + // next := ?2 + // lambda_type := ?b -> ?2 + // + // bind_type := m a -> (a -> m b) -> m b + // | + // := apply(expression_type) + // := (Int -> Aff ?r2) -> Aff ?r2 + // | + // := apply(lambda_type) + // := Aff ?r2 + // | + // >> ?b := Int + // >> ?2 := Aff ?r2 + // | + // >> ?1 ~ Aff ?r2 + // >> Effect ?r1 ~ Aff ?r2 + // | + // >> Oh no! + // + // This unification error is expected, but this left-to-right checking + // approach significantly improves the reported error positions compared + // to the previous approach that emulated desugared checking. + + let mut continuations = continuation_types.iter().tuple_windows::<(_, _)>(); + + for step in &steps { + match step { + DoStep::Let { statement, statements } => { + let error_step = ErrorStep::CheckingDoLet(*statement); + state.with_error_step(error_step, |state| { + check_let_chunks(state, context, statements) + })?; + } + DoStep::Bind { statement, binder_type, expression } => { + let Some((&now_type, &next_type)) = continuations.next() else { + continue; + }; + let Some(expression) = *expression else { + continue; + }; + let arguments = InferDoBind { + statement: *statement, + bind_type, + now_type, + next_type, + expression, + binder_type: *binder_type, + }; + infer_do_bind(state, context, arguments)?; + } + DoStep::Discard { statement, expression } => { + let Some((&now_type, &next_type)) = continuations.next() else { + continue; + }; + let Some(expression) = *expression else { + continue; + }; + let arguments = InferDoDiscard { + statement: *statement, + discard_type, + now_type, + next_type, + expression, + }; + infer_do_discard(state, context, arguments)?; + } } } - Ok(accumulated_type) + // The `first_continuation` is the overall type of the do expression, + // built iteratively and through solving unification variables. On + // the other hand, the `final_continuation` is the expected type for + // the final statement in the do expression. If there is only a single + // statement in the do expression, then these two bindings are equivalent. + let first_continuation = + *continuation_types.first().expect("invariant violated: empty continuation_types"); + let final_continuation = + *continuation_types.last().expect("invariant violated: empty continuation_types"); + + check_expression(state, context, pure_expression, final_continuation)?; + + Ok(first_continuation) +} + +enum DoStep<'a> { + Bind { + statement: lowering::DoStatementId, + binder_type: TypeId, + expression: Option, + }, + Discard { + statement: lowering::DoStatementId, + expression: Option, + }, + Let { + statement: lowering::DoStatementId, + statements: &'a [lowering::LetBindingChunk], + }, } +enum AdoStep<'a> { + Action { + statement: lowering::DoStatementId, + binder_type: TypeId, + expression: lowering::ExpressionId, + }, + Let { + statement: lowering::DoStatementId, + statements: &'a [lowering::LetBindingChunk], + }, +} + +#[tracing::instrument(skip_all, name = "infer_ado")] fn infer_ado( state: &mut CheckState, context: &CheckContext, - map: lowering::TermVariableResolution, - apply: lowering::TermVariableResolution, - pure: lowering::TermVariableResolution, - statements: &[lowering::DoStatement], + map: Option, + apply: Option, + pure: Option, + statement_ids: &[lowering::DoStatementId], expression: Option, ) -> QueryResult where Q: ExternalQueries, { - let map_type = lookup_term_variable(state, context, map)?; - let apply_type = lookup_term_variable(state, context, apply)?; - let pure_type = lookup_term_variable(state, context, pure)?; - - // First, perform a forward pass where variable bindings are - // bound to unification variables and let bindings are checked. - // This is like inferring the lambda in a desugared representation. - let mut binder_types = vec![]; - let mut ado_statements = vec![]; - for statement in statements.iter() { + // First, perform a forward pass where variable bindings are bound + // to unification variables. Let bindings are not checked here to + // avoid premature solving of unification variables. Instead, they + // are checked inline during the statement checking loop. + let mut steps = vec![]; + for &statement_id in statement_ids.iter() { + let Some(statement) = context.lowered.info.get_do_statement(statement_id) else { + continue; + }; match statement { lowering::DoStatement::Bind { binder, expression } => { let binder_type = if let Some(binder) = binder { @@ -726,82 +1129,162 @@ where } else { state.fresh_unification_type(context) }; - binder_types.push(binder_type); - ado_statements.push(*expression); + let Some(expression) = *expression else { continue }; + steps.push(AdoStep::Action { statement: statement_id, binder_type, expression }); } lowering::DoStatement::Let { statements } => { - check_let_chunks(state, context, statements)?; + steps.push(AdoStep::Let { statement: statement_id, statements }); } lowering::DoStatement::Discard { expression } => { let binder_type = state.fresh_unification_type(context); - binder_types.push(binder_type); - ado_statements.push(*expression); + let Some(expression) = *expression else { continue }; + steps.push(AdoStep::Action { statement: statement_id, binder_type, expression }); } } } - assert_eq!(binder_types.len(), ado_statements.len()); + let binder_types = steps.iter().filter_map(|step| match step { + AdoStep::Action { binder_type, .. } => Some(*binder_type), + AdoStep::Let { .. } => None, + }); + + let binder_types = binder_types.collect_vec(); - let [head_statement, tail_statements @ ..] = &ado_statements[..] else { + // For ado blocks with no bindings, we check let statements and then + // apply pure to the expression. + // + // pure_type := a -> f a + // expression := t + if binder_types.is_empty() { + for step in &steps { + if let AdoStep::Let { statement, statements } = step { + state.with_error_step(ErrorStep::CheckingAdoLet(*statement), |state| { + check_let_chunks(state, context, statements) + })?; + } + } return if let Some(expression) = expression { + let pure_type = lookup_or_synthesise_pure(state, context, pure)?; check_function_term_application(state, context, pure_type, expression) } else { state.insert_error(ErrorKind::EmptyAdoBlock); Ok(context.prim.unknown) }; - }; + } - // With the binders and let-bound names in scope, infer - // the type of the final expression as our starting point. - // - // main = ado - // pure 1 - // y <- pure "Hello!" - // in Message y + // Create a fresh unification variable for the in_expression. + // Inferring expression directly may solve the unification variables + // introduced in the first pass. This is undesirable, because the + // errors would be attributed incorrectly to the ado statements + // rather than the in-expression itself. // - // expression_type := Message - let expression_type = if let Some(expression) = expression { - infer_expression(state, context, expression)? - } else { - state.fresh_unification_type(context) - }; - - // Create a function type using the binder types collected - // from the forward pass. We made sure to allocate unification - // variables for the discard statements too. + // ado + // a <- pure "Hello!" + // _ <- pure 42 + // in Message a // - // lambda_type := ?discard -> ?y -> Message - let lambda_type = state.make_function(&binder_types, expression_type); - - // This applies map_type to the lambda_type that we just built - // and then to the inferred type of the first expression. + // in_expression :: Effect Message + // in_expression_type := ?in_expression + // lambda_type := ?a -> ?b -> ?in_expression + let in_expression_type = state.fresh_unification_type(context); + let lambda_type = state.make_function(&binder_types, in_expression_type); + + // The desugared form of an ado-expression is a forward applicative + // pipeline, unlike do-notation which works inside-out. The example + // above desugars to the following expression: // - // map_type := (a -> b) -> f a -> f b - // lambda_type := ?discard -> ?y -> Message + // (\a _ -> Message a) <$> (pure "Hello!") <*> (pure 42) // - // map_applied := f ?discard -> f (?y -> Message) - // expression_type := f Int + // To emulate this, we process steps in source order. Let bindings + // are checked inline between map/apply operations. The first action + // uses infer_ado_map, and subsequent actions use infer_ado_apply. // - // accumulated_type := f (?y -> Message) - let mut accumulated_type = - infer_ado_map(state, context, map_type, lambda_type, *head_statement)?; - - // This applies apply_type to the accumulated_type, and then to the - // inferred type of the expression to update the accumulated_type. + // map_type :: (a -> b) -> f a -> f b + // lambda_type := ?a -> ?b -> ?in_expression // - // apply_type := f (a -> b) -> f a -> f b - // accumulated_type := f (?y -> Message) + // expression_type := Effect String + // map(lambda, expression) := Effect (?b -> ?in_expression) + // >> + // >> ?a := String // - // accumulated_type := f ?y -> f Message - // expression_type := f String + // continuation_type := Effect (?b -> ?in_expression) + + // Lazily compute map_type and apply_type only when needed. + // - 1 action: only map is needed + // - 2+ actions: map and apply are needed + let action_count = binder_types.len(); + + let map_type = lookup_or_synthesise_map(state, context, map)?; + + let apply_type = if action_count > 1 { + lookup_or_synthesise_apply(state, context, apply)? + } else { + context.prim.unknown + }; + + let mut continuation_type = None; + + for step in &steps { + match step { + AdoStep::Let { statement, statements } => { + let error_step = ErrorStep::CheckingAdoLet(*statement); + state.with_error_step(error_step, |state| { + check_let_chunks(state, context, statements) + })?; + } + AdoStep::Action { statement, expression, .. } => { + let statement_type = if let Some(continuation_type) = continuation_type { + // Then, the infer_ado_apply rule applies `apply` to the inferred + // expression type and the continuation type that is a function + // contained within some container, like Effect. + // + // apply_type := f (x -> y) -> f x -> f y + // continuation_type := Effect (?b -> ?in_expression) + // + // expression_type := Effect Int + // apply(continuation, expression) := Effect ?in_expression + // >> + // >> ?b := Int + // + // continuation_type := Effect ?in_expression + let arguments = InferAdoApply { + statement: *statement, + apply_type, + continuation_type, + expression: *expression, + }; + infer_ado_apply(state, context, arguments)? + } else { + let arguments = InferAdoMap { + statement: *statement, + map_type, + lambda_type, + expression: *expression, + }; + infer_ado_map(state, context, arguments)? + }; + continuation_type = Some(statement_type); + } + } + } + + // Finally, check the in-expression against in_expression. + // At this point the binder unification variables have been solved + // to concrete types, so errors are attributed to the in-expression. // - // accumulated_type := f Message - for expression in tail_statements { - accumulated_type = - infer_ado_apply(state, context, apply_type, accumulated_type, *expression)?; + // in_expression :: Effect Message + // in_expression_type := Effect ?in_expression + // >> + // >> ?in_expression := Message + if let Some(expression) = expression { + check_expression(state, context, expression, in_expression_type)?; } - Ok(accumulated_type) + let Some(continuation_type) = continuation_type else { + unreachable!("invariant violated: impossible empty steps"); + }; + + Ok(continuation_type) } fn infer_array( @@ -845,7 +1328,7 @@ where // Instantiate to avoid polymorphic types in record fields. let id = toolkit::instantiate_forall(state, id); - let id = toolkit::collect_constraints(state, id); + let id = toolkit::collect_wanteds(state, id); fields.push(RowField { label, id }); } @@ -858,7 +1341,7 @@ where // Instantiate to avoid polymorphic types in record fields. let id = toolkit::instantiate_forall(state, id); - let id = toolkit::collect_constraints(state, id); + let id = toolkit::collect_wanteds(state, id); fields.push(RowField { label, id }); } @@ -889,10 +1372,7 @@ where let field_type = state.fresh_unification_type(context); - let row_type_kind = - state.storage.intern(Type::Application(context.prim.row, context.prim.t)); - - let tail_type = state.fresh_unification_kinded(row_type_kind); + let tail_type = state.fresh_unification_kinded(context.prim.row_type); let row_type = RowType::from_unsorted(vec![RowField { label, id: field_type }], Some(tail_type)); @@ -978,8 +1458,7 @@ where } } - let row_type_kind = state.storage.intern(Type::Application(context.prim.row, context.prim.t)); - let tail = state.fresh_unification_kinded(row_type_kind); + let tail = state.fresh_unification_kinded(context.prim.row_type); Ok((input_fields, output_fields, tail)) } @@ -1008,20 +1487,38 @@ where } } +struct InferAdoMap { + statement: lowering::DoStatementId, + map_type: TypeId, + lambda_type: TypeId, + expression: lowering::ExpressionId, +} + +#[tracing::instrument(skip_all, name = "infer_ado_map")] fn infer_ado_map( + state: &mut CheckState, + context: &CheckContext, + arguments: InferAdoMap, +) -> QueryResult +where + Q: ExternalQueries, +{ + let InferAdoMap { statement, map_type, lambda_type, expression } = arguments; + state.with_error_step(ErrorStep::InferringAdoMap(statement), |state| { + infer_ado_map_core(state, context, map_type, lambda_type, expression) + }) +} + +fn infer_ado_map_core( state: &mut CheckState, context: &CheckContext, map_type: TypeId, lambda_type: TypeId, - expression: Option, + expression: lowering::ExpressionId, ) -> QueryResult where Q: ExternalQueries, { - let Some(expression) = expression else { - return Ok(context.prim.unknown); - }; - // expression_type := f a let expression_type = infer_expression(state, context, expression)?; @@ -1053,33 +1550,51 @@ where ) } +struct InferAdoApply { + statement: lowering::DoStatementId, + apply_type: TypeId, + continuation_type: TypeId, + expression: lowering::ExpressionId, +} + +#[tracing::instrument(skip_all, name = "infer_ado_apply")] fn infer_ado_apply( state: &mut CheckState, context: &CheckContext, - apply_type: TypeId, - accumulated_type: TypeId, - expression: Option, + arguments: InferAdoApply, ) -> QueryResult where Q: ExternalQueries, { - let Some(expression) = expression else { - return Ok(context.prim.unknown); - }; + let InferAdoApply { statement, apply_type, continuation_type, expression } = arguments; + state.with_error_step(ErrorStep::InferringAdoApply(statement), |state| { + infer_ado_apply_core(state, context, apply_type, continuation_type, expression) + }) +} +fn infer_ado_apply_core( + state: &mut CheckState, + context: &CheckContext, + apply_type: TypeId, + continuation_type: TypeId, + expression: lowering::ExpressionId, +) -> QueryResult +where + Q: ExternalQueries, +{ // expression_type := ?f ?a let expression_type = infer_expression(state, context, expression)?; - // apply_type := f (a -> b) -> f a -> f b - // accumulated_type := f (a -> b) + // apply_type := f (a -> b) -> f a -> f b + // continuation_type := f (a -> b) let apply_applied = check_function_application_core( state, context, apply_type, - accumulated_type, - |state, context, accumulated_type, expected_type| { - let _ = unification::subtype(state, context, accumulated_type, expected_type)?; - Ok(accumulated_type) + continuation_type, + |state, context, continuation_type, expected_type| { + let _ = unification::subtype(state, context, continuation_type, expected_type)?; + Ok(continuation_type) }, )?; @@ -1098,25 +1613,49 @@ where ) } +struct InferDoBind { + statement: lowering::DoStatementId, + bind_type: TypeId, + now_type: TypeId, + next_type: TypeId, + expression: lowering::ExpressionId, + binder_type: TypeId, +} + +#[tracing::instrument(skip_all, name = "infer_do_bind")] fn infer_do_bind( + state: &mut CheckState, + context: &CheckContext, + arguments: InferDoBind, +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + let InferDoBind { statement, bind_type, now_type, next_type, expression, binder_type } = + arguments; + state.with_error_step(ErrorStep::InferringDoBind(statement), move |state| { + let statement_type = + infer_do_bind_core(state, context, bind_type, next_type, expression, binder_type)?; + let _ = unification::subtype(state, context, statement_type, now_type)?; + Ok(()) + }) +} + +fn infer_do_bind_core( state: &mut CheckState, context: &CheckContext, bind_type: TypeId, - accumulated_type: TypeId, - expression: Option, + continuation_type: TypeId, + expression: lowering::ExpressionId, binder_type: TypeId, ) -> QueryResult where Q: ExternalQueries, { - let Some(expression) = expression else { - return Ok(context.prim.unknown); - }; - // expression_type := m a let expression_type = infer_expression(state, context, expression)?; // lambda_type := a -> m b - let lambda_type = state.make_function(&[binder_type], accumulated_type); + let lambda_type = state.make_function(&[binder_type], continuation_type); // bind_type := m a -> (a -> m b) -> m b // expression_type := m a @@ -1146,20 +1685,42 @@ where ) } +struct InferDoDiscard { + statement: lowering::DoStatementId, + discard_type: TypeId, + now_type: TypeId, + next_type: TypeId, + expression: lowering::ExpressionId, +} + +#[tracing::instrument(skip_all, name = "infer_do_discard")] fn infer_do_discard( + state: &mut CheckState, + context: &CheckContext, + arguments: InferDoDiscard, +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + let InferDoDiscard { statement, discard_type, now_type, next_type, expression } = arguments; + state.with_error_step(ErrorStep::InferringDoDiscard(statement), move |state| { + let statement_type = + infer_do_discard_core(state, context, discard_type, next_type, expression)?; + let _ = unification::subtype(state, context, statement_type, now_type)?; + Ok(()) + }) +} + +fn infer_do_discard_core( state: &mut CheckState, context: &CheckContext, discard_type: TypeId, - accumulated_type: TypeId, - expression: Option, + continuation_type: TypeId, + expression: lowering::ExpressionId, ) -> QueryResult where Q: ExternalQueries, { - let Some(expression) = expression else { - return Ok(context.prim.unknown); - }; - // expression_type := m a let expression_type = infer_expression(state, context, expression)?; @@ -1189,9 +1750,9 @@ where |_, _, _, continuation_type| Ok(continuation_type), )?; - let _ = unification::subtype(state, context, accumulated_type, result_type)?; + let _ = unification::subtype(state, context, continuation_type, result_type)?; - Ok(accumulated_type) + Ok(continuation_type) } /// Looks up the type of a term. @@ -1207,6 +1768,8 @@ where let term_id = if file_id == context.id { if let Some(&k) = state.binding_group.terms.get(&term_id) { k + } else if let Some(&k) = state.pending_terms.get(&term_id) { + k.into() } else if let Some(&k) = state.checked.terms.get(&term_id) { transfer::localize(state, context, k) } else { @@ -1256,12 +1819,12 @@ where let function_t = state.normalize_type(function_t); match state.storage[function_t] { Type::Forall(ref binder, inner) => { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let (argument_type, _) = kind::check_surface_kind(state, context, argument, binder_kind)?; - Ok(substitute::SubstituteBound::on(state, binder_level, argument_type, inner)) + Ok(substitute::SubstituteBound::on(state, binder_variable, argument_type, inner)) } _ => Ok(context.prim.unknown), @@ -1296,7 +1859,7 @@ where F: FnOnce(&mut CheckState, &CheckContext, A, TypeId) -> QueryResult, { crate::trace_fields!(state, context, { function = function_t }); - let function_t = state.normalize_type(function_t); + let function_t = toolkit::normalise_expand_type(state, context, function_t)?; match state.storage[function_t] { // Check that `argument_id :: argument_type` Type::Function(argument_type, result_type) => { @@ -1332,18 +1895,18 @@ where // Instantiation rule, like `toolkit::instantiate_forall` Type::Forall(ref binder, inner) => { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let unification = state.fresh_unification_kinded(binder_kind); let function_t = - substitute::SubstituteBound::on(state, binder_level, unification, inner); + substitute::SubstituteBound::on(state, binder_variable, unification, inner); check_function_application_core(state, context, function_t, argument_id, check_argument) } // Constraint generation, like `toolkit::collect_constraints` Type::Constrained(constraint, constrained) => { - state.constraints.push_wanted(constraint); + state.push_wanted(constraint); check_function_application_core( state, context, @@ -1353,6 +1916,24 @@ where ) } + // Application(Application(f, a), b) as Function(a, b) + Type::Application(partial, result_type) => { + let partial = state.normalize_type(partial); + if let Type::Application(constructor, argument_type) = state.storage[partial] { + let constructor = state.normalize_type(constructor); + if constructor == context.prim.function { + check_argument(state, context, argument_id, argument_type)?; + return Ok(result_type); + } + if let Type::Unification(unification_id) = state.storage[constructor] { + state.unification.solve(unification_id, context.prim.function); + check_argument(state, context, argument_id, argument_type)?; + return Ok(result_type); + } + } + Ok(context.prim.unknown) + } + _ => Ok(context.prim.unknown), } } @@ -1369,7 +1950,7 @@ where check_function_application_core(state, context, function_t, expression_id, check_expression) } -fn check_let_chunks( +pub(crate) fn check_let_chunks( state: &mut CheckState, context: &CheckContext, chunks: &[lowering::LetBindingChunk], @@ -1405,11 +1986,27 @@ where let expression_type = infer_where_expression(state, context, where_expression)?; - let Some(binder) = binder else { + let Some(binder) = *binder else { return Ok(()); }; - let _ = binder::check_binder(state, context, *binder, expression_type)?; + let expression_type = if binder::requires_instantiation(context, binder) { + toolkit::instantiate_forall(state, expression_type) + } else { + expression_type + }; + + let binder_type = binder::check_binder(state, context, binder, expression_type)?; + + let exhaustiveness = + exhaustiveness::check_lambda_patterns(state, context, &[binder_type], &[binder])?; + + let has_missing = exhaustiveness.missing.is_some(); + state.report_exhaustiveness(exhaustiveness); + + if has_missing { + state.push_wanted(context.prim.partial); + } Ok(()) } @@ -1461,6 +2058,21 @@ fn check_let_name_binding( context: &CheckContext, id: lowering::LetBindingNameGroupId, ) -> QueryResult<()> +where + Q: ExternalQueries, +{ + state.with_implication(|state| { + state.with_error_step(ErrorStep::CheckingLetName(id), |state| { + check_let_name_binding_core(state, context, id) + }) + }) +} + +fn check_let_name_binding_core( + state: &mut CheckState, + context: &CheckContext, + id: lowering::LetBindingNameGroupId, +) -> QueryResult<()> where Q: ExternalQueries, { @@ -1476,18 +2088,150 @@ where let surface_bindings = state.surface_bindings.get_let(id); let surface_bindings = surface_bindings.as_deref().unwrap_or_default(); - let signature = - inspect::inspect_signature_core(state, context, name_type, surface_bindings)?; + let signature = inspect::inspect_signature(state, context, name_type, surface_bindings)?; + + equation::check_equations_core(state, context, signature_id, &signature, &name.equations)?; - check_equations(state, context, signature_id, signature, &name.equations) + let origin = equation::ExhaustivenessOrigin::FromSignature(&signature.arguments); + equation::patterns(state, context, origin, &name.equations)?; + + if let Some(variable) = signature.variables.first() { + state.type_scope.unbind_name(&variable.variable); + } } else { - infer_equations_core(state, context, name_type, &name.equations)?; + // Keep simple let bindings e.g. `bind = ibind` polymorphic. + if let [equation] = name.equations.as_ref() + && equation.binders.is_empty() + && let Some(guarded) = &equation.guarded + { + let inferred_type = infer_guarded_expression(state, context, guarded)?; + state.term_scope.bind_let(id, inferred_type); + } else { + equation::infer_equations_core(state, context, name_type, &name.equations)?; - // No let-generalization: infer equations and solve constraints; - // residuals are deferred to parent scope for later error reporting. - let residual = state.solve_constraints(context)?; - state.constraints.extend_wanted(&residual); + let origin = equation::ExhaustivenessOrigin::FromType(name_type); + equation::patterns(state, context, origin, &name.equations)?; + } + } - Ok(()) + Ok(()) +} + +pub fn infer_guarded_expression( + state: &mut CheckState, + context: &CheckContext, + guarded: &lowering::GuardedExpression, +) -> QueryResult +where + Q: ExternalQueries, +{ + match guarded { + lowering::GuardedExpression::Unconditional { where_expression } => { + let Some(w) = where_expression else { + return Ok(context.prim.unknown); + }; + infer_where_expression(state, context, w) + } + lowering::GuardedExpression::Conditionals { pattern_guarded } => { + let mut inferred_type = context.prim.unknown; + for pattern_guarded in pattern_guarded.iter() { + for pattern_guard in pattern_guarded.pattern_guards.iter() { + check_pattern_guard(state, context, pattern_guard)?; + } + if let Some(w) = &pattern_guarded.where_expression { + inferred_type = infer_where_expression(state, context, w)?; + } + } + Ok(inferred_type) + } + } +} + +pub fn check_guarded_expression( + state: &mut CheckState, + context: &CheckContext, + guarded: &lowering::GuardedExpression, + expected: TypeId, +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + match guarded { + lowering::GuardedExpression::Unconditional { where_expression } => { + let Some(w) = where_expression else { + return Ok(()); + }; + check_where_expression(state, context, w, expected)?; + Ok(()) + } + lowering::GuardedExpression::Conditionals { pattern_guarded } => { + for pattern_guarded in pattern_guarded.iter() { + for pattern_guard in pattern_guarded.pattern_guards.iter() { + check_pattern_guard(state, context, pattern_guard)?; + } + if let Some(w) = &pattern_guarded.where_expression { + check_where_expression(state, context, w, expected)?; + } + } + Ok(()) + } } } + +fn check_pattern_guard( + state: &mut CheckState, + context: &CheckContext, + guard: &lowering::PatternGuard, +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + let Some(expression) = guard.expression else { + return Ok(()); + }; + + let expression_type = infer_expression(state, context, expression)?; + + let Some(binder) = guard.binder else { + return Ok(()); + }; + + let _ = binder::check_binder(state, context, binder, expression_type)?; + + Ok(()) +} + +pub fn infer_where_expression( + state: &mut CheckState, + context: &CheckContext, + where_expression: &lowering::WhereExpression, +) -> QueryResult +where + Q: ExternalQueries, +{ + check_let_chunks(state, context, &where_expression.bindings)?; + + let Some(expression) = where_expression.expression else { + return Ok(context.prim.unknown); + }; + + infer_expression(state, context, expression) +} + +fn check_where_expression( + state: &mut CheckState, + context: &CheckContext, + where_expression: &lowering::WhereExpression, + expected: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + check_let_chunks(state, context, &where_expression.bindings)?; + + let Some(expression) = where_expression.expression else { + return Ok(context.prim.unknown); + }; + + check_expression(state, context, expression, expected) +} diff --git a/compiler-core/checking/src/algorithm/term_item.rs b/compiler-core/checking/src/algorithm/term_item.rs index 007db08e6..edb68a643 100644 --- a/compiler-core/checking/src/algorithm/term_item.rs +++ b/compiler-core/checking/src/algorithm/term_item.rs @@ -7,12 +7,12 @@ use itertools::Itertools; use lowering::TermItemIr; use crate::ExternalQueries; -use crate::algorithm::kind::synonym; -use crate::algorithm::state::{CheckContext, CheckState, InstanceHeadBinding}; +use crate::algorithm::safety::safe_loop; +use crate::algorithm::state::{CheckContext, CheckState, InstanceHeadBinding, PendingType}; use crate::algorithm::{ - constraint, inspect, kind, quantify, substitute, term, transfer, unification, + constraint, equation, inspect, kind, quantify, substitute, term, toolkit, transfer, unification, }; -use crate::core::{Instance, InstanceKind, Type, TypeId, Variable, debruijn}; +use crate::core::{Instance, InstanceKind, Name, Type, TypeId, Variable, debruijn}; use crate::error::{ErrorKind, ErrorStep}; #[derive(Clone)] @@ -25,11 +25,12 @@ pub struct InferredValueGroup { /// /// This function checks the term signatures for [`TermItemIr::Foreign`], /// [`TermItemIr::ValueGroup`], and [`TermItemIr::Operator`], inserting -/// them into [`CheckState::checked`] upon completion. +/// them into [`CheckState::pending_terms`] as [`PendingTermType`] entries. /// /// For [`TermItemIr::ValueGroup`] specifically, it also invokes the /// [`inspect::collect_signature_variables`] function to collect type /// variables that need to be rebound during [`check_value_group`]. +#[tracing::instrument(skip_all, name = "check_term_signature")] pub fn check_term_signature( state: &mut CheckState, context: &CheckContext, @@ -39,14 +40,12 @@ where Q: ExternalQueries, { state.with_error_step(ErrorStep::TermDeclaration(item_id), |state| { - let _span = tracing::debug_span!("check_term_signature").entered(); - let Some(item) = context.lowered.info.get_term_item(item_id) else { return Ok(()); }; match item { - TermItemIr::Foreign { signature } | TermItemIr::ValueGroup { signature, .. } => { + TermItemIr::Foreign { signature } => { let Some(signature) = signature else { return Ok(()) }; let signature_variables = inspect::collect_signature_variables(context, *signature); @@ -60,16 +59,26 @@ where .unwrap_or(inferred_type); crate::debug_fields!(state, context, { quantified_type = quantified_type }); + state.pending_terms.insert(item_id, PendingType::Immediate(quantified_type)); + } + TermItemIr::ValueGroup { signature, .. } => { + let Some(signature) = signature else { return Ok(()) }; - let global_type = transfer::globalize(state, context, quantified_type); - state.checked.terms.insert(item_id, global_type); + let signature_variables = inspect::collect_signature_variables(context, *signature); + state.surface_bindings.insert_term(item_id, signature_variables); + + let (inferred_type, _) = + kind::check_surface_kind(state, context, *signature, context.prim.t)?; + + crate::debug_fields!(state, context, { inferred_type = inferred_type }); + state.pending_terms.insert(item_id, PendingType::Deferred(inferred_type)); } TermItemIr::Operator { resolution, .. } => { let Some((file_id, term_id)) = *resolution else { return Ok(()) }; - let id = term::lookup_file_term(state, context, file_id, term_id)?; + let inferred_type = term::lookup_file_term(state, context, file_id, term_id)?; - let global_type = transfer::globalize(state, context, id); - state.checked.terms.insert(item_id, global_type); + crate::debug_fields!(state, context, { inferred_type = inferred_type }); + state.pending_terms.insert(item_id, PendingType::Deferred(inferred_type)); } _ => (), } @@ -100,6 +109,7 @@ pub struct CheckInstance<'a> { /// upon completion. /// /// [`core::Instance`]: crate::core::Instance +#[tracing::instrument(skip_all, name = "check_instance")] pub fn check_instance( state: &mut CheckState, context: &CheckContext, @@ -110,8 +120,6 @@ where { let CheckInstance { item_id, constraints, arguments, resolution } = input; state.with_error_step(ErrorStep::TermDeclaration(item_id), |state| { - let _span = tracing::debug_span!("check_instance").entered(); - let Some((class_file, class_item)) = *resolution else { return Ok(()); }; @@ -168,6 +176,14 @@ where quantify::quantify_instance(state, &mut instance); + constraint::validate_instance_rows( + state, + context, + class_file, + class_item, + &instance.arguments, + )?; + let arguments = instance.arguments.iter().map(|&(t, k)| { let t = transfer::globalize(state, context, t); let k = transfer::globalize(state, context, k); @@ -184,6 +200,12 @@ where instance.constraints = constraints.collect(); + let kind_variables = instance.kind_variables.iter().map(|(name, k)| { + (name.clone(), transfer::globalize(state, context, *k)) + }); + + instance.kind_variables = kind_variables.collect(); + state.checked.instances.insert(instance_id, instance); // Capture implicit variables from the instance head before unbinding. @@ -216,14 +238,15 @@ where let mut current = class_kind; for _ in 0..FUEL { - current = synonym::normalize_expand_type(state, context, current)?; + current = toolkit::normalise_expand_type(state, context, current)?; match state.storage[current] { Type::Forall(ref binder, inner) => { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let replacement = state.fresh_unification_kinded(binder_kind); - current = substitute::SubstituteBound::on(state, binder_level, replacement, inner); + current = + substitute::SubstituteBound::on(state, binder_variable, replacement, inner); } Type::Function(argument_kind, result_kind) => { @@ -250,40 +273,91 @@ pub struct CheckValueGroup<'a> { /// /// This function optionally returns [`InferredValueGroup`] /// for value declarations that do not have a signature. +#[tracing::instrument(skip_all, name = "check_value_group")] pub fn check_value_group( state: &mut CheckState, context: &CheckContext, input: CheckValueGroup<'_>, ) -> QueryResult> +where + Q: ExternalQueries, +{ + state.with_error_step(ErrorStep::TermDeclaration(input.item_id), |state| { + state.with_implication(|state| check_value_group_core(context, state, input)) + }) +} + +fn check_value_group_core( + context: &CheckContext, + state: &mut CheckState, + input: CheckValueGroup<'_>, +) -> QueryResult> where Q: ExternalQueries, { let CheckValueGroup { item_id, signature, equations } = input; - state.with_error_step(ErrorStep::TermDeclaration(item_id), |state| { - let _span = tracing::debug_span!("check_value_group").entered(); - if let Some(signature_id) = signature { - let group_type = term::lookup_file_term(state, context, context.id, item_id)?; + if let Some(signature_id) = signature { + let group_type = term::lookup_file_term(state, context, context.id, item_id)?; - let surface_bindings = state.surface_bindings.get_term(item_id); - let surface_bindings = surface_bindings.as_deref().unwrap_or_default(); + let surface_bindings = state.surface_bindings.get_term(item_id); + let surface_bindings = surface_bindings.as_deref().unwrap_or_default(); - let signature = - inspect::inspect_signature_core(state, context, group_type, surface_bindings)?; + let signature = inspect::inspect_signature(state, context, group_type, surface_bindings)?; - term::check_equations(state, context, *signature_id, signature, equations)?; - crate::debug_fields!(state, context, { group_type = group_type }, "checking"); - Ok(None) - } else { - let (inferred_type, residual_constraints) = - term::infer_equations(state, context, item_id, equations)?; - crate::debug_fields!(state, context, { inferred_type = inferred_type }, "inferring"); - Ok(Some(InferredValueGroup { inferred_type, residual_constraints })) - } - }) + equation::check_equations(state, context, *signature_id, signature, equations)?; + crate::debug_fields!(state, context, { group_type = group_type }, "checked"); + Ok(None) + } else { + let (inferred_type, residual_constraints) = + equation::infer_equations(state, context, item_id, equations)?; + crate::debug_fields!(state, context, { inferred_type = inferred_type }, "inferred"); + Ok(Some(InferredValueGroup { inferred_type, residual_constraints })) + } +} + +pub fn commit_checked_value_group( + state: &mut CheckState, + context: &CheckContext, + item_id: TermItemId, +) -> QueryResult<()> +where + Q: ExternalQueries, +{ + let Some(PendingType::Deferred(inferred_type)) = state.pending_terms.remove(&item_id) else { + return Ok(()); + }; + + let quantified_type = quantify::quantify(state, inferred_type) + .map(|(quantified_type, _)| quantified_type) + .unwrap_or(inferred_type); + + let global_type = transfer::globalize(state, context, quantified_type); + state.checked.terms.insert(item_id, global_type); + + Ok(()) +} + +/// Commits remaining pending term entries from [`CheckState::pending_terms`] +/// into [`CheckedModule::terms`]. +pub fn commit_pending_terms(state: &mut CheckState, context: &CheckContext) +where + Q: ExternalQueries, +{ + for (item_id, pending_type) in state.pending_terms.drain().collect_vec() { + let local_type = match pending_type { + PendingType::Immediate(id) => id, + PendingType::Deferred(id) => { + quantify::quantify(state, id).map(|(id, _)| id).unwrap_or(id) + } + }; + let global_type = transfer::globalize(state, context, local_type); + state.checked.terms.insert(item_id, global_type); + } } /// Generalises an [`InferredValueGroup`]. -pub fn commit_value_group( +#[tracing::instrument(skip_all, name = "commit_inferred_value_group")] +pub fn commit_inferred_value_group( state: &mut CheckState, context: &CheckContext, item_id: TermItemId, @@ -301,13 +375,12 @@ where }; state.with_error_step(ErrorStep::TermDeclaration(item_id), |state| { - let _span = tracing::debug_span!("commit_value_group").entered(); for constraint in result.ambiguous { - let constraint = transfer::globalize(state, context, constraint); + let constraint = state.render_local_type(context, constraint); state.insert_error(ErrorKind::AmbiguousConstraint { constraint }); } for constraint in result.unsatisfied { - let constraint = transfer::globalize(state, context, constraint); + let constraint = state.render_local_type(context, constraint); state.insert_error(ErrorKind::NoInstanceFound { constraint }); } crate::debug_fields!(state, context, { quantified = result.quantified }); @@ -327,7 +400,7 @@ pub struct CheckInstanceMembers<'a> { pub class_id: TypeItemId, pub instance_arguments: &'a [(TypeId, TypeId)], pub instance_constraints: &'a [(TypeId, TypeId)], - pub kind_variables: &'a [TypeId], + pub kind_variables: &'a [(Name, TypeId)], } /// Checks instance member declarations. @@ -412,10 +485,10 @@ pub struct CheckInstanceMemberGroup<'a> { class_id: TypeItemId, instance_arguments: &'a [(TypeId, TypeId)], instance_constraints: &'a [(TypeId, TypeId)], - kind_variables: &'a [TypeId], + kind_variables: &'a [(Name, TypeId)], } -/// Checks an instance member group against its specialized class member type. +/// Checks an instance member group against its specialised class member type. /// /// This rule maintains the following invariants: /// - Check mode: `inferred_type <: signature_type` and `signature_type ~ specialised_type` @@ -428,16 +501,29 @@ pub struct CheckInstanceMemberGroup<'a> { /// The signature type of a member group must unify with the specialised /// type of the class member. The signature cannot be more general than /// the specialised type. See tests 118 and 125 for a demonstration. +#[tracing::instrument(skip_all, name = "check_instance_member_group")] pub fn check_instance_member_group( state: &mut CheckState, context: &CheckContext, input: CheckInstanceMemberGroup<'_>, ) -> QueryResult<()> +where + Q: ExternalQueries, +{ + state.with_error_step(ErrorStep::TermDeclaration(input.instance_id), |state| { + state.with_implication(|state| check_instance_member_group_core(state, context, input)) + }) +} + +fn check_instance_member_group_core( + state: &mut CheckState, + context: &CheckContext, + input: CheckInstanceMemberGroup<'_>, +) -> QueryResult<()> where Q: ExternalQueries, { let CheckInstanceMemberGroup { - instance_id, instance_bindings, member, class_file, @@ -445,96 +531,98 @@ where instance_arguments, instance_constraints, kind_variables, + .. } = input; - state.with_error_step(ErrorStep::TermDeclaration(instance_id), |state| { - let _span = tracing::debug_span!("check_instance_member_group").entered(); - - // Save the current size of the environment for unbinding. - let size = state.type_scope.size(); + // Save the current size of the environment for unbinding. + let size = state.type_scope.size(); - // Bind kind variables generalised after instance head checking. - for &kind_variable in kind_variables { - let kind = transfer::localize(state, context, kind_variable); - state.type_scope.bind_core(kind); - } + // Bind kind variables generalised after instance head checking. + for (name, kind_variable) in kind_variables { + let kind = transfer::localize(state, context, *kind_variable); + let name = state.fresh_name(&name.text); + state.type_scope.bind_core(kind, name); + } - for binding in instance_bindings { - state.type_scope.bind_implicit(binding.node, binding.id, binding.kind); - } + for binding in instance_bindings { + state.type_scope.bind_implicit( + binding.node, + binding.id, + binding.kind, + binding.name.clone(), + ); + } - let class_member_type = lookup_class_member(state, context, member.resolution)?; + let class_member_type = lookup_class_member(state, context, member.resolution)?; - for (constraint_type, _) in instance_constraints { - let local_constraint = transfer::localize(state, context, *constraint_type); - state.constraints.push_given(local_constraint); - } + for (constraint_type, _) in instance_constraints { + let local_constraint = transfer::localize(state, context, *constraint_type); + state.push_given(local_constraint); + } - let specialized_type = if let Some(class_member_type) = class_member_type { - specialize_class_member( - state, - context, - class_member_type, - (class_file, class_id), - instance_arguments, - )? - } else { - None - }; + let specialised_type = if let Some(class_member_type) = class_member_type { + specialise_class_member( + state, + context, + class_member_type, + (class_file, class_id), + instance_arguments, + )? + } else { + None + }; - // The specialized type may have constraints like `Show a => (a -> b) -> f a -> f b`. - // We push `Show a` as a given and use the body `(a -> b) -> f a -> f b` for checking. - let specialized_type = specialized_type.map(|mut t| { - while let normalized = state.normalize_type(t) - && let Type::Constrained(constraint, constrained) = &state.storage[normalized] - { - state.constraints.push_given(*constraint); - t = *constrained; - } - t - }); + // The specialised type may have constraints like `Show a => (a -> b) -> f a -> f b`. + // We push `Show a` as a given and use the body `(a -> b) -> f a -> f b` for checking. + let specialised_type = specialised_type.map(|mut t| { + safe_loop! { + let normalized = state.normalize_type(t); + let Type::Constrained(constraint, constrained) = state.storage[normalized] else { + break t; + }; + state.push_given(constraint); + t = constrained; + } + }); - if let Some(signature_id) = &member.signature { - let surface_bindings = inspect::collect_signature_variables(context, *signature_id); + if let Some(signature_id) = &member.signature { + let surface_bindings = inspect::collect_signature_variables(context, *signature_id); - let (member_type, _) = - kind::check_surface_kind(state, context, *signature_id, context.prim.t)?; + let (member_type, _) = + kind::check_surface_kind(state, context, *signature_id, context.prim.t)?; - if let Some(specialized_type) = specialized_type { - let unified = unification::unify(state, context, member_type, specialized_type)?; - if !unified { - let expected = transfer::globalize(state, context, specialized_type); - let actual = transfer::globalize(state, context, member_type); - state.insert_error(ErrorKind::InstanceMemberTypeMismatch { expected, actual }); - } + if let Some(specialised_type) = specialised_type { + let unified = unification::unify(state, context, member_type, specialised_type)?; + if !unified { + let expected = state.render_local_type(context, specialised_type); + let actual = state.render_local_type(context, member_type); + state.insert_error(ErrorKind::InstanceMemberTypeMismatch { expected, actual }); } + } - let signature = - inspect::inspect_signature_core(state, context, member_type, &surface_bindings)?; + let signature = inspect::inspect_signature(state, context, member_type, &surface_bindings)?; - term::check_equations(state, context, *signature_id, signature, &member.equations)?; - } else if let Some(specialized_type) = specialized_type { - let inferred_type = state.fresh_unification_type(context); - term::infer_equations_core(state, context, inferred_type, &member.equations)?; + equation::check_equations(state, context, *signature_id, signature, &member.equations)?; + } else if let Some(specialised_type) = specialised_type { + let inferred_type = state.fresh_unification_type(context); + equation::infer_equations_core(state, context, inferred_type, &member.equations)?; - let matches = unification::subtype(state, context, inferred_type, specialized_type)?; - if !matches { - let expected = transfer::globalize(state, context, specialized_type); - let actual = transfer::globalize(state, context, inferred_type); - state.insert_error(ErrorKind::InstanceMemberTypeMismatch { expected, actual }); - } + let origin = equation::ExhaustivenessOrigin::FromType(specialised_type); + equation::patterns(state, context, origin, &member.equations)?; - let residual = state.solve_constraints(context)?; - for constraint in residual { - let constraint = transfer::globalize(state, context, constraint); - state.insert_error(ErrorKind::NoInstanceFound { constraint }); - } + let matches = unification::subtype(state, context, inferred_type, specialised_type)?; + if !matches { + let expected = state.render_local_type(context, specialised_type); + let actual = state.render_local_type(context, inferred_type); + state.insert_error(ErrorKind::InstanceMemberTypeMismatch { expected, actual }); } - state.type_scope.unbind(debruijn::Level(size.0)); + let _ = equation::constraints(state, context, equation::ConstraintsPolicy::Report)?; + } - Ok(()) - }) + state.type_scope.unbind(debruijn::Level(size.0)); + + Ok(()) } macro_rules! debug_assert_class_constraint { @@ -552,11 +640,11 @@ macro_rules! debug_assert_class_constraint { }; } -/// Specializes a class member type for a specific instance. +/// Specialises a class member type for a specific instance. /// /// Given a class member type like `forall a. Show a => a -> String`, /// and instance arguments like `Int`, this returns `Int -> String`. -fn specialize_class_member( +fn specialise_class_member( state: &mut CheckState, context: &CheckContext, class_member_type: TypeId, @@ -571,7 +659,7 @@ where return Ok(None); }; - let mut specialized = class_member_type; + let mut specialised = class_member_type; let arguments = instance_arguments.iter().map(|(t, k)| { let t = transfer::localize(state, context, *t); @@ -582,13 +670,35 @@ where let arguments = arguments.collect_vec(); let kind_variables = class_info.quantified_variables.0 + class_info.kind_variables.0; + // Class member types are originally bound with only the class' type + // variables in scope. When specialising for an instance, we shift + // their levels up by the current scope size to avoid conflicts. + // + // First, without shifting + // + // ```purescript + // class Functor f:0 where + // map :: forall a:1 b:2. (a -> b) -> f a -> f b + // + // instance Functor (Vector s:0 n:1) where + // -- map :: forall f:0 a:1 b:2. (a -> b) -> f a -> f b + // ``` + // + // Then, with shifting + // + // ```purescript + // instance Functor (Vector s:0 n:1) where + // -- map :: forall f:2 a:3 b:4. (a -> b) -> f a -> f b + // ``` + // With globally unique Names, shifting is no longer needed. + let mut kind_variables = 0..kind_variables; let mut arguments = arguments.into_iter(); - while let normalized = state.normalize_type(specialized) + while let normalized = state.normalize_type(specialised) && let Type::Forall(binder, inner) = &state.storage[normalized] { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let inner = *inner; @@ -598,18 +708,18 @@ where let _ = unification::unify(state, context, binder_kind, argument_kind); argument_type } else { - let skolem = Variable::Skolem(binder_level, binder_kind); + let skolem = Variable::Skolem(binder_variable.clone(), binder_kind); state.storage.intern(Type::Variable(skolem)) }; - specialized = substitute::SubstituteBound::on(state, binder_level, replacement, inner); + specialised = substitute::SubstituteBound::on(state, binder_variable, replacement, inner); } - specialized = state.normalize_type(specialized); - if let Type::Constrained(constraint, constrained) = state.storage[specialized] { + specialised = state.normalize_type(specialised); + if let Type::Constrained(constraint, constrained) = state.storage[specialised] { debug_assert_class_constraint!(state, constraint, class_file, class_id); - specialized = constrained; + specialised = constrained; } - Ok(Some(specialized)) + Ok(Some(specialised)) } diff --git a/compiler-core/checking/src/algorithm/toolkit.rs b/compiler-core/checking/src/algorithm/toolkit.rs index d6d9b8a01..31d110e4e 100644 --- a/compiler-core/checking/src/algorithm/toolkit.rs +++ b/compiler-core/checking/src/algorithm/toolkit.rs @@ -1,5 +1,9 @@ +use building_types::QueryResult; + +use crate::ExternalQueries; +use crate::algorithm::kind::{operator, synonym}; use crate::algorithm::safety::safe_loop; -use crate::algorithm::state::CheckState; +use crate::algorithm::state::{CheckContext, CheckState}; use crate::algorithm::substitute; use crate::core::{Type, TypeId, Variable}; @@ -98,11 +102,32 @@ pub fn instantiate_forall(state: &mut CheckState, mut type_id: TypeId) -> TypeId safe_loop! { type_id = state.normalize_type(type_id); if let Type::Forall(ref binder, inner) = state.storage[type_id] { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let unification = state.fresh_unification_kinded(binder_kind); - type_id = substitute::SubstituteBound::on(state, binder_level, unification, inner); + type_id = substitute::SubstituteBound::on(state, binder_variable, unification, inner); + } else { + break type_id; + } + } +} + +/// Skolemises [`Type::Forall`] by replacing bound variables with skolem constants. +/// +/// This mirrors [`instantiate_forall`] but introduces skolem variables instead +/// of unification variables. Skolem variables are rigid, they cannot be unified +/// with other types, enforcing parametricity over the quantified variable. +pub fn skolemise_forall(state: &mut CheckState, mut type_id: TypeId) -> TypeId { + safe_loop! { + type_id = state.normalize_type(type_id); + if let Type::Forall(ref binder, inner) = state.storage[type_id] { + let binder_variable = binder.variable.clone(); + let binder_kind = binder.kind; + + let v = Variable::Skolem(binder_variable.clone(), binder_kind); + let t = state.storage.intern(Type::Variable(v)); + type_id = substitute::SubstituteBound::on(state, binder_variable, t, inner); } else { break type_id; } @@ -110,11 +135,11 @@ pub fn instantiate_forall(state: &mut CheckState, mut type_id: TypeId) -> TypeId } /// Collects [`Type::Constrained`] as wanted constraints. -pub fn collect_constraints(state: &mut CheckState, mut type_id: TypeId) -> TypeId { +pub fn collect_wanteds(state: &mut CheckState, mut type_id: TypeId) -> TypeId { safe_loop! { type_id = state.normalize_type(type_id); if let Type::Constrained(constraint, constrained) = state.storage[type_id] { - state.constraints.push_wanted(constraint); + state.push_wanted(constraint); type_id = constrained; } else { break type_id; @@ -122,12 +147,37 @@ pub fn collect_constraints(state: &mut CheckState, mut type_id: TypeId) -> TypeI } } +/// Collects [`Type::Constrained`] as given constraints. +/// +/// Peels constraint layers from a type, pushing each as a given rather than +/// a wanted. Used when the expected type carries constraints that should +/// discharge wanted constraints from the inferred type e.g. `unsafePartial` +/// discharging `Partial`. +pub fn collect_givens(state: &mut CheckState, mut type_id: TypeId) -> TypeId { + safe_loop! { + type_id = state.normalize_type(type_id); + if let Type::Constrained(constraint, constrained) = state.storage[type_id] { + state.push_given(constraint); + type_id = constrained; + } else { + break type_id; + } + } +} + +/// [`instantiate_forall`] then [`collect_constraints`]. +pub fn instantiate_constrained(state: &mut CheckState, type_id: TypeId) -> TypeId { + let type_id = instantiate_forall(state, type_id); + collect_wanteds(state, type_id) +} + /// Instantiates [`Type::Forall`] with the provided arguments. /// /// This function falls back to constructing skolem variables if there's -/// not enough arguments provided. This is primarily used to specialise -/// constructor types based on the [`Type::Application`] and [`Type::KindApplication`] -/// used in an instance head. For example: +/// not enough arguments provided. The number of skolem variables produced +/// is returned alongside the instantiated type. This is primarily used to +/// specialise constructor types based on the [`Type::Application`] and +/// [`Type::KindApplication`] used in an instance head. For example: /// /// ```purescript /// -- Proxy @Type Int @@ -140,27 +190,106 @@ pub fn instantiate_with_arguments( state: &mut CheckState, mut type_id: TypeId, arguments: impl AsRef<[TypeId]>, -) -> TypeId { +) -> (TypeId, usize) { let mut arguments_iter = arguments.as_ref().iter().copied(); + let mut skolemised = 0; safe_loop! { type_id = state.normalize_type(type_id); match &state.storage[type_id] { Type::Forall(binder, inner) => { - let binder_level = binder.level; + let binder_variable = binder.variable.clone(); let binder_kind = binder.kind; let inner = *inner; let argument_type = arguments_iter.next().unwrap_or_else(|| { - let skolem = Variable::Skolem(binder_level, binder_kind); + skolemised += 1; + let skolem = Variable::Skolem(binder_variable.clone(), binder_kind); state.storage.intern(Type::Variable(skolem)) }); - type_id = substitute::SubstituteBound::on(state, binder_level, argument_type, inner); + type_id = + substitute::SubstituteBound::on(state, binder_variable, argument_type, inner); } _ => break, } } - type_id + (type_id, skolemised) +} + +#[derive(Copy, Clone, Debug)] +pub enum SynthesiseFunction { + Yes, + No, +} + +/// Decompose a type into `(argument, result)` as if it were a function type. +/// +/// Handles three representations: +/// - `Type::Function(a, b)`, the standard function representation +/// - `Type::Application(Application(f, a), b)`, the application-based form +/// - `Type::Unification(u)`, which requires function type synthesis +pub fn decompose_function( + state: &mut CheckState, + context: &CheckContext, + t: TypeId, + mode: SynthesiseFunction, +) -> QueryResult> +where + Q: ExternalQueries, +{ + match state.storage[t] { + Type::Function(argument, result) => Ok(Some((argument, result))), + + Type::Unification(unification_id) if matches!(mode, SynthesiseFunction::Yes) => { + let argument = state.fresh_unification_type(context); + let result = state.fresh_unification_type(context); + + let function = state.storage.intern(Type::Function(argument, result)); + state.unification.solve(unification_id, function); + + Ok(Some((argument, result))) + } + + Type::Application(partial, result) => { + let partial = state.normalize_type(partial); + if let Type::Application(constructor, argument) = state.storage[partial] { + let constructor = state.normalize_type(constructor); + if constructor == context.prim.function { + return Ok(Some((argument, result))); + } + if matches!(mode, SynthesiseFunction::Yes) + && let Type::Unification(unification_id) = state.storage[constructor] + { + state.unification.solve(unification_id, context.prim.function); + return Ok(Some((argument, result))); + } + } + Ok(None) + } + + _ => Ok(None), + } +} + +pub fn normalise_expand_type( + state: &mut CheckState, + context: &CheckContext, + mut type_id: TypeId, +) -> QueryResult +where + Q: ExternalQueries, +{ + safe_loop! { + let expanded_id = state.normalize_type(type_id); + let expanded_id = operator::expand_type_operator(state, context, expanded_id)?; + let expanded_id = synonym::expand_type_synonym(state, context, expanded_id)?; + + if expanded_id == type_id { + return Ok(type_id); + } + + type_id = expanded_id; + } } diff --git a/compiler-core/checking/src/algorithm/transfer.rs b/compiler-core/checking/src/algorithm/transfer.rs index 69894497d..7f68d6ca5 100644 --- a/compiler-core/checking/src/algorithm/transfer.rs +++ b/compiler-core/checking/src/algorithm/transfer.rs @@ -164,9 +164,9 @@ fn traverse<'a, Q: ExternalQueries>(source: &mut TraversalSource<'a, Q>, id: Typ let kind = traverse(source, kind); Type::Variable(Variable::Skolem(level, kind)) } - Variable::Bound(level, kind) => { + Variable::Bound(name, kind) => { let kind = traverse(source, kind); - Type::Variable(Variable::Bound(level, kind)) + Type::Variable(Variable::Bound(name, kind)) } free @ Variable::Free(_) => Type::Variable(free), }, diff --git a/compiler-core/checking/src/algorithm/type_item.rs b/compiler-core/checking/src/algorithm/type_item.rs index cdcfab9e8..ae997cb2d 100644 --- a/compiler-core/checking/src/algorithm/type_item.rs +++ b/compiler-core/checking/src/algorithm/type_item.rs @@ -10,7 +10,7 @@ use smol_str::SmolStr; use crate::ExternalQueries; use crate::algorithm::safety::safe_loop; -use crate::algorithm::state::{CheckContext, CheckState, CheckedConstructor}; +use crate::algorithm::state::{CheckContext, CheckState, CheckedConstructor, PendingType}; use crate::algorithm::{inspect, kind, quantify, transfer, unification}; use crate::core::{ Class, DataLike, ForallBinder, Operator, Role, Synonym, Type, TypeId, Variable, debruijn, @@ -64,6 +64,7 @@ pub enum CheckedTypeItem { /// - [`check_data_definition`] /// - [`check_synonym_definition`] /// - [`check_class_definition`] +#[tracing::instrument(skip_all, name = "check_type_item")] pub fn check_type_item( state: &mut CheckState, context: &CheckContext, @@ -73,8 +74,6 @@ where Q: ExternalQueries, { state.with_error_step(ErrorStep::TypeDeclaration(item_id), |state| { - let _span = tracing::debug_span!("check_type_item").entered(); - let Some(item) = context.lowered.info.get_type_item(item_id) else { return Ok(None); }; @@ -141,8 +140,13 @@ fn check_data_definition( where Q: ExternalQueries, { - let Some(SignatureLike { kind_variables, type_variables, result_kind }) = - check_signature_like(state, context, item_id, signature, variables, |_| context.prim.t)? + let Some(SignatureLike { + kind_variables, + type_variables, + result_kind, + kind_unbind_level, + type_unbind_level, + }) = check_signature_like(state, context, item_id, signature, variables, |_| context.prim.t)? else { return Ok(None); }; @@ -160,9 +164,6 @@ where let constructors = check_constructor_arguments(state, context, item_id)?; - let type_unbind_level = type_variables.first().map(|variable| variable.level); - let kind_unbind_level = kind_variables.first().map(|variable| variable.level); - if let Some(level) = type_unbind_level { state.type_scope.unbind(level); } @@ -193,10 +194,15 @@ fn check_synonym_definition( where Q: ExternalQueries, { - let Some(SignatureLike { kind_variables, type_variables, result_kind }) = - check_signature_like(state, context, item_id, signature, variables, |state| { - state.fresh_unification_type(context) - })? + let Some(SignatureLike { + kind_variables, + type_variables, + result_kind, + kind_unbind_level, + type_unbind_level, + }) = check_signature_like(state, context, item_id, signature, variables, |state| { + state.fresh_unification_type(context) + })? else { return Ok(None); }; @@ -213,11 +219,11 @@ where inferred_kind.replace(synonym_kind); } - if let Some(variable) = type_variables.first() { - state.type_scope.unbind(variable.level); + if let Some(level) = type_unbind_level { + state.type_scope.unbind(level); } - if let Some(variable) = kind_variables.first() { - state.type_scope.unbind(variable.level); + if let Some(level) = kind_unbind_level { + state.type_scope.unbind(level); } crate::debug_fields!(state, context, { synonym_type = synonym_type }); @@ -241,10 +247,15 @@ fn check_class_definition( where Q: ExternalQueries, { - let Some(SignatureLike { kind_variables, type_variables, result_kind }) = - check_signature_like(state, context, item_id, signature, variables, |_| { - context.prim.constraint - })? + let Some(SignatureLike { + kind_variables, + type_variables, + result_kind, + kind_unbind_level, + type_unbind_level, + }) = check_signature_like(state, context, item_id, signature, variables, |_| { + context.prim.constraint + })? else { return Ok(None); }; @@ -261,7 +272,7 @@ where let class_reference = { let reference_type = state.storage.intern(Type::Constructor(context.id, item_id)); type_variables.iter().cloned().fold(reference_type, |reference_type, binder| { - let variable = Variable::Bound(binder.level, binder.kind); + let variable = Variable::Bound(binder.variable, binder.kind); let variable = state.storage.intern(Type::Variable(variable)); state.storage.intern(Type::Application(reference_type, variable)) }) @@ -286,11 +297,11 @@ where class_reference, )?; - if let Some(variable) = type_variables.first() { - state.type_scope.unbind(variable.level); + if let Some(level) = type_unbind_level { + state.type_scope.unbind(level); } - if let Some(variable) = kind_variables.first() { - state.type_scope.unbind(variable.level); + if let Some(level) = kind_unbind_level { + state.type_scope.unbind(level); } crate::debug_fields!(state, context, { ?superclass_count = superclasses.len(), ?member_count = members.len() }); @@ -387,6 +398,14 @@ where { let type_id = transfer::globalize(state, context, quantified_type); state.checked.types.insert(item_id, type_id); + } else if inferred_kind.is_none() + && let Some(PendingType::Deferred(deferred_kind)) = state.pending_types.remove(&item_id) + { + let quantified_type = quantify::quantify(state, deferred_kind) + .map(|(quantified_type, _)| quantified_type) + .unwrap_or(deferred_kind); + let type_id = transfer::globalize(state, context, quantified_type); + state.checked.types.insert(item_id, type_id); } let synonym_type = type_variables.iter().rfold(synonym_type, |inner, binder| { @@ -449,6 +468,22 @@ where let data_like = DataLike { quantified_variables, kind_variables }; state.checked.data.insert(item_id, data_like); + let type_id = transfer::globalize(state, context, quantified_type); + state.checked.types.insert(item_id, type_id); + } else if let Some(PendingType::Deferred(deferred_kind)) = state.pending_types.remove(&item_id) + { + let (quantified_type, quantified_variables) = + if let Some(result) = quantify::quantify(state, deferred_kind) { + result + } else { + (deferred_kind, debruijn::Size(0)) + }; + + let kind_variables = debruijn::Size(kind_variable_count); + + let data_like = DataLike { quantified_variables, kind_variables }; + state.checked.data.insert(item_id, data_like); + let type_id = transfer::globalize(state, context, quantified_type); state.checked.types.insert(item_id, type_id); } else { @@ -500,7 +535,7 @@ where let reference_type = state.storage.intern(Type::Constructor(context.id, item_id)); let reference_type = kind_variables.iter().fold(reference_type, |reference, binder| { - let variable = Variable::Bound(binder.level, binder.kind); + let variable = Variable::Bound(binder.variable.clone(), binder.kind); let variable = state.storage.intern(Type::Variable(variable)); state.storage.intern(Type::KindApplication(reference, variable)) }); @@ -511,14 +546,14 @@ where }); let mut unsolved_kinds = unsolved_kinds.collect_vec(); - unsolved_kinds.sort_by_key(|&(_, id)| (state.unification.get(id).domain, id)); + unsolved_kinds.sort_by_key(|&(_, id)| (state.unification.get(id).depth, id)); let reference_type = unsolved_kinds.iter().fold(reference_type, |reference, &(kind, _)| { state.storage.intern(Type::KindApplication(reference, kind)) }); type_variables.iter().fold(reference_type, |reference, binder| { - let variable = Variable::Bound(binder.level, binder.kind); + let variable = Variable::Bound(binder.variable.clone(), binder.kind); let variable = state.storage.intern(Type::Variable(variable)); state.storage.intern(Type::Application(reference, variable)) }) @@ -537,31 +572,38 @@ where let CheckedClass { inferred_kind, kind_variables, type_variables, superclasses, members } = checked; - let mut quantified_type = None; - let mut quantified_variables = debruijn::Size(0); - if let Some(inferred_kind) = inferred_kind - && let Some((q_type, q_variables)) = quantify::quantify(state, inferred_kind) + && let Some((quantified_type, _)) = quantify::quantify(state, inferred_kind) { - quantified_type = Some(q_type); - quantified_variables = q_variables; - }; + let type_id = transfer::globalize(state, context, quantified_type); + state.checked.types.insert(item_id, type_id); + } else if inferred_kind.is_none() + && let Some(PendingType::Deferred(deferred_kind)) = state.pending_types.remove(&item_id) + { + let quantified_type = + quantify::quantify(state, deferred_kind).map(|(qt, _)| qt).unwrap_or(deferred_kind); + let type_id = transfer::globalize(state, context, quantified_type); + state.checked.types.insert(item_id, type_id); + } let mut class = { let kind_var_count = kind_variables.len() as u32; let kind_variables = debruijn::Size(kind_var_count); let type_variable_kinds = type_variables.iter().map(|binder| binder.kind).collect(); - Class { superclasses, type_variable_kinds, quantified_variables, kind_variables } + let type_variable_names = + type_variables.iter().map(|binder| binder.variable.clone()).collect(); + Class { + superclasses, + type_variable_kinds, + type_variable_names, + quantified_variables: debruijn::Size(0), + kind_variables, + } }; - let class_quantified_count = + let quantified_variables = quantify::quantify_class(state, &mut class).unwrap_or(debruijn::Size(0)); - debug_assert_eq!( - quantified_variables, class_quantified_count, - "critical violation: class type signature and declaration should have the same number of variables" - ); - class.quantified_variables = quantified_variables; let superclasses = class.superclasses.iter().map(|&(t, k)| { @@ -579,11 +621,6 @@ where state.checked.classes.insert(item_id, class); - if let Some(quantified_type) = quantified_type { - let type_id = transfer::globalize(state, context, quantified_type); - state.checked.types.insert(item_id, type_id); - } - for (member_id, member_type) in members { if let Some((quantified_member, _)) = quantify::quantify(state, member_type) { let member_type = transfer::globalize(state, context, quantified_member); @@ -608,7 +645,8 @@ where // Now that all items in the SCC are processed, the kind should be fully resolved if !is_binary_operator_type(state, kind) { - state.insert_error(ErrorKind::InvalidTypeOperator { id: kind }); + let kind_message = state.render_local_type(context, kind); + state.insert_error(ErrorKind::InvalidTypeOperator { kind_message }); } // Generalize and store the kind @@ -620,6 +658,24 @@ where Ok(()) } +/// Commits remaining pending type entries from [`CheckState::pending_types`] +/// into [`CheckedModule::types`]. +pub fn commit_pending_types(state: &mut CheckState, context: &CheckContext) +where + Q: ExternalQueries, +{ + for (item_id, pending_kind) in state.pending_types.drain().collect_vec() { + let local_type = match pending_kind { + PendingType::Immediate(id) => id, + PendingType::Deferred(id) => { + quantify::quantify(state, id).map(|(id, _)| id).unwrap_or(id) + } + }; + let global_type = transfer::globalize(state, context, local_type); + state.checked.types.insert(item_id, global_type); + } +} + /// Checks the kind signature of a type item. /// /// This function also generalises the type and inserts it directly to @@ -629,6 +685,7 @@ where /// To enable scoped type variables, this function also populates the /// [`CheckState::surface_bindings`] with the kind variables found in /// the signature. +#[tracing::instrument(skip_all, name = "check_type_signature")] pub fn check_type_signature( state: &mut CheckState, context: &CheckContext, @@ -638,18 +695,33 @@ where Q: ExternalQueries, { state.with_error_step(ErrorStep::TypeDeclaration(item_id), |state| { - let _span = tracing::debug_span!("check_type_signature").entered(); - let Some(item) = context.lowered.info.get_type_item(item_id) else { return Ok(()); }; match item { + TypeItemIr::Foreign { signature, .. } => { + let Some(signature) = signature else { + return Ok(()); + }; + + let signature_variables = inspect::collect_signature_variables(context, *signature); + state.surface_bindings.insert_type(item_id, signature_variables); + + let (inferred_type, _) = + kind::check_surface_kind(state, context, *signature, context.prim.t)?; + + let quantified_type = quantify::quantify(state, inferred_type) + .map(|(quantified_type, _)| quantified_type) + .unwrap_or(inferred_type); + + state.pending_types.insert(item_id, PendingType::Immediate(quantified_type)); + } + TypeItemIr::DataGroup { signature, .. } | TypeItemIr::NewtypeGroup { signature, .. } | TypeItemIr::SynonymGroup { signature, .. } - | TypeItemIr::ClassGroup { signature, .. } - | TypeItemIr::Foreign { signature, .. } => { + | TypeItemIr::ClassGroup { signature, .. } => { let Some(signature) = signature else { return Ok(()); }; @@ -660,10 +732,7 @@ where let (inferred_type, _) = kind::check_surface_kind(state, context, *signature, context.prim.t)?; - if let Some((quantified_type, _)) = quantify::quantify(state, inferred_type) { - let type_id = transfer::globalize(state, context, quantified_type); - state.checked.types.insert(item_id, type_id); - } + state.pending_types.insert(item_id, PendingType::Deferred(inferred_type)); } TypeItemIr::Operator { .. } => {} @@ -677,6 +746,12 @@ struct SignatureLike { kind_variables: Vec, type_variables: Vec, result_kind: TypeId, + /// The scope level before kind_variables were bound, + /// used for unbinding. + kind_unbind_level: Option, + /// The scope level before type_variables were bound, + /// used for unbinding. + type_unbind_level: Option, } fn check_signature_like( @@ -696,28 +771,43 @@ where let surface_bindings = state.surface_bindings.get_type(item_id); let surface_bindings = surface_bindings.as_deref().unwrap_or_default(); - let signature = - inspect::inspect_signature_core(state, context, stored_kind, surface_bindings)?; + // Capture scope level before inspect_signature binds kind variables. + let kind_unbind_level = debruijn::Level(state.type_scope.size().0); + + let signature = inspect::inspect_signature(state, context, stored_kind, surface_bindings)?; - if variables.len() != signature.arguments.len() { + let kind_unbind_level = + if signature.variables.is_empty() { None } else { Some(kind_unbind_level) }; + + // The kind signature may have more function arrows than the + // definition has parameters when the result kind is itself a + // function kind. For example: + // + // type C2 :: forall k. (k -> Type) -> (k -> Type) -> k -> Type + // type C2 a z = Coproduct a z + // + // The kind decomposes into 3 arrows but the synonym only has 2 + // parameters. The excess arrows belong to the result kind. + if variables.len() > signature.arguments.len() { state.insert_error(ErrorKind::TypeSignatureVariableMismatch { id: signature_id, expected: 0, actual: 0, }); - if let Some(variable) = signature.variables.first() { - state.type_scope.unbind(variable.level); + if let Some(level) = kind_unbind_level { + state.type_scope.unbind(level); } return Ok(None); }; - let variables = variables.iter(); - let arguments = signature.arguments.iter(); + let parameter_count = variables.len(); + let (matched_arguments, excess_arguments) = signature.arguments.split_at(parameter_count); let kinds = variables - .zip(arguments) + .iter() + .zip(matched_arguments.iter()) .map(|(variable, &argument)| { // Use contravariant subtyping for type variables: // @@ -746,18 +836,40 @@ where .collect::>>()?; let kind_variables = signature.variables; - let result_kind = signature.result; - let type_variables = kinds.into_iter().map(|(id, visible, name, kind)| { - let level = state.type_scope.bind_forall(id, kind); - ForallBinder { visible, name, level, kind } + + // Fold excess arguments back into the result kind. + let result_kind = excess_arguments.iter().rfold(signature.result, |result, &argument| { + state.storage.intern(Type::Function(argument, result)) + }); + + // Capture scope level before binding type variables. + let type_unbind_level = debruijn::Level(state.type_scope.size().0); + + let type_variables = kinds.into_iter().map(|(id, visible, text, kind)| { + let name = state.fresh_name(&text); + state.type_scope.bind_forall(id, kind, name.clone()); + ForallBinder { visible, implicit: false, text, variable: name, kind } }); let type_variables = type_variables.collect_vec(); - SignatureLike { kind_variables, type_variables, result_kind } + let type_unbind_level = + if type_variables.is_empty() { None } else { Some(type_unbind_level) }; + + SignatureLike { + kind_variables, + type_variables, + result_kind, + kind_unbind_level, + type_unbind_level, + } } else { let kind_variables = vec![]; let result_kind = infer_result(state); + + // Capture scope level before binding type variables. + let type_unbind_level = debruijn::Level(state.type_scope.size().0); + let type_variables = variables.iter().map(|variable| { let kind = if let Some(id) = variable.kind { let (kind, _) = kind::check_surface_kind(state, context, id, context.prim.t)?; @@ -767,14 +879,24 @@ where }; let visible = variable.visible; - let name = variable.name.clone().unwrap_or(MISSING_NAME); - let level = state.type_scope.bind_forall(variable.id, kind); - Ok(ForallBinder { visible, name, level, kind }) + let text = variable.name.clone().unwrap_or(MISSING_NAME); + let name = state.fresh_name(&text); + state.type_scope.bind_forall(variable.id, kind, name.clone()); + Ok(ForallBinder { visible, implicit: false, text, variable: name, kind }) }); let type_variables = type_variables.collect::>>()?; - SignatureLike { kind_variables, type_variables, result_kind } + let type_unbind_level = + if type_variables.is_empty() { None } else { Some(type_unbind_level) }; + + SignatureLike { + kind_variables, + type_variables, + result_kind, + kind_unbind_level: None, + type_unbind_level, + } }; Ok(Some(signature)) @@ -833,6 +955,7 @@ where Ok(()) } +#[tracing::instrument(skip_all, name = "check_constructor_arguments")] fn check_constructor_arguments( state: &mut CheckState, context: &CheckContext, @@ -841,7 +964,6 @@ fn check_constructor_arguments( where Q: ExternalQueries, { - let _span = tracing::debug_span!("check_constructor_arguments").entered(); let mut constructors = vec![]; for item_id in context.indexed.pairs.data_constructors(item_id) { @@ -862,6 +984,7 @@ where Ok(constructors) } +#[tracing::instrument(skip_all, name = "infer_constructor_argument")] fn infer_constructor_argument( state: &mut CheckState, context: &CheckContext, @@ -871,7 +994,6 @@ where Q: ExternalQueries, { state.with_error_step(ErrorStep::ConstructorArgument(argument), |state| { - let _span = tracing::debug_span!("infer_constructor_argument").entered(); let (inferred_type, _) = kind::check_surface_kind(state, context, argument, context.prim.t)?; Ok(inferred_type) @@ -894,8 +1016,8 @@ fn infer_roles( ) { let type_id = state.normalize_type(type_id); match state.storage[type_id].clone() { - Type::Variable(Variable::Bound(level, _)) => { - if let Some(index) = variables.iter().position(|v| v.level == level) { + Type::Variable(Variable::Bound(name, _)) => { + if let Some(index) = variables.iter().position(|v| v.variable == name) { // The following cases infer to nominal roles: // // ``` @@ -1021,11 +1143,8 @@ fn check_roles( if is_foreign || declared >= inferred { *validated = declared; } else { - state.insert_error(ErrorKind::InvalidRoleDeclaration { - type_id, - parameter_index: index, - declared, - inferred, + state.with_error_step(ErrorStep::TypeDeclaration(type_id), |state| { + state.insert_error(ErrorKind::InvalidRoleDeclaration { index, declared, inferred }); }); } } diff --git a/compiler-core/checking/src/algorithm/unification.rs b/compiler-core/checking/src/algorithm/unification.rs index 8a94f56c3..f8fa9dcb5 100644 --- a/compiler-core/checking/src/algorithm/unification.rs +++ b/compiler-core/checking/src/algorithm/unification.rs @@ -4,12 +4,35 @@ use building_types::QueryResult; use itertools::{EitherOrBoth, Itertools}; use crate::ExternalQueries; -use crate::algorithm::kind::synonym; use crate::algorithm::state::{CheckContext, CheckState}; -use crate::algorithm::{kind, substitute, transfer}; +use crate::algorithm::{kind, substitute, toolkit}; use crate::core::{RowField, RowType, Type, TypeId, Variable, debruijn}; use crate::error::ErrorKind; +/// Determines if constraints are elaborated during [`subtype`]. +/// +/// Elaboration means pushing constraints as "wanted" and inserting dictionary +/// placeholders. This is only valid in **covariant** positions where the type +/// checker controls what value is passed. +/// +/// In **contravariant** positions (e.g., function arguments), the caller provides +/// values—we cannot insert dictionaries there. When both sides have matching +/// constraint structure, structural unification handles them correctly: +/// +/// ```text +/// (IsSymbol ?sym => Proxy ?sym -> r) <= (IsSymbol ~sym => Proxy ~sym -> r) +/// IsSymbol ?sym ~ IsSymbol ~sym → solves ?sym := ~sym +/// ``` +/// +/// [`Type::Function`] in the [`subtype`] rule disables this for the argument +/// and result positions. Syntax-driven rules like checking for binders and +/// expressions that appear in the argument position also disable elaboration. +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum ElaborationMode { + Yes, + No, +} + /// Check that `t1` is a subtype of `t2` /// /// In the type system, we define that polymorphic types are subtypes of @@ -42,7 +65,6 @@ use crate::error::ErrorKind; /// subtype (?a -> ?a) (~a -> ~a) /// subtype ?a ~a /// ``` -#[tracing::instrument(skip_all, name = "subtype")] pub fn subtype( state: &mut CheckState, context: &CheckContext, @@ -52,10 +74,24 @@ pub fn subtype( where Q: ExternalQueries, { - let t1 = synonym::normalize_expand_type(state, context, t1)?; - let t2 = synonym::normalize_expand_type(state, context, t2)?; + subtype_with_mode(state, context, t1, t2, ElaborationMode::Yes) +} - crate::debug_fields!(state, context, { t1 = t1, t2 = t2 }); +#[tracing::instrument(skip_all, name = "subtype_with_mode")] +pub fn subtype_with_mode( + state: &mut CheckState, + context: &CheckContext, + t1: TypeId, + t2: TypeId, + mode: ElaborationMode, +) -> QueryResult +where + Q: ExternalQueries, +{ + let t1 = toolkit::normalise_expand_type(state, context, t1)?; + let t2 = toolkit::normalise_expand_type(state, context, t2)?; + + crate::debug_fields!(state, context, { t1 = t1, t2 = t2, ?mode = mode }); if t1 == t2 { crate::trace_fields!(state, context, { t1 = t1, t2 = t2 }, "identical"); @@ -67,29 +103,58 @@ where match (t1_core, t2_core) { (Type::Function(t1_argument, t1_result), Type::Function(t2_argument, t2_result)) => { - Ok(subtype(state, context, t2_argument, t1_argument)? - && subtype(state, context, t1_result, t2_result)?) + Ok(subtype_with_mode(state, context, t2_argument, t1_argument, ElaborationMode::No)? + && subtype_with_mode(state, context, t1_result, t2_result, ElaborationMode::No)?) + } + + (Type::Application(_, _), Type::Function(t2_argument, t2_result)) => { + let t2 = state.storage.intern(Type::Application(context.prim.function, t2_argument)); + let t2 = state.storage.intern(Type::Application(t2, t2_result)); + subtype_with_mode(state, context, t1, t2, mode) + } + + (Type::Function(t1_argument, t1_result), Type::Application(_, _)) => { + let t1 = state.storage.intern(Type::Application(context.prim.function, t1_argument)); + let t1 = state.storage.intern(Type::Application(t1, t1_result)); + subtype_with_mode(state, context, t1, t2, mode) } (_, Type::Forall(ref binder, inner)) => { - let v = Variable::Skolem(binder.level, binder.kind); + let v = Variable::Skolem(binder.variable.clone(), binder.kind); let t = state.storage.intern(Type::Variable(v)); - let inner = substitute::SubstituteBound::on(state, binder.level, t, inner); - subtype(state, context, t1, inner) + let inner = substitute::SubstituteBound::on(state, binder.variable.clone(), t, inner); + subtype_with_mode(state, context, t1, inner, mode) } (Type::Forall(ref binder, inner), _) => { let k = state.normalize_type(binder.kind); let t = state.fresh_unification_kinded(k); - let inner = substitute::SubstituteBound::on(state, binder.level, t, inner); - subtype(state, context, inner, t2) + let inner = substitute::SubstituteBound::on(state, binder.variable.clone(), t, inner); + subtype_with_mode(state, context, inner, t2, mode) } - (Type::Constrained(constraint, inner), _) => { - state.constraints.push_wanted(constraint); - subtype(state, context, inner, t2) + (Type::Constrained(constraint, inner), _) if mode == ElaborationMode::Yes => { + state.push_wanted(constraint); + subtype_with_mode(state, context, inner, t2, mode) + } + + ( + Type::Application(t1_function, t1_argument), + Type::Application(t2_function, t2_argument), + ) if t1_function == context.prim.record && t2_function == context.prim.record => { + let t1_argument = toolkit::normalise_expand_type(state, context, t1_argument)?; + let t2_argument = toolkit::normalise_expand_type(state, context, t2_argument)?; + + let t1_core = state.storage[t1_argument].clone(); + let t2_core = state.storage[t2_argument].clone(); + + if let (Type::Row(t1_row), Type::Row(t2_row)) = (t1_core, t2_core) { + subtype_rows(state, context, &t1_row, &t2_row, mode) + } else { + unify(state, context, t1, t2) + } } (_, _) => unify(state, context, t1, t2), @@ -106,8 +171,8 @@ pub fn unify( where Q: ExternalQueries, { - let t1 = synonym::normalize_expand_type(state, context, t1)?; - let t2 = synonym::normalize_expand_type(state, context, t2)?; + let t1 = toolkit::normalise_expand_type(state, context, t1)?; + let t2 = toolkit::normalise_expand_type(state, context, t2)?; crate::debug_fields!(state, context, { t1 = t1, t2 = t2 }); @@ -143,13 +208,100 @@ where unify(state, context, t1_left, t2_left)? && unify(state, context, t1_right, t2_right)? } + (Type::Forall(t1_binder, t1_inner), Type::Forall(t2_binder, t2_inner)) => { + unify(state, context, t1_binder.kind, t2_binder.kind)?; + + let (t1_name, t2_name) = state.fresh_name_unify(&t1_binder.text, &t2_binder.text); + + let t1_skolem = { + let skolem = Variable::Skolem(t1_name, t1_binder.kind); + state.storage.intern(Type::Variable(skolem)) + }; + + let t2_skolem = { + let skolem = Variable::Skolem(t2_name, t2_binder.kind); + state.storage.intern(Type::Variable(skolem)) + }; + + let t1 = + substitute::SubstituteBound::on(state, t1_binder.variable, t1_skolem, t1_inner); + let t2 = + substitute::SubstituteBound::on(state, t2_binder.variable, t2_skolem, t2_inner); + + unify(state, context, t1, t2)? + } + + (Type::Forall(binder, inner), _) => { + let name = state.fresh_name(&binder.text); + let skolem = Variable::Skolem(name, binder.kind); + let skolem = state.storage.intern(Type::Variable(skolem)); + let inner = substitute::SubstituteBound::on(state, binder.variable, skolem, inner); + unify(state, context, inner, t2)? + } + + (_, Type::Forall(binder, inner)) => { + let name = state.fresh_name(&binder.text); + let skolem = Variable::Skolem(name, binder.kind); + let skolem = state.storage.intern(Type::Variable(skolem)); + let inner = substitute::SubstituteBound::on(state, binder.variable, skolem, inner); + unify(state, context, t1, inner)? + } + (Type::Function(t1_argument, t1_result), Type::Function(t2_argument, t2_result)) => { unify(state, context, t1_argument, t2_argument)? && unify(state, context, t1_result, t2_result)? } + // Unify Application(Application(f, a), b) with Function(a', b'). + // + // This handles the case where `f` is a unification variable that should + // be solved to `Function`. For example, when checking: + // + // identity :: forall t. Category a => a t t + // + // monomorphic :: forall a. a -> a + // monomorphic = identity + // + // Unifying `?a ?t ?t` and `a -> a` solves `?a := Function`. + // + // We reconstruct the `Application`-based form for a `Function` as the + // type to unify against, allowing `Application(?f, ?x)` to unify. + (Type::Application(_, _), Type::Function(t2_argument, t2_result)) => { + let t2 = state.storage.intern(Type::Application(context.prim.function, t2_argument)); + let t2 = state.storage.intern(Type::Application(t2, t2_result)); + unify(state, context, t1, t2)? + } + + (Type::Function(t1_argument, t1_result), Type::Application(_, _)) => { + let t1 = state.storage.intern(Type::Application(context.prim.function, t1_argument)); + let t1 = state.storage.intern(Type::Application(t1, t1_result)); + unify(state, context, t1, t2)? + } + (Type::Row(t1_row), Type::Row(t2_row)) => unify_rows(state, context, t1_row, t2_row)?, + ( + Type::Variable(Variable::Bound(t1_name, t1_kind)), + Type::Variable(Variable::Bound(t2_name, t2_kind)), + ) => { + if t1_name == t2_name { + unify(state, context, t1_kind, t2_kind)? + } else { + false + } + } + + ( + Type::Variable(Variable::Skolem(t1_level, t1_kind)), + Type::Variable(Variable::Skolem(t2_level, t2_kind)), + ) => { + if t1_level == t2_level { + unify(state, context, t1_kind, t2_kind)? + } else { + false + } + } + (Type::Unification(unification_id), _) => { solve(state, context, unification_id, t2)?.is_some() } @@ -172,8 +324,8 @@ where if !unifies { // at this point, it should be impossible to have // unsolved unification variables within t1 and t2 - let t1 = transfer::globalize(state, context, t1); - let t2 = transfer::globalize(state, context, t2); + let t1 = state.render_local_type(context, t1); + let t2 = state.render_local_type(context, t2); state.insert_error(ErrorKind::CannotUnify { t1, t2 }); } @@ -195,9 +347,9 @@ where solution = solution, }); - let codomain = state.type_scope.size(); + let solve_depth = state.type_scope.size(); - if !promote_type(state, codomain, unification_id, solution) { + if !promote_type(state, solve_depth, unification_id, solution) { crate::trace_fields!(state, context, { ?unification_id = unification_id, solution = solution, @@ -216,181 +368,237 @@ where pub fn promote_type( state: &mut CheckState, - codomain: debruijn::Size, + solve_depth: debruijn::Size, unification_id: u32, solution: TypeId, ) -> bool { - let solution = state.normalize_type(solution); - match state.storage[solution] { - Type::Application(function, argument) => { - promote_type(state, codomain, unification_id, function) - && promote_type(state, codomain, unification_id, argument) - } + /// Invariant context for the inner recursion of [`promote_type`]. + struct PromoteContext { + /// The type scope size when calling [`solve`]. + /// + /// Bound variables at or above this level are introduced + /// by foralls within the solution and don't escape. + solve_depth: debruijn::Size, + /// The unification variable being solved. + unification_id: u32, + } - Type::Constrained(constraint, inner) => { - promote_type(state, codomain, unification_id, constraint) - && promote_type(state, codomain, unification_id, inner) - } + fn aux(s: &mut CheckState, c: &PromoteContext, depth: debruijn::Size, t: TypeId) -> bool { + let t = s.normalize_type(t); + match s.storage[t] { + Type::Application(function, argument) => { + aux(s, c, depth, function) && aux(s, c, depth, argument) + } - Type::Constructor(_, _) => true, + Type::Constrained(constraint, inner) => { + aux(s, c, depth, constraint) && aux(s, c, depth, inner) + } - Type::Forall(ref binder, inner) => { - let inner_codomain = codomain.increment(); - promote_type(state, codomain, unification_id, binder.kind) - && promote_type(state, inner_codomain, unification_id, inner) - } + Type::Constructor(_, _) => true, - Type::Function(argument, result) => { - promote_type(state, codomain, unification_id, argument) - && promote_type(state, codomain, unification_id, result) - } + Type::Forall(ref binder, inner) => { + let inner_depth = depth.increment(); + aux(s, c, depth, binder.kind) && aux(s, c, inner_depth, inner) + } + + Type::Function(argument, result) => { + aux(s, c, depth, argument) && aux(s, c, depth, result) + } - Type::Integer(_) => true, + Type::Integer(_) => true, - Type::KindApplication(function, argument) => { - promote_type(state, codomain, unification_id, function) - && promote_type(state, codomain, unification_id, argument) - } + Type::KindApplication(function, argument) => { + aux(s, c, depth, function) && aux(s, c, depth, argument) + } - Type::Kinded(inner, kind) => { - promote_type(state, codomain, unification_id, inner) - && promote_type(state, codomain, unification_id, kind) - } + Type::Kinded(inner, kind) => aux(s, c, depth, inner) && aux(s, c, depth, kind), - Type::Operator(_, _) => true, + Type::Operator(_, _) => true, - Type::OperatorApplication(_, _, left, right) => { - promote_type(state, codomain, unification_id, left) - && promote_type(state, codomain, unification_id, right) - } + Type::OperatorApplication(_, _, left, right) => { + aux(s, c, depth, left) && aux(s, c, depth, right) + } - Type::Row(RowType { ref fields, tail }) => { - let fields = Arc::clone(fields); + Type::Row(RowType { ref fields, tail }) => { + let fields = Arc::clone(fields); - for field in fields.iter() { - if !promote_type(state, codomain, unification_id, field.id) { + for field in fields.iter() { + if !aux(s, c, depth, field.id) { + return false; + } + } + + if let Some(tail) = tail + && !aux(s, c, depth, tail) + { return false; } - } - if let Some(tail) = tail - && !promote_type(state, codomain, unification_id, tail) - { - return false; + true } - true - } - - Type::String(_, _) => true, + Type::String(_, _) => true, - Type::SynonymApplication(_, _, _, ref arguments) => { - let arguments = Arc::clone(arguments); - for argument in arguments.iter() { - if !promote_type(state, codomain, unification_id, *argument) { - return false; + Type::SynonymApplication(_, _, _, ref arguments) => { + let arguments = Arc::clone(arguments); + for argument in arguments.iter() { + if !aux(s, c, depth, *argument) { + return false; + } } + true } - true - } - Type::Unification(solution_id) => { - let unification = state.unification.get(unification_id); - let solution = state.unification.get(solution_id); + Type::Unification(solution_id) => { + let unification = s.unification.get(c.unification_id); + let solution = s.unification.get(solution_id); - if unification_id == solution_id { - return false; - } + if c.unification_id == solution_id { + return false; + } - if unification.domain < solution.domain { - let promoted_ty = - state.fresh_unification_kinded_at(unification.domain, unification.kind); + if unification.depth < solution.depth { + let promoted_ty = + s.fresh_unification_kinded_at(unification.depth, unification.kind); - // promoted_ty is simple enough to not warrant `solve` recursion - state.unification.solve(solution_id, promoted_ty); - } + // promoted_ty is simple enough to not warrant `solve` recursion + s.unification.solve(solution_id, promoted_ty); + } - true - } + true + } - Type::Variable(ref variable) => { - // A bound variable escapes if its level >= the unification variable's domain. - // This means the variable was bound at or after the unification was created. - match variable { - Variable::Bound(level, kind) => { - let unification = state.unification.get(unification_id); - if level.0 >= unification.domain.0 { - return false; + Type::Variable(ref variable) => { + // Given a unification variable ?u created at depth C; and + // the solve depth S, the type scope size when solve was + // called; and a given variable bound at `level`, we define: + // + // level < C — safe, in scope when ?u was created + // C <= level < S — unsafe, introduced after ?u but before solving + // S <= level — safe, bound by a forall within the solution + // + // The third rule enables impredicative solutions. Forall types + // inside the solution introduce bound variables that are local + // to the solution type and don't escape. For example: + // + // Solving `?a[:1] := forall c. Maybe c` + // + // forall a. -- level 0, below C(1) → safe + // forall b. -- level 1, C=1, ?a created here + // solve at S=2 + // forall c. -- level 2, >= S(2) → solution-internal, safe + // Maybe c + // + // Without the third rule, `c` at level 2 >= C(1) would be + // rejected as escaping, breaking `?a := forall c. Maybe c`. + match variable { + Variable::Bound(name, kind) => { + let bound_depth = name.depth; + if bound_depth.0 >= c.solve_depth.0 { + // S <= depth + return aux(s, c, depth, *kind); + } + let unification = s.unification.get(c.unification_id); + if bound_depth.0 >= unification.depth.0 { + // C <= depth < S + return false; + } + // depth < C + aux(s, c, depth, *kind) } - promote_type(state, codomain, unification_id, *kind) + Variable::Skolem(_, kind) => aux(s, c, depth, *kind), + Variable::Free(_) => true, } - Variable::Skolem(_, kind) => promote_type(state, codomain, unification_id, *kind), - Variable::Free(_) => true, } - } - Type::Unknown => true, + Type::Unknown => true, + } } + + let c = PromoteContext { solve_depth, unification_id }; + aux(state, &c, solve_depth, solution) } -fn unify_rows( +/// Checks that `t1_row` is a subtype of `t2_row`, generating errors for +/// additional or missing fields. This is used for record subtyping. +/// +/// * This algorithm partitions row fields into common, t1-only, and t2-only fields. +/// * If t1_row is closed and t2_row is non-empty, [`ErrorKind::PropertyIsMissing`] +/// * If t2_row is closed and t1_row is non-empty, [`ErrorKind::AdditionalProperty`] +#[tracing::instrument(skip_all, name = "subtype_rows")] +fn subtype_rows( state: &mut CheckState, context: &CheckContext, - t1_row: RowType, - t2_row: RowType, + t1_row: &RowType, + t2_row: &RowType, + mode: ElaborationMode, ) -> QueryResult where Q: ExternalQueries, { - let (extras_left, extras_right, ok) = partition_row_fields(state, context, &t1_row, &t2_row)?; + let (left_only, right_only, ok) = partition_row_fields_with( + state, + context, + t1_row, + t2_row, + |state, context, left, right| subtype_with_mode(state, context, left, right, mode), + )?; if !ok { return Ok(false); } - match (t1_row.tail, t2_row.tail) { - (None, None) => Ok(extras_left.is_empty() && extras_right.is_empty()), + let mut failed = false; - (Some(t1_tail), None) => { - if !extras_left.is_empty() { - return Ok(false); - } - let row = Type::Row(RowType { fields: Arc::from(extras_right), tail: None }); - let row_id = state.storage.intern(row); - unify(state, context, t1_tail, row_id) - } + if t1_row.tail.is_none() && !right_only.is_empty() { + let labels = right_only.iter().map(|field| field.label.clone()); + let labels = Arc::from_iter(labels); + state.insert_error(ErrorKind::PropertyIsMissing { labels }); + failed = true; + } - (None, Some(t2_tail)) => { - if !extras_right.is_empty() { - return Ok(false); - } - let row = Type::Row(RowType { fields: Arc::from(extras_left), tail: None }); - let row_id = state.storage.intern(row); - unify(state, context, t2_tail, row_id) - } + if t2_row.tail.is_none() && !left_only.is_empty() { + let labels = left_only.iter().map(|field| field.label.clone()); + let labels = Arc::from_iter(labels); + state.insert_error(ErrorKind::AdditionalProperty { labels }); + failed = true; + } - (Some(t1_tail), Some(t2_tail)) => { - if extras_left.is_empty() && extras_right.is_empty() { - return unify(state, context, t1_tail, t2_tail); - } + if failed { + return Ok(false); + } - let row_type_kind = - state.storage.intern(Type::Application(context.prim.row, context.prim.t)); + unify_row_tails(state, context, t1_row.tail, t2_row.tail, left_only, right_only) +} - let unification = state.fresh_unification_kinded(row_type_kind); +fn unify_rows( + state: &mut CheckState, + context: &CheckContext, + t1_row: RowType, + t2_row: RowType, +) -> QueryResult +where + Q: ExternalQueries, +{ + let (left_only, right_only, ok) = partition_row_fields(state, context, &t1_row, &t2_row)?; - let left_tail_row = - Type::Row(RowType { fields: Arc::from(extras_right), tail: Some(unification) }); - let left_tail_row_id = state.storage.intern(left_tail_row); + if !ok { + return Ok(false); + } - let right_tail_row = - Type::Row(RowType { fields: Arc::from(extras_left), tail: Some(unification) }); - let right_tail_row_id = state.storage.intern(right_tail_row); + if t1_row.tail.is_none() && t2_row.tail.is_none() { + return Ok(left_only.is_empty() && right_only.is_empty()); + } - Ok(unify(state, context, t1_tail, left_tail_row_id)? - && unify(state, context, t2_tail, right_tail_row_id)?) - } + if t2_row.tail.is_none() && !left_only.is_empty() { + return Ok(false); + } + + if t1_row.tail.is_none() && !right_only.is_empty() { + return Ok(false); } + + unify_row_tails(state, context, t1_row.tail, t2_row.tail, left_only, right_only) } pub fn partition_row_fields( @@ -401,6 +609,20 @@ pub fn partition_row_fields( ) -> QueryResult<(Vec, Vec, bool)> where Q: ExternalQueries, +{ + partition_row_fields_with(state, context, t1_row, t2_row, unify) +} + +fn partition_row_fields_with( + state: &mut CheckState, + context: &CheckContext, + t1_row: &RowType, + t2_row: &RowType, + mut field_check: F, +) -> QueryResult<(Vec, Vec, bool)> +where + Q: ExternalQueries, + F: FnMut(&mut CheckState, &CheckContext, TypeId, TypeId) -> QueryResult, { let mut extras_left = vec![]; let mut extras_right = vec![]; @@ -412,7 +634,7 @@ where for field in t1_fields.merge_join_by(t2_fields, |left, right| left.label.cmp(&right.label)) { match field { EitherOrBoth::Both(left, right) => { - if !unify(state, context, left.id, right.id)? { + if !field_check(state, context, left.id, right.id)? { ok = false; } } @@ -429,3 +651,49 @@ where Ok((extras_left, extras_right, ok)) } + +fn unify_row_tails( + state: &mut CheckState, + context: &CheckContext, + t1_tail: Option, + t2_tail: Option, + extras_left: Vec, + extras_right: Vec, +) -> QueryResult +where + Q: ExternalQueries, +{ + match (t1_tail, t2_tail) { + (None, None) => Ok(true), + + (Some(t1_tail), None) => { + let row = Type::Row(RowType { fields: Arc::from(extras_right), tail: None }); + let row_id = state.storage.intern(row); + unify(state, context, t1_tail, row_id) + } + + (None, Some(t2_tail)) => { + let row = Type::Row(RowType { fields: Arc::from(extras_left), tail: None }); + let row_id = state.storage.intern(row); + unify(state, context, t2_tail, row_id) + } + + (Some(t1_tail), Some(t2_tail)) => { + if extras_left.is_empty() && extras_right.is_empty() { + return unify(state, context, t1_tail, t2_tail); + } + + let unification = state.fresh_unification_kinded(context.prim.row_type); + let tail = Some(unification); + + let left_tail_row = Type::Row(RowType { fields: Arc::from(extras_right), tail }); + let left_tail_row_id = state.storage.intern(left_tail_row); + + let right_tail_row = Type::Row(RowType { fields: Arc::from(extras_left), tail }); + let right_tail_row_id = state.storage.intern(right_tail_row); + + Ok(unify(state, context, t1_tail, left_tail_row_id)? + && unify(state, context, t2_tail, right_tail_row_id)?) + } + } +} diff --git a/compiler-core/checking/src/algorithm/visit.rs b/compiler-core/checking/src/algorithm/visit.rs index 86ebd0fc1..f7cc80a60 100644 --- a/compiler-core/checking/src/algorithm/visit.rs +++ b/compiler-core/checking/src/algorithm/visit.rs @@ -39,6 +39,31 @@ impl TypeVisitor for CollectFileReferences<'_> { } } +/// Checks if a type contains any rows with labels. +pub struct HasLabeledRole { + contains: bool, +} + +impl HasLabeledRole { + pub fn on(state: &mut CheckState, id: TypeId) -> bool { + let mut visitor = HasLabeledRole { contains: false }; + visit_type(state, id, &mut visitor); + visitor.contains + } +} + +impl TypeVisitor for HasLabeledRole { + fn visit(&mut self, _state: &mut CheckState, _id: TypeId, t: &Type) -> VisitAction { + if let Type::Row(RowType { fields, .. }) = t + && !fields.is_empty() + { + self.contains = true; + return VisitAction::Stop; + } + VisitAction::Continue + } +} + /// Recursively visit a type without transforming it. pub fn visit_type(state: &mut CheckState, id: TypeId, visitor: &mut V) { let id = state.normalize_type(id); diff --git a/compiler-core/checking/src/core.rs b/compiler-core/checking/src/core.rs index 2c52d0999..0282b574b 100644 --- a/compiler-core/checking/src/core.rs +++ b/compiler-core/checking/src/core.rs @@ -3,24 +3,63 @@ pub mod debruijn; pub mod pretty; +use std::cmp::Ordering; +use std::hash::{Hash, Hasher}; use std::sync::Arc; use files::FileId; use indexing::{InstanceChainId, TypeItemId}; use smol_str::SmolStr; +/// Globally unique identity for a bound type variable. +#[derive(Debug, Clone)] +pub struct Name { + pub unique: u32, + pub file: FileId, + pub text: SmolStr, + pub depth: debruijn::Size, +} + +impl PartialEq for Name { + fn eq(&self, other: &Name) -> bool { + self.file == other.file && self.unique == other.unique + } +} + +impl Eq for Name {} + +impl Hash for Name { + fn hash(&self, state: &mut H) { + self.file.hash(state); + self.unique.hash(state); + } +} + +impl PartialOrd for Name { + fn partial_cmp(&self, other: &Name) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Name { + fn cmp(&self, other: &Name) -> Ordering { + self.file.cmp(&other.file).then(self.unique.cmp(&other.unique)) + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ForallBinder { pub visible: bool, - pub name: SmolStr, - pub level: debruijn::Level, + pub implicit: bool, + pub text: SmolStr, + pub variable: Name, pub kind: TypeId, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub enum Variable { - Skolem(debruijn::Level, TypeId), - Bound(debruijn::Level, TypeId), + Skolem(Name, TypeId), + Bound(Name, TypeId), Free(SmolStr), } @@ -124,13 +163,14 @@ pub struct Instance { pub constraints: Vec<(TypeId, TypeId)>, pub resolution: (FileId, TypeItemId), pub kind: InstanceKind, - pub kind_variables: Vec, + pub kind_variables: Vec<(Name, TypeId)>, } #[derive(Debug, Clone, PartialEq, Eq)] pub struct Class { pub superclasses: Arc<[(TypeId, TypeId)]>, pub type_variable_kinds: Vec, + pub type_variable_names: Vec, pub quantified_variables: debruijn::Size, pub kind_variables: debruijn::Size, } diff --git a/compiler-core/checking/src/core/pretty.rs b/compiler-core/checking/src/core/pretty.rs index 679d8a1f5..5b435b4fa 100644 --- a/compiler-core/checking/src/core/pretty.rs +++ b/compiler-core/checking/src/core/pretty.rs @@ -295,16 +295,16 @@ where let binder_docs = binders .iter() - .map(|ForallBinder { name, kind, .. }| { + .map(|ForallBinder { text, variable, kind, .. }| { let kind_doc = traverse_precedence(arena, source, context, Precedence::Top, *kind); - context.names.insert(context.depth.0, name.to_string()); + context.names.insert(variable.unique, text.to_string()); context.depth = debruijn::Size(context.depth.0 + 1); // Group each binder so it stays together as an atomic unit arena .text("(") - .append(arena.text(name.to_string())) + .append(arena.text(text.to_string())) .append(arena.text(" :: ")) .append(kind_doc) .append(arena.text(")")) @@ -454,7 +454,7 @@ where Type::Unification(unification_id) => match source { TraversalSource::Local { state, .. } => { let unification = state.unification.get(unification_id); - arena.text(format!("?{}[{}]", unification_id, unification.domain)) + arena.text(format!("?{}[{}]", unification_id, unification.depth)) } TraversalSource::Global { .. } => arena.text(format!("?{}[]", unification_id)), }, @@ -482,22 +482,22 @@ where Q: ExternalQueries, { match variable { - Variable::Skolem(level, kind) => { + Variable::Skolem(name, kind) => { + let name = format!("~{}", name.text); let kind_doc = traverse_precedence(arena, source, context, Precedence::Top, *kind); arena .text("(") - .append(arena.text(format!("~{}", level))) + .append(arena.text(name)) .append(arena.text(" :: ")) .append(kind_doc) .append(arena.text(")")) } - Variable::Bound(level, kind) => { - let name = context.names.get(&level.0).cloned(); - let name_doc = arena.text(name.unwrap_or_else(|| format!("{}", level))); + Variable::Bound(name, kind) => { + let name = name.text.to_string(); let kind_doc = traverse_precedence(arena, source, context, Precedence::Top, *kind); arena .text("(") - .append(name_doc) + .append(arena.text(name)) .append(arena.text(" :: ")) .append(kind_doc) .append(arena.text(")")) diff --git a/compiler-core/checking/src/error.rs b/compiler-core/checking/src/error.rs index 6e5178cd0..02ce6a3e7 100644 --- a/compiler-core/checking/src/error.rs +++ b/compiler-core/checking/src/error.rs @@ -2,7 +2,11 @@ use std::sync::Arc; -use crate::TypeId; +use interner::{Id, Interner}; +use smol_str::SmolStr; + +pub type TypeErrorMessageId = Id; +pub type TypeErrorMessageInterner = Interner; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum ErrorStep { @@ -18,29 +22,39 @@ pub enum ErrorStep { InferringExpression(lowering::ExpressionId), CheckingExpression(lowering::ExpressionId), + + InferringDoBind(lowering::DoStatementId), + InferringDoDiscard(lowering::DoStatementId), + CheckingDoLet(lowering::DoStatementId), + + InferringAdoMap(lowering::DoStatementId), + InferringAdoApply(lowering::DoStatementId), + CheckingAdoLet(lowering::DoStatementId), + + CheckingLetName(lowering::LetBindingNameGroupId), } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub enum ErrorKind { AmbiguousConstraint { - constraint: TypeId, + constraint: TypeErrorMessageId, }, CannotDeriveClass { class_file: files::FileId, class_id: indexing::TypeItemId, }, CannotDeriveForType { - type_id: TypeId, + type_message: TypeErrorMessageId, }, ContravariantOccurrence { - type_id: TypeId, + type_message: TypeErrorMessageId, }, CovariantOccurrence { - type_id: TypeId, + type_message: TypeErrorMessageId, }, CannotUnify { - t1: TypeId, - t2: TypeId, + t1: TypeErrorMessageId, + t2: TypeErrorMessageId, }, DeriveInvalidArity { class_file: files::FileId, @@ -51,24 +65,38 @@ pub enum ErrorKind { DeriveMissingFunctor, EmptyAdoBlock, EmptyDoBlock, + InvalidFinalBind, + InvalidFinalLet, InstanceHeadMismatch { class_file: files::FileId, class_item: indexing::TypeItemId, expected: usize, actual: usize, }, + InstanceHeadLabeledRow { + class_file: files::FileId, + class_item: indexing::TypeItemId, + position: usize, + type_message: TypeErrorMessageId, + }, InstanceMemberTypeMismatch { - expected: TypeId, - actual: TypeId, + expected: TypeErrorMessageId, + actual: TypeErrorMessageId, + }, + InvalidTypeApplication { + function_type: TypeErrorMessageId, + function_kind: TypeErrorMessageId, + argument_type: TypeErrorMessageId, }, InvalidTypeOperator { - id: TypeId, + kind_message: TypeErrorMessageId, }, ExpectedNewtype { - type_id: TypeId, + type_message: TypeErrorMessageId, }, + InvalidNewtypeDeriveSkolemArguments, NoInstanceFound { - constraint: TypeId, + constraint: TypeErrorMessageId, }, PartialSynonymApplication { id: lowering::TypeId, @@ -88,8 +116,7 @@ pub enum ErrorKind { actual: u32, }, InvalidRoleDeclaration { - type_id: indexing::TypeItemId, - parameter_index: usize, + index: usize, declared: crate::core::Role, inferred: crate::core::Role, }, @@ -98,10 +125,22 @@ pub enum ErrorKind { item_id: indexing::TypeItemId, }, CustomWarning { - message_id: u32, + message_id: TypeErrorMessageId, + }, + RedundantPatterns { + patterns: Arc<[SmolStr]>, + }, + MissingPatterns { + patterns: Arc<[SmolStr]>, }, CustomFailure { - message_id: u32, + message_id: TypeErrorMessageId, + }, + PropertyIsMissing { + labels: Arc<[SmolStr]>, + }, + AdditionalProperty { + labels: Arc<[SmolStr]>, }, } diff --git a/compiler-core/checking/src/lib.rs b/compiler-core/checking/src/lib.rs index 06ec02737..5304dd22d 100644 --- a/compiler-core/checking/src/lib.rs +++ b/compiler-core/checking/src/lib.rs @@ -1,10 +1,12 @@ pub mod algorithm; -pub mod error; pub mod trace; pub mod core; pub use core::{Type, TypeId, TypeInterner}; +pub mod error; +pub use error::{TypeErrorMessageId, TypeErrorMessageInterner}; + use std::sync::Arc; use building_types::{QueryProxy, QueryResult}; @@ -49,7 +51,7 @@ pub struct CheckedModule { pub roles: FxHashMap>, pub errors: Vec, - pub custom_messages: Vec, + pub error_messages: TypeErrorMessageInterner, } impl CheckedModule { diff --git a/compiler-core/checking/src/trace.rs b/compiler-core/checking/src/trace.rs index a452e84fc..47e5b9499 100644 --- a/compiler-core/checking/src/trace.rs +++ b/compiler-core/checking/src/trace.rs @@ -5,13 +5,33 @@ //! `no-tracing` feature is enabled. use building_types::QueryResult; +use syntax::SyntaxNodePtr; use crate::ExternalQueries; use crate::algorithm::state::CheckContext; use crate::error::ErrorStep; -/// Extracts the byte offset range from an error step. -pub fn step_byte_range(step: &ErrorStep, context: &CheckContext) -> Option<(u32, u32)> +fn spanning_byte_range(iterator: I) -> Option<(u32, u32)> +where + I: IntoIterator, + I::IntoIter: DoubleEndedIterator, +{ + let mut iter = iterator.into_iter(); + + let start = iter.next()?; + let end = iter.next_back().unwrap_or(start); + + let start = start.text_range(); + let end = end.text_range(); + + let range = start.cover(end); + let start = range.start().into(); + let end = range.end().into(); + + Some((start, end)) +} + +fn step_byte_range(step: &ErrorStep, context: &CheckContext) -> Option<(u32, u32)> where Q: ExternalQueries, { @@ -32,6 +52,29 @@ where ErrorStep::TypeDeclaration(id) => { context.indexed.type_item_ptr(&context.stabilized, *id).next()? } + ErrorStep::InferringDoBind(id) + | ErrorStep::InferringDoDiscard(id) + | ErrorStep::CheckingDoLet(id) => context.stabilized.syntax_ptr(*id)?, + ErrorStep::InferringAdoMap(id) + | ErrorStep::InferringAdoApply(id) + | ErrorStep::CheckingAdoLet(id) => context.stabilized.syntax_ptr(*id)?, + ErrorStep::CheckingLetName(id) => { + let group = context.lowered.info.get_let_binding_group(*id); + + let signature = group + .signature + .as_slice() + .iter() + .filter_map(|signature| context.stabilized.syntax_ptr(*signature)); + + let equations = group + .equations + .as_ref() + .iter() + .filter_map(|equation| context.stabilized.syntax_ptr(*equation)); + + return spanning_byte_range(signature.chain(equations)); + } }; let range = pointer.text_range(); @@ -42,7 +85,6 @@ where Some((start, end)) } -/// Returns the byte offset range for the most specific (innermost) error step. pub fn current_offset(check_steps: &[ErrorStep], context: &CheckContext) -> Option<(u32, u32)> where Q: ExternalQueries, diff --git a/compiler-core/diagnostics/Cargo.toml b/compiler-core/diagnostics/Cargo.toml new file mode 100644 index 000000000..271f5a945 --- /dev/null +++ b/compiler-core/diagnostics/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "diagnostics" +version = "0.1.0" +edition = "2024" + +[dependencies] +files = { version = "0.1.0", path = "../files" } +indexing = { version = "0.1.0", path = "../indexing" } +lowering = { version = "0.1.0", path = "../lowering" } +resolving = { version = "0.1.0", path = "../resolving" } +checking = { version = "0.1.0", path = "../checking" } +stabilizing = { version = "0.1.0", path = "../stabilizing" } +syntax = { version = "0.1.0", path = "../syntax" } + +rowan = "0.16.1" +lsp-types = "0.95.1" +line-index = "0.1.2" +itertools = "0.14.0" diff --git a/compiler-core/diagnostics/src/context.rs b/compiler-core/diagnostics/src/context.rs new file mode 100644 index 000000000..d9f83923b --- /dev/null +++ b/compiler-core/diagnostics/src/context.rs @@ -0,0 +1,170 @@ +use checking::CheckedModule; +use checking::error::ErrorStep; +use indexing::IndexedModule; +use lowering::LoweredModule; +use rowan::ast::{AstNode, AstPtr}; +use rowan::{NodeOrToken, TextRange}; +use stabilizing::StabilizedModule; +use syntax::{SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr}; + +use crate::Span; + +fn is_trivia(element: &SyntaxElement) -> bool { + match element { + NodeOrToken::Node(node) => matches!(node.kind(), SyntaxKind::Annotation), + NodeOrToken::Token(token) => { + token.text_range().is_empty() + || matches!(token.kind(), SyntaxKind::LAYOUT_SEPARATOR | SyntaxKind::ELSE) + } + } +} + +fn first_significant_range(node: &SyntaxNode) -> Option { + for child in node.children_with_tokens() { + if is_trivia(&child) { + continue; + } + match child { + NodeOrToken::Token(token) => return Some(token.text_range()), + NodeOrToken::Node(node) => { + if let Some(range) = first_significant_range(&node) { + return Some(range); + } + } + } + } + None +} + +fn last_significant_range(node: &SyntaxNode) -> Option { + let mut significant = None; + for child in node.children_with_tokens() { + if is_trivia(&child) { + continue; + } + match child { + NodeOrToken::Token(token) => significant = Some(token.text_range()), + NodeOrToken::Node(node) => { + if let Some(range) = last_significant_range(&node) { + significant = Some(range); + } + } + } + } + significant +} + +fn significant_ranges(node: &SyntaxNode) -> Option { + let start = first_significant_range(node)?; + let end = last_significant_range(node)?; + Some(start.cover(end)) +} + +fn pointers_span(context: &DiagnosticsContext<'_>, iterator: I) -> Option +where + I: IntoIterator, + I::IntoIter: DoubleEndedIterator, +{ + let mut iter = iterator.into_iter(); + + let start_ptr = iter.next()?; + let end_ptr = iter.next_back().unwrap_or(start_ptr); + + let start_span = context.span_from_syntax_ptr(&start_ptr)?; + let end_span = context.span_from_syntax_ptr(&end_ptr)?; + + Some(Span::new(start_span.start, end_span.end)) +} + +pub struct DiagnosticsContext<'a> { + pub content: &'a str, + pub root: &'a SyntaxNode, + pub stabilized: &'a StabilizedModule, + pub indexed: &'a IndexedModule, + pub lowered: &'a LoweredModule, + pub checked: &'a CheckedModule, +} + +impl<'a> DiagnosticsContext<'a> { + pub fn new( + content: &'a str, + root: &'a SyntaxNode, + stabilized: &'a StabilizedModule, + indexed: &'a IndexedModule, + lowered: &'a LoweredModule, + checked: &'a CheckedModule, + ) -> DiagnosticsContext<'a> { + DiagnosticsContext { content, root, stabilized, indexed, lowered, checked } + } + + pub fn span_from_syntax_ptr(&self, ptr: &SyntaxNodePtr) -> Option { + let node = ptr.try_to_node(self.root)?; + self.span_from_syntax_node(&node) + } + + pub fn span_from_ast_ptr>( + &self, + ptr: &AstPtr, + ) -> Option { + let node = ptr.try_to_node(self.root)?; + self.span_from_syntax_node(node.syntax()) + } + + fn span_from_syntax_node(&self, node: &SyntaxNode) -> Option { + let range = significant_ranges(node)?; + Some(Span::new(range.start().into(), range.end().into())) + } + + pub fn text_of(&self, span: Span) -> &'a str { + &self.content[span.start as usize..span.end as usize] + } + + pub fn span_from_error_step(&self, step: &ErrorStep) -> Option { + let ptr = match step { + ErrorStep::ConstructorArgument(id) => self.stabilized.syntax_ptr(*id)?, + ErrorStep::InferringKind(id) | ErrorStep::CheckingKind(id) => { + self.stabilized.syntax_ptr(*id)? + } + ErrorStep::InferringBinder(id) | ErrorStep::CheckingBinder(id) => { + self.stabilized.syntax_ptr(*id)? + } + ErrorStep::InferringExpression(id) | ErrorStep::CheckingExpression(id) => { + self.stabilized.syntax_ptr(*id)? + } + ErrorStep::TermDeclaration(id) => { + self.indexed.term_item_ptr(self.stabilized, *id).next()? + } + ErrorStep::TypeDeclaration(id) => { + self.indexed.type_item_ptr(self.stabilized, *id).next()? + } + ErrorStep::InferringDoBind(id) + | ErrorStep::InferringDoDiscard(id) + | ErrorStep::CheckingDoLet(id) => self.stabilized.syntax_ptr(*id)?, + ErrorStep::InferringAdoMap(id) + | ErrorStep::InferringAdoApply(id) + | ErrorStep::CheckingAdoLet(id) => self.stabilized.syntax_ptr(*id)?, + ErrorStep::CheckingLetName(id) => { + let group = self.lowered.info.get_let_binding_group(*id); + + let signature = group + .signature + .as_slice() + .iter() + .filter_map(|signature| self.stabilized.syntax_ptr(*signature)); + + let equations = group + .equations + .as_ref() + .iter() + .filter_map(|equation| self.stabilized.syntax_ptr(*equation)); + + return pointers_span(self, signature.chain(equations)); + } + }; + self.span_from_syntax_ptr(&ptr) + } + + pub fn primary_span_from_steps(&self, steps: &[ErrorStep]) -> Option { + steps.last().and_then(|step| self.span_from_error_step(step)) + } +} diff --git a/compiler-core/diagnostics/src/convert.rs b/compiler-core/diagnostics/src/convert.rs new file mode 100644 index 000000000..c1056f2b2 --- /dev/null +++ b/compiler-core/diagnostics/src/convert.rs @@ -0,0 +1,371 @@ +use checking::error::{CheckError, ErrorKind}; +use indexing::TypeItemKind; +use itertools::Itertools; +use lowering::LoweringError; +use resolving::ResolvingError; +use rowan::ast::AstNode; + +use crate::{Diagnostic, DiagnosticsContext, Severity}; + +pub trait ToDiagnostics { + fn to_diagnostics(&self, ctx: &DiagnosticsContext<'_>) -> Vec; +} + +impl ToDiagnostics for LoweringError { + fn to_diagnostics(&self, ctx: &DiagnosticsContext<'_>) -> Vec { + match self { + LoweringError::NotInScope(not_in_scope) => { + let (ptr, name) = match not_in_scope { + lowering::NotInScope::ExprConstructor { id } => { + (ctx.stabilized.syntax_ptr(*id), None) + } + lowering::NotInScope::ExprVariable { id } => { + (ctx.stabilized.syntax_ptr(*id), None) + } + lowering::NotInScope::ExprOperatorName { id } => { + (ctx.stabilized.syntax_ptr(*id), None) + } + lowering::NotInScope::TypeConstructor { id } => { + (ctx.stabilized.syntax_ptr(*id), None) + } + lowering::NotInScope::TypeVariable { id } => { + (ctx.stabilized.syntax_ptr(*id), None) + } + lowering::NotInScope::TypeOperatorName { id } => { + (ctx.stabilized.syntax_ptr(*id), None) + } + lowering::NotInScope::NegateFn { id } => { + (ctx.stabilized.syntax_ptr(*id), Some("negate")) + } + lowering::NotInScope::DoFn { kind, id } => ( + ctx.stabilized.syntax_ptr(*id), + match kind { + lowering::DoFn::Bind => Some("bind"), + lowering::DoFn::Discard => Some("discard"), + }, + ), + lowering::NotInScope::AdoFn { kind, id } => ( + ctx.stabilized.syntax_ptr(*id), + match kind { + lowering::AdoFn::Map => Some("map"), + lowering::AdoFn::Apply => Some("apply"), + lowering::AdoFn::Pure => Some("pure"), + }, + ), + lowering::NotInScope::TermOperator { id } => { + (ctx.stabilized.syntax_ptr(*id), None) + } + lowering::NotInScope::TypeOperator { id } => { + (ctx.stabilized.syntax_ptr(*id), None) + } + }; + + let Some(ptr) = ptr else { return vec![] }; + let Some(span) = ctx.span_from_syntax_ptr(&ptr) else { return vec![] }; + + let message = if let Some(name) = name { + format!("'{name}' is not in scope") + } else { + let text = ctx.text_of(span).trim(); + format!("'{text}' is not in scope") + }; + + vec![Diagnostic::error("NotInScope", message, span, "lowering")] + } + + LoweringError::RecursiveSynonym(group) => convert_recursive_group( + ctx, + &group.group, + "RecursiveSynonym", + "Invalid type synonym cycle", + ), + + LoweringError::RecursiveKinds(group) => { + convert_recursive_group(ctx, &group.group, "RecursiveKinds", "Invalid kind cycle") + } + } + } +} + +fn convert_recursive_group( + ctx: &DiagnosticsContext<'_>, + group: &[indexing::TypeItemId], + code: &'static str, + message: &'static str, +) -> Vec { + let spans = group.iter().filter_map(|id| { + let ptr = match ctx.indexed.items[*id].kind { + TypeItemKind::Synonym { equation, .. } => ctx.stabilized.syntax_ptr(equation?)?, + TypeItemKind::Data { equation, .. } => ctx.stabilized.syntax_ptr(equation?)?, + TypeItemKind::Newtype { equation, .. } => ctx.stabilized.syntax_ptr(equation?)?, + _ => return None, + }; + ctx.span_from_syntax_ptr(&ptr) + }); + + let spans = spans.collect_vec(); + + let Some(&primary) = spans.first() else { return vec![] }; + + let mut diagnostic = Diagnostic::error(code, message, primary, "lowering"); + + for &span in &spans[1..] { + diagnostic = diagnostic.with_related(span, "Includes this type"); + } + + vec![diagnostic] +} + +impl ToDiagnostics for ResolvingError { + fn to_diagnostics(&self, ctx: &DiagnosticsContext<'_>) -> Vec { + match self { + ResolvingError::TermExportConflict { .. } + | ResolvingError::TypeExportConflict { .. } + | ResolvingError::ExistingTerm { .. } + | ResolvingError::ExistingType { .. } => { + vec![] + } + + ResolvingError::InvalidImportStatement { id } => { + let Some(ptr) = ctx.stabilized.ast_ptr(*id) else { return vec![] }; + + let message = { + let cst = ptr.to_node(ctx.root); + let name = cst.module_name().map(|cst| { + let range = cst.syntax().text_range(); + ctx.content[range].trim() + }); + let name = name.unwrap_or(""); + format!("Cannot import module '{name}'") + }; + + let Some(span) = ctx.span_from_ast_ptr(&ptr) else { return vec![] }; + + vec![Diagnostic::error("InvalidImportStatement", message, span, "resolving")] + } + + ResolvingError::InvalidImportItem { id } => { + let Some(ptr) = ctx.stabilized.syntax_ptr(*id) else { return vec![] }; + let Some(span) = ctx.span_from_syntax_ptr(&ptr) else { return vec![] }; + + let text = ctx.text_of(span).trim(); + let message = format!("Cannot import item '{text}'"); + + vec![Diagnostic::error("InvalidImportItem", message, span, "resolving")] + } + } + } +} + +impl ToDiagnostics for CheckError { + fn to_diagnostics(&self, context: &DiagnosticsContext<'_>) -> Vec { + let Some(primary) = context.primary_span_from_steps(&self.step) else { + return vec![]; + }; + + let lookup_message = |id| context.checked.error_messages[id].as_str(); + + let (severity, code, message) = match &self.kind { + ErrorKind::AmbiguousConstraint { constraint } => { + let msg = lookup_message(*constraint); + (Severity::Error, "AmbiguousConstraint", format!("Ambiguous constraint: {msg}")) + } + ErrorKind::CannotDeriveClass { .. } => { + (Severity::Error, "CannotDeriveClass", "Cannot derive this class".to_string()) + } + ErrorKind::CannotDeriveForType { type_message } => { + let msg = lookup_message(*type_message); + (Severity::Error, "CannotDeriveForType", format!("Cannot derive for type: {msg}")) + } + ErrorKind::ContravariantOccurrence { type_message } => { + let msg = lookup_message(*type_message); + ( + Severity::Error, + "ContravariantOccurrence", + format!("Type variable occurs in contravariant position: {msg}"), + ) + } + ErrorKind::CovariantOccurrence { type_message } => { + let msg = lookup_message(*type_message); + ( + Severity::Error, + "CovariantOccurrence", + format!("Type variable occurs in covariant position: {msg}"), + ) + } + ErrorKind::CannotUnify { t1, t2 } => { + let t1 = lookup_message(*t1); + let t2 = lookup_message(*t2); + (Severity::Error, "CannotUnify", format!("Cannot unify '{t1}' with '{t2}'")) + } + ErrorKind::DeriveInvalidArity { expected, actual, .. } => ( + Severity::Error, + "DeriveInvalidArity", + format!("Invalid arity for derive: expected {expected}, got {actual}"), + ), + ErrorKind::DeriveMissingFunctor => ( + Severity::Error, + "DeriveMissingFunctor", + "Deriving Functor requires Data.Functor to be in scope".to_string(), + ), + ErrorKind::EmptyAdoBlock => { + (Severity::Error, "EmptyAdoBlock", "Empty ado block".to_string()) + } + ErrorKind::EmptyDoBlock => { + (Severity::Error, "EmptyDoBlock", "Empty do block".to_string()) + } + ErrorKind::InvalidFinalBind => ( + Severity::Warning, + "InvalidFinalBind", + "Invalid final bind statement in do expression".to_string(), + ), + ErrorKind::InvalidFinalLet => ( + Severity::Error, + "InvalidFinalLet", + "Invalid final let statement in do expression".to_string(), + ), + ErrorKind::InstanceHeadMismatch { expected, actual, .. } => ( + Severity::Error, + "InstanceHeadMismatch", + format!("Instance head mismatch: expected {expected} arguments, got {actual}"), + ), + ErrorKind::InstanceHeadLabeledRow { position, type_message, .. } => { + let type_msg = lookup_message(*type_message); + ( + Severity::Error, + "InstanceHeadLabeledRow", + format!( + "Instance argument at position {position} contains a labeled row, \ + but this position is not determined by any functional dependency. \ + Only the `( | r )` form is allowed. Got '{type_msg}' instead." + ), + ) + } + ErrorKind::InstanceMemberTypeMismatch { expected, actual } => { + let expected = lookup_message(*expected); + let actual = lookup_message(*actual); + ( + Severity::Error, + "InstanceMemberTypeMismatch", + format!("Instance member type mismatch: expected '{expected}', got '{actual}'"), + ) + } + ErrorKind::InvalidTypeApplication { function_type, function_kind, argument_type } => { + let function_type = lookup_message(*function_type); + let function_kind = lookup_message(*function_kind); + let argument_type = lookup_message(*argument_type); + ( + Severity::Error, + "InvalidTypeApplication", + format!( + "Cannot apply type '{function_type}' to '{argument_type}'. \ + '{function_type}' has kind '{function_kind}', which is not a function kind." + ), + ) + } + ErrorKind::InvalidTypeOperator { kind_message } => { + let msg = lookup_message(*kind_message); + ( + Severity::Error, + "InvalidTypeOperator", + format!("Invalid type operator kind: {msg}"), + ) + } + ErrorKind::ExpectedNewtype { type_message } => { + let msg = lookup_message(*type_message); + (Severity::Error, "ExpectedNewtype", format!("Expected a newtype, got: {msg}")) + } + ErrorKind::NoInstanceFound { constraint } => { + let msg = lookup_message(*constraint); + (Severity::Error, "NoInstanceFound", format!("No instance found for: {msg}")) + } + ErrorKind::PartialSynonymApplication { .. } => ( + Severity::Error, + "PartialSynonymApplication", + "Partial type synonym application".to_string(), + ), + ErrorKind::RecursiveSynonymExpansion { .. } => ( + Severity::Error, + "RecursiveSynonymExpansion", + "Recursive type synonym expansion".to_string(), + ), + ErrorKind::TooManyBinders { expected, actual, .. } => ( + Severity::Error, + "TooManyBinders", + format!("Too many binders: expected {expected}, got {actual}"), + ), + ErrorKind::TypeSignatureVariableMismatch { expected, actual, .. } => ( + Severity::Error, + "TypeSignatureVariableMismatch", + format!( + "Type signature variable mismatch: expected {expected} variables, got {actual}" + ), + ), + ErrorKind::InvalidRoleDeclaration { declared, inferred, .. } => ( + Severity::Error, + "InvalidRoleDeclaration", + format!("Invalid role declaration: declared {declared:?}, inferred {inferred:?}"), + ), + ErrorKind::CoercibleConstructorNotInScope { .. } => ( + Severity::Error, + "CoercibleConstructorNotInScope", + "Constructor not in scope for Coercible".to_string(), + ), + ErrorKind::InvalidNewtypeDeriveSkolemArguments => ( + Severity::Error, + "InvalidNewtypeDeriveSkolemArguments", + "Cannot derive newtype instance where skolemised arguments do not appear trailing in the inner type." + .to_string(), + ), + ErrorKind::RedundantPatterns { patterns } => { + let patterns = patterns.join(", "); + ( + Severity::Warning, + "RedundantPattern", + format!("Pattern match has redundant patterns: {patterns}"), + ) + } + ErrorKind::MissingPatterns { patterns } => { + let patterns = patterns.join(", "); + ( + Severity::Warning, + "MissingPatterns", + format!("Pattern match is not exhaustive. Missing: {patterns}"), + ) + } + ErrorKind::CustomWarning { message_id } => { + let msg = lookup_message(*message_id); + (Severity::Warning, "CustomWarning", msg.to_string()) + } + ErrorKind::CustomFailure { message_id } => { + let msg = lookup_message(*message_id); + (Severity::Error, "CustomFailure", msg.to_string()) + } + ErrorKind::PropertyIsMissing { labels } => { + let labels_str = labels.join(", "); + ( + Severity::Error, + "PropertyIsMissing", + format!("Missing required properties: {labels_str}"), + ) + } + ErrorKind::AdditionalProperty { labels } => { + let labels_str = labels.join(", "); + ( + Severity::Error, + "AdditionalProperty", + format!("Additional properties not allowed: {labels_str}"), + ) + } + }; + + vec![Diagnostic { + severity, + code: crate::DiagnosticCode::new(code), + message, + primary, + related: vec![], + source: "checking", + }] + } +} diff --git a/compiler-core/diagnostics/src/lib.rs b/compiler-core/diagnostics/src/lib.rs new file mode 100644 index 000000000..f16805633 --- /dev/null +++ b/compiler-core/diagnostics/src/lib.rs @@ -0,0 +1,9 @@ +mod context; +mod convert; +mod model; +mod render; + +pub use context::DiagnosticsContext; +pub use convert::ToDiagnostics; +pub use model::{Diagnostic, DiagnosticCode, RelatedSpan, Severity, Span}; +pub use render::{format_rustc, format_rustc_with_path, format_text, to_lsp_diagnostic}; diff --git a/compiler-core/diagnostics/src/model.rs b/compiler-core/diagnostics/src/model.rs new file mode 100644 index 000000000..1302b0513 --- /dev/null +++ b/compiler-core/diagnostics/src/model.rs @@ -0,0 +1,94 @@ +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct Span { + pub start: u32, + pub end: u32, +} + +impl Span { + pub fn new(start: u32, end: u32) -> Span { + Span { start, end } + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct RelatedSpan { + pub span: Span, + pub message: String, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum Severity { + Error, + Warning, +} + +use std::fmt; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct DiagnosticCode(&'static str); + +impl DiagnosticCode { + pub fn new(code: &'static str) -> DiagnosticCode { + DiagnosticCode(code) + } +} + +impl fmt::Display for DiagnosticCode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.0) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Diagnostic { + pub severity: Severity, + pub code: DiagnosticCode, + pub message: String, + pub primary: Span, + pub related: Vec, + pub source: &'static str, +} + +impl Diagnostic { + pub fn error( + code: &'static str, + message: impl Into, + primary: Span, + source: &'static str, + ) -> Diagnostic { + let message = message.into(); + let related = vec![]; + Diagnostic { + severity: Severity::Error, + code: DiagnosticCode::new(code), + message, + primary, + related, + source, + } + } + + pub fn warning( + code: &'static str, + message: impl Into, + primary: Span, + source: &'static str, + ) -> Diagnostic { + let message = message.into(); + let related = vec![]; + Diagnostic { + severity: Severity::Warning, + code: DiagnosticCode::new(code), + message, + primary, + related, + source, + } + } + + pub fn with_related(mut self, span: Span, message: impl Into) -> Diagnostic { + let message = message.into(); + self.related.push(RelatedSpan { span, message }); + self + } +} diff --git a/compiler-core/diagnostics/src/render.rs b/compiler-core/diagnostics/src/render.rs new file mode 100644 index 000000000..0d7e0413a --- /dev/null +++ b/compiler-core/diagnostics/src/render.rs @@ -0,0 +1,217 @@ +use itertools::Itertools; +use line_index::{LineCol, LineIndex}; +use lsp_types::{ + DiagnosticRelatedInformation, DiagnosticSeverity, Location, NumberOrString, Position, Range, +}; +use rowan::TextSize; + +use crate::{Diagnostic, Severity, Span}; + +pub fn format_text(diagnostics: &[Diagnostic]) -> String { + let mut output = String::new(); + + for diagnostic in diagnostics { + let severity = match diagnostic.severity { + Severity::Error => "error", + Severity::Warning => "warning", + }; + + output.push_str(&format!( + "{severity}[{}] at {}..{}: {}\n", + diagnostic.code, diagnostic.primary.start, diagnostic.primary.end, diagnostic.message + )); + + for related in &diagnostic.related { + output.push_str(&format!( + " note at {}..{}: {}\n", + related.span.start, related.span.end, related.message + )); + } + } + + output +} + +fn line_text<'a>(line_index: &LineIndex, content: &'a str, line: u32) -> Option<&'a str> { + let range = line_index.line(line)?; + let text = &content[range]; + Some(text.trim_end_matches(['\n', '\r'])) +} + +fn caret_marker(line: &str, start_col: u32, end_col: Option) -> String { + let line_len = line.chars().count() as u32; + let start = start_col.min(line_len); + let end = end_col.unwrap_or(line_len).min(line_len); + + if end <= start { + format!("{}^", " ".repeat(start as usize)) + } else { + let tilde_count = (end - start).saturating_sub(1) as usize; + format!("{}^{}", " ".repeat(start as usize), "~".repeat(tilde_count)) + } +} + +fn span_location( + line_index: &LineIndex, + content: &str, + span: Span, +) -> Option<((u32, u32), (u32, u32))> { + let start = offset_to_position(line_index, content, TextSize::from(span.start))?; + let end = offset_to_position(line_index, content, TextSize::from(span.end))?; + Some(((start.line, start.character), (end.line, end.character))) +} + +pub fn format_rustc(diagnostics: &[Diagnostic], content: &str) -> String { + format_rustc_inner(diagnostics, content, None) +} + +/// Renders diagnostics in rustc style with a file path in the `-->` lines. +pub fn format_rustc_with_path(diagnostics: &[Diagnostic], content: &str, path: &str) -> String { + format_rustc_inner(diagnostics, content, Some(path)) +} + +fn format_rustc_inner(diagnostics: &[Diagnostic], content: &str, path: Option<&str>) -> String { + let line_index = LineIndex::new(content); + let mut output = String::new(); + + for diagnostic in diagnostics { + let severity = match diagnostic.severity { + Severity::Error => "error", + Severity::Warning => "warning", + }; + + output.push_str(&format!("{severity}[{}]: {}\n", diagnostic.code, diagnostic.message)); + + if let Some(((start_line, start_col), (end_line, end_col))) = + span_location(&line_index, content, diagnostic.primary) + { + let display_start_line = start_line + 1; + let display_start_col = start_col + 1; + let display_end_line = end_line + 1; + let display_end_col = end_col + 1; + + if let Some(path) = path { + output.push_str(&format!( + " --> {path}:{}:{}..{}:{}\n", + display_start_line, display_start_col, display_end_line, display_end_col + )); + } else { + output.push_str(&format!( + " --> {}:{}..{}:{}\n", + display_start_line, display_start_col, display_end_line, display_end_col + )); + } + + if let Some(line) = line_text(&line_index, content, start_line) { + let line_num_width = display_start_line.to_string().len(); + output.push_str(&format!("{:>width$} |\n", "", width = line_num_width)); + output.push_str(&format!("{} | {}\n", display_start_line, line)); + + let marker_end_col = if start_line == end_line { Some(end_col) } else { None }; + let marker = caret_marker(line, start_col, marker_end_col); + output.push_str(&format!("{:>width$} | {}\n", "", marker, width = line_num_width)); + } + } + + for related in &diagnostic.related { + output.push_str(&format!(" note: {}\n", related.message)); + + if let Some(((start_line, start_col), (end_line, end_col))) = + span_location(&line_index, content, related.span) + { + let display_start_line = start_line + 1; + let display_start_col = start_col + 1; + let display_end_line = end_line + 1; + let display_end_col = end_col + 1; + + if let Some(path) = path { + output.push_str(&format!( + " --> {path}:{}:{}..{}:{}\n", + display_start_line, display_start_col, display_end_line, display_end_col + )); + } else { + output.push_str(&format!( + " --> {}:{}..{}:{}\n", + display_start_line, display_start_col, display_end_line, display_end_col + )); + } + + if let Some(line) = line_text(&line_index, content, start_line) { + let line_num_width = display_start_line.to_string().len(); + output.push_str(&format!(" {:>width$} |\n", "", width = line_num_width)); + output.push_str(&format!(" {} | {}\n", display_start_line, line)); + + let marker_end_col = if start_line == end_line { Some(end_col) } else { None }; + let marker = caret_marker(line, start_col, marker_end_col); + output.push_str(&format!( + " {:>width$} | {}\n", + "", + marker, + width = line_num_width + )); + } + } + } + } + + output +} + +fn offset_to_position(line_index: &LineIndex, content: &str, offset: TextSize) -> Option { + let LineCol { line, col } = line_index.line_col(offset); + + let line_text_range = line_index.line(line)?; + let line_content = &content[line_text_range]; + + let until_col = &line_content[..col as usize]; + let character = until_col.chars().count() as u32; + + Some(Position { line, character }) +} + +pub fn to_lsp_diagnostic( + diagnostic: &Diagnostic, + content: &str, + uri: &lsp_types::Url, +) -> Option { + let line_index = LineIndex::new(content); + + let to_position = + |offset: u32| offset_to_position(&line_index, content, TextSize::from(offset)); + + let start = to_position(diagnostic.primary.start)?; + let end = to_position(diagnostic.primary.end)?; + let range = Range { start, end }; + + let severity = match diagnostic.severity { + Severity::Error => DiagnosticSeverity::ERROR, + Severity::Warning => DiagnosticSeverity::WARNING, + }; + + let related_information = diagnostic.related.iter().filter_map(|related| { + let start = to_position(related.span.start)?; + let end = to_position(related.span.end)?; + Some(DiagnosticRelatedInformation { + location: Location { uri: uri.clone(), range: Range { start, end } }, + message: related.message.clone(), + }) + }); + + let related_information = related_information.collect_vec(); + + Some(lsp_types::Diagnostic { + range, + severity: Some(severity), + code: Some(NumberOrString::String(diagnostic.code.to_string())), + code_description: None, + source: Some(format!("analyzer/{}", diagnostic.source)), + message: diagnostic.message.clone(), + related_information: if related_information.is_empty() { + None + } else { + Some(related_information) + }, + tags: None, + data: None, + }) +} diff --git a/compiler-core/lowering/src/algorithm.rs b/compiler-core/lowering/src/algorithm.rs index 780d2bd0d..0a9018ad1 100644 --- a/compiler-core/lowering/src/algorithm.rs +++ b/compiler-core/lowering/src/algorithm.rs @@ -43,6 +43,8 @@ pub(crate) struct State { pub(crate) synonym_edges: FxHashSet<(TypeItemId, TypeItemId)>, pub(crate) let_binding_graph: ItemGraph, + pub(crate) in_constraint: bool, + pub(crate) errors: Vec, } @@ -114,6 +116,10 @@ impl State { self.nodes.type_node.insert(id, node); } + fn associate_do_statement(&mut self, id: DoStatementId, statement: DoStatement) { + self.info.do_statement.insert(id, statement); + } + fn associate_let_binding_name(&mut self, id: LetBindingNameGroupId, info: LetBindingName) { self.info.let_binding_name.insert(id, info); let Some(node) = self.graph_scope else { return }; @@ -262,6 +268,33 @@ impl State { Some((file_id, type_id)) } + fn resolve_class_reference( + &mut self, + context: &Context, + qualifier: Option<&str>, + name: &str, + ) -> Option<(FileId, TypeItemId)> { + let (file_id, type_id) = context.lookup_class(qualifier, name)?; + + if context.file_id == file_id + && let Some(current_id) = self.current_type + { + self.type_edges.insert((current_id, type_id)); + + if let Some(synonym_id) = self.current_synonym + && let TypeItemKind::Synonym { .. } = context.indexed.items[type_id].kind + { + self.synonym_edges.insert((synonym_id, type_id)); + } + + if let Some(kind_id) = self.current_kind { + self.kind_edges.insert((kind_id, type_id)); + } + } + + Some((file_id, type_id)) + } + fn resolve_type_variable(&mut self, id: TypeId, name: &str) -> Option { let node = self.graph_scope?; if let GraphNode::Implicit { collecting, bindings, .. } = &mut self.graph.inner[node] { @@ -320,6 +353,16 @@ impl Context<'_> { let name = name.as_ref(); self.resolved.lookup_type(self.prim, qualifier, name) } + + fn lookup_class(&self, qualifier: Option, name: N) -> Option<(FileId, TypeItemId)> + where + Q: AsRef, + N: AsRef, + { + let qualifier = qualifier.as_ref().map(Q::as_ref); + let name = name.as_ref(); + self.resolved.lookup_class(self.prim, qualifier, name) + } } pub(super) fn lower_module( @@ -368,7 +411,7 @@ fn lower_term_item(state: &mut State, context: &Context, item_id: TermItemId, it let qualified = head.qualified()?; let (qualifier, name) = recursive::lower_qualified_name(&qualified, cst::QualifiedName::upper)?; - state.resolve_type_reference(context, qualifier.as_deref(), &name) + state.resolve_class_reference(context, qualifier.as_deref(), &name) }); state.push_implicit_scope(); @@ -381,6 +424,7 @@ fn lower_term_item(state: &mut State, context: &Context, item_id: TermItemId, it .collect() }; + state.in_constraint = true; let constraints = recover! { cst.as_ref()? .instance_constraints()? @@ -388,6 +432,7 @@ fn lower_term_item(state: &mut State, context: &Context, item_id: TermItemId, it .map(|cst| recursive::lower_type(state, context, &cst)) .collect() }; + state.in_constraint = false; state.finish_implicit_scope(); let kind = TermItemIr::Derive { newtype, constraints, resolution, arguments }; @@ -414,7 +459,7 @@ fn lower_term_item(state: &mut State, context: &Context, item_id: TermItemId, it let qualified = head.qualified()?; let (qualifier, name) = recursive::lower_qualified_name(&qualified, cst::QualifiedName::upper)?; - state.resolve_type_reference(context, qualifier.as_deref(), &name) + state.resolve_class_reference(context, qualifier.as_deref(), &name) }); state.push_implicit_scope(); @@ -427,6 +472,7 @@ fn lower_term_item(state: &mut State, context: &Context, item_id: TermItemId, it .collect() }; + state.in_constraint = true; let constraints = recover! { cst.as_ref()? .instance_constraints()? @@ -434,6 +480,7 @@ fn lower_term_item(state: &mut State, context: &Context, item_id: TermItemId, it .map(|cst| recursive::lower_type(state, context, &cst)) .collect() }; + state.in_constraint = false; state.finish_implicit_scope(); let members = recover! { @@ -630,12 +677,14 @@ fn lower_type_item(state: &mut State, context: &Context, item_id: TypeItemId, it .collect() }; + state.in_constraint = true; let constraints = recover! { cst.class_constraints()? .children() .map(|cst| recursive::lower_type(state, context, &cst)) .collect() }; + state.in_constraint = false; let variable_map: FxHashMap<&str, u8> = variables .iter() diff --git a/compiler-core/lowering/src/algorithm/recursive.rs b/compiler-core/lowering/src/algorithm/recursive.rs index 0daaa43f3..021ca2759 100644 --- a/compiler-core/lowering/src/algorithm/recursive.rs +++ b/compiler-core/lowering/src/algorithm/recursive.rs @@ -1,7 +1,7 @@ use std::mem; use std::sync::Arc; -use itertools::Itertools; +use itertools::{Itertools, Position}; use petgraph::algo::tarjan_scc; use rowan::ast::AstNode; use rustc_hash::FxHashMap; @@ -45,8 +45,25 @@ fn lower_binder_kind( .collect(); BinderKind::OperatorChain { head, tail } } - cst::Binder::BinderInteger(_) => BinderKind::Integer, - cst::Binder::BinderNumber(_) => BinderKind::Number, + cst::Binder::BinderInteger(cst) => { + let value = cst.integer_token().and_then(|token| { + let text = token.text(); + let integer = if let Some(hex) = text.strip_prefix("0x") { + let clean = hex.replace_smolstr("_", ""); + i32::from_str_radix(&clean, 16).ok()? + } else { + let clean = text.replace_smolstr("_", ""); + clean.parse().ok()? + }; + if cst.minus_token().is_some() { Some(-integer) } else { Some(integer) } + }); + BinderKind::Integer { value } + } + cst::Binder::BinderNumber(cst) => { + let negative = cst.minus_token().is_some(); + let value = cst.number_token().map(|token| SmolStr::from(token.text())); + BinderKind::Number { negative, value } + } cst::Binder::BinderConstructor(cst) => { let resolution = cst.name().and_then(|cst| { let (qualifier, name) = lower_qualified_name(&cst, cst::QualifiedName::upper)?; @@ -77,8 +94,50 @@ fn lower_binder_kind( BinderKind::Named { named, binder } } cst::Binder::BinderWildcard(_) => BinderKind::Wildcard, - cst::Binder::BinderString(_) => BinderKind::String, - cst::Binder::BinderChar(_) => BinderKind::Char, + cst::Binder::BinderString(cst) => { + let (kind, value) = if let Some(token) = cst.string() { + let text = token.text(); + let value = text + .strip_prefix('"') + .and_then(|text| text.strip_suffix('"')) + .map(SmolStr::from); + (StringKind::String, value) + } else if let Some(token) = cst.raw_string() { + let text = token.text(); + let value = text + .strip_prefix("\"\"\"") + .and_then(|text| text.strip_suffix("\"\"\"")) + .map(SmolStr::from); + (StringKind::RawString, value) + } else { + (StringKind::String, None) + }; + BinderKind::String { kind, value } + } + cst::Binder::BinderChar(cst) => { + let value = cst.char_token().and_then(|token| { + let text = token.text(); + let inner = text.strip_prefix('\'')?.strip_suffix('\'')?; + if let Some(escaped) = inner.strip_prefix('\\') { + match escaped.chars().next()? { + 'n' => Some('\n'), + 'r' => Some('\r'), + 't' => Some('\t'), + '\\' => Some('\\'), + '\'' => Some('\''), + '0' => Some('\0'), + 'x' if escaped.len() >= 3 => { + let hex = &escaped[1..3]; + u8::from_str_radix(hex, 16).ok().map(|b| b as char) + } + _ => None, + } + } else { + inner.chars().next() + } + }); + BinderKind::Char { value } + } cst::Binder::BinderTrue(_) => BinderKind::Boolean { boolean: true }, cst::Binder::BinderFalse(_) => BinderKind::Boolean { boolean: false }, cst::Binder::BinderArray(cst) => { @@ -274,26 +333,44 @@ fn lower_expression_kind( cst::Expression::ExpressionDo(cst) => state.with_scope(|state| { let qualifier = cst.qualifier().and_then(|cst| { let token = cst.text()?; - let text = token.text(); + let text = token.text().trim_end_matches('.'); Some(SmolStr::from(text)) }); - let bind = state.resolve_term_full(context, qualifier.as_deref(), "bind"); - let discard = state.resolve_term_full(context, qualifier.as_deref(), "discard"); + // Scan statements to determine which rebindable functions are needed: + // - `bind` is needed if there's at least one `<-` statement + // - `discard` is needed if there's a non-final discard statement + let (has_bind, has_discard) = cst.statements().map_or((false, false), |statements| { + let mut has_bind = false; + let mut has_discard = false; + + for (position, statement) in statements.children().with_position() { + let is_final = matches!(position, Position::Last | Position::Only); + match statement { + cst::DoStatement::DoStatementBind(_) => has_bind = true, + cst::DoStatement::DoStatementDiscard(_) if !is_final => has_discard = true, + _ => {} + } + } + + (has_bind, has_discard) + }); - if bind.is_none() { - let id = context.stabilized.lookup_cst(cst).expect_id(); - state - .errors - .push(LoweringError::NotInScope(NotInScope::DoFn { kind: DoFn::Bind, id })); - } + let mut resolve_do_fn = |kind: DoFn| { + let name = match kind { + DoFn::Bind => "bind", + DoFn::Discard => "discard", + }; + let resolution = state.resolve_term_full(context, qualifier.as_deref(), name); + if resolution.is_none() { + let id = context.stabilized.lookup_cst(cst).expect_id(); + state.errors.push(LoweringError::NotInScope(NotInScope::DoFn { kind, id })); + } + resolution + }; - if discard.is_none() { - let id = context.stabilized.lookup_cst(cst).expect_id(); - state - .errors - .push(LoweringError::NotInScope(NotInScope::DoFn { kind: DoFn::Discard, id })); - } + let bind = if has_bind { resolve_do_fn(DoFn::Bind) } else { None }; + let discard = if has_discard { resolve_do_fn(DoFn::Discard) } else { None }; let statements = recover! { cst.statements()? @@ -307,34 +384,51 @@ fn lower_expression_kind( cst::Expression::ExpressionAdo(cst) => state.with_scope(|state| { let qualifier = cst.qualifier().and_then(|cst| { let token = cst.text()?; - let text = token.text(); + let text = token.text().trim_end_matches('.'); Some(SmolStr::from(text)) }); - let map = state.resolve_term_full(context, qualifier.as_deref(), "map"); - let apply = state.resolve_term_full(context, qualifier.as_deref(), "apply"); - let pure = state.resolve_term_full(context, qualifier.as_deref(), "pure"); + // Count action statements (Bind/Discard, ignoring Let) to determine + // which rebindable functions are needed: + // - 0 actions: only `pure` needed + // - 1 action: only `map` needed + // - 2+ actions: `map` and `apply` needed + let action_count = cst.statements().map_or(0, |statements| { + statements + .children() + .filter(|s| { + matches!( + s, + cst::DoStatement::DoStatementBind(_) + | cst::DoStatement::DoStatementDiscard(_) + ) + }) + .count() + }); - if map.is_none() { - let id = context.stabilized.lookup_cst(cst).expect_id(); - state - .errors - .push(LoweringError::NotInScope(NotInScope::AdoFn { kind: AdoFn::Map, id })); - } + let (needs_pure, needs_map, needs_apply) = match action_count { + 0 => (true, false, false), + 1 => (false, true, false), + _ => (false, true, true), + }; - if apply.is_none() { - let id = context.stabilized.lookup_cst(cst).expect_id(); - state - .errors - .push(LoweringError::NotInScope(NotInScope::AdoFn { kind: AdoFn::Apply, id })); - } + let mut resolve_ado_fn = |kind: AdoFn| { + let name = match kind { + AdoFn::Map => "map", + AdoFn::Apply => "apply", + AdoFn::Pure => "pure", + }; + let resolution = state.resolve_term_full(context, qualifier.as_deref(), name); + if resolution.is_none() { + let id = context.stabilized.lookup_cst(cst).expect_id(); + state.errors.push(LoweringError::NotInScope(NotInScope::AdoFn { kind, id })); + } + resolution + }; - if pure.is_none() { - let id = context.stabilized.lookup_cst(cst).expect_id(); - state - .errors - .push(LoweringError::NotInScope(NotInScope::AdoFn { kind: AdoFn::Pure, id })); - } + let map = if needs_map { resolve_ado_fn(AdoFn::Map) } else { None }; + let apply = if needs_apply { resolve_ado_fn(AdoFn::Apply) } else { None }; + let pure = if needs_pure { resolve_ado_fn(AdoFn::Pure) } else { None }; let statements = recover! { cst.statements()? @@ -725,8 +819,13 @@ pub(crate) fn lower_equation_like( }) } -fn lower_do_statement(state: &mut State, context: &Context, cst: &cst::DoStatement) -> DoStatement { - match cst { +fn lower_do_statement( + state: &mut State, + context: &Context, + cst: &cst::DoStatement, +) -> DoStatementId { + let id = context.stabilized.lookup_cst(cst).expect_id(); + let statement = match cst { cst::DoStatement::DoStatementBind(cst) => { let expression = cst.expression().map(|cst| lower_expression(state, context, &cst)); state.push_binder_scope(); @@ -741,7 +840,9 @@ fn lower_do_statement(state: &mut State, context: &Context, cst: &cst::DoStateme let expression = cst.expression().map(|cst| lower_expression(state, context, &cst)); DoStatement::Discard { expression } } - } + }; + state.associate_do_statement(id, statement); + id } fn lower_record_updates( @@ -789,9 +890,11 @@ fn lower_type_kind( ) -> TypeKind { match cst { cst::Type::TypeApplicationChain(cst) => { - let mut children = cst.children().map(|cst| lower_type(state, context, &cst)); - let function = children.next(); - let arguments = children.collect(); + let mut children = cst.children(); + let function = children.next().map(|cst| lower_type(state, context, &cst)); + let in_constraint = mem::replace(&mut state.in_constraint, false); + let arguments = children.map(|cst| lower_type(state, context, &cst)).collect(); + state.in_constraint = in_constraint; TypeKind::ApplicationChain { function, arguments } } cst::Type::TypeArrow(cst) => { @@ -801,15 +904,23 @@ fn lower_type_kind( TypeKind::Arrow { argument, result } } cst::Type::TypeConstrained(cst) => { - let mut children = cst.children().map(|cst| lower_type(state, context, &cst)); - let constraint = children.next(); - let constrained = children.next(); + let mut children = cst.children(); + let in_constraint = mem::replace(&mut state.in_constraint, true); + let constraint = children.next().map(|cst| lower_type(state, context, &cst)); + state.in_constraint = in_constraint; + let constrained = children.next().map(|cst| lower_type(state, context, &cst)); TypeKind::Constrained { constraint, constrained } } cst::Type::TypeConstructor(cst) => { let resolution = cst.name().and_then(|cst| { let (qualifier, name) = lower_qualified_name(&cst, cst::QualifiedName::upper)?; - state.resolve_type_reference(context, qualifier.as_deref(), &name) + if state.in_constraint { + state.resolve_class_reference(context, qualifier.as_deref(), &name).or_else( + || state.resolve_type_reference(context, qualifier.as_deref(), &name), + ) + } else { + state.resolve_type_reference(context, qualifier.as_deref(), &name) + } }); if resolution.is_none() { let id = context.stabilized.lookup_cst(cst).expect_id(); diff --git a/compiler-core/lowering/src/intermediate.rs b/compiler-core/lowering/src/intermediate.rs index 21c73438a..15ceaff48 100644 --- a/compiler-core/lowering/src/intermediate.rs +++ b/compiler-core/lowering/src/intermediate.rs @@ -26,14 +26,14 @@ pub enum BinderRecordItem { pub enum BinderKind { Typed { binder: Option, type_: Option }, OperatorChain { head: Option, tail: Arc<[OperatorPair]> }, - Integer, - Number, + Integer { value: Option }, + Number { negative: bool, value: Option }, Constructor { resolution: Option<(FileId, TermItemId)>, arguments: Arc<[BinderId]> }, Variable { variable: Option }, Named { named: Option, binder: Option }, Wildcard, - String, - Char, + String { kind: StringKind, value: Option }, + Char { value: Option }, Boolean { boolean: bool }, Array { array: Arc<[BinderId]> }, Record { record: Arc<[BinderRecordItem]> }, @@ -113,13 +113,13 @@ pub enum ExpressionKind { Do { bind: Option, discard: Option, - statements: Arc<[DoStatement]>, + statements: Arc<[DoStatementId]>, }, Ado { map: Option, apply: Option, pure: Option, - statements: Arc<[DoStatement]>, + statements: Arc<[DoStatementId]>, expression: Option, }, Constructor { @@ -418,6 +418,7 @@ pub struct LoweringInfo { pub(crate) term_item: FxHashMap, pub(crate) type_item: FxHashMap, + pub(crate) do_statement: FxHashMap, pub(crate) let_binding: Arena, pub(crate) let_binding_name: ArenaMap, @@ -438,6 +439,10 @@ impl LoweringInfo { self.type_kind.iter().map(|(k, v)| (*k, v)) } + pub fn iter_do_statement(&self) -> impl Iterator { + self.do_statement.iter().map(|(k, v)| (*k, v)) + } + pub fn iter_term_operator(&self) -> impl Iterator { self.term_operator.iter().map(|(o_id, (f_id, t_id))| (*o_id, *f_id, *t_id)) } @@ -458,6 +463,10 @@ impl LoweringInfo { self.type_kind.get(&id) } + pub fn get_do_statement(&self, id: DoStatementId) -> Option<&DoStatement> { + self.do_statement.get(&id) + } + pub fn get_term_item(&self, id: TermItemId) -> Option<&TermItemIr> { self.term_item.get(&id) } diff --git a/compiler-core/parsing/src/parser/names.rs b/compiler-core/parsing/src/parser/names.rs index e4760058d..bc5e6ce99 100644 --- a/compiler-core/parsing/src/parser/names.rs +++ b/compiler-core/parsing/src/parser/names.rs @@ -1,24 +1,8 @@ -use syntax::{SyntaxKind, TokenSet}; +use syntax::SyntaxKind; -use super::Parser; - -pub(super) const LOWER: TokenSet = - TokenSet::new(&[SyntaxKind::LOWER, SyntaxKind::AS, SyntaxKind::HIDING, SyntaxKind::ROLE]) - .union(ROLE); - -pub(super) const ROLE: TokenSet = - TokenSet::new(&[SyntaxKind::NOMINAL, SyntaxKind::PHANTOM, SyntaxKind::REPRESENTATIONAL]); - -pub(super) const OPERATOR: TokenSet = TokenSet::new(&[ - SyntaxKind::OPERATOR, - SyntaxKind::COLON, - SyntaxKind::MINUS, - SyntaxKind::DOUBLE_PERIOD, - SyntaxKind::LEFT_THICK_ARROW, -]); +pub(super) use syntax::names::{KEYWORD, LOWER, OPERATOR, OPERATOR_NAME, RECORD_LABEL, ROLE}; -pub(super) const OPERATOR_NAME: TokenSet = - TokenSet::new(&[SyntaxKind::OPERATOR_NAME, SyntaxKind::DOUBLE_PERIOD_OPERATOR_NAME]); +use super::Parser; pub(super) fn module_name(p: &mut Parser) { p.annotate(); @@ -60,37 +44,6 @@ pub(super) fn operator_name(p: &mut Parser) { m.end(p, SyntaxKind::QualifiedName); } -pub(super) const KEYWORD: TokenSet = TokenSet::new(&[ - SyntaxKind::MODULE, - SyntaxKind::WHERE, - SyntaxKind::IMPORT, - SyntaxKind::ADO, - SyntaxKind::DO, - SyntaxKind::IF, - SyntaxKind::THEN, - SyntaxKind::ELSE, - SyntaxKind::LET, - SyntaxKind::IN, - SyntaxKind::CASE, - SyntaxKind::OF, - SyntaxKind::DATA, - SyntaxKind::NEWTYPE, - SyntaxKind::FORALL, - SyntaxKind::TYPE, - SyntaxKind::CLASS, - SyntaxKind::INSTANCE, - SyntaxKind::DERIVE, - SyntaxKind::FOREIGN, - SyntaxKind::INFIXL, - SyntaxKind::INFIXR, - SyntaxKind::INFIX, - SyntaxKind::TRUE, - SyntaxKind::FALSE, -]); - -pub(super) const RECORD_LABEL: TokenSet = - TokenSet::new(&[SyntaxKind::STRING, SyntaxKind::RAW_STRING]).union(LOWER).union(KEYWORD); - pub(super) fn label(p: &mut Parser) { let mut m = p.start(); p.expect_in(RECORD_LABEL, SyntaxKind::TEXT, "Expected RECORD_LABEL"); diff --git a/compiler-core/resolving/src/algorithm.rs b/compiler-core/resolving/src/algorithm.rs index 214d09453..5aaf5f3fe 100644 --- a/compiler-core/resolving/src/algorithm.rs +++ b/compiler-core/resolving/src/algorithm.rs @@ -1,8 +1,8 @@ use building_types::QueryResult; use files::FileId; use indexing::{ - ExportKind, ImplicitItems, ImportId, ImportKind, IndexedModule, IndexingImport, TermItemId, - TermItemKind, TypeItemId, TypeItemKind, + ExportKind, ImplicitItems, ImportKind, IndexedModule, IndexingImport, TermItemId, TermItemKind, + TypeItemId, TypeItemKind, }; use smol_str::SmolStr; @@ -56,13 +56,14 @@ fn resolve_imports( if let Some(alias) = &indexing_import.alias { let alias = SmolStr::clone(alias); - let resolved_import = state.qualified.entry(alias).or_insert(resolved_import); + let imports = state.qualified.entry(alias).or_default(); + imports.push(resolved_import); + let resolved_import = imports.last_mut().unwrap(); resolve_import( queries, &mut state.errors, &mut state.class, resolved_import, - indexing_import_id, indexing_import, import_file_id, )?; @@ -73,7 +74,6 @@ fn resolve_imports( &mut state.errors, &mut state.class, &mut resolved_import, - indexing_import_id, indexing_import, import_file_id, )?; @@ -89,7 +89,6 @@ fn resolve_import( errors: &mut Vec, class_members: &mut ResolvedClassMembers, resolved: &mut ResolvedImport, - indexing_import_id: ImportId, indexing_import: &IndexingImport, import_file_id: FileId, ) -> QueryResult<()> { @@ -103,9 +102,12 @@ fn resolve_import( let terms = import_resolved.exports.iter_terms().map(|(name, file, id)| (name, file, id, kind)); let types = import_resolved.exports.iter_types().map(|(name, file, id)| (name, file, id, kind)); + let classes = + import_resolved.exports.iter_classes().map(|(name, file, id)| (name, file, id, kind)); - add_imported_terms(errors, resolved, indexing_import_id, terms); - add_imported_types(errors, resolved, indexing_import_id, types); + add_imported_terms(resolved, terms); + add_imported_types(resolved, types); + add_imported_classes(resolved, classes); // Adjust import kinds for explicit/hidden imports BEFORE copying class members if !matches!(indexing_import.kind, ImportKind::Implicit) { @@ -123,6 +125,8 @@ fn resolve_import( let Some(implicit) = implicit else { continue }; let item = (*file, *id, implicit); resolve_implicit(queries, resolved, indexing_import, item)?; + } else if let Some((_, _, kind)) = resolved.classes.get_mut(name) { + *kind = indexing_import.kind; } else { errors.push(ResolvingError::InvalidImportItem { id }); }; @@ -130,7 +134,7 @@ fn resolve_import( } // Copy class members AFTER kind adjustments so hidden types are properly filtered - for (_, _, type_id, import_kind) in resolved.iter_types() { + for (_, _, type_id, import_kind) in resolved.iter_classes() { if matches!(import_kind, ImportKind::Hidden) { continue; } @@ -178,70 +182,38 @@ fn resolve_implicit( } fn add_imported_terms<'a>( - errors: &mut Vec, resolved: &mut ResolvedImport, - indexing_import_id: ImportId, terms: impl Iterator, ) { let (additional, _) = terms.size_hint(); resolved.terms.reserve(additional); - terms.for_each(|(name, file, id, kind)| { - add_imported_term(errors, resolved, indexing_import_id, name, file, id, kind); - }); + for (name, file, id, kind) in terms { + let name = SmolStr::clone(name); + resolved.terms.insert(name, (file, id, kind)); + } } fn add_imported_types<'a>( - errors: &mut Vec, resolved: &mut ResolvedImport, - indexing_import_id: ImportId, - terms: impl Iterator, + types: impl Iterator, ) { - let (additional, _) = terms.size_hint(); - resolved.terms.reserve(additional); - terms.for_each(|(name, file, id, kind)| { - add_imported_type(errors, resolved, indexing_import_id, name, file, id, kind); - }); -} - -fn add_imported_term( - errors: &mut Vec, - resolved: &mut ResolvedImport, - indexing_import_id: ImportId, - name: &SmolStr, - file: FileId, - id: TermItemId, - kind: ImportKind, -) { - if let Some((existing_file, existing_term, _)) = resolved.terms.get(name) { - let duplicate = (file, id, indexing_import_id); - let existing = (*existing_file, *existing_term, resolved.id); - if duplicate != existing { - errors.push(ResolvingError::TermImportConflict { existing, duplicate }); - } - } else { + let (additional, _) = types.size_hint(); + resolved.types.reserve(additional); + for (name, file, id, kind) in types { let name = SmolStr::clone(name); - resolved.terms.insert(name, (file, id, kind)); + resolved.types.insert(name, (file, id, kind)); } } -fn add_imported_type( - errors: &mut Vec, +fn add_imported_classes<'a>( resolved: &mut ResolvedImport, - indexing_import_id: ImportId, - name: &SmolStr, - file: FileId, - id: TypeItemId, - kind: ImportKind, + classes: impl Iterator, ) { - if let Some((existing_file, existing_term, _)) = resolved.types.get(name) { - let duplicate = (file, id, indexing_import_id); - let existing = (*existing_file, *existing_term, resolved.id); - if duplicate != existing { - errors.push(ResolvingError::TypeImportConflict { existing, duplicate }); - } - } else { + let (additional, _) = classes.size_hint(); + resolved.classes.reserve(additional); + for (name, file, id, kind) in classes { let name = SmolStr::clone(name); - resolved.types.insert(name, (file, id, kind)); + resolved.classes.insert(name, (file, id, kind)); } } @@ -326,6 +298,36 @@ fn add_local_type( } } +fn add_local_classes<'k>( + items: &mut ResolvedLocals, + errors: &mut Vec, + iterator: impl Iterator, +) { + let (additional, _) = iterator.size_hint(); + items.classes.reserve(additional); + iterator.for_each(move |(name, file, id)| { + add_local_class(items, errors, name, file, id); + }); +} + +fn add_local_class( + items: &mut ResolvedLocals, + errors: &mut Vec, + name: &SmolStr, + file: FileId, + id: TypeItemId, +) { + if let Some(&existing) = items.classes.get(name) { + let duplicate = (file, id); + if existing != duplicate { + errors.push(ResolvingError::ExistingType { existing, duplicate }); + } + } else { + let name = SmolStr::clone(name); + items.classes.insert(name, (file, id)); + } +} + fn export_module_items(state: &mut State, indexed: &IndexedModule, file: FileId) { let local_terms = indexed.items.iter_terms().filter_map(|(id, item)| { let name = item.name.as_ref()?; @@ -334,11 +336,18 @@ fn export_module_items(state: &mut State, indexed: &IndexedModule, file: FileId) let local_types = indexed.items.iter_types().filter_map(|(id, item)| { let name = item.name.as_ref()?; - Some((name, file, id)) + Some((name, file, id, &item.kind)) }); + let (local_class_items, local_type_items): (Vec<_>, Vec<_>) = + local_types.partition(|(_, _, _, kind)| matches!(kind, TypeItemKind::Class { .. })); + + let local_types = local_type_items.into_iter().map(|(name, file, id, _)| (name, file, id)); + let local_classes = local_class_items.into_iter().map(|(name, file, id, _)| (name, file, id)); + add_local_terms(&mut state.locals, &mut state.errors, local_terms); add_local_types(&mut state.locals, &mut state.errors, local_types); + add_local_classes(&mut state.locals, &mut state.errors, local_classes); let exported_terms = indexed.items.iter_terms().filter_map(|(id, item)| { // Instances cannot be to referred directly by their given name yet. @@ -358,11 +367,22 @@ fn export_module_items(state: &mut State, indexed: &IndexedModule, file: FileId) return None; } let name = item.name.as_ref()?; - Some((name, file, id, ExportSource::Local)) + Some((name, file, id, ExportSource::Local, &item.kind)) }); + let (exported_class_items, exported_type_items): (Vec<_>, Vec<_>) = + exported_types.partition(|(_, _, _, _, kind)| matches!(kind, TypeItemKind::Class { .. })); + + let exported_types = + exported_type_items.into_iter().map(|(name, file, id, source, _)| (name, file, id, source)); + + let exported_classes = exported_class_items + .into_iter() + .map(|(name, file, id, source, _)| (name, file, id, source)); + add_export_terms(&mut state.exports, &mut state.errors, exported_terms); add_export_types(&mut state.exports, &mut state.errors, exported_types); + add_export_classes(&mut state.exports, &mut state.errors, exported_classes); } fn export_module_imports(state: &mut State, indexed: &IndexedModule) { @@ -371,7 +391,7 @@ fn export_module_imports(state: &mut State, indexed: &IndexedModule) { } let unqualified = state.unqualified.values().flatten(); - let qualified = state.qualified.values(); + let qualified = state.qualified.values().flatten(); let imports = unqualified.chain(qualified); for import in imports { @@ -393,8 +413,16 @@ fn export_module_imports(state: &mut State, indexed: &IndexedModule) { None } }); + let classes = import.iter_classes().filter_map(|(k, f, i, d)| { + if matches!(d, ImportKind::Implicit | ImportKind::Explicit) { + Some((k, f, i, source)) + } else { + None + } + }); add_export_terms(&mut state.exports, &mut state.errors, terms); add_export_types(&mut state.exports, &mut state.errors, types); + add_export_classes(&mut state.exports, &mut state.errors, classes); } } @@ -459,3 +487,34 @@ fn add_export_type( items.types.insert(name, (file, id, source)); } } + +fn add_export_classes<'k>( + items: &mut ResolvedExports, + errors: &mut Vec, + iterator: impl Iterator, +) { + let (additional, _) = iterator.size_hint(); + items.classes.reserve(additional); + iterator.for_each(move |(name, file, id, source)| { + add_export_class(items, errors, name, file, id, source); + }); +} + +fn add_export_class( + items: &mut ResolvedExports, + errors: &mut Vec, + name: &SmolStr, + file: FileId, + id: TypeItemId, + source: ExportSource, +) { + if let Some(&existing) = items.classes.get(name) { + let duplicate = (file, id, source); + if existing != duplicate { + errors.push(ResolvingError::TypeExportConflict { existing, duplicate }); + } + } else { + let name = SmolStr::clone(name); + items.classes.insert(name, (file, id, source)); + } +} diff --git a/compiler-core/resolving/src/error.rs b/compiler-core/resolving/src/error.rs index ef5f281a6..4ee624ba8 100644 --- a/compiler-core/resolving/src/error.rs +++ b/compiler-core/resolving/src/error.rs @@ -6,15 +6,6 @@ use crate::ExportSource; /// The kind of errors produced during name resolution. #[derive(Debug, PartialEq, Eq)] pub enum ResolvingError { - TermImportConflict { - existing: (FileId, TermItemId, ImportId), - duplicate: (FileId, TermItemId, ImportId), - }, - TypeImportConflict { - existing: (FileId, TypeItemId, ImportId), - duplicate: (FileId, TypeItemId, ImportId), - }, - TermExportConflict { existing: (FileId, TermItemId, ExportSource), duplicate: (FileId, TermItemId, ExportSource), diff --git a/compiler-core/resolving/src/lib.rs b/compiler-core/resolving/src/lib.rs index 0fc900c1c..ba08ad4ef 100644 --- a/compiler-core/resolving/src/lib.rs +++ b/compiler-core/resolving/src/lib.rs @@ -62,6 +62,29 @@ pub struct ResolvedModule { } impl ResolvedModule { + fn lookup_qualified( + &self, + qualifier: &str, + lookup: LookupFn, + default: DefaultFn, + ) -> Option<(FileId, ItemId)> + where + LookupFn: Fn(&ResolvedImport) -> Option<(FileId, ItemId, ImportKind)>, + DefaultFn: FnOnce() -> Option<(FileId, ItemId)>, + { + if let Some(imports) = self.qualified.get(qualifier) { + let (file_id, item_id, _) = imports + .iter() + .filter_map(&lookup) + .find(|(_, _, kind)| !matches!(kind, ImportKind::Hidden))?; + Some((file_id, item_id)) + } else if qualifier == "Prim" { + default() + } else { + None + } + } + fn lookup_unqualified(&self, lookup: LookupFn) -> Option<(FileId, ItemId)> where LookupFn: Fn(&ResolvedImport) -> Option<(FileId, ItemId, ImportKind)>, @@ -103,9 +126,9 @@ impl ResolvedModule { name: &str, ) -> Option<(FileId, TermItemId)> { if let Some(qualifier) = qualifier { - let import = self.qualified.get(qualifier)?; - let (file, id, kind) = import.lookup_term(name)?; - if matches!(kind, ImportKind::Hidden) { None } else { Some((file, id)) } + let lookup_item = |import: &ResolvedImport| import.lookup_term(name); + let lookup_prim = || prim.exports.lookup_term(name); + self.lookup_qualified(qualifier, lookup_item, lookup_prim) } else { let lookup_item = |import: &ResolvedImport| import.lookup_term(name); let lookup_prim = || prim.exports.lookup_term(name); @@ -122,9 +145,9 @@ impl ResolvedModule { name: &str, ) -> Option<(FileId, TypeItemId)> { if let Some(qualifier) = qualifier { - let import = self.qualified.get(qualifier)?; - let (file, id, kind) = import.lookup_type(name)?; - if matches!(kind, ImportKind::Hidden) { None } else { Some((file, id)) } + let lookup_item = |import: &ResolvedImport| import.lookup_type(name); + let lookup_prim = || prim.exports.lookup_type(name); + self.lookup_qualified(qualifier, lookup_item, lookup_prim) } else { let lookup_item = |import: &ResolvedImport| import.lookup_type(name); let lookup_prim = || prim.exports.lookup_type(name); @@ -134,6 +157,25 @@ impl ResolvedModule { } } + pub fn lookup_class( + &self, + prim: &ResolvedModule, + qualifier: Option<&str>, + name: &str, + ) -> Option<(FileId, TypeItemId)> { + if let Some(qualifier) = qualifier { + let lookup_item = |import: &ResolvedImport| import.lookup_class(name); + let lookup_prim = || prim.exports.lookup_class(name); + self.lookup_qualified(qualifier, lookup_item, lookup_prim) + } else { + let lookup_item = |import: &ResolvedImport| import.lookup_class(name); + let lookup_prim = || prim.exports.lookup_class(name); + None.or_else(|| self.locals.lookup_class(name)) + .or_else(|| self.lookup_unqualified(lookup_item)) + .or_else(|| self.lookup_prim_import(lookup_item, lookup_prim)) + } + } + pub fn lookup_class_member( &self, class_id: TypeItemId, @@ -160,12 +202,33 @@ impl ResolvedModule { } } - for import in self.qualified.values() { - if import.contains_term(file_id, item_id) { - return true; + for imports in self.qualified.values() { + for import in imports { + if import.contains_term(file_id, item_id) { + return true; + } } } + // If an unqualified Prim import exists, use its import list; + if let Some(prim_imports) = self.unqualified.get("Prim") { + for prim_import in prim_imports { + if prim_import.contains_term(file_id, item_id) { + return true; + } + } + } + + // if a qualified Prim import exists, use its import list; + if let Some(prim_imports) = self.qualified.get("Prim") { + for prim_import in prim_imports { + if prim_import.contains_term(file_id, item_id) { + return true; + } + } + } + + // if there are no Prim imports, use the export list. if prim.exports.contains_term(file_id, item_id) { return true; } @@ -175,12 +238,13 @@ impl ResolvedModule { } type ResolvedImportsUnqualified = FxHashMap>; -type ResolvedImportsQualified = FxHashMap; +type ResolvedImportsQualified = FxHashMap>; #[derive(Debug, Default, PartialEq, Eq)] pub struct ResolvedLocals { terms: FxHashMap, types: FxHashMap, + classes: FxHashMap, } impl ResolvedLocals { @@ -203,6 +267,14 @@ impl ResolvedLocals { pub fn iter_types(&self) -> impl Iterator { self.types.iter().map(|(k, (f, i))| (k, *f, *i)) } + + pub fn lookup_class(&self, name: &str) -> Option<(FileId, TypeItemId)> { + self.classes.get(name).copied() + } + + pub fn iter_classes(&self) -> impl Iterator { + self.classes.iter().map(|(k, (f, i))| (k, *f, *i)) + } } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -215,6 +287,7 @@ pub enum ExportSource { pub struct ResolvedExports { terms: FxHashMap, types: FxHashMap, + classes: FxHashMap, } impl ResolvedExports { @@ -237,6 +310,14 @@ impl ResolvedExports { pub fn iter_types(&self) -> impl Iterator { self.types.iter().map(|(k, (f, i, _))| (k, *f, *i)) } + + pub fn lookup_class(&self, name: &str) -> Option<(FileId, TypeItemId)> { + self.classes.get(name).copied().map(|(f, i, _)| (f, i)) + } + + pub fn iter_classes(&self) -> impl Iterator { + self.classes.iter().map(|(k, (f, i, _))| (k, *f, *i)) + } } #[derive(Debug, PartialEq, Eq)] @@ -247,13 +328,15 @@ pub struct ResolvedImport { pub exported: bool, terms: FxHashMap, types: FxHashMap, + classes: FxHashMap, } impl ResolvedImport { fn new(id: ImportId, file: FileId, kind: ImportKind, exported: bool) -> ResolvedImport { let terms = FxHashMap::default(); let types = FxHashMap::default(); - ResolvedImport { id, file, kind, exported, terms, types } + let classes = FxHashMap::default(); + ResolvedImport { id, file, kind, exported, terms, types, classes } } pub fn lookup_term(&self, name: &str) -> Option<(FileId, TermItemId, ImportKind)> { @@ -277,6 +360,14 @@ impl ResolvedImport { pub fn iter_types(&self) -> impl Iterator { self.types.iter().map(|(k, (f, i, d))| (k, *f, *i, *d)) } + + pub fn lookup_class(&self, name: &str) -> Option<(FileId, TypeItemId, ImportKind)> { + self.classes.get(name).copied() + } + + pub fn iter_classes(&self) -> impl Iterator { + self.classes.iter().map(|(k, (f, i, d))| (k, *f, *i, *d)) + } } pub fn resolve_module(queries: &impl ExternalQueries, file: FileId) -> QueryResult { diff --git a/compiler-core/sugar/src/bracketing.rs b/compiler-core/sugar/src/bracketing.rs index 94d1b8f5c..d9c75aef3 100644 --- a/compiler-core/sugar/src/bracketing.rs +++ b/compiler-core/sugar/src/bracketing.rs @@ -91,9 +91,16 @@ where } /// Translates [`Associativity`] and precedence into binding power. +/// +/// Each precedence level occupies two binding power slots so that +/// adjacent precedence levels never overlap. Without the `2*p` scaling, +/// `infixr 3` and `infix 4` both produce a binding power of 5, which +/// prevents the Pratt parser from breaking out of a higher-precedence +/// recursive call when it encounters a lower-precedence operator. fn binding_power(associativity: Associativity, precedence: u8) -> (u8, u8) { - let bp_0 = precedence.saturating_add(1); - let bp_1 = precedence.saturating_add(2); + let base = precedence.saturating_mul(2); + let bp_0 = base.saturating_add(1); + let bp_1 = base.saturating_add(2); match associativity { Associativity::None => (bp_0, bp_0), Associativity::Left => (bp_0, bp_1), @@ -257,3 +264,105 @@ pub fn bracketed( Ok(Bracketed { binders, expressions, types }) } + +#[cfg(test)] +mod tests { + use super::*; + + const ALL_ASSOCIATIVITIES: [Associativity; 3] = + [Associativity::None, Associativity::Left, Associativity::Right]; + + /// PureScript precedences range from 0 to 9. + const PRECEDENCE_RANGE: std::ops::RangeInclusive = 0..=9; + + /// A lower-precedence operator must always yield to a recursive call + /// from a higher-precedence operator. Without sufficient spacing in + /// the binding power encoding, adjacent precedences can collide. + /// + /// Let `bp(a, p) = (l, r)` denote the binding power function. Then: + /// + /// ```text + /// forall p1, p2 in 0..=9, a1, a2 in {None, Left, Right}. + /// p1 < p2 => fst(bp(a1, p1)) < snd(bp(a2, p2)) + /// ``` + #[test] + fn lower_precedence_yields() { + for p_high in PRECEDENCE_RANGE { + for p_low in 0..p_high { + for &assoc_high in &ALL_ASSOCIATIVITIES { + let (_, right_bp_high) = binding_power(assoc_high, p_high); + for &assoc_low in &ALL_ASSOCIATIVITIES { + let (left_bp_low, _) = binding_power(assoc_low, p_low); + assert!( + left_bp_low < right_bp_high, + "precedence {p_low} ({assoc_low:?}) should yield to \ + precedence {p_high} ({assoc_high:?}): \ + lbp {left_bp_low} must be < rbp {right_bp_high}" + ); + } + } + } + } + } + + /// A higher-precedence operator must bind tighter than a recursive + /// call from a lower-precedence operator. + /// + /// ```text + /// forall p1, p2 in 0..=9, a1, a2 in {None, Left, Right}. + /// p1 < p2 => fst(bp(a2, p2)) >= snd(bp(a1, p1)) + /// ``` + #[test] + fn higher_precedence_binds() { + for p_low in PRECEDENCE_RANGE { + for p_high in (p_low + 1)..=9 { + for &assoc_low in &ALL_ASSOCIATIVITIES { + let (_, right_bp_low) = binding_power(assoc_low, p_low); + for &assoc_high in &ALL_ASSOCIATIVITIES { + let (left_bp_high, _) = binding_power(assoc_high, p_high); + assert!( + left_bp_high >= right_bp_low, + "precedence {p_high} ({assoc_high:?}) should bind inside \ + precedence {p_low} ({assoc_low:?}): \ + lbp {left_bp_high} must be >= rbp {right_bp_low}" + ); + } + } + } + } + } + + /// Left-associative operators at the same precedence continue the + /// current branch rather than recursing deeper. + /// + /// ```text + /// forall p in 0..=9. fst(bp(Left, p)) < snd(bp(Left, p)) + /// ``` + #[test] + fn left_associative_chains_left() { + for p in PRECEDENCE_RANGE { + let (left_bp, right_bp) = binding_power(Associativity::Left, p); + assert!( + left_bp < right_bp, + "left-associative at precedence {p}: lbp {left_bp} must be < rbp {right_bp}" + ); + } + } + + /// Right-associative operators at the same precedence recurse deeper + /// into the right subtree. + /// + /// ```text + /// forall p in 0..=9. fst(bp(Right, p)) > snd(bp(Right, p)) + /// ``` + #[test] + fn right_associative_chains_right() { + for p in PRECEDENCE_RANGE { + let (left_bp, right_bp) = binding_power(Associativity::Right, p); + assert!( + left_bp > right_bp, + "right-associative at precedence {p}: lbp {left_bp} must be > rbp {right_bp}" + ); + } + } +} diff --git a/compiler-core/syntax/src/cst.rs b/compiler-core/syntax/src/cst.rs index a7658851d..a2faa918e 100644 --- a/compiler-core/syntax/src/cst.rs +++ b/compiler-core/syntax/src/cst.rs @@ -1,5 +1,7 @@ use rowan::ast::AstNode; +use crate::names; + #[macro_use] mod macros; @@ -18,11 +20,15 @@ has_token!( has_token!( QualifiedName | upper() -> UPPER - | lower() -> LOWER | operator() -> OPERATOR | operator_name() -> OPERATOR_NAME ); +has_token_set!( + QualifiedName + | lower() -> names::LOWER +); + has_child!( QualifiedName | qualifier() -> Qualifier @@ -987,14 +993,14 @@ has_children!( | children() -> Binder ); -has_token!( +has_token_set!( BinderVariable - | name_token() -> LOWER + | name_token() -> names::LOWER ); -has_token!( +has_token_set!( BinderNamed - | name_token() -> LOWER + | name_token() -> names::LOWER ); has_child!( @@ -1169,3 +1175,26 @@ has_token!( | string() -> STRING | raw_string() -> RAW_STRING ); + +has_token!( + BinderInteger + | minus_token() -> MINUS + | integer_token() -> INTEGER +); + +has_token!( + BinderNumber + | minus_token() -> MINUS + | number_token() -> NUMBER +); + +has_token!( + BinderString + | string() -> STRING + | raw_string() -> RAW_STRING +); + +has_token!( + BinderChar + | char_token() -> CHAR +); diff --git a/compiler-core/syntax/src/cst/macros.rs b/compiler-core/syntax/src/cst/macros.rs index 952febd28..90f0ef58d 100644 --- a/compiler-core/syntax/src/cst/macros.rs +++ b/compiler-core/syntax/src/cst/macros.rs @@ -97,6 +97,21 @@ macro_rules! has_token { }; } +macro_rules! has_token_set { + ($kind:ident $(|$name:ident() -> $set:expr)+) => { + impl $kind { + $( + pub fn $name(&self) -> Option { + self.syntax() + .children_with_tokens() + .filter_map(|element| element.into_token()) + .find(|token| $set.contains(token.kind())) + } + )+ + } + }; +} + macro_rules! has_tokens { ($kind:ident $(|$name:ident() -> $token:ident)+) => { impl $kind { diff --git a/compiler-core/syntax/src/lib.rs b/compiler-core/syntax/src/lib.rs index 924947bb3..a08a45925 100644 --- a/compiler-core/syntax/src/lib.rs +++ b/compiler-core/syntax/src/lib.rs @@ -1,4 +1,5 @@ pub mod cst; +pub mod names; mod token_set; pub use token_set::TokenSet; diff --git a/compiler-core/syntax/src/names.rs b/compiler-core/syntax/src/names.rs new file mode 100644 index 000000000..4b4731287 --- /dev/null +++ b/compiler-core/syntax/src/names.rs @@ -0,0 +1,50 @@ +use crate::{SyntaxKind, TokenSet}; + +pub const ROLE: TokenSet = + TokenSet::new(&[SyntaxKind::NOMINAL, SyntaxKind::PHANTOM, SyntaxKind::REPRESENTATIONAL]); + +pub const LOWER: TokenSet = + TokenSet::new(&[SyntaxKind::LOWER, SyntaxKind::AS, SyntaxKind::HIDING, SyntaxKind::ROLE]) + .union(ROLE); + +pub const OPERATOR: TokenSet = TokenSet::new(&[ + SyntaxKind::OPERATOR, + SyntaxKind::COLON, + SyntaxKind::MINUS, + SyntaxKind::DOUBLE_PERIOD, + SyntaxKind::LEFT_THICK_ARROW, +]); + +pub const OPERATOR_NAME: TokenSet = + TokenSet::new(&[SyntaxKind::OPERATOR_NAME, SyntaxKind::DOUBLE_PERIOD_OPERATOR_NAME]); + +pub const KEYWORD: TokenSet = TokenSet::new(&[ + SyntaxKind::MODULE, + SyntaxKind::WHERE, + SyntaxKind::IMPORT, + SyntaxKind::ADO, + SyntaxKind::DO, + SyntaxKind::IF, + SyntaxKind::THEN, + SyntaxKind::ELSE, + SyntaxKind::LET, + SyntaxKind::IN, + SyntaxKind::CASE, + SyntaxKind::OF, + SyntaxKind::DATA, + SyntaxKind::NEWTYPE, + SyntaxKind::FORALL, + SyntaxKind::TYPE, + SyntaxKind::CLASS, + SyntaxKind::INSTANCE, + SyntaxKind::DERIVE, + SyntaxKind::FOREIGN, + SyntaxKind::INFIXL, + SyntaxKind::INFIXR, + SyntaxKind::INFIX, + SyntaxKind::TRUE, + SyntaxKind::FALSE, +]); + +pub const RECORD_LABEL: TokenSet = + TokenSet::new(&[SyntaxKind::STRING, SyntaxKind::RAW_STRING]).union(LOWER).union(KEYWORD); diff --git a/compiler-lsp/analyzer/src/completion/edit.rs b/compiler-lsp/analyzer/src/completion/edit.rs index 628fe7e51..b211ddcdf 100644 --- a/compiler-lsp/analyzer/src/completion/edit.rs +++ b/compiler-lsp/analyzer/src/completion/edit.rs @@ -125,6 +125,7 @@ pub(super) fn type_import_item( |import| { import .lookup_type(type_name) + .or_else(|| import.lookup_class(type_name)) .and_then(|(f, t, k)| if (f, t) == (file_id, type_id) { Some(k) } else { None }) }, |import_indexed| type_import_name(import_indexed, type_name, type_id), diff --git a/compiler-lsp/analyzer/src/completion/prelude.rs b/compiler-lsp/analyzer/src/completion/prelude.rs index 334a2d572..4637a12ed 100644 --- a/compiler-lsp/analyzer/src/completion/prelude.rs +++ b/compiler-lsp/analyzer/src/completion/prelude.rs @@ -75,6 +75,7 @@ impl Context<'_, '_> { pub fn has_type_import(&self, qualifier: Option<&str>, name: &str) -> bool { self.resolved.lookup_type(self.prim_resolved, qualifier, name).is_some() + || self.resolved.lookup_class(self.prim_resolved, qualifier, name).is_some() } } diff --git a/compiler-lsp/analyzer/src/completion/sources.rs b/compiler-lsp/analyzer/src/completion/sources.rs index 9e0bc696b..4577b11cd 100644 --- a/compiler-lsp/analyzer/src/completion/sources.rs +++ b/compiler-lsp/analyzer/src/completion/sources.rs @@ -35,7 +35,8 @@ impl CompletionSource for QualifiedModules { let source = context.resolved.qualified.iter(); let source = source.filter(move |(name, _)| filter.matches(name)); - for (name, import) in source { + for (name, imports) in source { + let Some(import) = imports.first() else { continue }; let (parsed, _) = context.engine.parsed(import.file)?; let description = parsed.module_name().map(|name| name.to_string()); @@ -117,6 +118,22 @@ impl CompletionSource for LocalTypes { items.push(item.build()) } + let source = context.resolved.locals.iter_classes(); + let source = source.filter(move |(name, _, _)| filter.matches(name)); + + for (name, file_id, type_id) in source { + let mut item = CompletionItemSpec::new( + name.to_string(), + context.range, + CompletionItemKind::STRUCT, + CompletionResolveData::TypeItem(file_id, type_id), + ); + + item.label_description("Local".to_string()); + + items.push(item.build()) + } + Ok(()) } } @@ -198,6 +215,27 @@ impl CompletionSource for ImportedTypes { items.push(item.build()) } + + let source = import.iter_classes().filter(move |(name, _, _, kind)| { + filter.matches(name) && !matches!(kind, ImportKind::Hidden) + }); + for (name, f, t, _) in source { + let (parsed, _) = context.engine.parsed(f)?; + let description = parsed.module_name().map(|name| name.to_string()); + + let mut item = CompletionItemSpec::new( + name.to_string(), + context.range, + CompletionItemKind::STRUCT, + CompletionResolveData::TypeItem(f, t), + ); + + if let Some(description) = description { + item.label_description(description); + } + + items.push(item.build()) + } } Ok(()) @@ -216,31 +254,33 @@ impl CompletionSource for QualifiedTerms<'_> { filter: F, items: &mut Vec, ) -> Result { - let Some(import) = context.resolved.qualified.get(self.0) else { + let Some(imports) = context.resolved.qualified.get(self.0) else { return Ok(()); }; - let source = import.iter_terms().filter(move |(name, _, _, kind)| { - filter.matches(name) && !matches!(kind, ImportKind::Hidden) - }); + for import in imports { + let source = import.iter_terms().filter(|(name, _, _, kind)| { + filter.matches(name) && !matches!(kind, ImportKind::Hidden) + }); - for (name, file_id, term_id, _) in source { - let (parsed, _) = context.engine.parsed(file_id)?; - let description = parsed.module_name().map(|name| name.to_string()); + for (name, file_id, term_id, _) in source { + let (parsed, _) = context.engine.parsed(file_id)?; + let description = parsed.module_name().map(|name| name.to_string()); - let mut item = CompletionItemSpec::new( - name.to_string(), - context.range, - CompletionItemKind::VALUE, - CompletionResolveData::TermItem(file_id, term_id), - ); + let mut item = CompletionItemSpec::new( + name.to_string(), + context.range, + CompletionItemKind::VALUE, + CompletionResolveData::TermItem(file_id, term_id), + ); - item.edit_text(format!("{}.{name}", self.0)); - if let Some(description) = description { - item.label_description(description); - } + item.edit_text(format!("{}.{name}", self.0)); + if let Some(description) = description { + item.label_description(description); + } - items.push(item.build()) + items.push(item.build()) + } } Ok(()) @@ -259,31 +299,56 @@ impl CompletionSource for QualifiedTypes<'_> { filter: F, items: &mut Vec, ) -> Result { - let Some(import) = context.resolved.qualified.get(self.0) else { + let Some(imports) = context.resolved.qualified.get(self.0) else { return Ok(()); }; - let source = import.iter_types().filter(move |(name, _, _, kind)| { - filter.matches(name) && !matches!(kind, ImportKind::Hidden) - }); + for import in imports { + let source = import.iter_types().filter(|(name, _, _, kind)| { + filter.matches(name) && !matches!(kind, ImportKind::Hidden) + }); - for (name, file_id, type_id, _) in source { - let (parsed, _) = context.engine.parsed(file_id)?; - let description = parsed.module_name().map(|name| name.to_string()); + for (name, file_id, type_id, _) in source { + let (parsed, _) = context.engine.parsed(file_id)?; + let description = parsed.module_name().map(|name| name.to_string()); - let mut item = CompletionItemSpec::new( - name.to_string(), - context.range, - CompletionItemKind::STRUCT, - CompletionResolveData::TypeItem(file_id, type_id), - ); + let mut item = CompletionItemSpec::new( + name.to_string(), + context.range, + CompletionItemKind::STRUCT, + CompletionResolveData::TypeItem(file_id, type_id), + ); - item.edit_text(format!("{}.{name}", self.0)); - if let Some(description) = description { - item.label_description(description); + item.edit_text(format!("{}.{name}", self.0)); + if let Some(description) = description { + item.label_description(description); + } + + items.push(item.build()) } - items.push(item.build()) + let source = import.iter_classes().filter(|(name, _, _, kind)| { + filter.matches(name) && !matches!(kind, ImportKind::Hidden) + }); + + for (name, file_id, type_id, _) in source { + let (parsed, _) = context.engine.parsed(file_id)?; + let description = parsed.module_name().map(|name| name.to_string()); + + let mut item = CompletionItemSpec::new( + name.to_string(), + context.range, + CompletionItemKind::STRUCT, + CompletionResolveData::TypeItem(file_id, type_id), + ); + + item.edit_text(format!("{}.{name}", self.0)); + if let Some(description) = description { + item.label_description(description); + } + + items.push(item.build()) + } } Ok(()) @@ -369,7 +434,7 @@ impl SuggestionsHelper for SuggestedTypes { fn exports( resolved: &ResolvedModule, ) -> impl Iterator { - resolved.exports.iter_types() + resolved.exports.iter_types().chain(resolved.exports.iter_classes()) } fn candidate( @@ -543,6 +608,25 @@ impl CompletionSource for PrimTypes { items.push(item.build()) } + let source = context + .prim_resolved + .exports + .iter_classes() + .filter(move |(name, _, _)| filter.matches(name)); + + for (name, file_id, type_item) in source { + let mut item = CompletionItemSpec::new( + name.to_string(), + context.range, + CompletionItemKind::STRUCT, + CompletionResolveData::TypeItem(file_id, type_item), + ); + + item.label_description("Prim".to_string()); + + items.push(item.build()) + } + Ok(()) } } @@ -603,7 +687,7 @@ impl SuggestionsHelper for QualifiedTypesSuggestions<'_> { fn exports( resolved: &ResolvedModule, ) -> impl Iterator { - resolved.exports.iter_types() + resolved.exports.iter_types().chain(resolved.exports.iter_classes()) } fn candidate( @@ -648,7 +732,12 @@ fn suggestions_candidates_qualified( filter: impl Filter, items: &mut Vec, ) -> Result<(), AnalyzerError> { - let has_prim = context.resolved.qualified.values().any(|import| import.file == context.prim_id); + let has_prim = context + .resolved + .qualified + .values() + .flatten() + .any(|import| import.file == context.prim_id); let file_ids = context.files.iter_id().filter(move |&id| { let not_self = id != context.current_file; diff --git a/compiler-lsp/analyzer/src/definition.rs b/compiler-lsp/analyzer/src/definition.rs index 18fece002..ae89ae156 100644 --- a/compiler-lsp/analyzer/src/definition.rs +++ b/compiler-lsp/analyzer/src/definition.rs @@ -129,8 +129,21 @@ fn definition_import( let goto_type = |engine: &QueryEngine, files: &Files, name: &str| { let name = name.trim_start_matches("(").trim_end_matches(")"); - let (f_id, t_id) = - import_resolved.exports.lookup_type(name).ok_or(AnalyzerError::NonFatal)?; + let (f_id, t_id) = import_resolved + .exports + .lookup_type(name) + .or_else(|| import_resolved.exports.lookup_class(name)) + .ok_or(AnalyzerError::NonFatal)?; + definition_file_type(engine, files, f_id, t_id) + }; + + let goto_class = |engine: &QueryEngine, files: &Files, name: &str| { + let name = name.trim_start_matches("(").trim_end_matches(")"); + let (f_id, t_id) = import_resolved + .exports + .lookup_class(name) + .or_else(|| import_resolved.exports.lookup_type(name)) + .ok_or(AnalyzerError::NonFatal)?; definition_file_type(engine, files, f_id, t_id) }; @@ -143,7 +156,7 @@ fn definition_import( cst::ImportItem::ImportClass(cst) => { let token = cst.name_token().ok_or(AnalyzerError::NonFatal)?; let name = token.text(); - goto_type(engine, files, name) + goto_class(engine, files, name) } cst::ImportItem::ImportType(cst) => { let token = cst.name_token().ok_or(AnalyzerError::NonFatal)?; diff --git a/compiler-lsp/analyzer/src/hover.rs b/compiler-lsp/analyzer/src/hover.rs index 1cd12429b..8450b18b5 100644 --- a/compiler-lsp/analyzer/src/hover.rs +++ b/compiler-lsp/analyzer/src/hover.rs @@ -113,8 +113,21 @@ fn hover_import( let hover_type_import = |engine: &QueryEngine, name: &str| { let name = name.trim_start_matches("(").trim_end_matches(")"); - let (f_id, t_id) = - import_resolved.exports.lookup_type(name).ok_or(AnalyzerError::NonFatal)?; + let (f_id, t_id) = import_resolved + .exports + .lookup_type(name) + .or_else(|| import_resolved.exports.lookup_class(name)) + .ok_or(AnalyzerError::NonFatal)?; + hover_file_type(engine, f_id, t_id) + }; + + let hover_class_import = |engine: &QueryEngine, name: &str| { + let name = name.trim_start_matches("(").trim_end_matches(")"); + let (f_id, t_id) = import_resolved + .exports + .lookup_class(name) + .or_else(|| import_resolved.exports.lookup_type(name)) + .ok_or(AnalyzerError::NonFatal)?; hover_file_type(engine, f_id, t_id) }; @@ -127,7 +140,7 @@ fn hover_import( cst::ImportItem::ImportClass(cst) => { let token = cst.name_token().ok_or(AnalyzerError::NonFatal)?; let name = token.text(); - hover_type_import(engine, name) + hover_class_import(engine, name) } cst::ImportItem::ImportType(cst) => { let token = cst.name_token().ok_or(AnalyzerError::NonFatal)?; diff --git a/compiler-lsp/analyzer/src/references.rs b/compiler-lsp/analyzer/src/references.rs index 27ea5ccaf..ee2a2649a 100644 --- a/compiler-lsp/analyzer/src/references.rs +++ b/compiler-lsp/analyzer/src/references.rs @@ -132,8 +132,21 @@ fn references_import( let references_type = |engine: &QueryEngine, files: &Files, name: &str| { let name = name.trim_start_matches("(").trim_end_matches(")"); - let (f_id, t_id) = - import_resolved.exports.lookup_type(name).ok_or(AnalyzerError::NonFatal)?; + let (f_id, t_id) = import_resolved + .exports + .lookup_type(name) + .or_else(|| import_resolved.exports.lookup_class(name)) + .ok_or(AnalyzerError::NonFatal)?; + references_file_type(engine, files, current_file, f_id, t_id) + }; + + let references_class = |engine: &QueryEngine, files: &Files, name: &str| { + let name = name.trim_start_matches("(").trim_end_matches(")"); + let (f_id, t_id) = import_resolved + .exports + .lookup_class(name) + .or_else(|| import_resolved.exports.lookup_type(name)) + .ok_or(AnalyzerError::NonFatal)?; references_file_type(engine, files, current_file, f_id, t_id) }; @@ -146,7 +159,7 @@ fn references_import( cst::ImportItem::ImportClass(cst) => { let token = cst.name_token().ok_or(AnalyzerError::NonFatal)?; let name = token.text(); - references_type(engine, files, name) + references_class(engine, files, name) } cst::ImportItem::ImportType(cst) => { let token = cst.name_token().ok_or(AnalyzerError::NonFatal)?; @@ -387,6 +400,8 @@ fn probe_type_references( probe_workspace_imports(engine, files, current_file, file_id, |import| { import.iter_types().any(|(_, f_id, t_id, kind)| { kind != ImportKind::Hidden && (f_id, t_id) == (file_id, type_id) + }) || import.iter_classes().any(|(_, f_id, t_id, kind)| { + kind != ImportKind::Hidden && (f_id, t_id) == (file_id, type_id) }) }) } @@ -408,7 +423,7 @@ fn probe_workspace_imports( let resolved = engine.resolved(workspace_file_id)?; let unqualified = resolved.unqualified.values().flatten(); - let qualified = resolved.qualified.values(); + let qualified = resolved.qualified.values().flatten(); let imports = unqualified.chain(qualified); for import in imports { @@ -432,7 +447,7 @@ fn probe_imports_for( let resolved = engine.resolved(workspace_file_id)?; let unqualified = resolved.unqualified.values().flatten(); - let qualified = resolved.qualified.values(); + let qualified = resolved.qualified.values().flatten(); let imports = unqualified.chain(qualified); for import in imports { diff --git a/compiler-lsp/analyzer/src/symbols.rs b/compiler-lsp/analyzer/src/symbols.rs index d90edebf4..94bb44f57 100644 --- a/compiler-lsp/analyzer/src/symbols.rs +++ b/compiler-lsp/analyzer/src/symbols.rs @@ -93,6 +93,22 @@ fn build_symbol_list( container_name: None, }); } + + for (name, _, type_id) in resolved.locals.iter_classes() { + if !name.to_lowercase().starts_with(query) { + continue; + } + let location = common::file_type_location(engine, uri.clone(), file_id, type_id)?; + symbols.push(SymbolInformation { + name: name.to_string(), + kind: SymbolKind::CLASS, + tags: None, + #[allow(deprecated)] + deprecated: None, + location, + container_name: None, + }); + } } Ok(symbols) diff --git a/compiler-scripts/Cargo.toml b/compiler-scripts/Cargo.toml index 6880cb04c..f4703f35a 100644 --- a/compiler-scripts/Cargo.toml +++ b/compiler-scripts/Cargo.toml @@ -4,14 +4,14 @@ version = "0.1.0" edition = "2024" [dependencies] +anyhow = "1.0.100" clap = { version = "4", features = ["derive"] } console = "0.15" +heck = "0.5" serde = { version = "1", features = ["derive"] } serde_json = "1" similar = { version = "2", features = ["inline"] } md-5 = "0.10" walkdir = "2" -[[bin]] -name = "test-checking" -path = "src/bin/test-checking.rs" + diff --git a/compiler-scripts/src/bin/test-checking.rs b/compiler-scripts/src/bin/test-checking.rs deleted file mode 100644 index ab4ec0349..000000000 --- a/compiler-scripts/src/bin/test-checking.rs +++ /dev/null @@ -1,260 +0,0 @@ -use std::collections::HashMap; -use std::path::{Path, PathBuf}; -use std::process::{Command, Stdio}; -use std::time::{Duration, Instant}; -use std::{env, fs}; - -use clap::Parser; -use compiler_scripts::console::style; -use compiler_scripts::fixtures::fixture_env; -use compiler_scripts::snapshots::{print_diff, strip_frontmatter}; -use serde::Deserialize; - -const TRACE_DIR: &str = "target/compiler-tracing"; - -#[derive(Deserialize)] -struct PendingSnapshot { - path: String, -} - -#[derive(Parser)] -#[command(about = "Run type checker integration tests with snapshot diffing")] -struct Config { - /// Test name or number filters - #[arg(num_args = 0..)] - filters: Vec, - - /// Verbose output (show test progress) - #[arg(short, long)] - verbose: bool, - - /// Enable tracing output for debugging - #[arg(long)] - debug: bool, -} - -fn main() { - let config = Config::parse(); - - let (fixture_hashes, hash_duration) = hash_fixtures(); - println!("{}", style(format!("Hashed fixtures in {}ms", hash_duration.as_millis())).dim()); - - run_tests(&config, &fixture_hashes); - - let trace_paths = collect_trace_paths(&config); - process_pending_snapshots(&config, &trace_paths); -} - -fn hash_fixtures() -> (HashMap, Duration) { - let start = Instant::now(); - let hashes = fixture_env(); - (hashes, start.elapsed()) -} - -fn build_nextest_command(config: &Config, fixture_hashes: &HashMap) -> Command { - let mut cmd = Command::new("cargo"); - cmd.arg("nextest").arg("run").arg("-p").arg("tests-integration").arg("--test").arg("checking"); - - if config.debug { - cmd.env("TRACE_LEVEL", "debug"); - } - - for filter in &config.filters { - cmd.arg(filter); - } - - if config.verbose { - cmd.arg("--status-level=fail"); - cmd.arg("--color=always"); - } else { - cmd.arg("--status-level=none"); - } - - cmd.env("INSTA_FORCE_PASS", "1"); - for (key, value) in fixture_hashes { - cmd.env(key, value); - } - - cmd -} - -fn run_tests(config: &Config, fixture_hashes: &HashMap) { - let mut cmd = build_nextest_command(config, fixture_hashes); - - if config.verbose { - cmd.status().expect("Failed to run cargo nextest"); - } else { - cmd.stdout(Stdio::null()).stderr(Stdio::null()); - let status = cmd.status().expect("Failed to run cargo nextest"); - - if !status.success() { - eprintln!("{}", style("Tests failed, re-running verbose...").yellow()); - - let verbose_config = - Config { filters: config.filters.clone(), verbose: true, debug: config.debug }; - let mut retry = build_nextest_command(&verbose_config, fixture_hashes); - let _ = retry.status(); - } - } -} - -fn collect_trace_paths(config: &Config) -> Vec { - if !config.debug { - return Vec::new(); - } - - let trace_dir = PathBuf::from(TRACE_DIR); - if !trace_dir.exists() { - return Vec::new(); - } - - let Ok(entries) = fs::read_dir(&trace_dir) else { - return Vec::new(); - }; - - let mut entries: Vec<_> = entries.filter_map(|e| e.ok()).collect(); - entries.sort_by_key(|e| e.path()); - - let mut trace_paths = Vec::new(); - for entry in entries { - let path = entry.path(); - if path.extension().is_some_and(|ext| ext == "jsonl") { - let file_name = path.file_name().unwrap_or_default().to_string_lossy(); - - // Skip traces that don't match any filter - if !config.filters.is_empty() && !config.filters.iter().any(|f| file_name.contains(f)) { - continue; - } - - trace_paths.push(path); - } - } - - trace_paths -} - -/// Finds a trace file that matches the given snapshot path. -/// -/// Snapshot paths look like: `.../fixtures/checking/200_int_compare_transitive/Main.snap` -/// Trace files look like: `200_int_compare_transitive_Main.jsonl` -/// -/// We extract the test identifier (e.g., `200_int_compare_transitive`) from the snapshot's -/// parent directory and the module name from the file, then find a matching trace file. -fn find_trace_for_snapshot(snap_path: &str, trace_paths: &[PathBuf]) -> Option { - let path = Path::new(snap_path); - - // Get module name from file (e.g., "Main" from "Main.snap") - let module_name = path.file_stem()?.to_str()?; - - // Get test identifier from parent directory (e.g., "200_int_compare_transitive") - let test_id = path.parent()?.file_name()?.to_str()?; - - // Trace files are named: {test_id}_{module_name}.jsonl - let expected_trace_name = format!("{}_{}.jsonl", test_id, module_name); - - // Find matching trace file - trace_paths - .iter() - .find(|trace_path| { - trace_path - .file_name() - .and_then(|name| name.to_str()) - .is_some_and(|name| name == expected_trace_name) - }) - .cloned() -} - -fn process_pending_snapshots(config: &Config, trace_paths: &[PathBuf]) { - let pending_output = Command::new("cargo") - .arg("insta") - .arg("pending-snapshots") - .arg("--as-json") - .stderr(Stdio::null()) - .output() - .expect("Failed to run cargo insta"); - - let pending = String::from_utf8_lossy(&pending_output.stdout); - let pending = pending.trim(); - - if pending.is_empty() { - println!("{}", style("No pending snapshots.").dim()); - return; - } - - println!(); - - let cwd = env::current_dir().unwrap(); - - for line in pending.lines() { - let line = line.trim(); - if line.is_empty() { - continue; - } - - let snapshot_path = if let Ok(snapshot) = serde_json::from_str::(line) { - snapshot.path - } else { - continue; - }; - - // Skip snapshots that don't match any filter - if !config.filters.is_empty() && !config.filters.iter().any(|f| snapshot_path.contains(f)) { - continue; - } - - let short_path = snapshot_path - .strip_prefix(cwd.to_str().unwrap_or("")) - .unwrap_or(&snapshot_path) - .trim_start_matches('/'); - - let snap = Path::new(&snapshot_path); - let snap_new = PathBuf::from(format!("{}.new", snapshot_path)); - let trace_path = find_trace_for_snapshot(&snapshot_path, trace_paths); - - if snap.exists() { - display_snapshot_diff(snap, &snap_new, short_path, trace_path.as_deref()); - } else { - display_new_snapshot(&snap_new, short_path, trace_path.as_deref()); - } - - println!(); - } -} - -fn display_snapshot_diff( - snap: &Path, - snap_new: &Path, - short_path: &str, - trace_path: Option<&Path>, -) { - println!("{} {}", style("UPDATED").yellow().bold(), style(short_path).cyan()); - - if let Some(trace) = trace_path { - println!(" {} {}", style("TRACE").magenta().bold(), style(trace.display()).cyan()); - } - - println!(); - - let old_content = fs::read_to_string(snap).unwrap_or_default(); - let new_content = fs::read_to_string(snap_new).unwrap_or_default(); - - let old_stripped = strip_frontmatter(&old_content); - let new_stripped = strip_frontmatter(&new_content); - - print_diff(old_stripped, new_stripped); -} - -fn display_new_snapshot(snap_new: &Path, short_path: &str, trace_path: Option<&Path>) { - println!("{} {}", style("CREATED").green().bold(), style(short_path).cyan()); - - if let Some(trace) = trace_path { - println!(" {} {}", style("TRACE").magenta().bold(), style(trace.display()).cyan()); - } - - println!(); - - let new_content = fs::read_to_string(snap_new).unwrap_or_default(); - for (i, line) in strip_frontmatter(&new_content).lines().enumerate() { - println!("{} {}", style(format!("{:3}", i + 1)).dim(), line); - } -} diff --git a/compiler-scripts/src/lib.rs b/compiler-scripts/src/lib.rs index 7792dc5f3..0ab89a1a2 100644 --- a/compiler-scripts/src/lib.rs +++ b/compiler-scripts/src/lib.rs @@ -1,5 +1,7 @@ pub use console; +pub mod test_runner; + pub mod fixtures { use md5::{Digest, Md5}; use std::collections::HashMap; diff --git a/compiler-scripts/src/main.rs b/compiler-scripts/src/main.rs new file mode 100644 index 000000000..89d361881 --- /dev/null +++ b/compiler-scripts/src/main.rs @@ -0,0 +1,119 @@ +use clap::Parser; +use console::style; + +use compiler_scripts::test_runner::{ + CategoryCommand, DeleteFixtureOutcome, RunArgs, TestCategory, accept_category, create_fixture, + delete_fixture, reject_category, run_category, +}; + +#[derive(Parser)] +#[command(about = "Compiler development scripts")] +struct Cli { + /// Test category: checking (c), lowering (l), resolving (r), lsp + category: TestCategory, + + #[command(flatten)] + args: RunArgs, +} + +fn main() { + let cli = Cli::parse(); + + if let Some(name) = &cli.args.create { + if let Err(error) = create_fixture(cli.category, name) { + eprintln!("{:#}", error); + std::process::exit(1); + } + return; + } + + if let Some(name) = &cli.args.delete { + match delete_fixture(cli.category, name, cli.args.confirm) { + Ok(DeleteFixtureOutcome { fixture_paths, snapshot_paths, confirmed }) => { + if confirmed { + for path in &fixture_paths { + println!( + "{} {}", + style("DELETED").red().bold(), + style(path.display()).cyan() + ); + } + for path in &snapshot_paths { + println!( + "{} {}", + style("DELETED").red().bold(), + style(path.display()).cyan() + ); + } + } else { + println!( + "{} pending deletion(s) in {}", + fixture_paths.len() + snapshot_paths.len(), + style(cli.category.as_str()).cyan() + ); + println!(); + for path in &fixture_paths { + println!(" {}", style(path.display()).dim()); + } + for path in &snapshot_paths { + println!(" {}", style(path.display()).dim()); + } + println!(); + println!( + "To delete, run: {}", + style(format!( + "just t {} --delete \"{}\" --confirm", + cli.category.as_str(), + name + )) + .cyan() + ); + } + } + Err(error) => { + eprintln!("{:#}", error); + std::process::exit(1); + } + } + return; + } + + match &cli.args.command { + Some(CategoryCommand::Accept(args)) => { + let outcome = match accept_category(cli.category, args) { + Ok(outcome) => outcome, + Err(error) => { + eprintln!("{:#}", error); + std::process::exit(1); + } + }; + if !outcome.success { + std::process::exit(1); + } + } + Some(CategoryCommand::Reject(args)) => { + let outcome = match reject_category(cli.category, args) { + Ok(outcome) => outcome, + Err(error) => { + eprintln!("{:#}", error); + std::process::exit(1); + } + }; + if !outcome.success { + std::process::exit(1); + } + } + None => { + let outcome = match run_category(cli.category, &cli.args) { + Ok(outcome) => outcome, + Err(error) => { + eprintln!("{:#}", error); + std::process::exit(1); + } + }; + if !outcome.tests_passed { + std::process::exit(1); + } + } + } +} diff --git a/compiler-scripts/src/test_runner.rs b/compiler-scripts/src/test_runner.rs new file mode 100644 index 000000000..1e9834133 --- /dev/null +++ b/compiler-scripts/src/test_runner.rs @@ -0,0 +1,138 @@ +mod category; +mod cli; +mod decision; +mod fixture; +mod nextest; +mod pending; +mod traces; +mod ui; + +pub use category::TestCategory; +pub use cli::{CategoryCommand, RunArgs, SnapshotArgs}; + +use std::path::PathBuf; +use std::time::Instant; + +use console::style; + +use crate::test_runner::ui::NextActionsArgs; + +pub struct TestOutcome { + pub tests_passed: bool, + pub pending_count: usize, + pub trace_paths: Vec, +} + +pub fn run_category(category: TestCategory, args: &RunArgs) -> anyhow::Result { + // 1. Hash fixtures and print timing + let start = Instant::now(); + let fixture_hashes = crate::fixtures::fixture_env(); + println!("{}", style(format!("Hashed fixtures in {}ms", start.elapsed().as_millis())).dim()); + + // 2. Run nextest + let tests_passed = nextest::run_nextest(category, args, &fixture_hashes)?; + + // 3. Collect trace paths + let trace_paths = traces::collect_trace_paths(&args.filters, args.debug); + + // 4. Process pending snapshots + let pending_result = pending::process_pending_snapshots(category, args, &trace_paths)?; + + // 5. Print next actions + ui::print_next_actions(NextActionsArgs { + category_name: category.as_str(), + filters: &args.filters, + tests_passed, + pending_count: pending_result.count, + excluded_count: pending_result.excluded_count, + total_lines_changed: pending_result.total_lines_changed, + trace_paths: &trace_paths, + debug: args.debug, + showed_diffs: args.diff, + }); + + Ok(TestOutcome { tests_passed, pending_count: pending_result.count, trace_paths }) +} + +pub struct SnapshotOutcome { + pub success: bool, + pub count: usize, +} + +pub fn accept_category( + category: TestCategory, + args: &SnapshotArgs, +) -> anyhow::Result { + let snapshots = pending::collect_pending_snapshots(category, &args.filters)?; + + if snapshots.is_empty() { + println!("{}", style("No pending snapshots found.").dim()); + return Ok(SnapshotOutcome { success: true, count: 0 }); + } + + if !args.all && args.filters.is_empty() { + println!("{} pending snapshot(s) in {}", snapshots.len(), style(category.as_str()).cyan()); + println!(); + for info in &snapshots { + println!(" {}", style(&info.short_path).dim()); + } + println!(); + println!( + "To accept all, run: {}", + style(format!("just t {} accept --all", category.as_str())).cyan() + ); + return Ok(SnapshotOutcome { success: false, count: 0 }); + } + + let result = pending::accept_snapshots(&snapshots); + println!(); + println!("{}", style(format!("Accepted {} snapshot(s)", result.accepted)).green()); + + Ok(SnapshotOutcome { success: result.failed == 0, count: result.accepted }) +} + +pub fn reject_category( + category: TestCategory, + args: &SnapshotArgs, +) -> anyhow::Result { + let snapshots = pending::collect_pending_snapshots(category, &args.filters)?; + + if snapshots.is_empty() { + println!("{}", style("No pending snapshots found.").dim()); + return Ok(SnapshotOutcome { success: true, count: 0 }); + } + + if !args.all && args.filters.is_empty() { + println!("{} pending snapshot(s) in {}", snapshots.len(), style(category.as_str()).cyan()); + println!(); + for info in &snapshots { + println!(" {}", style(&info.short_path).dim()); + } + println!(); + println!( + "To reject all, run: {}", + style(format!("just t {} reject --all", category.as_str())).cyan() + ); + return Ok(SnapshotOutcome { success: false, count: 0 }); + } + + let result = pending::reject_snapshots(&snapshots); + println!(); + println!("{}", style(format!("Rejected {} snapshot(s)", result.rejected)).red()); + + Ok(SnapshotOutcome { success: result.failed == 0, count: result.rejected }) +} + +pub use fixture::DeleteFixtureOutcome; + +pub fn create_fixture(category: TestCategory, name: &str) -> anyhow::Result<()> { + fixture::create_fixture(category, name).map(|_| ()) +} + +pub fn delete_fixture( + category: TestCategory, + name: &str, + confirm: bool, +) -> anyhow::Result { + fixture::delete_fixture(category, name, confirm) +} diff --git a/compiler-scripts/src/test_runner/category.rs b/compiler-scripts/src/test_runner/category.rs new file mode 100644 index 000000000..932770e25 --- /dev/null +++ b/compiler-scripts/src/test_runner/category.rs @@ -0,0 +1,64 @@ +use std::path::{Path, PathBuf}; +use std::str::FromStr; + +use anyhow::bail; + +#[derive(Copy, Clone, Debug)] +pub enum TestCategory { + Checking, + Lowering, + Resolving, + Lsp, +} + +impl TestCategory { + pub fn as_str(&self) -> &'static str { + match self { + TestCategory::Checking => "checking", + TestCategory::Lowering => "lowering", + TestCategory::Resolving => "resolving", + TestCategory::Lsp => "lsp", + } + } + + pub fn fixtures_subdir_fragment(&self) -> String { + format!("tests-integration/fixtures/{}", self.as_str()) + } + + pub fn snapshot_path_fragments(&self) -> Vec { + vec![ + format!("tests-integration/fixtures/{}", self.as_str()), + format!("tests-integration/tests/snapshots/{}__", self.as_str()), + ] + } + + pub fn extra_env(&self, debug: bool) -> Vec<(&'static str, String)> { + if debug { vec![("TRACE_LEVEL", "debug".to_string())] } else { vec![] } + } + + pub fn trace_for_snapshot(&self, snap_path: &Path, trace_paths: &[PathBuf]) -> Option { + match self { + TestCategory::Checking => { + crate::test_runner::traces::match_checking_trace(snap_path, trace_paths) + } + _ => None, + } + } +} + +impl FromStr for TestCategory { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "checking" | "c" => Ok(TestCategory::Checking), + "lowering" | "l" => Ok(TestCategory::Lowering), + "resolving" | "r" => Ok(TestCategory::Resolving), + "lsp" => Ok(TestCategory::Lsp), + _ => bail!( + "unknown test category '{}', expected: checking (c), lowering (l), resolving (r), lsp", + s + ), + } + } +} diff --git a/compiler-scripts/src/test_runner/cli.rs b/compiler-scripts/src/test_runner/cli.rs new file mode 100644 index 000000000..fae93f76d --- /dev/null +++ b/compiler-scripts/src/test_runner/cli.rs @@ -0,0 +1,63 @@ +use clap::{Args, Subcommand}; + +#[derive(Subcommand, Clone, Debug)] +pub enum CategoryCommand { + /// Accept pending snapshots for this category + Accept(SnapshotArgs), + /// Reject pending snapshots for this category + Reject(SnapshotArgs), +} + +#[derive(Args, Clone, Debug)] +pub struct SnapshotArgs { + /// Snapshot filters (same as test filters) + #[arg(num_args = 0..)] + pub filters: Vec, + + /// Accept/reject all pending snapshots (required when no filters provided) + #[arg(long)] + pub all: bool, +} + +#[derive(Args, Clone, Debug)] +pub struct RunArgs { + /// Create a new fixture directory with a template file + #[arg(long, value_name = "NAME")] + pub create: Option, + + /// Delete a fixture directory (dry-run unless --confirm) + #[arg(long, value_name = "NAME")] + pub delete: Option, + + /// Confirm deletion for --delete + #[arg(long)] + pub confirm: bool, + + /// Subcommand (accept/reject) or test filters + #[command(subcommand)] + pub command: Option, + + /// Test name or number filters (passed through to nextest) + #[arg(num_args = 0..)] + pub filters: Vec, + + /// Verbose output (show test progress) + #[arg(short, long)] + pub verbose: bool, + + /// Enable tracing output for debugging + #[arg(long)] + pub debug: bool, + + /// Show full diffs (by default only shows summary to reduce output) + #[arg(long)] + pub diff: bool, + + /// Maximum number of snapshots to show (default: 3) + #[arg(long, default_value = "3")] + pub count: usize, + + /// Exclude snapshots matching pattern (substring match, repeatable) + #[arg(long)] + pub exclude: Vec, +} diff --git a/compiler-scripts/src/test_runner/decision.rs b/compiler-scripts/src/test_runner/decision.rs new file mode 100644 index 000000000..78ba6540e --- /dev/null +++ b/compiler-scripts/src/test_runner/decision.rs @@ -0,0 +1,205 @@ +/// Decision logic for test runner output, separated from rendering for testability. + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct DecisionInput { + pub tests_passed: bool, + pub pending_count: usize, + pub total_lines_changed: usize, + pub showed_diffs: bool, + pub ran_all: bool, + pub debug: bool, + pub trace_count: usize, + pub max_count: usize, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Outcome { + Clean, + Failure(FailureDecision), + Pending(PendingDecision), +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct FailureDecision { + pub show_debug_hint: bool, + pub show_trace_hint: bool, + pub max_traces_to_show: usize, + pub pending_note: Option, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PendingDecision { + pub show_lines_changed: bool, + pub next_action: NextAction, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum NextAction { + AcceptOrReject, + ReviewSubset, + ShowDiff, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct SnapshotDisplayLimits { + pub max_shown: usize, +} + +pub fn decide_outcome(input: &DecisionInput) -> Outcome { + let has_pending = input.pending_count > 0; + let has_traces = input.trace_count > 0; + + if input.tests_passed && !has_pending { + return Outcome::Clean; + } + + if !input.tests_passed { + return Outcome::Failure(FailureDecision { + show_debug_hint: !input.debug, + show_trace_hint: input.debug && has_traces, + max_traces_to_show: 3, + pending_note: if has_pending { Some(input.pending_count) } else { None }, + }); + } + + let many_pending = input.ran_all && input.pending_count > 3; + + let next_action = if input.showed_diffs { + NextAction::AcceptOrReject + } else if many_pending { + NextAction::ReviewSubset + } else { + NextAction::ShowDiff + }; + + Outcome::Pending(PendingDecision { + show_lines_changed: input.total_lines_changed > 50, + next_action, + }) +} + +pub fn decide_snapshot_limits(input: &DecisionInput) -> SnapshotDisplayLimits { + let max_shown = if input.showed_diffs { usize::MAX } else { input.max_count }; + + SnapshotDisplayLimits { max_shown } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn base_input() -> DecisionInput { + DecisionInput { + tests_passed: true, + pending_count: 0, + total_lines_changed: 0, + showed_diffs: false, + ran_all: true, + debug: false, + trace_count: 0, + max_count: 3, + } + } + + #[test] + fn clean_when_passed_no_pending() { + let input = base_input(); + assert_eq!(decide_outcome(&input), Outcome::Clean); + } + + #[test] + fn failure_suggests_debug_when_not_in_debug_mode() { + let input = DecisionInput { tests_passed: false, ..base_input() }; + let Outcome::Failure(decision) = decide_outcome(&input) else { + panic!("expected Failure"); + }; + assert!(decision.show_debug_hint); + assert!(!decision.show_trace_hint); + } + + #[test] + fn failure_suggests_traces_when_in_debug_mode_with_traces() { + let input = + DecisionInput { tests_passed: false, debug: true, trace_count: 2, ..base_input() }; + let Outcome::Failure(decision) = decide_outcome(&input) else { + panic!("expected Failure"); + }; + assert!(!decision.show_debug_hint); + assert!(decision.show_trace_hint); + } + + #[test] + fn failure_notes_pending_count() { + let input = DecisionInput { tests_passed: false, pending_count: 5, ..base_input() }; + let Outcome::Failure(decision) = decide_outcome(&input) else { + panic!("expected Failure"); + }; + assert_eq!(decision.pending_note, Some(5)); + } + + #[test] + fn pending_accept_reject_after_diff() { + let input = DecisionInput { pending_count: 1, showed_diffs: true, ..base_input() }; + let Outcome::Pending(decision) = decide_outcome(&input) else { + panic!("expected Pending"); + }; + assert_eq!(decision.next_action, NextAction::AcceptOrReject); + } + + #[test] + fn pending_review_subset_when_many() { + let input = DecisionInput { pending_count: 10, ran_all: true, ..base_input() }; + let Outcome::Pending(decision) = decide_outcome(&input) else { + panic!("expected Pending"); + }; + assert_eq!(decision.next_action, NextAction::ReviewSubset); + } + + #[test] + fn pending_show_diff_for_small_batch() { + let input = DecisionInput { pending_count: 2, ran_all: true, ..base_input() }; + let Outcome::Pending(decision) = decide_outcome(&input) else { + panic!("expected Pending"); + }; + assert_eq!(decision.next_action, NextAction::ShowDiff); + } + + #[test] + fn pending_shows_lines_changed_when_large() { + let input = DecisionInput { pending_count: 1, total_lines_changed: 100, ..base_input() }; + let Outcome::Pending(decision) = decide_outcome(&input) else { + panic!("expected Pending"); + }; + assert!(decision.show_lines_changed); + } + + #[test] + fn snapshot_limits_max_when_diff_enabled() { + let input = DecisionInput { showed_diffs: true, ..base_input() }; + let limits = decide_snapshot_limits(&input); + assert_eq!(limits.max_shown, usize::MAX); + } + + #[test] + fn snapshot_limits_uses_max_count() { + let input = DecisionInput { max_count: 10, ..base_input() }; + let limits = decide_snapshot_limits(&input); + assert_eq!(limits.max_shown, 10); + } + + #[test] + fn snapshot_limits_default_count() { + let input = base_input(); + let limits = decide_snapshot_limits(&input); + assert_eq!(limits.max_shown, 3); + } + + #[test] + fn filtered_run_not_considered_many() { + let input = DecisionInput { pending_count: 10, ran_all: false, ..base_input() }; + let Outcome::Pending(decision) = decide_outcome(&input) else { + panic!("expected Pending"); + }; + assert_eq!(decision.next_action, NextAction::ShowDiff); + } +} diff --git a/compiler-scripts/src/test_runner/fixture.rs b/compiler-scripts/src/test_runner/fixture.rs new file mode 100644 index 000000000..4f79772fe --- /dev/null +++ b/compiler-scripts/src/test_runner/fixture.rs @@ -0,0 +1,188 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use anyhow::{Context, bail}; +use console::style; +use heck::ToSnakeCase; + +use crate::test_runner::TestCategory; + +const MAIN_TEMPLATE: &str = "module Main where\n\n"; + +pub fn create_fixture(category: TestCategory, name: &str) -> anyhow::Result { + let fixtures_dir = PathBuf::from(category.fixtures_subdir_fragment()); + if !fixtures_dir.is_dir() { + bail!("fixtures directory '{}' does not exist", fixtures_dir.display()); + } + + let next_number = next_fixture_number(&fixtures_dir)?; + let slug = slugify(name)?; + let folder_name = format!("{:03}_{}", next_number, slug); + let folder_path = fixtures_dir.join(&folder_name); + + if folder_path.exists() { + bail!("fixture '{}' already exists", folder_path.display()); + } + + fs::create_dir_all(&folder_path).with_context(|| { + format!("failed to create fixture directory '{}'", folder_path.display()) + })?; + + let main_path = folder_path.join("Main.purs"); + fs::write(&main_path, MAIN_TEMPLATE) + .with_context(|| format!("failed to write '{}'", main_path.display()))?; + + println!("{} {}", style("CREATED").green().bold(), style(main_path.display()).cyan()); + println!(); + println!( + " {} {}", + style("Next:").dim(), + style(format!("just t {} {:03}", category.as_str(), next_number)).cyan() + ); + + Ok(folder_path) +} + +pub struct DeleteFixtureOutcome { + pub fixture_paths: Vec, + pub snapshot_paths: Vec, + pub confirmed: bool, +} + +pub fn delete_fixture( + category: TestCategory, + name: &str, + confirm: bool, +) -> anyhow::Result { + let fixtures_dir = PathBuf::from(category.fixtures_subdir_fragment()); + if !fixtures_dir.is_dir() { + bail!("fixtures directory '{}' does not exist", fixtures_dir.display()); + } + + let fixture_paths = resolve_fixture_paths(&fixtures_dir, name)?; + let mut snapshot_paths = Vec::new(); + for fixture_path in &fixture_paths { + snapshot_paths.extend(find_snapshot_paths(category, fixture_path)?); + } + + if confirm { + for fixture_path in &fixture_paths { + if fixture_path.exists() { + fs::remove_dir_all(fixture_path).with_context(|| { + format!("failed to delete fixture directory '{}'", fixture_path.display()) + })?; + } + } + + for snapshot_path in &snapshot_paths { + if snapshot_path.exists() { + fs::remove_file(snapshot_path).with_context(|| { + format!("failed to delete snapshot '{}'", snapshot_path.display()) + })?; + } + } + } + + Ok(DeleteFixtureOutcome { fixture_paths, snapshot_paths, confirmed: confirm }) +} + +fn next_fixture_number(fixtures_dir: &Path) -> anyhow::Result { + let mut max_number = 0; + let entries = fs::read_dir(fixtures_dir) + .with_context(|| format!("failed to read '{}'", fixtures_dir.display()))?; + + for entry in entries { + let entry = entry.context("failed to read entry")?; + let name = entry.file_name(); + let name = name.to_string_lossy(); + let Some((prefix, _)) = name.split_once('_') else { + continue; + }; + if prefix.len() != 3 || !prefix.chars().all(|ch| ch.is_ascii_digit()) { + continue; + } + if let Ok(number) = prefix.parse::() { + max_number = max_number.max(number); + } + } + + Ok(max_number + 1) +} + +fn resolve_fixture_paths(fixtures_dir: &Path, name: &str) -> anyhow::Result> { + let slug = slugify(name)?; + let mut matches = find_matching_fixtures(fixtures_dir, &slug)?; + if matches.is_empty() && name.chars().all(|ch| ch.is_ascii_digit()) { + matches = find_matching_fixtures(fixtures_dir, name)?; + } + + if matches.is_empty() { + bail!("no fixture found matching '{}' in '{}'", name, fixtures_dir.display()); + } + + matches.sort(); + Ok(matches) +} + +fn find_matching_fixtures(fixtures_dir: &Path, needle: &str) -> anyhow::Result> { + let mut matches = Vec::new(); + let entries = fs::read_dir(fixtures_dir) + .with_context(|| format!("failed to read '{}'", fixtures_dir.display()))?; + + for entry in entries { + let entry = entry.context("failed to read entry")?; + if !entry.path().is_dir() { + continue; + } + let name = entry.file_name(); + let name = name.to_string_lossy(); + if name.contains(needle) { + matches.push(entry.path()); + } + } + + Ok(matches) +} + +fn find_snapshot_paths( + category: TestCategory, + fixture_path: &Path, +) -> anyhow::Result> { + let fixture_name = fixture_path + .file_name() + .and_then(|name| name.to_str()) + .context("fixture path is missing a valid folder name")?; + let fixture_slug = slugify(fixture_name).unwrap_or_else(|_| fixture_name.to_string()); + let mut paths = Vec::new(); + + for fragment in category.snapshot_path_fragments() { + let base = PathBuf::from(fragment); + if !base.exists() { + continue; + } + for entry in + fs::read_dir(&base).with_context(|| format!("failed to read '{}'", base.display()))? + { + let entry = entry.context("failed to read entry")?; + if !entry.path().is_file() { + continue; + } + let file_name = entry.file_name(); + let file_name = file_name.to_string_lossy(); + if file_name.contains(&fixture_slug) { + paths.push(entry.path()); + } + } + } + + paths.sort(); + paths.dedup(); + Ok(paths) +} + +fn slugify(input: &str) -> anyhow::Result { + if input.is_empty() { + bail!("fixture name must not be empty"); + } + Ok(input.to_snake_case()) +} diff --git a/compiler-scripts/src/test_runner/nextest.rs b/compiler-scripts/src/test_runner/nextest.rs new file mode 100644 index 000000000..94db04286 --- /dev/null +++ b/compiler-scripts/src/test_runner/nextest.rs @@ -0,0 +1,81 @@ +use std::collections::HashMap; +use std::process::{Command, Stdio}; + +use anyhow::Context; +use console::style; + +use crate::test_runner::category::TestCategory; +use crate::test_runner::cli::RunArgs; + +pub fn build_nextest_command( + category: TestCategory, + args: &RunArgs, + fixture_hashes: &HashMap, +) -> Command { + let mut cmd = Command::new("cargo"); + cmd.arg("nextest") + .arg("run") + .arg("-p") + .arg("tests-integration") + .arg("--test") + .arg(category.as_str()); + + for (key, value) in category.extra_env(args.debug) { + cmd.env(key, value); + } + + for filter in &args.filters { + cmd.arg(filter); + } + + if args.verbose { + cmd.arg("--status-level=fail"); + cmd.arg("--color=always"); + } else { + cmd.arg("--status-level=none"); + } + + cmd.env("INSTA_FORCE_PASS", "1"); + for (key, value) in fixture_hashes { + cmd.env(key, value); + } + + cmd +} + +pub fn run_nextest( + category: TestCategory, + args: &RunArgs, + fixture_hashes: &HashMap, +) -> anyhow::Result { + let mut cmd = build_nextest_command(category, args, fixture_hashes); + + if args.verbose { + let status = cmd.status().context("failed to run cargo nextest")?; + Ok(status.success()) + } else { + cmd.stdout(Stdio::null()).stderr(Stdio::null()); + let status = cmd.status().context("failed to run cargo nextest")?; + + if !status.success() { + eprintln!("{}", style("Tests failed, re-running verbose...").yellow()); + + let verbose_args = RunArgs { + create: None, + delete: None, + confirm: false, + command: None, + filters: args.filters.clone(), + verbose: true, + debug: args.debug, + diff: args.diff, + count: args.count, + exclude: args.exclude.clone(), + }; + let mut retry = build_nextest_command(category, &verbose_args, fixture_hashes); + let _ = retry.status(); + } + + Ok(status.success()) + } +} diff --git a/compiler-scripts/src/test_runner/pending.rs b/compiler-scripts/src/test_runner/pending.rs new file mode 100644 index 000000000..9de121789 --- /dev/null +++ b/compiler-scripts/src/test_runner/pending.rs @@ -0,0 +1,276 @@ +use std::path::{Path, PathBuf}; +use std::process::{Command, Stdio}; +use std::{env, fs}; + +use anyhow::Context; +use console::style; +use serde::Deserialize; + +use crate::test_runner::category::TestCategory; +use crate::test_runner::cli::RunArgs; +use crate::test_runner::decision::DecisionInput; +use crate::test_runner::{decision, ui}; + +#[derive(Deserialize)] +struct PendingSnapshotJson { + path: String, +} + +pub struct PendingResult { + pub count: usize, + pub excluded_count: usize, + pub total_lines_changed: usize, +} + +pub struct SnapshotInfo { + pub snapshot_path: String, + pub short_path: String, + pub snap_new: PathBuf, + pub is_update: bool, + pub trace_path: Option, +} + +impl SnapshotInfo { + /// Returns the short path with `.new` suffix for display (e.g., `foo.snap.new`) + pub fn short_path_new(&self) -> String { + format!("{}.new", self.short_path) + } +} + +/// Collect pending snapshots for a category, optionally filtered. +pub fn collect_pending_snapshots( + category: TestCategory, + filters: &[String], +) -> anyhow::Result> { + let pending_output = Command::new("cargo") + .arg("insta") + .arg("pending-snapshots") + .arg("--as-json") + .stderr(Stdio::null()) + .output() + .context("failed to run cargo insta")?; + + let pending = String::from_utf8_lossy(&pending_output.stdout); + let pending = pending.trim(); + + let cwd = env::current_dir().context("failed to get working directory")?; + let path_fragments = category.snapshot_path_fragments(); + + let mut snapshots = Vec::new(); + + for line in pending.lines() { + let line = line.trim(); + if line.is_empty() { + continue; + } + + let snapshot_path = if let Ok(snapshot) = serde_json::from_str::(line) + { + snapshot.path + } else { + continue; + }; + + if !path_fragments.iter().any(|f| snapshot_path.contains(f)) { + continue; + } + + if !filters.is_empty() && !filters.iter().any(|f| snapshot_path.contains(f)) { + continue; + } + + let short_path = snapshot_path + .strip_prefix(cwd.to_str().unwrap_or("")) + .unwrap_or(&snapshot_path) + .trim_start_matches('/') + .to_string(); + + let snap_new = PathBuf::from(format!("{}.new", snapshot_path)); + let is_update = Path::new(&snapshot_path).exists(); + + snapshots.push(SnapshotInfo { + snapshot_path, + short_path, + snap_new, + is_update, + trace_path: None, // Populated separately if needed + }); + } + + Ok(snapshots) +} + +fn collect_exclusion_patterns(args: &RunArgs) -> Vec { + let mut patterns = args.exclude.clone(); + + if let Ok(env_patterns) = env::var("EXCLUDE_SNAPSHOTS") { + for pattern in env_patterns.split(',') { + let pattern = pattern.trim(); + if !pattern.is_empty() { + patterns.push(pattern.to_string()); + } + } + } + + patterns +} + +fn apply_exclusions( + snapshots: Vec, + patterns: &[String], +) -> (Vec, usize) { + if patterns.is_empty() { + return (snapshots, 0); + } + + let (excluded, visible): (Vec<_>, Vec<_>) = snapshots + .into_iter() + .partition(|info| patterns.iter().any(|p| info.short_path.contains(p))); + + (visible, excluded.len()) +} + +pub fn process_pending_snapshots( + category: TestCategory, + args: &RunArgs, + trace_paths: &[PathBuf], +) -> anyhow::Result { + let mut snapshots = collect_pending_snapshots(category, &args.filters)?; + + // Populate trace paths + for info in &mut snapshots { + info.trace_path = category.trace_for_snapshot(Path::new(&info.snapshot_path), trace_paths); + } + + // Apply exclusion filters + let exclusion_patterns = collect_exclusion_patterns(args); + let (visible, excluded_count) = apply_exclusions(snapshots, &exclusion_patterns); + + if excluded_count > 0 { + println!( + "{}", + style(format!("info: excluded {} snapshot(s) by pattern", excluded_count)).dim() + ); + } + + let pending_count = visible.len(); + let mut total_lines_changed = 0; + + let limits = decision::decide_snapshot_limits(&DecisionInput { + tests_passed: true, // not relevant for snapshot limits + pending_count, + total_lines_changed: 0, // not known yet, not relevant for limits + showed_diffs: args.diff, + ran_all: args.filters.is_empty(), + debug: args.debug, + trace_count: trace_paths.len(), + max_count: args.count, + }); + + let max_shown = limits.max_shown; + + for info in visible.iter().take(max_shown) { + let snap = Path::new(&info.snapshot_path); + let stats = if info.is_update { + ui::display_snapshot_diff( + snap, + &info.snap_new, + &info.short_path_new(), + info.trace_path.as_deref(), + args.diff, + ) + } else { + ui::display_new_snapshot( + &info.snap_new, + &info.short_path_new(), + info.trace_path.as_deref(), + args.diff, + ) + }; + total_lines_changed += stats.added + stats.removed; + } + + if pending_count > max_shown { + let hidden = pending_count - max_shown; + println!( + "{}", + style(format!("...and {} more pending snapshot(s) not shown", hidden)).dim() + ); + } + + Ok(PendingResult { count: visible.len(), excluded_count, total_lines_changed }) +} + +pub struct AcceptRejectResult { + pub accepted: usize, + pub rejected: usize, + pub failed: usize, +} + +pub fn accept_snapshots(snapshots: &[SnapshotInfo]) -> AcceptRejectResult { + let mut accepted = 0; + let mut failed = 0; + + for info in snapshots { + if !info.snap_new.exists() { + println!( + "{} {} (missing .new file)", + style("SKIP").yellow().bold(), + style(&info.short_path).cyan() + ); + failed += 1; + continue; + } + + let snap_path = Path::new(&info.snapshot_path); + + // Remove existing snapshot if present + if snap_path.exists() + && let Err(e) = fs::remove_file(snap_path) + { + println!("{} {} ({})", style("FAIL").red().bold(), style(&info.short_path).cyan(), e); + failed += 1; + continue; + } + + // Rename .new to .snap + if let Err(e) = fs::rename(&info.snap_new, snap_path) { + println!("{} {} ({})", style("FAIL").red().bold(), style(&info.short_path).cyan(), e); + failed += 1; + continue; + } + + println!("{} {}", style("ACCEPTED").green().bold(), style(&info.short_path).cyan()); + accepted += 1; + } + + AcceptRejectResult { accepted, rejected: 0, failed } +} + +pub fn reject_snapshots(snapshots: &[SnapshotInfo]) -> AcceptRejectResult { + let mut rejected = 0; + let mut failed = 0; + + for info in snapshots { + if !info.snap_new.exists() { + println!( + "{} {} (missing .new file)", + style("SKIP").yellow().bold(), + style(&info.short_path).cyan() + ); + failed += 1; + continue; + } + + if let Err(e) = fs::remove_file(&info.snap_new) { + println!("{} {} ({})", style("FAIL").red().bold(), style(&info.short_path).cyan(), e); + failed += 1; + continue; + } + + println!("{} {}", style("REJECTED").red().bold(), style(&info.short_path).cyan()); + rejected += 1; + } + + AcceptRejectResult { accepted: 0, rejected, failed } +} diff --git a/compiler-scripts/src/test_runner/traces.rs b/compiler-scripts/src/test_runner/traces.rs new file mode 100644 index 000000000..4e2bc888a --- /dev/null +++ b/compiler-scripts/src/test_runner/traces.rs @@ -0,0 +1,62 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +pub const TRACE_DIR: &str = "target/compiler-tracing"; + +pub fn collect_trace_paths(filters: &[String], debug: bool) -> Vec { + if !debug { + return Vec::new(); + } + + let trace_dir = PathBuf::from(TRACE_DIR); + if !trace_dir.exists() { + return Vec::new(); + } + + let Ok(entries) = fs::read_dir(&trace_dir) else { + return Vec::new(); + }; + + let mut entries: Vec<_> = entries.filter_map(|e| e.ok()).collect(); + entries.sort_by_key(|e| e.path()); + + let mut trace_paths = Vec::new(); + for entry in entries { + let path = entry.path(); + if path.extension().is_some_and(|ext| ext == "jsonl") { + let file_name = path.file_name().unwrap_or_default().to_string_lossy(); + + if !filters.is_empty() && !filters.iter().any(|f| file_name.contains(f)) { + continue; + } + + trace_paths.push(path); + } + } + + trace_paths +} + +/// Finds a trace file that matches the given snapshot path. +/// +/// Snapshot paths look like: `.../fixtures/checking/200_int_compare_transitive/Main.snap` +/// Trace files look like: `200_int_compare_transitive_Main.jsonl` +/// +/// We extract the test identifier (e.g., `200_int_compare_transitive`) from the snapshot's +/// parent directory and the module name from the file, then find a matching trace file. +pub fn match_checking_trace(snap_path: &Path, trace_paths: &[PathBuf]) -> Option { + let module_name = snap_path.file_stem()?.to_str()?; + let test_id = snap_path.parent()?.file_name()?.to_str()?; + + let expected_trace_name = format!("{}_{}.jsonl", test_id, module_name); + + trace_paths + .iter() + .find(|trace_path| { + trace_path + .file_name() + .and_then(|name| name.to_str()) + .is_some_and(|name| name == expected_trace_name) + }) + .cloned() +} diff --git a/compiler-scripts/src/test_runner/ui.rs b/compiler-scripts/src/test_runner/ui.rs new file mode 100644 index 000000000..74a3c56d3 --- /dev/null +++ b/compiler-scripts/src/test_runner/ui.rs @@ -0,0 +1,252 @@ +use std::fs; +use std::path::{Path, PathBuf}; + +use similar::{ChangeTag, TextDiff}; + +use crate::console::style; +use crate::snapshots::{print_diff, strip_frontmatter}; +use crate::test_runner::decision; +use crate::test_runner::decision::{ + DecisionInput, FailureDecision, NextAction, Outcome, PendingDecision, +}; + +pub struct SnapshotStats { + pub added: usize, + pub removed: usize, +} + +fn count_diff_lines(old: &str, new: &str) -> SnapshotStats { + let diff = TextDiff::from_lines(old, new); + let mut added = 0; + let mut removed = 0; + + for change in diff.iter_all_changes() { + match change.tag() { + ChangeTag::Delete => removed += 1, + ChangeTag::Insert => added += 1, + ChangeTag::Equal => {} + } + } + + SnapshotStats { added, removed } +} + +pub fn display_snapshot_diff( + snap: &Path, + snap_new: &Path, + short_path: &str, + trace_path: Option<&Path>, + show_diff: bool, +) -> SnapshotStats { + let old_content = fs::read_to_string(snap).unwrap_or_default(); + let new_content = fs::read_to_string(snap_new).unwrap_or_default(); + + let old_stripped = strip_frontmatter(&old_content); + let new_stripped = strip_frontmatter(&new_content); + + let stats = count_diff_lines(old_stripped, new_stripped); + + print!("{} {}", style("UPDATED").yellow().bold(), style(short_path).cyan()); + println!( + " ({}, {})", + style(format!("+{}", stats.added)).green(), + style(format!("-{}", stats.removed)).red() + ); + + if let Some(trace) = trace_path { + println!(" {} {}", style("TRACE").magenta().bold(), style(trace.display()).cyan()); + } + + if show_diff { + println!(); + print_diff(old_stripped, new_stripped); + println!(); + } + + stats +} + +pub fn display_new_snapshot( + snap_new: &Path, + short_path: &str, + trace_path: Option<&Path>, + show_diff: bool, +) -> SnapshotStats { + let new_content = fs::read_to_string(snap_new).unwrap_or_default(); + let new_stripped = strip_frontmatter(&new_content); + let line_count = new_stripped.lines().count(); + + print!("{} {}", style("CREATED").green().bold(), style(short_path).cyan()); + println!(" ({})", style(format!("+{}", line_count)).green()); + + if let Some(trace) = trace_path { + println!(" {} {}", style("TRACE").magenta().bold(), style(trace.display()).cyan()); + } + + if show_diff { + println!(); + for (i, line) in new_stripped.lines().enumerate() { + println!("{} {}", style(format!("{:3}", i + 1)).dim(), line); + } + println!(); + } + + SnapshotStats { added: line_count, removed: 0 } +} + +pub struct NextActionsArgs<'a> { + pub category_name: &'a str, + pub filters: &'a [String], + pub tests_passed: bool, + pub pending_count: usize, + pub excluded_count: usize, + pub total_lines_changed: usize, + pub trace_paths: &'a [PathBuf], + pub debug: bool, + pub showed_diffs: bool, +} + +pub fn print_next_actions(args: NextActionsArgs<'_>) { + let NextActionsArgs { + category_name, + filters, + tests_passed, + pending_count, + excluded_count, + total_lines_changed, + trace_paths, + debug, + showed_diffs, + } = args; + + let ran_all = filters.is_empty(); + let filters_str = if ran_all { String::new() } else { format!(" {}", filters.join(" ")) }; + + let input = DecisionInput { + tests_passed, + pending_count, + total_lines_changed, + showed_diffs, + ran_all, + debug, + trace_count: trace_paths.len(), + max_count: 3, // not used for outcome decisions + }; + + match decision::decide_outcome(&input) { + Outcome::Clean => { + println!("{}", style("All tests passed, no pending snapshots.").green()); + } + Outcome::Failure(decision) => { + render_failure(&decision, category_name, &filters_str, trace_paths); + } + Outcome::Pending(decision) => { + render_pending( + &decision, + category_name, + &filters_str, + pending_count, + excluded_count, + total_lines_changed, + ); + } + } +} + +fn render_failure( + decision: &FailureDecision, + category_name: &str, + filters_str: &str, + trace_paths: &[PathBuf], +) { + println!("{}", style("-".repeat(60)).dim()); + println!(); + println!("{}", style("Tests failed.").red()); + + if decision.show_debug_hint { + println!( + " Next: {}", + style(format!("just t {} --debug{}", category_name, filters_str)).cyan() + ); + } else if decision.show_trace_hint { + println!(" Next: consult trace files below"); + } + + if !trace_paths.is_empty() { + println!(); + for trace in trace_paths.iter().take(decision.max_traces_to_show) { + println!(" {} {}", style("TRACE").magenta().bold(), style(trace.display()).cyan()); + } + if trace_paths.len() > decision.max_traces_to_show { + let hidden = trace_paths.len() - decision.max_traces_to_show; + println!(" {}", style(format!("...and {} more trace file(s)", hidden)).dim()); + } + } + + if let Some(count) = decision.pending_note { + println!(); + println!( + " {}", + style(format!("Note: {} pending snapshot(s); review after fixing failures", count)) + .dim() + ); + } + + println!(); +} + +fn render_pending( + decision: &PendingDecision, + category_name: &str, + filters_str: &str, + pending_count: usize, + excluded_count: usize, + total_lines_changed: usize, +) { + println!("{}", style("-".repeat(60)).dim()); + println!(); + + let header = if decision.show_lines_changed { + format!("{} pending snapshot(s), {} lines changed", pending_count, total_lines_changed) + } else if excluded_count > 0 { + format!( + "{} pending snapshot{} ({} excluded)", + pending_count, + if pending_count == 1 { "" } else { "s" }, + excluded_count + ) + } else { + format!("{} pending snapshot{}", pending_count, if pending_count == 1 { "" } else { "s" }) + }; + println!("{}", header); + println!(); + + match decision.next_action { + NextAction::AcceptOrReject => { + let accept_cmd = format_accept_reject_cmd(category_name, filters_str, "accept"); + let reject_cmd = format_accept_reject_cmd(category_name, filters_str, "reject"); + println!(" Next: {}", style(&accept_cmd).green()); + println!(" Or: {}", style(&reject_cmd).red()); + } + NextAction::ReviewSubset => { + println!(" Next: {}", style(format!("just t {} NNN --diff", category_name)).cyan()); + println!(" {}", style("Hint: Review 1-2 tests at a time").dim()); + } + NextAction::ShowDiff => { + println!( + " Next: {}", + style(format!("just t {}{} --diff", category_name, filters_str)).cyan() + ); + } + } + + println!(); +} + +fn format_accept_reject_cmd(category_name: &str, filters_str: &str, action: &str) -> String { + if filters_str.is_empty() { + format!("just t {} {} --all", category_name, action) + } else { + format!("just t {} {}{}", category_name, action, filters_str) + } +} diff --git a/docs/src/wasm/src/lib.rs b/docs/src/wasm/src/lib.rs index a61260a7d..d35764211 100644 --- a/docs/src/wasm/src/lib.rs +++ b/docs/src/wasm/src/lib.rs @@ -267,19 +267,16 @@ pub fn check(source: &str) -> JsValue { let mut errors = Vec::new(); for error in &checked.errors { + let message = |id| checked.error_messages[id].as_str(); let (kind, message) = match &error.kind { checking::error::ErrorKind::CannotUnify { t1, t2 } => { - let t1_pretty = pretty::print_global(engine, *t1); - let t2_pretty = pretty::print_global(engine, *t2); - ("CannotUnify".to_string(), format!("{t1_pretty} ~ {t2_pretty}")) + ("CannotUnify".to_string(), format!("{} ~ {}", message(*t1), message(*t2))) } checking::error::ErrorKind::NoInstanceFound { constraint } => { - let c_pretty = pretty::print_global(engine, *constraint); - ("NoInstanceFound".to_string(), c_pretty) + ("NoInstanceFound".to_string(), message(*constraint).to_string()) } checking::error::ErrorKind::AmbiguousConstraint { constraint } => { - let c_pretty = pretty::print_global(engine, *constraint); - ("AmbiguousConstraint".to_string(), c_pretty) + ("AmbiguousConstraint".to_string(), message(*constraint).to_string()) } _ => (format!("{:?}", error.kind), String::new()), }; diff --git a/justfile b/justfile index 83a26d7a2..643889572 100644 --- a/justfile +++ b/justfile @@ -24,9 +24,12 @@ coverage-html: @integration *args="": cargo nextest run -p tests-integration "$@" --status-level=fail --final-status-level=fail --failure-output=final -[doc("Run checking tests with snapshot diffing. Use --help for options.")] -@tc *args="": - cargo run -q -p compiler-scripts --bin test-checking -- {{args}} +[doc("Run integration tests with snapshot diffing: checking|lowering|resolving|lsp")] +@t *args="": + cargo run -q -p compiler-scripts --release -- "$@" + +@c *args="": + cargo run -q -p compiler-compatibility --release -- "$@" [doc("Apply clippy fixes and format")] fix: diff --git a/tests-integration/Cargo.toml b/tests-integration/Cargo.toml index d1fe8d5cb..48952baba 100644 --- a/tests-integration/Cargo.toml +++ b/tests-integration/Cargo.toml @@ -8,6 +8,7 @@ build = "build.rs" analyzer = { version = "0.1.0", path = "../compiler-lsp/analyzer" } async-lsp = "0.2.2" checking = { version = "0.1.0", path = "../compiler-core/checking" } +diagnostics = { version = "0.1.0", path = "../compiler-core/diagnostics" } tracing = "0.1.44" tracing-subscriber = { version = "0.3.22", features = ["env-filter", "json"] } files = { version = "0.1.0", path = "../compiler-core/files" } @@ -25,7 +26,7 @@ tabled = "0.20.0" url = "2.5.7" [build-dependencies] -convert_case = "0.8.0" +heck = "0.5" itertools = "0.14.0" [dev-dependencies] diff --git a/tests-integration/build.rs b/tests-integration/build.rs index 036d6871f..f76345e33 100644 --- a/tests-integration/build.rs +++ b/tests-integration/build.rs @@ -2,7 +2,7 @@ use std::fs; use std::io::Write; use std::path::{Path, PathBuf}; -use convert_case::{Case, Converter}; +use heck::ToSnakeCase; use itertools::Itertools; fn read_dir<'output>(path: &Path) -> impl Iterator + use<'output> { @@ -44,10 +44,9 @@ fn run_test(folder: &str, file: &str) {{ ) .unwrap(); - let converter = Converter::new().to_case(Case::Snake); for folder in read_dir(Path::new("./fixtures/lsp")) { let Some(stem) = folder.file_stem() else { continue }; - let folder_name = converter.convert(stem.to_os_string().into_string().unwrap()); + let folder_name = stem.to_os_string().into_string().unwrap().to_snake_case(); writeln!( buffer, r#" @@ -73,14 +72,13 @@ fn run_test(folder: &str, file: &str) {{ settings.bind(|| insta::assert_snapshot!(file, report)); }}"#).unwrap(); - let converter = Converter::new().to_case(Case::Snake); for folder in read_dir(Path::new("./fixtures/lowering")) { let Some(stem) = folder.file_stem() else { continue }; - let folder_name = converter.convert(stem.to_os_string().into_string().unwrap()); + let folder_name = stem.to_os_string().into_string().unwrap().to_snake_case(); for file in read_purs_files(&folder) { let Some(file_stem) = file.file_stem() else { continue }; let file_name = file_stem.to_os_string().into_string().unwrap(); - let test_name = format!("{}_{}", folder_name, converter.convert(&file_name)); + let test_name = format!("{}_{}", folder_name, file_name.to_snake_case()); writeln!( buffer, r#" @@ -107,14 +105,13 @@ fn run_test(folder: &str, file: &str) {{ settings.bind(|| insta::assert_snapshot!(file, report)); }}"#).unwrap(); - let converter = Converter::new().to_case(Case::Snake); for folder in read_dir(Path::new("./fixtures/resolving")) { let Some(stem) = folder.file_stem() else { continue }; - let folder_name = converter.convert(stem.to_os_string().into_string().unwrap()); + let folder_name = stem.to_os_string().into_string().unwrap().to_snake_case(); for file in read_purs_files(&folder) { let Some(file_stem) = file.file_stem() else { continue }; let file_name = file_stem.to_os_string().into_string().unwrap(); - let test_name = format!("{}_{}", folder_name, converter.convert(&file_name)); + let test_name = format!("{}_{}", folder_name, file_name.to_snake_case()); writeln!( buffer, r#" @@ -157,10 +154,9 @@ fn run_test(folder: &str, file: &str) {{ settings.bind(|| insta::assert_snapshot!(file, report)); }}"#).unwrap(); - let converter = Converter::new().to_case(Case::Snake); for folder in read_dir(Path::new("./fixtures/checking")) { let Some(stem) = folder.file_stem() else { continue }; - let folder_name = converter.convert(stem.to_os_string().into_string().unwrap()); + let folder_name = stem.to_os_string().into_string().unwrap().to_snake_case(); // Skip the prelude folder - it's shared setup, not a test if folder_name == "prelude" { continue; diff --git a/tests-integration/fixtures/checking/006_type_synonym/Main.snap b/tests-integration/fixtures/checking/006_type_synonym/Main.snap index 47dc5a18b..a6ec3f83a 100644 --- a/tests-integration/fixtures/checking/006_type_synonym/Main.snap +++ b/tests-integration/fixtures/checking/006_type_synonym/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,7 +10,7 @@ Tuple :: Type -> Type -> Type AliasType :: Type AliasTypeType :: Type -> Type InferApply :: - forall (t4 :: Type) (t9 :: Type). ((t9 :: Type) -> (t4 :: Type)) -> (t9 :: Type) -> (t4 :: Type) + forall (t2 :: Type) (t7 :: Type). ((t7 :: Type) -> (t2 :: Type)) -> (t7 :: Type) -> (t2 :: Type) InferTuple :: Type -> Type -> Type CheckApply :: forall (x :: Type) (y :: Type). ((x :: Type) -> (y :: Type)) -> (x :: Type) -> (y :: Type) @@ -27,8 +28,8 @@ AliasTypeType = Array Kind = :0 Type = :0 -InferApply = forall (t4 :: Type) (t9 :: Type) (f :: (t9 :: Type) -> (t4 :: Type)) (a :: (t9 :: Type)). - (f :: (t9 :: Type) -> (t4 :: Type)) (a :: (t9 :: Type)) +InferApply = forall (t2 :: Type) (t7 :: Type) (f :: (t7 :: Type) -> (t2 :: Type)) (a :: (t7 :: Type)). + (f :: (t7 :: Type) -> (t2 :: Type)) (a :: (t7 :: Type)) Quantified = :2 Kind = :0 Type = :2 diff --git a/tests-integration/fixtures/checking/010_class_basic/Main.snap b/tests-integration/fixtures/checking/010_class_basic/Main.snap index 108405ffb..a3a853a14 100644 --- a/tests-integration/fixtures/checking/010_class_basic/Main.snap +++ b/tests-integration/fixtures/checking/010_class_basic/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,4 +10,4 @@ Types Show :: Type -> Constraint Classes -class Show (&0 :: Type) +class Show (a :: Type) diff --git a/tests-integration/fixtures/checking/011_class_functor/Main.snap b/tests-integration/fixtures/checking/011_class_functor/Main.snap index 343f855fd..452312f99 100644 --- a/tests-integration/fixtures/checking/011_class_functor/Main.snap +++ b/tests-integration/fixtures/checking/011_class_functor/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -14,4 +15,4 @@ Types Functor :: (Type -> Type) -> Constraint Classes -class Functor (&0 :: Type -> Type) +class Functor (f :: Type -> Type) diff --git a/tests-integration/fixtures/checking/012_class_monad_state/Main.snap b/tests-integration/fixtures/checking/012_class_monad_state/Main.snap index 896f426f2..d09dc2593 100644 --- a/tests-integration/fixtures/checking/012_class_monad_state/Main.snap +++ b/tests-integration/fixtures/checking/012_class_monad_state/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,5 +20,5 @@ Monad :: (Type -> Type) -> Constraint MonadState :: Type -> (Type -> Type) -> Constraint Classes -class Monad (&0 :: Type -> Type) -class Monad (&1 :: Type -> Type) <= MonadState (&0 :: Type) (&1 :: Type -> Type) +class Monad (m :: Type -> Type) +class Monad (m :: Type -> Type) <= MonadState (s :: Type) (m :: Type -> Type) diff --git a/tests-integration/fixtures/checking/013_class_phantom/Main.snap b/tests-integration/fixtures/checking/013_class_phantom/Main.snap index 014035979..ffc8a79d2 100644 --- a/tests-integration/fixtures/checking/013_class_phantom/Main.snap +++ b/tests-integration/fixtures/checking/013_class_phantom/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,4 +10,4 @@ Types Phantom :: forall (t0 :: Type). (t0 :: Type) -> Constraint Classes -class Phantom (&1 :: (&0 :: Type)) +class Phantom (a :: (t0 :: Type)) diff --git a/tests-integration/fixtures/checking/014_class_with_signature/Main.snap b/tests-integration/fixtures/checking/014_class_with_signature/Main.snap index 343f855fd..452312f99 100644 --- a/tests-integration/fixtures/checking/014_class_with_signature/Main.snap +++ b/tests-integration/fixtures/checking/014_class_with_signature/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -14,4 +15,4 @@ Types Functor :: (Type -> Type) -> Constraint Classes -class Functor (&0 :: Type -> Type) +class Functor (f :: Type -> Type) diff --git a/tests-integration/fixtures/checking/015_class_superclass/Main.snap b/tests-integration/fixtures/checking/015_class_superclass/Main.snap index 4df74fab0..ce05684aa 100644 --- a/tests-integration/fixtures/checking/015_class_superclass/Main.snap +++ b/tests-integration/fixtures/checking/015_class_superclass/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -18,5 +19,5 @@ Functor :: (Type -> Type) -> Constraint Applicative :: (Type -> Type) -> Constraint Classes -class Functor (&0 :: Type -> Type) -class Functor (&0 :: Type -> Type) <= Applicative (&0 :: Type -> Type) +class Functor (f :: Type -> Type) +class Functor (f :: Type -> Type) <= Applicative (f :: Type -> Type) diff --git a/tests-integration/fixtures/checking/026_row_empty/Main.snap b/tests-integration/fixtures/checking/026_row_empty/Main.snap index b2b8902db..f554f0a35 100644 --- a/tests-integration/fixtures/checking/026_row_empty/Main.snap +++ b/tests-integration/fixtures/checking/026_row_empty/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -21,12 +22,12 @@ EmptyRecord = {} Kind = :0 Type = :0 -TailOnly = forall (t7 :: Type) (r :: Row (t7 :: Type)). ( | (r :: Row (t7 :: Type)) ) +TailOnly = forall (t7 :: Type) (r :: Row (t7 :: Type)). (r :: Row (t7 :: Type)) Quantified = :1 Kind = :0 Type = :1 -TailOnlyRecord = forall (r :: Row Type). { | (r :: Row Type) } +TailOnlyRecord = forall (r :: Row Type). {| (r :: Row Type) } Quantified = :0 Kind = :0 Type = :1 diff --git a/tests-integration/fixtures/checking/027_type_constrained/Main.snap b/tests-integration/fixtures/checking/027_type_constrained/Main.snap index 0354e07f4..1ab5f67ae 100644 --- a/tests-integration/fixtures/checking/027_type_constrained/Main.snap +++ b/tests-integration/fixtures/checking/027_type_constrained/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -17,4 +18,4 @@ Showable = Show Int => Int Classes -class Show (&0 :: Type) +class Show (a :: Type) diff --git a/tests-integration/fixtures/checking/032_recursive_synonym_expansion/Main.snap b/tests-integration/fixtures/checking/032_recursive_synonym_expansion/Main.snap index 91ec8369d..9cac43c79 100644 --- a/tests-integration/fixtures/checking/032_recursive_synonym_expansion/Main.snap +++ b/tests-integration/fixtures/checking/032_recursive_synonym_expansion/Main.snap @@ -1,11 +1,12 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms -testF :: F -> F -testG :: G -> G -testH :: H -> H +testF :: F @Type -> F @Type +testG :: G @Type -> G @Type +testH :: H @Type -> H @Type testValid :: Int -> Int Types @@ -36,13 +37,64 @@ Valid = Int Type = :0 -Errors -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(0) } at [TermDeclaration(Idx::(0))] -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(0) } at [TermDeclaration(Idx::(0))] -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(0) } at [TermDeclaration(Idx::(0))] -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(1) } at [TermDeclaration(Idx::(1))] -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(1) } at [TermDeclaration(Idx::(1))] -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(1) } at [TermDeclaration(Idx::(1))] -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(2) } at [TermDeclaration(Idx::(2))] -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(2) } at [TermDeclaration(Idx::(2))] -RecursiveSynonymExpansion { file_id: Idx::(30), item_id: Idx::(2) } at [TermDeclaration(Idx::(2))] +Diagnostics +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 8:10..8:11 + | +8 | testF :: F -> F + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 8:15..8:16 + | +8 | testF :: F -> F + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 11:10..11:11 + | +11 | testG :: G -> G + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 11:15..11:16 + | +11 | testG :: G -> G + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 14:10..14:11 + | +14 | testH :: H -> H + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 14:15..14:16 + | +14 | testH :: H -> H + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 9:11..9:12 + | +9 | testF x = x + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 9:11..9:12 + | +9 | testF x = x + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 12:11..12:12 + | +12 | testG x = x + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 12:11..12:12 + | +12 | testG x = x + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 15:11..15:12 + | +15 | testH x = x + | ^ +error[RecursiveSynonymExpansion]: Recursive type synonym expansion + --> 15:11..15:12 + | +15 | testH x = x + | ^ diff --git a/tests-integration/fixtures/checking/040_pattern_guard/Main.purs b/tests-integration/fixtures/checking/040_pattern_guard/Main.purs index 8e16e385d..5b15ea86f 100644 --- a/tests-integration/fixtures/checking/040_pattern_guard/Main.purs +++ b/tests-integration/fixtures/checking/040_pattern_guard/Main.purs @@ -8,4 +8,4 @@ bar s | b <- "hello" = b foo' x | c <- 42 = c -bar' s | b <- "hello" = b +bar' s | b <- "hello" = b diff --git a/tests-integration/fixtures/checking/040_pattern_guard/Main.snap b/tests-integration/fixtures/checking/040_pattern_guard/Main.snap index 31b510649..d6894e5ef 100644 --- a/tests-integration/fixtures/checking/040_pattern_guard/Main.snap +++ b/tests-integration/fixtures/checking/040_pattern_guard/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,3 +10,25 @@ foo' :: forall (t1 :: Type). (t1 :: Type) -> Int bar' :: forall (t4 :: Type). (t4 :: Type) -> String Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 3:1..3:18 + | +3 | foo :: Int -> Int + | ^~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 6:1..6:24 + | +6 | bar :: String -> String + | ^~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 9:1..9:21 + | +9 | foo' x | c <- 42 = c + | ^~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 11:1..11:26 + | +11 | bar' s | b <- "hello" = b + | ^~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/041_where_expression/Main.snap b/tests-integration/fixtures/checking/041_where_expression/Main.snap index 26775003a..27e175127 100644 --- a/tests-integration/fixtures/checking/041_where_expression/Main.snap +++ b/tests-integration/fixtures/checking/041_where_expression/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms diff --git a/tests-integration/fixtures/checking/042_where_polymorphic/Main.snap b/tests-integration/fixtures/checking/042_where_polymorphic/Main.snap index c6ee89e2f..92a16ba09 100644 --- a/tests-integration/fixtures/checking/042_where_polymorphic/Main.snap +++ b/tests-integration/fixtures/checking/042_where_polymorphic/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms diff --git a/tests-integration/fixtures/checking/043_binder_named/Main.snap b/tests-integration/fixtures/checking/043_binder_named/Main.snap index f8f87d194..309efad32 100644 --- a/tests-integration/fixtures/checking/043_binder_named/Main.snap +++ b/tests-integration/fixtures/checking/043_binder_named/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -7,9 +8,9 @@ foo :: Int -> Int bar :: String -> String baz :: Int -> Int qux :: Int -foo' :: forall (t3 :: Type). (t3 :: Type) -> (t3 :: Type) -bar' :: forall (t6 :: Type). (t6 :: Type) -> (t6 :: Type) -baz' :: forall (t9 :: Type). (t9 :: Type) -> (t9 :: Type) +foo' :: forall (t2 :: Type). (t2 :: Type) -> (t2 :: Type) +bar' :: forall (t5 :: Type). (t5 :: Type) -> (t5 :: Type) +baz' :: forall (t8 :: Type). (t8 :: Type) -> (t8 :: Type) qux' :: Int Types diff --git a/tests-integration/fixtures/checking/053_do_polymorphic/Main.snap b/tests-integration/fixtures/checking/053_do_polymorphic/Main.snap index 2d5f239b7..9de9d4220 100644 --- a/tests-integration/fixtures/checking/053_do_polymorphic/Main.snap +++ b/tests-integration/fixtures/checking/053_do_polymorphic/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -16,7 +17,7 @@ discard :: (m :: Type -> Type) (b :: Type) pure :: forall (m :: Type -> Type) (a :: Type). (a :: Type) -> (m :: Type -> Type) (a :: Type) test :: forall (m :: Type -> Type). (m :: Type -> Type) (Tuple Int String) -test' :: forall (t58 :: Type -> Type). (t58 :: Type -> Type) (Tuple Int String) +test' :: forall (t55 :: Type -> Type). (t55 :: Type -> Type) (Tuple Int String) Types Tuple :: Type -> Type -> Type diff --git a/tests-integration/fixtures/checking/060_array_binder/Main.snap b/tests-integration/fixtures/checking/060_array_binder/Main.snap index fbc354c0f..a2f3d171b 100644 --- a/tests-integration/fixtures/checking/060_array_binder/Main.snap +++ b/tests-integration/fixtures/checking/060_array_binder/Main.snap @@ -1,15 +1,58 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms test1 :: Array Int -> { x :: Int, y :: Int } test1' :: forall (t6 :: Type). Array (t6 :: Type) -> { x :: (t6 :: Type), y :: (t6 :: Type) } test2 :: forall (a :: Type). Array (a :: Type) -> Array (a :: Type) -test2' :: forall (t21 :: Type). Array (t21 :: Type) -> Array (t21 :: Type) +test2' :: forall (t20 :: Type). Array (t20 :: Type) -> Array (t20 :: Type) test3 :: Array Int -> Int test3' :: Array Int -> Int nested :: Array (Array Int) -> Int -nested' :: forall (t53 :: Type). Array (Array (t53 :: Type)) -> (t53 :: Type) +nested' :: forall (t52 :: Type). Array (Array (t52 :: Type)) -> (t52 :: Type) Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 3:1..3:45 + | +3 | test1 :: Array Int -> { x :: Int, y :: Int } + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 6:1..6:25 + | +6 | test1' [x, y] = { x, y } + | ^~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 8:1..8:38 + | +8 | test2 :: forall a. Array a -> Array a + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 11:1..11:29 + | +11 | test2' [x, y, z] = [z, y, x] + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 13:1..13:26 + | +13 | test3 :: Array Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 18:1..18:14 + | +18 | test3' [] = 0 + | ^~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 22:1..22:35 + | +22 | nested :: Array (Array Int) -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 26:1..26:18 + | +26 | nested' [[x]] = x + | ^~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/061_record_binder/Main.snap b/tests-integration/fixtures/checking/061_record_binder/Main.snap index b5887cdb4..d7ceac8ae 100644 --- a/tests-integration/fixtures/checking/061_record_binder/Main.snap +++ b/tests-integration/fixtures/checking/061_record_binder/Main.snap @@ -1,20 +1,25 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms test1 :: { x :: Int, y :: Int } -> Int test1' :: - forall (t10 :: Type) (t11 :: Type). { x :: (t11 :: Type), y :: (t10 :: Type) } -> (t11 :: Type) + forall (t8 :: Type) (t9 :: Row Type) (t10 :: Type). + { x :: (t10 :: Type), y :: (t8 :: Type) | (t9 :: Row Type) } -> (t10 :: Type) test2 :: { x :: Int, y :: String } -> { x :: Int, y :: String } test2' :: - forall (t15 :: Type) (t16 :: Type). - { x :: (t15 :: Type), y :: (t16 :: Type) } -> { x :: (t15 :: Type), y :: (t16 :: Type) } + forall (t12 :: Type) (t13 :: Type) (t14 :: Row Type). + { x :: (t12 :: Type), y :: (t13 :: Type) | (t14 :: Row Type) } -> + { x :: (t12 :: Type), y :: (t13 :: Type) } test3 :: { age :: Int, name :: String } -> String test3' :: - forall (t22 :: Type) (t23 :: Type). - { age :: (t22 :: Type), name :: (t23 :: Type) } -> (t23 :: Type) + forall (t18 :: Type) (t19 :: Row Type) (t20 :: Type). + { age :: (t18 :: Type), name :: (t20 :: Type) | (t19 :: Row Type) } -> (t20 :: Type) nested :: { inner :: { x :: Int } } -> Int -nested' :: forall (t27 :: Type). { inner :: { x :: (t27 :: Type) } } -> (t27 :: Type) +nested' :: + forall (t23 :: Row Type) (t24 :: Row Type) (t25 :: Type). + { inner :: { x :: (t25 :: Type) | (t23 :: Row Type) } | (t24 :: Row Type) } -> (t25 :: Type) Types diff --git a/tests-integration/fixtures/checking/062_case_of/Main.snap b/tests-integration/fixtures/checking/062_case_of/Main.snap index 248a51db8..99df9e300 100644 --- a/tests-integration/fixtures/checking/062_case_of/Main.snap +++ b/tests-integration/fixtures/checking/062_case_of/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -13,7 +14,7 @@ test1' :: Maybe Int -> Int test2 :: Maybe Int -> Maybe Int -> Int test2' :: Maybe Int -> Maybe Int -> Int test3 :: Either Int String -> Int -test3' :: forall (t46 :: Type). Either Int (t46 :: Type) -> Int +test3' :: forall (t43 :: Type). Either Int (t43 :: Type) -> Int test4 :: Tuple (Maybe Int) (Maybe Int) -> Int test4' :: Tuple (Maybe Int) (Maybe Int) -> Int diff --git a/tests-integration/fixtures/checking/066_ado_collector/Main.snap b/tests-integration/fixtures/checking/066_ado_collector/Main.snap index 15125111a..35600fc89 100644 --- a/tests-integration/fixtures/checking/066_ado_collector/Main.snap +++ b/tests-integration/fixtures/checking/066_ado_collector/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -15,15 +16,15 @@ apply :: Collector (Tuple (x :: Type) (y :: Type)) (b :: Type) pure :: forall (a :: Type). (a :: Type) -> Collector (a :: Type) (a :: Type) test1 :: - forall (t18 :: Type). + forall (t19 :: Type). Collector - (Tuple (Tuple (Tuple Int (t18 :: Type)) String) Char) + (Tuple (Tuple (Tuple Int (t19 :: Type)) String) Char) { x :: Int, y :: String, z :: Char } test2 :: - forall (t37 :: Type). - Collector (Tuple (Tuple Char (t37 :: Type)) Boolean) { x :: Char, y :: Boolean } + forall (t39 :: Type). + Collector (Tuple (Tuple Char (t39 :: Type)) Boolean) { x :: Char, y :: Boolean } test3 :: - forall (t51 :: Type). Collector (Tuple (Tuple Int (t51 :: Type)) String) { x :: Int, z :: String } + forall (t54 :: Type). Collector (Tuple (Tuple Int (t54 :: Type)) String) { x :: Int, z :: String } Types Collector :: Type -> Type -> Type diff --git a/tests-integration/fixtures/checking/068_expression_sections/Main.snap b/tests-integration/fixtures/checking/068_expression_sections/Main.snap index 5d3feda10..361326ffd 100644 --- a/tests-integration/fixtures/checking/068_expression_sections/Main.snap +++ b/tests-integration/fixtures/checking/068_expression_sections/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -21,14 +22,14 @@ test7 :: Int -> Int test8 :: Int -> Int test9 :: Int -> Int -> Int test10 :: forall (t41 :: Type). ((Int -> Int) -> (t41 :: Type)) -> (t41 :: Type) -test11 :: forall (t48 :: Type). (t48 :: Type) -> Array (t48 :: Type) -test12 :: forall (t51 :: Type). (t51 :: Type) -> { foo :: (t51 :: Type) } -test13 :: forall (t54 :: Type). (t54 :: Type) -> (t54 :: Type) -test14 :: forall (t59 :: Type). (t59 :: Type) -> (t59 :: Type) -> Array (t59 :: Type) +test11 :: forall (t49 :: Type). (t49 :: Type) -> Array (t49 :: Type) +test12 :: forall (t52 :: Type). (t52 :: Type) -> { foo :: (t52 :: Type) } +test13 :: forall (t55 :: Type). (t55 :: Type) -> (t55 :: Type) +test14 :: forall (t60 :: Type). (t60 :: Type) -> (t60 :: Type) -> Array (t60 :: Type) test15 :: - forall (t62 :: Type) (t63 :: Type). - (t62 :: Type) -> (t63 :: Type) -> { a :: (t62 :: Type), b :: (t63 :: Type) } -test16 :: forall (t67 :: Type). (t67 :: Type) -> Tuple (t67 :: Type) Int + forall (t63 :: Type) (t64 :: Type). + (t63 :: Type) -> (t64 :: Type) -> { a :: (t63 :: Type), b :: (t64 :: Type) } +test16 :: forall (t68 :: Type). (t68 :: Type) -> Tuple (t68 :: Type) Int test17 :: Tuple String Int test18 :: Int -> Int diff --git a/tests-integration/fixtures/checking/069_expression_sections_inference/Main.snap b/tests-integration/fixtures/checking/069_expression_sections_inference/Main.snap index 8504e122d..f92f71c09 100644 --- a/tests-integration/fixtures/checking/069_expression_sections_inference/Main.snap +++ b/tests-integration/fixtures/checking/069_expression_sections_inference/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -10,9 +11,9 @@ sub :: Int -> Int -> Int identity :: forall (a :: Type). (a :: Type) -> (a :: Type) test1 :: Int -> String -> { a :: Int, b :: String } test2 :: - forall (t13 :: Type) (t16 :: Type) (t17 :: Type). - ((((t16 :: Type) -> (t17 :: Type)) -> (t16 :: Type) -> (t17 :: Type)) -> (t13 :: Type)) -> - (t13 :: Type) + forall (t11 :: Type) (t15 :: Type) (t16 :: Type). + ((((t16 :: Type) -> (t15 :: Type)) -> (t16 :: Type) -> (t15 :: Type)) -> (t11 :: Type)) -> + (t11 :: Type) test3 :: Boolean -> Int -> Int test4 :: Int test5 :: { x :: Int, y :: Int | () } diff --git a/tests-integration/fixtures/checking/070_record_access_sections/Main.snap b/tests-integration/fixtures/checking/070_record_access_sections/Main.snap index e1dbcd677..a1b02114f 100644 --- a/tests-integration/fixtures/checking/070_record_access_sections/Main.snap +++ b/tests-integration/fixtures/checking/070_record_access_sections/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,55 +10,54 @@ map :: apply :: forall (a :: Type) (b :: Type). ((a :: Type) -> (b :: Type)) -> (a :: Type) -> (b :: Type) f :: ({ foo :: Int } -> Int) -> Int test1 :: - forall (t9 :: Type) (t10 :: Row Type). - { prop :: (t9 :: Type) | (t10 :: Row Type) } -> (t9 :: Type) + forall (t8 :: Type) (t9 :: Row Type). { prop :: (t8 :: Type) | (t9 :: Row Type) } -> (t8 :: Type) test2 :: - forall (t15 :: Row Type) (t17 :: Row Type) (t18 :: Type) (t19 :: Row Type). - { a :: { b :: { c :: (t18 :: Type) | (t19 :: Row Type) } | (t17 :: Row Type) } - | (t15 :: Row Type) + forall (t14 :: Row Type) (t16 :: Row Type) (t17 :: Type) (t18 :: Row Type). + { a :: { b :: { c :: (t17 :: Type) | (t18 :: Row Type) } | (t16 :: Row Type) } + | (t14 :: Row Type) } -> - (t18 :: Type) + (t17 :: Type) test3 :: - forall (t23 :: Type) (t24 :: Row Type). - { poly :: (t23 :: Type) | (t24 :: Row Type) } -> (t23 :: Type) + forall (t22 :: Type) (t23 :: Row Type). + { poly :: (t22 :: Type) | (t23 :: Row Type) } -> (t22 :: Type) test4 :: - forall (t28 :: Type) (t31 :: Row Type). - Array { prop :: (t28 :: Type) | (t31 :: Row Type) } -> Array (t28 :: Type) + forall (t27 :: Type) (t29 :: Row Type). + Array { prop :: (t27 :: Type) | (t29 :: Row Type) } -> Array (t27 :: Type) test5 :: Int test6 :: - forall (t39 :: Type) (t41 :: Type) (t45 :: Type) (t46 :: Row Type). - (t39 :: Type) -> - (({ bar :: (t45 :: Type) | (t46 :: Row Type) } -> (t45 :: Type)) -> (t41 :: Type)) -> - (t41 :: Type) + forall (t36 :: Type) (t38 :: Type) (t42 :: Type) (t44 :: Row Type). + (t36 :: Type) -> + (({ bar :: (t42 :: Type) | (t44 :: Row Type) } -> (t42 :: Type)) -> (t38 :: Type)) -> + (t38 :: Type) test7 :: - forall (t51 :: Type) (t52 :: Row Type). - { nonexistent :: (t51 :: Type) | (t52 :: Row Type) } -> (t51 :: Type) + forall (t49 :: Type) (t50 :: Row Type). + { nonexistent :: (t49 :: Type) | (t50 :: Row Type) } -> (t49 :: Type) test8 :: - forall (t57 :: Type) (t59 :: Type) (t60 :: Row Type). - (({ foo :: (t59 :: Type) | (t60 :: Row Type) } -> (t59 :: Type)) -> (t57 :: Type)) -> - (t57 :: Type) + forall (t55 :: Type) (t57 :: Type) (t59 :: Row Type). + (({ foo :: (t57 :: Type) | (t59 :: Row Type) } -> (t57 :: Type)) -> (t55 :: Type)) -> + (t55 :: Type) test9 :: - forall (t64 :: Type) (t69 :: Type) (t70 :: Row Type). - Array (({ prop :: (t69 :: Type) | (t70 :: Row Type) } -> (t69 :: Type)) -> (t64 :: Type)) -> - Array (t64 :: Type) + forall (t63 :: Type) (t67 :: Type) (t69 :: Row Type). + Array (({ prop :: (t67 :: Type) | (t69 :: Row Type) } -> (t67 :: Type)) -> (t63 :: Type)) -> + Array (t63 :: Type) test10 :: - forall (t74 :: Type) (t77 :: Row Type) (t79 :: Row Type). - { a :: { b :: (t74 :: Type) | (t79 :: Row Type) } | (t77 :: Row Type) } -> (t74 :: Type) + forall (t73 :: Type) (t75 :: Row Type) (t77 :: Row Type). + { a :: { b :: (t73 :: Type) | (t77 :: Row Type) } | (t75 :: Row Type) } -> (t73 :: Type) test11 :: - forall (t82 :: Type) (t84 :: Type) (t85 :: Row Type). - (t82 :: Type) -> - { a :: (t82 :: Type), b :: { foo :: (t84 :: Type) | (t85 :: Row Type) } -> (t84 :: Type) } + forall (t80 :: Type) (t82 :: Type) (t83 :: Row Type). + (t80 :: Type) -> + { a :: (t80 :: Type), b :: { foo :: (t82 :: Type) | (t83 :: Row Type) } -> (t82 :: Type) } test12 :: - forall (t91 :: Type) (t92 :: Row Type). - ({ bar :: (t91 :: Type) | (t92 :: Row Type) } -> (t91 :: Type)) -> - Array ({ bar :: (t91 :: Type) | (t92 :: Row Type) } -> (t91 :: Type)) + forall (t89 :: Type) (t90 :: Row Type). + ({ bar :: (t89 :: Type) | (t90 :: Row Type) } -> (t89 :: Type)) -> + Array ({ bar :: (t89 :: Type) | (t90 :: Row Type) } -> (t89 :: Type)) test13 :: - forall (t95 :: Type) (t98 :: Type) (t99 :: Row Type). - (t95 :: Type) -> { prop :: (t98 :: Type) | (t99 :: Row Type) } -> (t98 :: Type) + forall (t93 :: Type) (t96 :: Type) (t97 :: Row Type). + (t93 :: Type) -> { prop :: (t96 :: Type) | (t97 :: Row Type) } -> (t96 :: Type) test14 :: - forall (t105 :: Type) (t110 :: Row Type). + forall (t103 :: Type) (t108 :: Row Type). Boolean -> - { a :: (t105 :: Type) | ( b :: (t105 :: Type) | (t110 :: Row Type) ) } -> - (t105 :: Type) + { a :: (t103 :: Type) | ( b :: (t103 :: Type) | (t108 :: Row Type) ) } -> + (t103 :: Type) Types diff --git a/tests-integration/fixtures/checking/071_record_update_sections/Main.snap b/tests-integration/fixtures/checking/071_record_update_sections/Main.snap index 17a42b0ef..285e6adc1 100644 --- a/tests-integration/fixtures/checking/071_record_update_sections/Main.snap +++ b/tests-integration/fixtures/checking/071_record_update_sections/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -28,28 +29,28 @@ polymorphicRecordUpdate :: forall (t36 :: Type) (t37 :: Row Type). { foo :: (t36 :: Type) | (t37 :: Row Type) } -> { foo :: Int | (t37 :: Row Type) } higherOrderContext :: - forall (t44 :: Type) (t45 :: Row Type). - Array { x :: (t44 :: Type) | (t45 :: Row Type) } -> Array { x :: Int | (t45 :: Row Type) } + forall (t42 :: Type) (t43 :: Row Type). + Array { x :: (t42 :: Type) | (t43 :: Row Type) } -> Array { x :: Int | (t43 :: Row Type) } multipleSectionsInteraction :: - forall (t49 :: Type) (t50 :: Type) (t51 :: Type) (t52 :: Type) (t53 :: Row Type). - { x :: (t51 :: Type), y :: (t52 :: Type) | (t53 :: Row Type) } -> - (t49 :: Type) -> - (t50 :: Type) -> - { x :: (t49 :: Type), y :: (t50 :: Type) | (t53 :: Row Type) } + forall (t47 :: Type) (t48 :: Type) (t49 :: Type) (t50 :: Type) (t51 :: Row Type). + { x :: (t49 :: Type), y :: (t50 :: Type) | (t51 :: Row Type) } -> + (t47 :: Type) -> + (t48 :: Type) -> + { x :: (t47 :: Type), y :: (t48 :: Type) | (t51 :: Row Type) } nestedSectionInteraction :: - forall (t57 :: Type) (t58 :: Type) (t59 :: Row Type) (t60 :: Row Type). - { a :: { b :: (t58 :: Type) | (t59 :: Row Type) } | (t60 :: Row Type) } -> - (t57 :: Type) -> - { a :: { b :: (t57 :: Type) | (t59 :: Row Type) } | (t60 :: Row Type) } + forall (t55 :: Type) (t56 :: Type) (t57 :: Row Type) (t58 :: Row Type). + { a :: { b :: (t56 :: Type) | (t57 :: Row Type) } | (t58 :: Row Type) } -> + (t55 :: Type) -> + { a :: { b :: (t55 :: Type) | (t57 :: Row Type) } | (t58 :: Row Type) } mixedSections :: - forall (t64 :: Type) (t66 :: Type) (t67 :: Row Type). - { a :: (t64 :: Type), b :: (t66 :: Type) | (t67 :: Row Type) } -> - { a :: Int -> Int, b :: Int | (t67 :: Row Type) } + forall (t62 :: Type) (t64 :: Type) (t65 :: Row Type). + { a :: (t62 :: Type), b :: (t64 :: Type) | (t65 :: Row Type) } -> + { a :: Int -> Int, b :: Int | (t65 :: Row Type) } recordAccessSectionUpdate :: - forall (t71 :: Type) (t73 :: Type) (t74 :: Row Type) (t75 :: Row Type). - { a :: (t71 :: Type) | (t75 :: Row Type) } -> - { a :: { b :: (t73 :: Type) | (t74 :: Row Type) } -> (t73 :: Type) | (t75 :: Row Type) } -concreteRecordUpdateSection :: forall (t78 :: Type). (t78 :: Type) -> { a :: (t78 :: Type) | () } + forall (t69 :: Type) (t71 :: Type) (t72 :: Row Type) (t73 :: Row Type). + { a :: (t69 :: Type) | (t73 :: Row Type) } -> + { a :: { b :: (t71 :: Type) | (t72 :: Row Type) } -> (t71 :: Type) | (t73 :: Row Type) } +concreteRecordUpdateSection :: forall (t76 :: Type). (t76 :: Type) -> { a :: (t76 :: Type) | () } map :: forall (a :: Type) (b :: Type). ((a :: Type) -> (b :: Type)) -> Array (a :: Type) -> Array (b :: Type) diff --git a/tests-integration/fixtures/checking/076_inspect_constraints/Main.snap b/tests-integration/fixtures/checking/076_inspect_constraints/Main.snap index d21b8bf8d..17dec3848 100644 --- a/tests-integration/fixtures/checking/076_inspect_constraints/Main.snap +++ b/tests-integration/fixtures/checking/076_inspect_constraints/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -38,5 +39,5 @@ NestedConstraint = forall (a :: Type) (b :: Type). Show (b :: Type) => (a :: Typ Classes -class Show (&0 :: Type) -class Eq (&0 :: Type) +class Show (a :: Type) +class Eq (a :: Type) diff --git a/tests-integration/fixtures/checking/078_inspect_arity_invalid/Main.snap b/tests-integration/fixtures/checking/078_inspect_arity_invalid/Main.snap index 22969a057..2f40d2eec 100644 --- a/tests-integration/fixtures/checking/078_inspect_arity_invalid/Main.snap +++ b/tests-integration/fixtures/checking/078_inspect_arity_invalid/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -35,7 +36,19 @@ Const = forall (a :: Type) (b :: Type). (a :: Type) -> (b :: Type) -> (a :: Type Type = :0 -Errors -TooManyBinders { signature: AstId(13), expected: 2, actual: 3 } at [TermDeclaration(Idx::(0))] -TooManyBinders { signature: AstId(39), expected: 2, actual: 3 } at [TermDeclaration(Idx::(1))] -TooManyBinders { signature: AstId(62), expected: 2, actual: 3 } at [TermDeclaration(Idx::(2))] +Diagnostics +error[TooManyBinders]: Too many binders: expected 2, got 3 + --> 7:1..7:33 + | +7 | addWrong :: forall a. BinaryOp a + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[TooManyBinders]: Too many binders: expected 2, got 3 + --> 13:1..13:40 + | +13 | composeWrong :: forall a. ReturnUnary a + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[TooManyBinders]: Too many binders: expected 2, got 3 + --> 18:1..18:20 + | +18 | constWrong :: Const + | ^~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/080_let_recursive_errors/Main.snap b/tests-integration/fixtures/checking/080_let_recursive_errors/Main.snap index cb4f1672d..2283b3d41 100644 --- a/tests-integration/fixtures/checking/080_let_recursive_errors/Main.snap +++ b/tests-integration/fixtures/checking/080_let_recursive_errors/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -11,9 +12,29 @@ threeWayConflict :: Int -> Int Types -Errors -CannotUnify { Int, String } at [TermDeclaration(Idx::(1)), InferringExpression(AstId(20))] -CannotUnify { String, Int } at [TermDeclaration(Idx::(1)), InferringExpression(AstId(20))] -CannotUnify { ??? -> ???, ??? } at [TermDeclaration(Idx::(2)), InferringExpression(AstId(61))] -CannotUnify { Int, String } at [TermDeclaration(Idx::(3)), InferringExpression(AstId(82)), InferringExpression(AstId(102)), CheckingExpression(AstId(105))] -CannotUnify { String, Int } at [TermDeclaration(Idx::(4)), InferringExpression(AstId(115)), InferringExpression(AstId(135)), CheckingExpression(AstId(138))] +Diagnostics +error[CannotUnify]: Cannot unify 'Int' with 'String' + --> 11:13..11:16 + | +11 | g x = f x + | ^~~ +error[CannotUnify]: Cannot unify 'String' with 'Int' + --> 9:13..9:16 + | +9 | f x = g x + | ^~~ +error[CannotUnify]: Cannot unify '?2[:0] -> ?3[:0]' with '?3[:0]' + --> 17:7..17:14 + | +17 | let f x = f + | ^~~~~~~ +error[CannotUnify]: Cannot unify 'Int' with 'String' + --> 25:8..25:9 + | +25 | in f n + | ^ +error[CannotUnify]: Cannot unify 'String' with 'Int' + --> 32:15..32:21 + | +32 | b x = c "oops" + | ^~~~~~ diff --git a/tests-integration/fixtures/checking/081_prim_rowlist/Main.snap b/tests-integration/fixtures/checking/081_prim_rowlist/Main.snap index 14ef4e470..d03667214 100644 --- a/tests-integration/fixtures/checking/081_prim_rowlist/Main.snap +++ b/tests-integration/fixtures/checking/081_prim_rowlist/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -8,7 +9,7 @@ Types Test :: RowList Type Synonyms -Test = Cons @Type "T" Int Nil +Test = Cons @Type "T" Int (Nil @Type) Quantified = :0 Kind = :0 Type = :0 diff --git a/tests-integration/fixtures/checking/083_instance_basic/Main.snap b/tests-integration/fixtures/checking/083_instance_basic/Main.snap index fe16548c7..3a5720a4e 100644 --- a/tests-integration/fixtures/checking/083_instance_basic/Main.snap +++ b/tests-integration/fixtures/checking/083_instance_basic/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,8 +10,8 @@ Types Eq :: Type -> Constraint Classes -class Eq (&0 :: Type) +class Eq (a :: Type) Instances -instance Eq (&0 :: Type) => Eq (Array (&0 :: Type) :: Type) +instance Eq (a :: Type) => Eq (Array (a :: Type) :: Type) chain: 0 diff --git a/tests-integration/fixtures/checking/084_instance_eq/Main.snap b/tests-integration/fixtures/checking/084_instance_eq/Main.snap index 083c9ca9b..3eb4d7b42 100644 --- a/tests-integration/fixtures/checking/084_instance_eq/Main.snap +++ b/tests-integration/fixtures/checking/084_instance_eq/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -10,11 +11,15 @@ Types Eq :: Type -> Constraint Classes -class Eq (&0 :: Type) +class Eq (a :: Type) Instances instance Eq (Int :: Type) chain: 0 -Errors -NoInstanceFound { Eq String } at [TermDeclaration(Idx::(2))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq String + --> 9:1..9:36 + | +9 | test = [eq 123 456, eq "123" "456"] + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/085_instance_functional_dependency/Main.snap b/tests-integration/fixtures/checking/085_instance_functional_dependency/Main.snap index 3a0e10e7a..1db6b21a3 100644 --- a/tests-integration/fixtures/checking/085_instance_functional_dependency/Main.snap +++ b/tests-integration/fixtures/checking/085_instance_functional_dependency/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -11,7 +12,7 @@ Types Convert :: Type -> Type -> Constraint Classes -class Convert (&0 :: Type) (&1 :: Type) +class Convert (a :: Type) (b :: Type) Instances instance Convert (Int :: Type) (String :: Type) diff --git a/tests-integration/fixtures/checking/086_instance_functional_dependency_transitive/Main.snap b/tests-integration/fixtures/checking/086_instance_functional_dependency_transitive/Main.snap index 29a2bbe6f..495123abe 100644 --- a/tests-integration/fixtures/checking/086_instance_functional_dependency_transitive/Main.snap +++ b/tests-integration/fixtures/checking/086_instance_functional_dependency_transitive/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -12,7 +13,7 @@ Types Chain :: forall (t1 :: Type). Type -> (t1 :: Type) -> Type -> Constraint Classes -class Chain (&1 :: Type) (&2 :: (&0 :: Type)) (&3 :: Type) +class Chain (a :: Type) (b :: (t1 :: Type)) (c :: Type) Instances instance Chain (Int :: Type) (String :: Type) (Boolean :: Type) diff --git a/tests-integration/fixtures/checking/087_instance_functional_dependency_multiple/Main.snap b/tests-integration/fixtures/checking/087_instance_functional_dependency_multiple/Main.snap index af72085f9..fa661789c 100644 --- a/tests-integration/fixtures/checking/087_instance_functional_dependency_multiple/Main.snap +++ b/tests-integration/fixtures/checking/087_instance_functional_dependency_multiple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -12,7 +13,7 @@ Types TypeEq :: Type -> Type -> Type -> Constraint Classes -class TypeEq (&0 :: Type) (&1 :: Type) (&2 :: Type) +class TypeEq (a :: Type) (b :: Type) (c :: Type) Instances instance TypeEq (Int :: Type) (Int :: Type) (Boolean :: Type) diff --git a/tests-integration/fixtures/checking/088_given_constraint_matching/Main.snap b/tests-integration/fixtures/checking/088_given_constraint_matching/Main.snap index 58c388743..76d52f9f3 100644 --- a/tests-integration/fixtures/checking/088_given_constraint_matching/Main.snap +++ b/tests-integration/fixtures/checking/088_given_constraint_matching/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -10,4 +11,4 @@ Types Eq :: Type -> Constraint Classes -class Eq (&0 :: Type) +class Eq (a :: Type) diff --git a/tests-integration/fixtures/checking/089_no_instance_found/Main.snap b/tests-integration/fixtures/checking/089_no_instance_found/Main.snap index 9595f74e1..3bc5832b0 100644 --- a/tests-integration/fixtures/checking/089_no_instance_found/Main.snap +++ b/tests-integration/fixtures/checking/089_no_instance_found/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -21,7 +22,11 @@ Roles Foo = [] Classes -class Eq (&0 :: Type) +class Eq (a :: Type) -Errors -NoInstanceFound { Eq Foo } at [TermDeclaration(Idx::(2))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq Foo + --> 9:1..9:23 + | +9 | test :: Foo -> Boolean + | ^~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/090_instance_improve/Main.snap b/tests-integration/fixtures/checking/090_instance_improve/Main.snap index 56b86cde7..daae1dd67 100644 --- a/tests-integration/fixtures/checking/090_instance_improve/Main.snap +++ b/tests-integration/fixtures/checking/090_instance_improve/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -28,8 +29,8 @@ True = [] False = [] Classes -class TypeEq (&1 :: Type) (&2 :: Type) (&3 :: (&0 :: Type)) +class TypeEq (a :: Type) (b :: Type) (r :: (t2 :: Type)) Instances -instance TypeEq ((&0 :: Type) :: Type) ((&0 :: Type) :: Type) (True :: Type) +instance TypeEq ((a :: Type) :: Type) ((a :: Type) :: Type) (True :: Type) chain: 0 diff --git a/tests-integration/fixtures/checking/091_superclass_elaboration/Main.snap b/tests-integration/fixtures/checking/091_superclass_elaboration/Main.snap index 6dea7d9b7..dd192af6a 100644 --- a/tests-integration/fixtures/checking/091_superclass_elaboration/Main.snap +++ b/tests-integration/fixtures/checking/091_superclass_elaboration/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -26,5 +27,5 @@ Roles Ordering = [] Classes -class Eq (&0 :: Type) -class Eq (&0 :: Type) <= Ord (&0 :: Type) +class Eq (a :: Type) +class Eq (a :: Type) <= Ord (a :: Type) diff --git a/tests-integration/fixtures/checking/092_ambiguous_constraint/Main.snap b/tests-integration/fixtures/checking/092_ambiguous_constraint/Main.snap index 3629ec608..ba31985e3 100644 --- a/tests-integration/fixtures/checking/092_ambiguous_constraint/Main.snap +++ b/tests-integration/fixtures/checking/092_ambiguous_constraint/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -12,9 +13,17 @@ Read :: Type -> Constraint Show :: Type -> Constraint Classes -class Read (&0 :: Type) -class Show (&0 :: Type) +class Read (a :: Type) +class Show (a :: Type) -Errors -AmbiguousConstraint { Show ??? } at [TermDeclaration(Idx::(2))] -AmbiguousConstraint { Read ??? } at [TermDeclaration(Idx::(2))] +Diagnostics +error[AmbiguousConstraint]: Ambiguous constraint: Show ?5[:0] + --> 11:1..11:23 + | +11 | test s = show (read s) + | ^~~~~~~~~~~~~~~~~~~~~~ +error[AmbiguousConstraint]: Ambiguous constraint: Read ?5[:0] + --> 11:1..11:23 + | +11 | test s = show (read s) + | ^~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/093_constraint_generalization/Main.snap b/tests-integration/fixtures/checking/093_constraint_generalization/Main.snap index c898b9410..2f6573d11 100644 --- a/tests-integration/fixtures/checking/093_constraint_generalization/Main.snap +++ b/tests-integration/fixtures/checking/093_constraint_generalization/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -13,5 +14,5 @@ Eq :: Type -> Constraint Ord :: Type -> Constraint Classes -class Eq (&0 :: Type) -class Ord (&0 :: Type) +class Eq (a :: Type) +class Ord (a :: Type) diff --git a/tests-integration/fixtures/checking/094_let_binding_constraint_error/Main.snap b/tests-integration/fixtures/checking/094_let_binding_constraint_error/Main.snap index a320ac8e4..316ddb453 100644 --- a/tests-integration/fixtures/checking/094_let_binding_constraint_error/Main.snap +++ b/tests-integration/fixtures/checking/094_let_binding_constraint_error/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -21,7 +22,11 @@ Roles Foo = [] Classes -class Eq (&0 :: Type) +class Eq (a :: Type) -Errors -NoInstanceFound { Eq Foo } at [TermDeclaration(Idx::(2))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq Foo + --> 10:1..12:11 + | +10 | test = + | ^~~~~~ diff --git a/tests-integration/fixtures/checking/095_given_constraint_arityless/Main.snap b/tests-integration/fixtures/checking/095_given_constraint_arityless/Main.snap index babad5c71..5a92146aa 100644 --- a/tests-integration/fixtures/checking/095_given_constraint_arityless/Main.snap +++ b/tests-integration/fixtures/checking/095_given_constraint_arityless/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,5 +20,5 @@ Eq :: Type -> Constraint Coercible :: Type -> Type -> Constraint Classes -class Eq (&0 :: Type) -class Coercible (&0 :: Type) (&1 :: Type) +class Eq (a :: Type) +class Coercible (a :: Type) (b :: Type) diff --git a/tests-integration/fixtures/checking/096_given_functional_dependency/Main.snap b/tests-integration/fixtures/checking/096_given_functional_dependency/Main.snap index a4c973e17..eac0f4fcb 100644 --- a/tests-integration/fixtures/checking/096_given_functional_dependency/Main.snap +++ b/tests-integration/fixtures/checking/096_given_functional_dependency/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -16,5 +17,5 @@ Convert :: Type -> Type -> Constraint Relate :: Type -> Type -> Type -> Constraint Classes -class Convert (&0 :: Type) (&1 :: Type) -class Relate (&0 :: Type) (&1 :: Type) (&2 :: Type) +class Convert (a :: Type) (b :: Type) +class Relate (a :: Type) (b :: Type) (c :: Type) diff --git a/tests-integration/fixtures/checking/097_instance_chains/Main.snap b/tests-integration/fixtures/checking/097_instance_chains/Main.snap index 042b3c31d..1a3ff3dcf 100644 --- a/tests-integration/fixtures/checking/097_instance_chains/Main.snap +++ b/tests-integration/fixtures/checking/097_instance_chains/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -30,10 +31,10 @@ Roles Proxy = [Phantom] Classes -class TypeEq (&3 :: (&0 :: Type)) (&4 :: (&1 :: Type)) (&5 :: (&2 :: Type)) +class TypeEq (a :: (t1 :: Type)) (b :: (t2 :: Type)) (r :: (t3 :: Type)) Instances -instance forall (&0 :: Type). TypeEq ((&1 :: (&0 :: Type)) :: (&0 :: Type)) ((&1 :: (&0 :: Type)) :: (&0 :: Type)) (True :: Boolean) +instance forall (t19 :: Type). TypeEq ((a :: (t19 :: Type)) :: (t19 :: Type)) ((a :: (t19 :: Type)) :: (t19 :: Type)) (True :: Boolean) chain: 0 -instance forall (&0 :: Type) (&1 :: Type). TypeEq ((&2 :: (&0 :: Type)) :: (&0 :: Type)) ((&3 :: (&1 :: Type)) :: (&1 :: Type)) (False :: Boolean) +instance forall (t24 :: Type) (t23 :: Type). TypeEq ((a :: (t23 :: Type)) :: (t23 :: Type)) ((b :: (t24 :: Type)) :: (t24 :: Type)) (False :: Boolean) chain: 1 diff --git a/tests-integration/fixtures/checking/098_fundep_propagation/Main.snap b/tests-integration/fixtures/checking/098_fundep_propagation/Main.snap index ba03d54d1..b68508599 100644 --- a/tests-integration/fixtures/checking/098_fundep_propagation/Main.snap +++ b/tests-integration/fixtures/checking/098_fundep_propagation/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -37,13 +38,13 @@ Z = [] S = [Nominal] Classes -class IsZero (&2 :: (&0 :: Type)) (&3 :: (&1 :: Type)) -class And (&3 :: (&0 :: Type)) (&4 :: (&1 :: Type)) (&5 :: (&2 :: Type)) +class IsZero (n :: (t1 :: Type)) (r :: (t2 :: Type)) +class And (a :: (t3 :: Type)) (b :: (t4 :: Type)) (r :: (t5 :: Type)) Instances instance IsZero (Z :: Type) (True :: Boolean) chain: 0 -instance IsZero (S (&0 :: Type) :: Type) (False :: Boolean) +instance IsZero (S (n :: Type) :: Type) (False :: Boolean) chain: 0 instance And (True :: Boolean) (True :: Boolean) (True :: Boolean) chain: 0 diff --git a/tests-integration/fixtures/checking/102_builtin_row/Main.snap b/tests-integration/fixtures/checking/102_builtin_row/Main.snap index 6ba87762f..bf9a2e598 100644 --- a/tests-integration/fixtures/checking/102_builtin_row/Main.snap +++ b/tests-integration/fixtures/checking/102_builtin_row/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -74,11 +75,11 @@ rowToListThree :: RowToList @Type ( a :: Int, b :: String, c :: Boolean ) (list :: RowList Type) => Proxy @(RowList Type) (list :: RowList Type) solveUnion :: - forall (t147 :: Type). + forall (t145 :: Type). { deriveUnion :: Proxy @(Row Type) ( a :: Int, b :: String ) , deriveUnionLeft :: Proxy @(Row Type) ( a :: Int ) , deriveUnionRight :: Proxy @(Row Type) ( b :: String ) - , unionBothEmpty :: Proxy @(Row (t147 :: Type)) () + , unionBothEmpty :: Proxy @(Row (t145 :: Type)) () , unionEmptyLeft :: Proxy @(Row Type) ( a :: Int ) , unionEmptyRight :: Proxy @(Row Type) ( a :: Int ) , unionMultiple :: Proxy @(Row Type) ( a :: Int, b :: String, c :: Boolean ) @@ -90,18 +91,18 @@ solveCons :: , nestedCons :: Proxy @(Row Type) ( a :: Int, b :: String ) } solveLacks :: - forall (t160 :: Type) (t161 :: (t160 :: Type)) (t164 :: Type) (t166 :: (t164 :: Type)). - { lacksEmpty :: Proxy @(t164 :: Type) (t166 :: (t164 :: Type)) - , lacksSimple :: Proxy @(t160 :: Type) (t161 :: (t160 :: Type)) + forall (t158 :: Type) (t159 :: (t158 :: Type)) (t162 :: Type) (t164 :: (t162 :: Type)). + { lacksEmpty :: Proxy @(t162 :: Type) (t164 :: (t162 :: Type)) + , lacksSimple :: Proxy @(t158 :: Type) (t159 :: (t158 :: Type)) } solveNub :: - forall (t175 :: Type). - { nubEmpty :: Proxy @(Row (t175 :: Type)) () + forall (t173 :: Type). + { nubEmpty :: Proxy @(Row (t173 :: Type)) () , nubNoDuplicates :: Proxy @(Row Type) ( a :: Int, b :: String ) } solveRowToList :: - forall (t183 :: Type). - { rowToListEmpty :: Proxy @(RowList (t183 :: Type)) (Nil @(t183 :: Type)) + forall (t181 :: Type). + { rowToListEmpty :: Proxy @(RowList (t181 :: Type)) (Nil @(t181 :: Type)) , rowToListMultiple :: Proxy @(RowList Type) (Cons @Type "a" Int (Cons @Type "b" String (Nil @Type))) , rowToListSimple :: Proxy @(RowList Type) (Cons @Type "a" Int (Nil @Type)) diff --git a/tests-integration/fixtures/checking/110_row_lacks_invalid_no_instance/Main.snap b/tests-integration/fixtures/checking/110_row_lacks_invalid_no_instance/Main.snap index d4097d77c..7da6b0bd2 100644 --- a/tests-integration/fixtures/checking/110_row_lacks_invalid_no_instance/Main.snap +++ b/tests-integration/fixtures/checking/110_row_lacks_invalid_no_instance/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,5 +20,9 @@ Proxy Roles Proxy = [Phantom] -Errors -NoInstanceFound { Lacks @Type "b" ( a :: Int, b :: String ) } at [TermDeclaration(Idx::(2))] +Diagnostics +error[NoInstanceFound]: No instance found for: Lacks @Type "b" ( a :: Int, b :: String ) + --> 11:1..11:25 + | +11 | forceSolve = { invalid } + | ^~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/111_int_add_invalid_no_instance/Main.snap b/tests-integration/fixtures/checking/111_int_add_invalid_no_instance/Main.snap index 8889cc1b9..8e8d9360d 100644 --- a/tests-integration/fixtures/checking/111_int_add_invalid_no_instance/Main.snap +++ b/tests-integration/fixtures/checking/111_int_add_invalid_no_instance/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,5 +20,9 @@ Proxy Roles Proxy = [Phantom] -Errors -NoInstanceFound { Add 2 3 10 } at [TermDeclaration(Idx::(2))] +Diagnostics +error[NoInstanceFound]: No instance found for: Add 2 3 10 + --> 11:1..11:25 + | +11 | forceSolve = { invalid } + | ^~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/112_int_mul_invalid_no_instance/Main.snap b/tests-integration/fixtures/checking/112_int_mul_invalid_no_instance/Main.snap index ac88bd4d4..193d796b1 100644 --- a/tests-integration/fixtures/checking/112_int_mul_invalid_no_instance/Main.snap +++ b/tests-integration/fixtures/checking/112_int_mul_invalid_no_instance/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,5 +20,9 @@ Proxy Roles Proxy = [Phantom] -Errors -NoInstanceFound { Mul 2 3 10 } at [TermDeclaration(Idx::(2))] +Diagnostics +error[NoInstanceFound]: No instance found for: Mul 2 3 10 + --> 11:1..11:25 + | +11 | forceSolve = { invalid } + | ^~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/113_int_compare_invalid_no_instance/Main.snap b/tests-integration/fixtures/checking/113_int_compare_invalid_no_instance/Main.snap index 137d5fb70..dcd765445 100644 --- a/tests-integration/fixtures/checking/113_int_compare_invalid_no_instance/Main.snap +++ b/tests-integration/fixtures/checking/113_int_compare_invalid_no_instance/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,5 +20,14 @@ Proxy Roles Proxy = [Phantom] -Errors -NoInstanceFound { Compare 5 1 LT } at [TermDeclaration(Idx::(2))] +Diagnostics +error[InvalidImportItem]: Cannot import item 'kind' + --> 4:23..4:27 + | +4 | import Prim.Ordering (kind Ordering, LT) + | ^~~~ +error[NoInstanceFound]: No instance found for: Compare 5 1 LT + --> 12:1..12:25 + | +12 | forceSolve = { invalid } + | ^~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/114_int_tostring_invalid_no_instance/Main.snap b/tests-integration/fixtures/checking/114_int_tostring_invalid_no_instance/Main.snap index da18ef47b..2d9ef469e 100644 --- a/tests-integration/fixtures/checking/114_int_tostring_invalid_no_instance/Main.snap +++ b/tests-integration/fixtures/checking/114_int_tostring_invalid_no_instance/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,5 +20,9 @@ Proxy Roles Proxy = [Phantom] -Errors -NoInstanceFound { ToString 42 "999" } at [TermDeclaration(Idx::(2))] +Diagnostics +error[NoInstanceFound]: No instance found for: ToString 42 "999" + --> 11:1..11:25 + | +11 | forceSolve = { invalid } + | ^~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/115_empty_do_block/Main.snap b/tests-integration/fixtures/checking/115_empty_do_block/Main.snap index 7eb99fbfb..9ca9b1722 100644 --- a/tests-integration/fixtures/checking/115_empty_do_block/Main.snap +++ b/tests-integration/fixtures/checking/115_empty_do_block/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -18,6 +19,14 @@ Effect :: Type -> Type Roles Effect = [Nominal] -Errors -EmptyDoBlock at [TermDeclaration(Idx::(3)), InferringExpression(AstId(56))] -CannotUnify { Type, ??? } at [TermDeclaration(Idx::(3))] +Diagnostics +error[EmptyDoBlock]: Empty do block + --> 9:8..9:10 + | +9 | test = do + | ^~ +error[CannotUnify]: Cannot unify 'Type' with '???' + --> 9:1..9:10 + | +9 | test = do + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/116_empty_ado_block/Main.snap b/tests-integration/fixtures/checking/116_empty_ado_block/Main.snap index 4b2279dc3..7262504dc 100644 --- a/tests-integration/fixtures/checking/116_empty_ado_block/Main.snap +++ b/tests-integration/fixtures/checking/116_empty_ado_block/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,6 +20,14 @@ Effect :: Type -> Type Roles Effect = [Nominal] -Errors -EmptyAdoBlock at [TermDeclaration(Idx::(3)), InferringExpression(AstId(54))] -CannotUnify { Type, ??? } at [TermDeclaration(Idx::(3))] +Diagnostics +error[EmptyAdoBlock]: Empty ado block + --> 9:9..9:12 + | +9 | test1 = ado + | ^~~ +error[CannotUnify]: Cannot unify 'Type' with '???' + --> 9:1..9:12 + | +9 | test1 = ado + | ^~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/117_do_ado_constrained/Main.snap b/tests-integration/fixtures/checking/117_do_ado_constrained/Main.snap index 5c8227e01..b78ad6870 100644 --- a/tests-integration/fixtures/checking/117_do_ado_constrained/Main.snap +++ b/tests-integration/fixtures/checking/117_do_ado_constrained/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -34,17 +35,17 @@ bind :: testDo :: forall (m :: Type -> Type). Monad (m :: Type -> Type) => (m :: Type -> Type) (Tuple Int String) testDo' :: - forall (t55 :: Type -> Type). - Bind (t55 :: Type -> Type) => (t55 :: Type -> Type) (Tuple Int String) + forall (t52 :: Type -> Type). + Bind (t52 :: Type -> Type) => (t52 :: Type -> Type) (Tuple Int String) testAdo :: forall (f :: Type -> Type). Applicative (f :: Type -> Type) => (f :: Type -> Type) (Tuple Int String) testAdo' :: - forall (t85 :: Type -> Type). - Applicative (t85 :: Type -> Type) => (t85 :: Type -> Type) (Tuple Int String) + forall (t91 :: Type -> Type). + Applicative (t91 :: Type -> Type) => (t91 :: Type -> Type) (Tuple Int String) testDoDiscard :: forall (m :: Type -> Type). Monad (m :: Type -> Type) => (m :: Type -> Type) Int testDoDiscard' :: - forall (t101 :: Type -> Type). Discard (t101 :: Type -> Type) => (t101 :: Type -> Type) Int + forall (t111 :: Type -> Type). Discard (t111 :: Type -> Type) => (t111 :: Type -> Type) Int Types Tuple :: Type -> Type -> Type @@ -65,12 +66,16 @@ Roles Tuple = [Representational, Representational] Classes -class Functor (&0 :: Type -> Type) -class Functor (&0 :: Type -> Type) <= Apply (&0 :: Type -> Type) -class Apply (&0 :: Type -> Type) <= Applicative (&0 :: Type -> Type) -class Applicative (&0 :: Type -> Type) <= Discard (&0 :: Type -> Type) -class Applicative (&0 :: Type -> Type) <= Bind (&0 :: Type -> Type) -class Bind (&0 :: Type -> Type) <= Monad (&0 :: Type -> Type) +class Functor (f :: Type -> Type) +class Functor (f :: Type -> Type) <= Apply (f :: Type -> Type) +class Apply (f :: Type -> Type) <= Applicative (f :: Type -> Type) +class Applicative (m :: Type -> Type) <= Discard (m :: Type -> Type) +class Applicative (m :: Type -> Type) <= Bind (m :: Type -> Type) +class Bind (m :: Type -> Type) <= Monad (m :: Type -> Type) -Errors -NoInstanceFound { Discard (&0 :: Type -> Type) } at [TermDeclaration(Idx::(10))] +Diagnostics +error[NoInstanceFound]: No instance found for: Discard (m :: Type -> Type) + --> 44:1..44:44 + | +44 | testDoDiscard :: forall m. Monad m => m Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/118_instance_member_type_match/Main.snap b/tests-integration/fixtures/checking/118_instance_member_type_match/Main.snap index 196c0477d..fd6040c44 100644 --- a/tests-integration/fixtures/checking/118_instance_member_type_match/Main.snap +++ b/tests-integration/fixtures/checking/118_instance_member_type_match/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,7 +10,7 @@ Types Show :: Type -> Constraint Classes -class Show (&0 :: Type) +class Show (a :: Type) Instances instance Show (Int :: Type) diff --git a/tests-integration/fixtures/checking/119_instance_member_type_mismatch/Main.snap b/tests-integration/fixtures/checking/119_instance_member_type_mismatch/Main.snap index 52e71db9a..219320da9 100644 --- a/tests-integration/fixtures/checking/119_instance_member_type_mismatch/Main.snap +++ b/tests-integration/fixtures/checking/119_instance_member_type_mismatch/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,13 +10,25 @@ Types Show :: Type -> Constraint Classes -class Show (&0 :: Type) +class Show (a :: Type) Instances instance Show (Int :: Type) chain: 0 -Errors -CannotUnify { Int, String } at [TermDeclaration(Idx::(1))] -CannotUnify { Int -> Int, Int -> String } at [TermDeclaration(Idx::(1))] -InstanceMemberTypeMismatch { expected: Int -> String, actual: Int -> Int } at [TermDeclaration(Idx::(1))] +Diagnostics +error[CannotUnify]: Cannot unify 'Int' with 'String' + --> 6:1..8:13 + | +6 | instance Show Int where + | ^~~~~~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Int -> Int' with 'Int -> String' + --> 6:1..8:13 + | +6 | instance Show Int where + | ^~~~~~~~~~~~~~~~~~~~~~~ +error[InstanceMemberTypeMismatch]: Instance member type mismatch: expected 'Int -> String', got 'Int -> Int' + --> 6:1..8:13 + | +6 | instance Show Int where + | ^~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/120_class_explicit_kind_variable/Main.snap b/tests-integration/fixtures/checking/120_class_explicit_kind_variable/Main.snap index 804860aa8..8edb4804e 100644 --- a/tests-integration/fixtures/checking/120_class_explicit_kind_variable/Main.snap +++ b/tests-integration/fixtures/checking/120_class_explicit_kind_variable/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -11,7 +12,7 @@ Types TypeEq :: forall (k :: Type). Type -> Type -> (k :: Type) -> Constraint Classes -class TypeEq (&1 :: Type) (&2 :: Type) (&3 :: (&0 :: Type)) +class TypeEq (a :: Type) (b :: Type) (r :: (k :: Type)) Instances instance TypeEq (Int :: Type) (Int :: Type) (Int :: Type) diff --git a/tests-integration/fixtures/checking/121_instance_member_inner_forall/Main.snap b/tests-integration/fixtures/checking/121_instance_member_inner_forall/Main.snap index 7aeae948e..c01c94631 100644 --- a/tests-integration/fixtures/checking/121_instance_member_inner_forall/Main.snap +++ b/tests-integration/fixtures/checking/121_instance_member_inner_forall/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -25,7 +26,7 @@ Roles Box = [Representational] Classes -class Functor (&0 :: Type -> Type) +class Functor (f :: Type -> Type) Instances instance Functor (Box :: Type -> Type) diff --git a/tests-integration/fixtures/checking/122_instance_member_inner_forall_constraint/Main.snap b/tests-integration/fixtures/checking/122_instance_member_inner_forall_constraint/Main.snap index 8da20ce0c..ee30515e0 100644 --- a/tests-integration/fixtures/checking/122_instance_member_inner_forall_constraint/Main.snap +++ b/tests-integration/fixtures/checking/122_instance_member_inner_forall_constraint/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -36,8 +37,8 @@ Box = [Representational] Maybe = [Representational] Classes -class Show (&0 :: Type) -class Functor (&0 :: Type -> Type) +class Show (a :: Type) +class Functor (f :: Type -> Type) Instances instance Functor (Box :: Type -> Type) diff --git a/tests-integration/fixtures/checking/123_incomplete_instance_head/Main.snap b/tests-integration/fixtures/checking/123_incomplete_instance_head/Main.snap index e4fd1f86e..e732cd17f 100644 --- a/tests-integration/fixtures/checking/123_incomplete_instance_head/Main.snap +++ b/tests-integration/fixtures/checking/123_incomplete_instance_head/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -12,7 +13,7 @@ Types Pair :: Type -> Type -> Constraint Classes -class Pair (&0 :: Type) (&1 :: Type) +class Pair (a :: Type) (b :: Type) Instances instance Pair (Int :: Type) (String :: Type) @@ -20,5 +21,9 @@ instance Pair (Int :: Type) (String :: Type) instance Pair (Int :: Type) chain: 0 -Errors -InstanceHeadMismatch { class_file: Idx::(30), class_item: Idx::(0), expected: 2, actual: 1 } at [TermDeclaration(Idx::(2))] +Diagnostics +error[InstanceHeadMismatch]: Instance head mismatch: expected 2 arguments, got 1 + --> 9:1..10:28 + | +9 | instance Pair Int where + | ^~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/124_instance_member_missing_constraint/Main.snap b/tests-integration/fixtures/checking/124_instance_member_missing_constraint/Main.snap index 1db769036..f28ff718f 100644 --- a/tests-integration/fixtures/checking/124_instance_member_missing_constraint/Main.snap +++ b/tests-integration/fixtures/checking/124_instance_member_missing_constraint/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -27,12 +28,16 @@ Roles Box = [Representational] Classes -class Show (&0 :: Type) -class Functor (&0 :: Type -> Type) +class Show (a :: Type) +class Functor (f :: Type -> Type) Instances instance Functor (Box :: Type -> Type) chain: 0 -Errors -NoInstanceFound { Show (~&1 :: Type) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[NoInstanceFound]: No instance found for: Show (~a :: Type) + --> 11:1..16:16 + | +11 | instance Functor Box where + | ^~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/125_instance_member_overly_general/Main.snap b/tests-integration/fixtures/checking/125_instance_member_overly_general/Main.snap index 1cfd3b3cc..b8eaddc37 100644 --- a/tests-integration/fixtures/checking/125_instance_member_overly_general/Main.snap +++ b/tests-integration/fixtures/checking/125_instance_member_overly_general/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -9,12 +10,30 @@ Types Show :: Type -> Constraint Classes -class Show (&0 :: Type) +class Show (a :: Type) Instances instance Show (Boolean :: Type) chain: 0 -Errors -CannotUnify { forall (a :: Type). (a :: Type) -> String, Boolean -> String } at [TermDeclaration(Idx::(1))] -InstanceMemberTypeMismatch { expected: Boolean -> String, actual: forall (a :: Type). (a :: Type) -> String } at [TermDeclaration(Idx::(1))] +Diagnostics +error[CannotUnify]: Cannot unify '(~a :: Type)' with 'Boolean' + --> 6:1..8:18 + | +6 | instance Show Boolean where + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify '(~a :: Type) -> String' with 'Boolean -> String' + --> 6:1..8:18 + | +6 | instance Show Boolean where + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'forall (a :: Type). (a :: Type) -> String' with 'Boolean -> String' + --> 6:1..8:18 + | +6 | instance Show Boolean where + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[InstanceMemberTypeMismatch]: Instance member type mismatch: expected 'Boolean -> String', got 'forall (a :: Type). (a :: Type) -> String' + --> 6:1..8:18 + | +6 | instance Show Boolean where + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/126_instance_phantom/Main.snap b/tests-integration/fixtures/checking/126_instance_phantom/Main.snap index 991510446..4b40b8e5c 100644 --- a/tests-integration/fixtures/checking/126_instance_phantom/Main.snap +++ b/tests-integration/fixtures/checking/126_instance_phantom/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -22,8 +23,8 @@ Roles Proxy = [Phantom] Classes -class Phantom (&0 :: Type) +class Phantom (a :: Type) Instances -instance forall (&0 :: Type). Phantom (Proxy @(&0 :: Type) (&1 :: (&0 :: Type)) :: Type) +instance forall (t2 :: Type). Phantom (Proxy @(t2 :: Type) (a :: (t2 :: Type)) :: Type) chain: 0 diff --git a/tests-integration/fixtures/checking/127_derive_eq_simple/Main.snap b/tests-integration/fixtures/checking/127_derive_eq_simple/Main.snap index e22867b9a..32798dd8d 100644 --- a/tests-integration/fixtures/checking/127_derive_eq_simple/Main.snap +++ b/tests-integration/fixtures/checking/127_derive_eq_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -32,8 +33,12 @@ NoEq = [] ContainsNoEq = [] Derived -derive forall (&0 :: Type). Eq (Proxy @(&0 :: Type) (&1 :: (&0 :: Type)) :: Type) +derive forall (t1 :: Type). Eq (Proxy @(t1 :: Type) (a :: (t1 :: Type)) :: Type) derive Eq (ContainsNoEq :: Type) -Errors -NoInstanceFound { Eq NoEq } at [TermDeclaration(Idx::(4))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq NoEq + --> 13:1..13:32 + | +13 | derive instance Eq ContainsNoEq + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/130_derive_eq_parameterized/Main.snap b/tests-integration/fixtures/checking/130_derive_eq_parameterized/Main.snap index c618f3a58..64ef72c55 100644 --- a/tests-integration/fixtures/checking/130_derive_eq_parameterized/Main.snap +++ b/tests-integration/fixtures/checking/130_derive_eq_parameterized/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,4 +20,4 @@ Roles Maybe = [Representational] Derived -derive Eq (&0 :: Type) => Eq (Maybe (&0 :: Type) :: Type) +derive Eq (a :: Type) => Eq (Maybe (a :: Type) :: Type) diff --git a/tests-integration/fixtures/checking/131_derive_eq_missing_instance/Main.snap b/tests-integration/fixtures/checking/131_derive_eq_missing_instance/Main.snap index 72d81bdac..99f1b9c86 100644 --- a/tests-integration/fixtures/checking/131_derive_eq_missing_instance/Main.snap +++ b/tests-integration/fixtures/checking/131_derive_eq_missing_instance/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -27,5 +28,9 @@ Box = [] Derived derive Eq (Box :: Type) -Errors -NoInstanceFound { Eq NoEq } at [TermDeclaration(Idx::(2))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq NoEq + --> 9:1..9:23 + | +9 | derive instance Eq Box + | ^~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/132_derive_eq_1_higher_kinded/Main.snap b/tests-integration/fixtures/checking/132_derive_eq_1_higher_kinded/Main.snap index 60acc6598..886a1a623 100644 --- a/tests-integration/fixtures/checking/132_derive_eq_1_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/132_derive_eq_1_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -31,8 +32,12 @@ Wrap = [Representational, Nominal] WrapNoEq1 = [Representational, Nominal] Derived -derive (Eq1 (&0 :: Type -> Type), Eq (&1 :: Type)) => Eq (Wrap @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) -derive Eq (&1 :: Type) => Eq (WrapNoEq1 @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) +derive (Eq1 (f :: Type -> Type), Eq (a :: Type)) => Eq (Wrap @Type (f :: Type -> Type) (a :: Type) :: Type) +derive Eq (a :: Type) => Eq (WrapNoEq1 @Type (f :: Type -> Type) (a :: Type) :: Type) -Errors -NoInstanceFound { Eq1 (&0 :: Type -> Type) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq1 (f :: Type -> Type) + --> 12:1..12:43 + | +12 | derive instance Eq a => Eq (WrapNoEq1 f a) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/133_derive_eq_partial/Main.snap b/tests-integration/fixtures/checking/133_derive_eq_partial/Main.snap index f017cb827..88f4cfaba 100644 --- a/tests-integration/fixtures/checking/133_derive_eq_partial/Main.snap +++ b/tests-integration/fixtures/checking/133_derive_eq_partial/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,8 +20,12 @@ Roles Either = [Representational, Representational] Derived -derive Eq (&0 :: Type) => Eq (Either Int (&0 :: Type) :: Type) -derive Eq (Either Int (&0 :: Type) :: Type) +derive Eq (b :: Type) => Eq (Either Int (b :: Type) :: Type) +derive Eq (Either Int (b :: Type) :: Type) -Errors -NoInstanceFound { Eq (&0 :: Type) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq (b :: Type) + --> 9:1..9:34 + | +9 | derive instance Eq (Either Int b) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/134_derive_ord_simple/Main.snap b/tests-integration/fixtures/checking/134_derive_ord_simple/Main.snap index ffce05874..b3a04cd5c 100644 --- a/tests-integration/fixtures/checking/134_derive_ord_simple/Main.snap +++ b/tests-integration/fixtures/checking/134_derive_ord_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -47,5 +48,9 @@ derive Eq (NoOrd :: Type) derive Eq (ContainsNoOrd :: Type) derive Ord (ContainsNoOrd :: Type) -Errors -NoInstanceFound { Ord NoOrd } at [TermDeclaration(Idx::(10))] +Diagnostics +error[NoInstanceFound]: No instance found for: Ord NoOrd + --> 23:1..23:34 + | +23 | derive instance Ord ContainsNoOrd + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/135_derive_ord_1_higher_kinded/Main.snap b/tests-integration/fixtures/checking/135_derive_ord_1_higher_kinded/Main.snap index 2a435bb5d..5f0193200 100644 --- a/tests-integration/fixtures/checking/135_derive_ord_1_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/135_derive_ord_1_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -31,11 +32,19 @@ Wrap = [Representational, Nominal] WrapNoOrd1 = [Representational, Nominal] Derived -derive (Eq1 (&0 :: Type -> Type), Eq (&1 :: Type)) => Eq (Wrap @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) -derive (Ord1 (&0 :: Type -> Type), Ord (&1 :: Type)) => Ord (Wrap @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) -derive (Eq1 (&0 :: Type -> Type), Eq (&1 :: Type)) => Eq (WrapNoOrd1 @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) -derive Ord (&1 :: Type) => Ord (WrapNoOrd1 @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) +derive (Eq1 (f :: Type -> Type), Eq (a :: Type)) => Eq (Wrap @Type (f :: Type -> Type) (a :: Type) :: Type) +derive (Ord1 (f :: Type -> Type), Ord (a :: Type)) => Ord (Wrap @Type (f :: Type -> Type) (a :: Type) :: Type) +derive (Eq1 (f :: Type -> Type), Eq (a :: Type)) => Eq (WrapNoOrd1 @Type (f :: Type -> Type) (a :: Type) :: Type) +derive Ord (a :: Type) => Ord (WrapNoOrd1 @Type (f :: Type -> Type) (a :: Type) :: Type) -Errors -NoInstanceFound { Ord1 (&0 :: Type -> Type) } at [TermDeclaration(Idx::(5))] -NoInstanceFound { Eq1 (&0 :: Type -> Type) } at [TermDeclaration(Idx::(5))] +Diagnostics +error[NoInstanceFound]: No instance found for: Ord1 (f :: Type -> Type) + --> 14:1..14:46 + | +14 | derive instance Ord a => Ord (WrapNoOrd1 f a) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Eq1 (f :: Type -> Type) + --> 14:1..14:46 + | +14 | derive instance Ord a => Ord (WrapNoOrd1 f a) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/136_derive_nested_higher_kinded/Main.snap b/tests-integration/fixtures/checking/136_derive_nested_higher_kinded/Main.snap index 2487ddad9..541f8ed1c 100644 --- a/tests-integration/fixtures/checking/136_derive_nested_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/136_derive_nested_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -46,17 +47,33 @@ U = [Representational] V = [Representational, Representational] Derived -derive Eq1 (&0 :: Type -> Type) => Eq (V @Type (&0 :: Type -> Type) (&1 :: Int -> Type) :: Type) -derive Ord1 (&0 :: Type -> Type) => Ord (V @Type (&0 :: Type -> Type) (&1 :: Int -> Type) :: Type) -derive Eq1 (&0 :: Type -> Type) => Eq (T @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type) -derive Ord1 (&0 :: Type -> Type) => Ord (T @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type) -derive Eq (&0 :: Type) => Eq (Maybe (&0 :: Type) :: Type) -derive Ord (&0 :: Type) => Ord (Maybe (&0 :: Type) :: Type) -derive Eq1 (&0 :: Type -> Type) => Eq (U (&0 :: Type -> Type) :: Type) -derive Ord1 (&0 :: Type -> Type) => Ord (U (&0 :: Type -> Type) :: Type) - -Errors -NoInstanceFound { Eq ((&1 :: Type -> Type) Int) } at [TermDeclaration(Idx::(1))] -NoInstanceFound { Ord ((&1 :: Type -> Type) Int) } at [TermDeclaration(Idx::(2))] -NoInstanceFound { Eq ((&1 :: Int -> Type) 42) } at [TermDeclaration(Idx::(11))] -NoInstanceFound { Ord ((&1 :: Int -> Type) 42) } at [TermDeclaration(Idx::(12))] +derive Eq1 (f :: Type -> Type) => Eq (V @Type (f :: Type -> Type) (g :: Int -> Type) :: Type) +derive Ord1 (f :: Type -> Type) => Ord (V @Type (f :: Type -> Type) (g :: Int -> Type) :: Type) +derive Eq1 (f :: Type -> Type) => Eq (T @Type (f :: Type -> Type) (g :: Type -> Type) :: Type) +derive Ord1 (f :: Type -> Type) => Ord (T @Type (f :: Type -> Type) (g :: Type -> Type) :: Type) +derive Eq (a :: Type) => Eq (Maybe (a :: Type) :: Type) +derive Ord (a :: Type) => Ord (Maybe (a :: Type) :: Type) +derive Eq1 (f :: Type -> Type) => Eq (U (f :: Type -> Type) :: Type) +derive Ord1 (f :: Type -> Type) => Ord (U (f :: Type -> Type) :: Type) + +Diagnostics +error[NoInstanceFound]: No instance found for: Eq ((g :: Type -> Type) Int) + --> 8:1..8:38 + | +8 | derive instance (Eq1 f) => Eq (T f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Ord ((g :: Type -> Type) Int) + --> 9:1..9:40 + | +9 | derive instance (Ord1 f) => Ord (T f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Eq ((g :: Int -> Type) 42) + --> 23:1..23:38 + | +23 | derive instance (Eq1 f) => Eq (V f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Ord ((g :: Int -> Type) 42) + --> 24:1..24:40 + | +24 | derive instance (Ord1 f) => Ord (V f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/139_derive_newtype_with_given/Main.snap b/tests-integration/fixtures/checking/139_derive_newtype_with_given/Main.snap index 82d96445d..692d35e66 100644 --- a/tests-integration/fixtures/checking/139_derive_newtype_with_given/Main.snap +++ b/tests-integration/fixtures/checking/139_derive_newtype_with_given/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -18,4 +19,4 @@ Roles Identity = [Representational] Derived -derive Show (&0 :: Type) => Show (Identity (&0 :: Type) :: Type) +derive Show (a :: Type) => Show (Identity (a :: Type) :: Type) diff --git a/tests-integration/fixtures/checking/140_derive_newtype_recursive/Main.snap b/tests-integration/fixtures/checking/140_derive_newtype_recursive/Main.snap index 5e036496b..3c7102f02 100644 --- a/tests-integration/fixtures/checking/140_derive_newtype_recursive/Main.snap +++ b/tests-integration/fixtures/checking/140_derive_newtype_recursive/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -19,4 +20,4 @@ Roles Mu = [Representational] Derived -derive Show ((&0 :: Type -> Type) (Mu (&0 :: Type -> Type))) => Show (Mu (&0 :: Type -> Type) :: Type) +derive Show ((f :: Type -> Type) (Mu (f :: Type -> Type))) => Show (Mu (f :: Type -> Type) :: Type) diff --git a/tests-integration/fixtures/checking/141_derive_newtype_phantom/Main.snap b/tests-integration/fixtures/checking/141_derive_newtype_phantom/Main.snap index a18024705..ab38b26f9 100644 --- a/tests-integration/fixtures/checking/141_derive_newtype_phantom/Main.snap +++ b/tests-integration/fixtures/checking/141_derive_newtype_phantom/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -18,4 +19,4 @@ Roles Vector = [Phantom, Representational] Derived -derive Show (&1 :: Type) => Show (Vector (&0 :: Int) (&1 :: Type) :: Type) +derive Show (a :: Type) => Show (Vector (n :: Int) (a :: Type) :: Type) diff --git a/tests-integration/fixtures/checking/142_derive_newtype_not_newtype/Main.snap b/tests-integration/fixtures/checking/142_derive_newtype_not_newtype/Main.snap index b57fee0c8..a46ef31db 100644 --- a/tests-integration/fixtures/checking/142_derive_newtype_not_newtype/Main.snap +++ b/tests-integration/fixtures/checking/142_derive_newtype_not_newtype/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -17,5 +18,9 @@ Foo Roles Foo = [] -Errors -ExpectedNewtype { type_id: Id(9) } at [TermDeclaration(Idx::(1))] +Diagnostics +error[ExpectedNewtype]: Expected a newtype, got: Foo + --> 7:1..7:33 + | +7 | derive newtype instance Show Foo + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/143_derive_newtype_missing_instance/Main.snap b/tests-integration/fixtures/checking/143_derive_newtype_missing_instance/Main.snap index 480797cdc..9ad82503a 100644 --- a/tests-integration/fixtures/checking/143_derive_newtype_missing_instance/Main.snap +++ b/tests-integration/fixtures/checking/143_derive_newtype_missing_instance/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -20,5 +21,9 @@ Identity = [Representational] Derived derive Show (Identity String :: Type) -Errors -NoInstanceFound { Show String } at [TermDeclaration(Idx::(1))] +Diagnostics +error[NoInstanceFound]: No instance found for: Show String + --> 7:1..7:47 + | +7 | derive newtype instance Show (Identity String) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/144_derive_newtype_missing_given/Main.snap b/tests-integration/fixtures/checking/144_derive_newtype_missing_given/Main.snap index 0b2c6fb50..cf69a7908 100644 --- a/tests-integration/fixtures/checking/144_derive_newtype_missing_given/Main.snap +++ b/tests-integration/fixtures/checking/144_derive_newtype_missing_given/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -18,7 +19,11 @@ Roles Identity = [Representational] Derived -derive Show (Identity (&0 :: Type) :: Type) +derive Show (Identity (a :: Type) :: Type) -Errors -NoInstanceFound { Show (&0 :: Type) } at [TermDeclaration(Idx::(1))] +Diagnostics +error[NoInstanceFound]: No instance found for: Show (a :: Type) + --> 7:1..7:42 + | +7 | derive newtype instance Show (Identity a) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/146_derive_functor_simple/Main.snap b/tests-integration/fixtures/checking/146_derive_functor_simple/Main.snap index cde366727..89fa39254 100644 --- a/tests-integration/fixtures/checking/146_derive_functor_simple/Main.snap +++ b/tests-integration/fixtures/checking/146_derive_functor_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -36,5 +37,5 @@ Maybe = [Representational] Derived derive Functor (Identity :: Type -> Type) -derive Functor (Const @Type (&0 :: Type) :: Type -> Type) +derive Functor (Const @Type (e :: Type) :: Type -> Type) derive Functor (Maybe :: Type -> Type) diff --git a/tests-integration/fixtures/checking/147_derive_functor_higher_kinded/Main.snap b/tests-integration/fixtures/checking/147_derive_functor_higher_kinded/Main.snap index 4c3642bd1..5995b6411 100644 --- a/tests-integration/fixtures/checking/147_derive_functor_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/147_derive_functor_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -31,8 +32,12 @@ Wrap = [Representational, Nominal] WrapNoFunctor = [Representational, Nominal] Derived -derive Functor (&0 :: Type -> Type) => Functor (Wrap @Type (&0 :: Type -> Type) :: Type -> Type) -derive Functor (WrapNoFunctor @Type (&0 :: Type -> Type) :: Type -> Type) +derive Functor (f :: Type -> Type) => Functor (Wrap @Type (f :: Type -> Type) :: Type -> Type) +derive Functor (WrapNoFunctor @Type (f :: Type -> Type) :: Type -> Type) -Errors -NoInstanceFound { Functor (&0 :: Type -> Type) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[NoInstanceFound]: No instance found for: Functor (f :: Type -> Type) + --> 9:1..9:42 + | +9 | derive instance Functor (WrapNoFunctor f) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/148_derive_functor_contravariant_error/Main.snap b/tests-integration/fixtures/checking/148_derive_functor_contravariant_error/Main.snap index 05a05de41..127ad714a 100644 --- a/tests-integration/fixtures/checking/148_derive_functor_contravariant_error/Main.snap +++ b/tests-integration/fixtures/checking/148_derive_functor_contravariant_error/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -36,8 +37,12 @@ Cont = [Representational, Representational] Derived derive Functor (Predicate :: Type -> Type) -derive Functor (Reader (&0 :: Type) :: Type -> Type) -derive Functor (Cont (&0 :: Type) :: Type -> Type) - -Errors -ContravariantOccurrence { type_id: Id(45) } at [TermDeclaration(Idx::(1))] +derive Functor (Reader (r :: Type) :: Type -> Type) +derive Functor (Cont (r :: Type) :: Type -> Type) + +Diagnostics +error[ContravariantOccurrence]: Type variable occurs in contravariant position: (~a :: Type) + --> 6:1..6:34 + | +6 | derive instance Functor Predicate + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/149_derive_bifunctor_simple/Main.snap b/tests-integration/fixtures/checking/149_derive_bifunctor_simple/Main.snap index c5aab0605..4b46692df 100644 --- a/tests-integration/fixtures/checking/149_derive_bifunctor_simple/Main.snap +++ b/tests-integration/fixtures/checking/149_derive_bifunctor_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -38,4 +39,4 @@ Const2 = [Representational, Phantom, Phantom] Derived derive Bifunctor (Either :: Type -> Type -> Type) derive Bifunctor (Pair :: Type -> Type -> Type) -derive Bifunctor (Const2 @Type @Type (&0 :: Type) :: Type -> Type -> Type) +derive Bifunctor (Const2 @Type @Type (e :: Type) :: Type -> Type -> Type) diff --git a/tests-integration/fixtures/checking/150_derive_bifunctor_higher_kinded/Main.snap b/tests-integration/fixtures/checking/150_derive_bifunctor_higher_kinded/Main.snap index 39402b81a..3b5eb99a6 100644 --- a/tests-integration/fixtures/checking/150_derive_bifunctor_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/150_derive_bifunctor_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -47,9 +48,17 @@ WrapBoth = [Representational, Representational, Nominal, Nominal] WrapBothNoConstraint = [Representational, Representational, Nominal, Nominal] Derived -derive (Functor (&0 :: Type -> Type), Functor (&1 :: Type -> Type)) => Bifunctor (WrapBoth @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type -> Type) -derive Bifunctor (WrapBothNoConstraint @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type -> Type) +derive (Functor (f :: Type -> Type), Functor (g :: Type -> Type)) => Bifunctor (WrapBoth @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type -> Type) +derive Bifunctor (WrapBothNoConstraint @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type -> Type) -Errors -NoInstanceFound { Functor (&0 :: Type -> Type) } at [TermDeclaration(Idx::(3))] -NoInstanceFound { Functor (&1 :: Type -> Type) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[NoInstanceFound]: No instance found for: Functor (f :: Type -> Type) + --> 10:1..10:53 + | +10 | derive instance Bifunctor (WrapBothNoConstraint f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Functor (g :: Type -> Type) + --> 10:1..10:53 + | +10 | derive instance Bifunctor (WrapBothNoConstraint f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/151_derive_bifunctor_missing_functor/Main.snap b/tests-integration/fixtures/checking/151_derive_bifunctor_missing_functor/Main.snap index 80506f29b..7abee319b 100644 --- a/tests-integration/fixtures/checking/151_derive_bifunctor_missing_functor/Main.snap +++ b/tests-integration/fixtures/checking/151_derive_bifunctor_missing_functor/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -29,8 +30,16 @@ Roles WrapBoth = [Representational, Representational, Nominal, Nominal] Derived -derive Bifunctor (WrapBoth @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type -> Type) +derive Bifunctor (WrapBoth @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type -> Type) -Errors -DeriveMissingFunctor at [TermDeclaration(Idx::(1))] -DeriveMissingFunctor at [TermDeclaration(Idx::(1))] +Diagnostics +error[DeriveMissingFunctor]: Deriving Functor requires Data.Functor to be in scope + --> 6:1..6:41 + | +6 | derive instance Bifunctor (WrapBoth f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[DeriveMissingFunctor]: Deriving Functor requires Data.Functor to be in scope + --> 6:1..6:41 + | +6 | derive instance Bifunctor (WrapBoth f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/152_derive_contravariant_simple/Main.snap b/tests-integration/fixtures/checking/152_derive_contravariant_simple/Main.snap index fb24b6f73..3827168a5 100644 --- a/tests-integration/fixtures/checking/152_derive_contravariant_simple/Main.snap +++ b/tests-integration/fixtures/checking/152_derive_contravariant_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -34,4 +35,4 @@ Op = [Representational, Representational] Derived derive Contravariant (Predicate :: Type -> Type) derive Contravariant (Comparison :: Type -> Type) -derive Contravariant (Op (&0 :: Type) :: Type -> Type) +derive Contravariant (Op (a :: Type) :: Type -> Type) diff --git a/tests-integration/fixtures/checking/153_derive_contravariant_error/Main.snap b/tests-integration/fixtures/checking/153_derive_contravariant_error/Main.snap index 2ee1b9db7..9af133228 100644 --- a/tests-integration/fixtures/checking/153_derive_contravariant_error/Main.snap +++ b/tests-integration/fixtures/checking/153_derive_contravariant_error/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -28,6 +29,14 @@ Derived derive Contravariant (Identity :: Type -> Type) derive Contravariant (Producer :: Type -> Type) -Errors -CovariantOccurrence { type_id: Id(34) } at [TermDeclaration(Idx::(1))] -CovariantOccurrence { type_id: Id(34) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[CovariantOccurrence]: Type variable occurs in covariant position: (~a :: Type) + --> 7:1..7:39 + | +7 | derive instance Contravariant Identity + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[CovariantOccurrence]: Type variable occurs in covariant position: (~a :: Type) + --> 11:1..11:39 + | +11 | derive instance Contravariant Producer + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/154_derive_profunctor_simple/Main.snap b/tests-integration/fixtures/checking/154_derive_profunctor_simple/Main.snap index 48d1414ae..8e9789597 100644 --- a/tests-integration/fixtures/checking/154_derive_profunctor_simple/Main.snap +++ b/tests-integration/fixtures/checking/154_derive_profunctor_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -36,5 +37,5 @@ Choice = [Representational, Representational] Derived derive Profunctor (Fn :: Type -> Type -> Type) -derive Profunctor (ConstR @Type (&0 :: Type) :: Type -> Type -> Type) +derive Profunctor (ConstR @Type (r :: Type) :: Type -> Type -> Type) derive Profunctor (Choice :: Type -> Type -> Type) diff --git a/tests-integration/fixtures/checking/155_derive_profunctor_error/Main.snap b/tests-integration/fixtures/checking/155_derive_profunctor_error/Main.snap index cdf626eb1..3112375f2 100644 --- a/tests-integration/fixtures/checking/155_derive_profunctor_error/Main.snap +++ b/tests-integration/fixtures/checking/155_derive_profunctor_error/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -31,7 +32,19 @@ Derived derive Profunctor (WrongFirst :: Type -> Type -> Type) derive Profunctor (WrongSecond :: Type -> Type -> Type) -Errors -CovariantOccurrence { type_id: Id(46) } at [TermDeclaration(Idx::(1))] -ContravariantOccurrence { type_id: Id(47) } at [TermDeclaration(Idx::(3))] -CovariantOccurrence { type_id: Id(46) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[CovariantOccurrence]: Type variable occurs in covariant position: (~a :: Type) + --> 7:1..7:38 + | +7 | derive instance Profunctor WrongFirst + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[ContravariantOccurrence]: Type variable occurs in contravariant position: (~b :: Type) + --> 11:1..11:39 + | +11 | derive instance Profunctor WrongSecond + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[CovariantOccurrence]: Type variable occurs in covariant position: (~a :: Type) + --> 11:1..11:39 + | +11 | derive instance Profunctor WrongSecond + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/156_derive_bifunctor_insufficient_params/Main.snap b/tests-integration/fixtures/checking/156_derive_bifunctor_insufficient_params/Main.snap index 25024b9a4..567f49ea5 100644 --- a/tests-integration/fixtures/checking/156_derive_bifunctor_insufficient_params/Main.snap +++ b/tests-integration/fixtures/checking/156_derive_bifunctor_insufficient_params/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -22,6 +23,14 @@ Triple = [Representational, Representational, Representational] Derived derive Bifunctor (Triple Int String :: Type -> Type) -Errors -CannotUnify { Type, Type -> Type } at [TermDeclaration(Idx::(1)), CheckingKind(AstId(21))] -CannotDeriveForType { type_id: Id(46) } at [TermDeclaration(Idx::(1))] +Diagnostics +error[CannotUnify]: Cannot unify 'Type' with 'Type -> Type' + --> 6:27..6:46 + | +6 | derive instance Bifunctor (Triple Int String) + | ^~~~~~~~~~~~~~~~~~~ +error[CannotDeriveForType]: Cannot derive for type: Triple Int String + --> 6:1..6:46 + | +6 | derive instance Bifunctor (Triple Int String) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/157_derive_functor_insufficient_params/Main.snap b/tests-integration/fixtures/checking/157_derive_functor_insufficient_params/Main.snap index 7971d5423..7239f1a98 100644 --- a/tests-integration/fixtures/checking/157_derive_functor_insufficient_params/Main.snap +++ b/tests-integration/fixtures/checking/157_derive_functor_insufficient_params/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -28,8 +29,24 @@ Derived derive Functor (Pair Int String :: Type) derive Functor (Unit :: Type) -Errors -CannotUnify { Type, Type -> Type } at [TermDeclaration(Idx::(1)), CheckingKind(AstId(19))] -CannotDeriveForType { type_id: Id(35) } at [TermDeclaration(Idx::(1))] -CannotUnify { Type, Type -> Type } at [TermDeclaration(Idx::(3)), CheckingKind(AstId(28))] -CannotDeriveForType { type_id: Id(17) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[CannotUnify]: Cannot unify 'Type' with 'Type -> Type' + --> 6:25..6:42 + | +6 | derive instance Functor (Pair Int String) + | ^~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Type' with 'Type -> Type' + --> 9:25..9:29 + | +9 | derive instance Functor Unit + | ^~~~ +error[CannotDeriveForType]: Cannot derive for type: Pair Int String + --> 6:1..6:42 + | +6 | derive instance Functor (Pair Int String) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[CannotDeriveForType]: Cannot derive for type: Unit + --> 9:1..9:29 + | +9 | derive instance Functor Unit + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/158_derive_foldable_simple/Main.snap b/tests-integration/fixtures/checking/158_derive_foldable_simple/Main.snap index 0ab900c4e..60d87ba7c 100644 --- a/tests-integration/fixtures/checking/158_derive_foldable_simple/Main.snap +++ b/tests-integration/fixtures/checking/158_derive_foldable_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -37,4 +38,4 @@ Const = [Representational, Phantom] Derived derive Foldable (Identity :: Type -> Type) derive Foldable (Maybe :: Type -> Type) -derive Foldable (Const @Type (&0 :: Type) :: Type -> Type) +derive Foldable (Const @Type (e :: Type) :: Type -> Type) diff --git a/tests-integration/fixtures/checking/159_derive_foldable_higher_kinded/Main.snap b/tests-integration/fixtures/checking/159_derive_foldable_higher_kinded/Main.snap index f53133c14..98b51c69c 100644 --- a/tests-integration/fixtures/checking/159_derive_foldable_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/159_derive_foldable_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -31,8 +32,12 @@ Wrap = [Representational, Nominal] WrapNoFoldable = [Representational, Nominal] Derived -derive Foldable (&0 :: Type -> Type) => Foldable (Wrap @Type (&0 :: Type -> Type) :: Type -> Type) -derive Foldable (WrapNoFoldable @Type (&0 :: Type -> Type) :: Type -> Type) +derive Foldable (f :: Type -> Type) => Foldable (Wrap @Type (f :: Type -> Type) :: Type -> Type) +derive Foldable (WrapNoFoldable @Type (f :: Type -> Type) :: Type -> Type) -Errors -NoInstanceFound { Foldable (&0 :: Type -> Type) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[NoInstanceFound]: No instance found for: Foldable (f :: Type -> Type) + --> 9:1..9:44 + | +9 | derive instance Foldable (WrapNoFoldable f) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/160_derive_bifoldable_simple/Main.snap b/tests-integration/fixtures/checking/160_derive_bifoldable_simple/Main.snap index ad951a85f..4dca62dc3 100644 --- a/tests-integration/fixtures/checking/160_derive_bifoldable_simple/Main.snap +++ b/tests-integration/fixtures/checking/160_derive_bifoldable_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -38,4 +39,4 @@ Const2 = [Representational, Phantom, Phantom] Derived derive Bifoldable (Either :: Type -> Type -> Type) derive Bifoldable (Pair :: Type -> Type -> Type) -derive Bifoldable (Const2 @Type @Type (&0 :: Type) :: Type -> Type -> Type) +derive Bifoldable (Const2 @Type @Type (e :: Type) :: Type -> Type -> Type) diff --git a/tests-integration/fixtures/checking/161_derive_bifoldable_higher_kinded/Main.snap b/tests-integration/fixtures/checking/161_derive_bifoldable_higher_kinded/Main.snap index 5e87e9c10..0ed8b63f9 100644 --- a/tests-integration/fixtures/checking/161_derive_bifoldable_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/161_derive_bifoldable_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -47,9 +48,17 @@ WrapBoth = [Representational, Representational, Nominal, Nominal] WrapBothNoConstraint = [Representational, Representational, Nominal, Nominal] Derived -derive (Foldable (&0 :: Type -> Type), Foldable (&1 :: Type -> Type)) => Bifoldable (WrapBoth @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type -> Type) -derive Bifoldable (WrapBothNoConstraint @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type -> Type) +derive (Foldable (f :: Type -> Type), Foldable (g :: Type -> Type)) => Bifoldable (WrapBoth @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type -> Type) +derive Bifoldable (WrapBothNoConstraint @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type -> Type) -Errors -NoInstanceFound { Foldable (&0 :: Type -> Type) } at [TermDeclaration(Idx::(3))] -NoInstanceFound { Foldable (&1 :: Type -> Type) } at [TermDeclaration(Idx::(3))] +Diagnostics +error[NoInstanceFound]: No instance found for: Foldable (f :: Type -> Type) + --> 10:1..10:54 + | +10 | derive instance Bifoldable (WrapBothNoConstraint f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Foldable (g :: Type -> Type) + --> 10:1..10:54 + | +10 | derive instance Bifoldable (WrapBothNoConstraint f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/162_derive_traversable_simple/Main.snap b/tests-integration/fixtures/checking/162_derive_traversable_simple/Main.snap index f21c15a20..63b920279 100644 --- a/tests-integration/fixtures/checking/162_derive_traversable_simple/Main.snap +++ b/tests-integration/fixtures/checking/162_derive_traversable_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -41,6 +42,6 @@ derive Traversable (Identity :: Type -> Type) derive Functor (Maybe :: Type -> Type) derive Foldable (Maybe :: Type -> Type) derive Traversable (Maybe :: Type -> Type) -derive Functor (Const @Type (&0 :: Type) :: Type -> Type) -derive Foldable (Const @Type (&0 :: Type) :: Type -> Type) -derive Traversable (Const @Type (&0 :: Type) :: Type -> Type) +derive Functor (Const @Type (e :: Type) :: Type -> Type) +derive Foldable (Const @Type (e :: Type) :: Type -> Type) +derive Traversable (Const @Type (e :: Type) :: Type -> Type) diff --git a/tests-integration/fixtures/checking/163_derive_traversable_higher_kinded/Main.snap b/tests-integration/fixtures/checking/163_derive_traversable_higher_kinded/Main.snap index 52ca615bc..ac6d020c6 100644 --- a/tests-integration/fixtures/checking/163_derive_traversable_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/163_derive_traversable_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -27,6 +28,6 @@ Roles Compose = [Representational, Representational, Nominal] Derived -derive (Functor (&0 :: Type -> Type), Functor (&1 :: Type -> Type)) => Functor (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type) -derive (Foldable (&0 :: Type -> Type), Foldable (&1 :: Type -> Type)) => Foldable (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type) -derive (Traversable (&0 :: Type -> Type), Traversable (&1 :: Type -> Type)) => Traversable (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type) +derive (Functor (f :: Type -> Type), Functor (g :: Type -> Type)) => Functor (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type) +derive (Foldable (f :: Type -> Type), Foldable (g :: Type -> Type)) => Foldable (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type) +derive (Traversable (f :: Type -> Type), Traversable (g :: Type -> Type)) => Traversable (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type) diff --git a/tests-integration/fixtures/checking/165_derive_bitraversable_higher_kinded/Main.snap b/tests-integration/fixtures/checking/165_derive_bitraversable_higher_kinded/Main.snap index e3785f032..fef2082d4 100644 --- a/tests-integration/fixtures/checking/165_derive_bitraversable_higher_kinded/Main.snap +++ b/tests-integration/fixtures/checking/165_derive_bitraversable_higher_kinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -29,6 +30,6 @@ Roles WrapBoth = [Representational, Representational, Nominal, Nominal] Derived -derive (Functor (&0 :: Type -> Type), Functor (&1 :: Type -> Type)) => Bifunctor (WrapBoth @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type -> Type) -derive (Foldable (&0 :: Type -> Type), Foldable (&1 :: Type -> Type)) => Bifoldable (WrapBoth @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type -> Type) -derive (Traversable (&0 :: Type -> Type), Traversable (&1 :: Type -> Type)) => Bitraversable (WrapBoth @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type -> Type) +derive (Functor (f :: Type -> Type), Functor (g :: Type -> Type)) => Bifunctor (WrapBoth @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type -> Type) +derive (Foldable (f :: Type -> Type), Foldable (g :: Type -> Type)) => Bifoldable (WrapBoth @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type -> Type) +derive (Traversable (f :: Type -> Type), Traversable (g :: Type -> Type)) => Bitraversable (WrapBoth @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type -> Type) diff --git a/tests-integration/fixtures/checking/166_derive_traversable_missing_superclass/Main.snap b/tests-integration/fixtures/checking/166_derive_traversable_missing_superclass/Main.snap index 085210057..9003689be 100644 --- a/tests-integration/fixtures/checking/166_derive_traversable_missing_superclass/Main.snap +++ b/tests-integration/fixtures/checking/166_derive_traversable_missing_superclass/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -27,8 +28,16 @@ Roles Compose = [Representational, Representational, Nominal] Derived -derive (Traversable (&0 :: Type -> Type), Traversable (&1 :: Type -> Type)) => Traversable (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type) +derive (Traversable (f :: Type -> Type), Traversable (g :: Type -> Type)) => Traversable (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type) -Errors -NoInstanceFound { Functor (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type)) } at [TermDeclaration(Idx::(1))] -NoInstanceFound { Foldable (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type)) } at [TermDeclaration(Idx::(1))] +Diagnostics +error[NoInstanceFound]: No instance found for: Functor (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type)) + --> 7:1..7:76 + | +7 | derive instance (Traversable f, Traversable g) => Traversable (Compose f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Foldable (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type)) + --> 7:1..7:76 + | +7 | derive instance (Traversable f, Traversable g) => Traversable (Compose f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/167_derive_eq_1/Main.snap b/tests-integration/fixtures/checking/167_derive_eq_1/Main.snap index 889e3cf9f..6f001319e 100644 --- a/tests-integration/fixtures/checking/167_derive_eq_1/Main.snap +++ b/tests-integration/fixtures/checking/167_derive_eq_1/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -80,22 +81,30 @@ Either' = [Representational] NoEq = [Representational] Derived -derive Eq1 (&0 :: Type -> Type) => Eq1 (Wrap @Type (&0 :: Type -> Type) :: Type -> Type) -derive forall (&0 :: Type). (Eq1 (&1 :: Type -> Type), Eq ((&2 :: (&0 :: Type) -> Type) (&3 :: (&0 :: Type)))) => Eq (Compose @Type @(&0 :: Type) (&1 :: Type -> Type) (&2 :: (&0 :: Type) -> Type) (&3 :: (&0 :: Type)) :: Type) -derive Eq1 (&0 :: Type -> Type) => Eq1 (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type) -derive Eq (&0 :: Type) => Eq (Id (&0 :: Type) :: Type) -derive Eq (&0 :: Type) => Eq (Either' (&0 :: Type) :: Type) +derive Eq1 (f :: Type -> Type) => Eq1 (Wrap @Type (f :: Type -> Type) :: Type -> Type) +derive forall (t41 :: Type). (Eq1 (f :: Type -> Type), Eq ((g :: (t41 :: Type) -> Type) (a :: (t41 :: Type)))) => Eq (Compose @Type @(t41 :: Type) (f :: Type -> Type) (g :: (t41 :: Type) -> Type) (a :: (t41 :: Type)) :: Type) +derive Eq1 (f :: Type -> Type) => Eq1 (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type) +derive Eq (a :: Type) => Eq (Id (a :: Type) :: Type) +derive Eq (a :: Type) => Eq (Either' (a :: Type) :: Type) derive Eq1 (Either' :: Type -> Type) derive Eq1 (NoEq :: Type -> Type) derive Eq1 (Id :: Type -> Type) -derive Eq (&0 :: Type) => Eq (Pair (&0 :: Type) :: Type) +derive Eq (a :: Type) => Eq (Pair (a :: Type) :: Type) derive Eq1 (Pair :: Type -> Type) -derive Eq (&0 :: Type) => Eq (Mixed (&0 :: Type) :: Type) +derive Eq (a :: Type) => Eq (Mixed (a :: Type) :: Type) derive Eq1 (Mixed :: Type -> Type) -derive Eq (&0 :: Type) => Eq (Rec (&0 :: Type) :: Type) +derive Eq (a :: Type) => Eq (Rec (a :: Type) :: Type) derive Eq1 (Rec :: Type -> Type) -derive (Eq1 (&0 :: Type -> Type), Eq (&1 :: Type)) => Eq (Wrap @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) +derive (Eq1 (f :: Type -> Type), Eq (a :: Type)) => Eq (Wrap @Type (f :: Type -> Type) (a :: Type) :: Type) -Errors -NoInstanceFound { Eq ((&1 :: Type -> Type) (~&2 :: Type)) } at [TermDeclaration(Idx::(17))] -NoInstanceFound { Eq (NoEq (~&0 :: Type)) } at [TermDeclaration(Idx::(23))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq ((g :: Type -> Type) (~_ :: Type)) + --> 35:1..35:43 + | +35 | derive instance Eq1 f => Eq1 (Compose f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Eq (NoEq (~_ :: Type)) + --> 45:1..45:25 + | +45 | derive instance Eq1 NoEq + | ^~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/168_derive_ord_1/Main.snap b/tests-integration/fixtures/checking/168_derive_ord_1/Main.snap index 60e8c7123..080b13c95 100644 --- a/tests-integration/fixtures/checking/168_derive_ord_1/Main.snap +++ b/tests-integration/fixtures/checking/168_derive_ord_1/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -51,23 +52,35 @@ Compose = [Representational, Representational, Nominal] NoOrd = [Representational] Derived -derive forall (&0 :: Type). (Eq1 (&1 :: Type -> Type), Eq ((&2 :: (&0 :: Type) -> Type) (&3 :: (&0 :: Type)))) => Eq (Compose @Type @(&0 :: Type) (&1 :: Type -> Type) (&2 :: (&0 :: Type) -> Type) (&3 :: (&0 :: Type)) :: Type) -derive forall (&0 :: Type). (Ord1 (&1 :: Type -> Type), Ord ((&2 :: (&0 :: Type) -> Type) (&3 :: (&0 :: Type)))) => Ord (Compose @Type @(&0 :: Type) (&1 :: Type -> Type) (&2 :: (&0 :: Type) -> Type) (&3 :: (&0 :: Type)) :: Type) -derive Eq1 (&0 :: Type -> Type) => Eq1 (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type) -derive Ord1 (&0 :: Type -> Type) => Ord1 (Compose @Type @Type (&0 :: Type -> Type) (&1 :: Type -> Type) :: Type -> Type) -derive Eq (&0 :: Type) => Eq (NoOrd (&0 :: Type) :: Type) +derive forall (t40 :: Type). (Eq1 (f :: Type -> Type), Eq ((g :: (t40 :: Type) -> Type) (a :: (t40 :: Type)))) => Eq (Compose @Type @(t40 :: Type) (f :: Type -> Type) (g :: (t40 :: Type) -> Type) (a :: (t40 :: Type)) :: Type) +derive forall (t48 :: Type). (Ord1 (f :: Type -> Type), Ord ((g :: (t48 :: Type) -> Type) (a :: (t48 :: Type)))) => Ord (Compose @Type @(t48 :: Type) (f :: Type -> Type) (g :: (t48 :: Type) -> Type) (a :: (t48 :: Type)) :: Type) +derive Eq1 (f :: Type -> Type) => Eq1 (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type) +derive Ord1 (f :: Type -> Type) => Ord1 (Compose @Type @Type (f :: Type -> Type) (g :: Type -> Type) :: Type -> Type) +derive Eq (a :: Type) => Eq (NoOrd (a :: Type) :: Type) derive Eq1 (NoOrd :: Type -> Type) derive Ord1 (NoOrd :: Type -> Type) -derive Eq (&0 :: Type) => Eq (Id (&0 :: Type) :: Type) +derive Eq (a :: Type) => Eq (Id (a :: Type) :: Type) derive Eq1 (Id :: Type -> Type) -derive Ord (&0 :: Type) => Ord (Id (&0 :: Type) :: Type) +derive Ord (a :: Type) => Ord (Id (a :: Type) :: Type) derive Ord1 (Id :: Type -> Type) -derive (Eq1 (&0 :: Type -> Type), Eq (&1 :: Type)) => Eq (Wrap @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) -derive Eq1 (&0 :: Type -> Type) => Eq1 (Wrap @Type (&0 :: Type -> Type) :: Type -> Type) -derive (Ord1 (&0 :: Type -> Type), Ord (&1 :: Type)) => Ord (Wrap @Type (&0 :: Type -> Type) (&1 :: Type) :: Type) -derive Ord1 (&0 :: Type -> Type) => Ord1 (Wrap @Type (&0 :: Type -> Type) :: Type -> Type) +derive (Eq1 (f :: Type -> Type), Eq (a :: Type)) => Eq (Wrap @Type (f :: Type -> Type) (a :: Type) :: Type) +derive Eq1 (f :: Type -> Type) => Eq1 (Wrap @Type (f :: Type -> Type) :: Type -> Type) +derive (Ord1 (f :: Type -> Type), Ord (a :: Type)) => Ord (Wrap @Type (f :: Type -> Type) (a :: Type) :: Type) +derive Ord1 (f :: Type -> Type) => Ord1 (Wrap @Type (f :: Type -> Type) :: Type -> Type) -Errors -NoInstanceFound { Eq ((&1 :: Type -> Type) (~&2 :: Type)) } at [TermDeclaration(Idx::(13))] -NoInstanceFound { Ord ((&1 :: Type -> Type) (~&2 :: Type)) } at [TermDeclaration(Idx::(14))] -NoInstanceFound { Ord (NoOrd (~&0 :: Type)) } at [TermDeclaration(Idx::(18))] +Diagnostics +error[NoInstanceFound]: No instance found for: Eq ((g :: Type -> Type) (~_ :: Type)) + --> 26:1..26:43 + | +26 | derive instance Eq1 f => Eq1 (Compose f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Ord ((g :: Type -> Type) (~_ :: Type)) + --> 27:1..27:45 + | +27 | derive instance Ord1 f => Ord1 (Compose f g) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Ord (NoOrd (~_ :: Type)) + --> 34:1..34:27 + | +34 | derive instance Ord1 NoOrd + | ^~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/170_derive_newtype_class_parameterized/Main.snap b/tests-integration/fixtures/checking/170_derive_newtype_class_parameterized/Main.snap index 54ed79090..5ae9287b1 100644 --- a/tests-integration/fixtures/checking/170_derive_newtype_class_parameterized/Main.snap +++ b/tests-integration/fixtures/checking/170_derive_newtype_class_parameterized/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -18,4 +19,4 @@ Roles Wrapper = [Representational] Derived -derive Newtype (Wrapper (&0 :: Type) :: Type) ((&0 :: Type) :: Type) +derive Newtype (Wrapper (a :: Type) :: Type) ((a :: Type) :: Type) diff --git a/tests-integration/fixtures/checking/171_derive_newtype_class_not_newtype/Main.snap b/tests-integration/fixtures/checking/171_derive_newtype_class_not_newtype/Main.snap index 80d37ac75..5a6910bf0 100644 --- a/tests-integration/fixtures/checking/171_derive_newtype_class_not_newtype/Main.snap +++ b/tests-integration/fixtures/checking/171_derive_newtype_class_not_newtype/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -17,5 +18,9 @@ NotANewtype Roles NotANewtype = [] -Errors -ExpectedNewtype { type_id: Id(9) } at [TermDeclaration(Idx::(1))] +Diagnostics +error[ExpectedNewtype]: Expected a newtype, got: NotANewtype + --> 7:1..7:38 + | +7 | derive instance Newtype NotANewtype _ + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/172_derive_generic_simple/Main.snap b/tests-integration/fixtures/checking/172_derive_generic_simple/Main.snap index af6963ca1..860b4caab 100644 --- a/tests-integration/fixtures/checking/172_derive_generic_simple/Main.snap +++ b/tests-integration/fixtures/checking/172_derive_generic_simple/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -91,7 +92,7 @@ Proxy = [Phantom] Derived derive Generic (Void :: Type) (NoConstructors :: Type) derive Generic (MyUnit :: Type) (Constructor "MyUnit" NoArguments :: Type) -derive Generic (Identity (&0 :: Type) :: Type) (Constructor "Identity" (Argument (&0 :: Type)) :: Type) -derive Generic (Either (&0 :: Type) (&1 :: Type) :: Type) (Sum (Constructor "Left" (Argument (&0 :: Type))) (Constructor "Right" (Argument (&1 :: Type))) :: Type) -derive Generic (Tuple (&0 :: Type) (&1 :: Type) :: Type) (Constructor "Tuple" (Product (Argument (&0 :: Type)) (Argument (&1 :: Type))) :: Type) -derive Generic (Wrapper (&0 :: Type) :: Type) (Constructor "Wrapper" (Argument (&0 :: Type)) :: Type) +derive Generic (Identity (a :: Type) :: Type) (Constructor "Identity" (Argument (a :: Type)) :: Type) +derive Generic (Either (a :: Type) (b :: Type) :: Type) (Sum (Constructor "Left" (Argument (a :: Type))) (Constructor "Right" (Argument (b :: Type))) :: Type) +derive Generic (Tuple (a :: Type) (b :: Type) :: Type) (Constructor "Tuple" (Product (Argument (a :: Type)) (Argument (b :: Type))) :: Type) +derive Generic (Wrapper (a :: Type) :: Type) (Constructor "Wrapper" (Argument (a :: Type)) :: Type) diff --git a/tests-integration/fixtures/checking/173_derive_newtype_class_coercible/Main.snap b/tests-integration/fixtures/checking/173_derive_newtype_class_coercible/Main.snap index 0ff2c93d0..510919998 100644 --- a/tests-integration/fixtures/checking/173_derive_newtype_class_coercible/Main.snap +++ b/tests-integration/fixtures/checking/173_derive_newtype_class_coercible/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -30,4 +31,4 @@ Wrapper = [Representational] Derived derive Newtype (UserId :: Type) (Int :: Type) -derive Newtype (Wrapper (&0 :: Type) :: Type) ((&0 :: Type) :: Type) +derive Newtype (Wrapper (a :: Type) :: Type) ((a :: Type) :: Type) diff --git a/tests-integration/fixtures/checking/176_role_inference_nominal_constraint/Main.snap b/tests-integration/fixtures/checking/176_role_inference_nominal_constraint/Main.snap index c6ca57a0d..91b251bfd 100644 --- a/tests-integration/fixtures/checking/176_role_inference_nominal_constraint/Main.snap +++ b/tests-integration/fixtures/checking/176_role_inference_nominal_constraint/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -21,4 +22,4 @@ Roles Shown = [Nominal] Classes -class Show (&0 :: Type) +class Show (a :: Type) diff --git a/tests-integration/fixtures/checking/181_role_declaration_loosen_error/Main.snap b/tests-integration/fixtures/checking/181_role_declaration_loosen_error/Main.snap index ddbd408cd..de35971fa 100644 --- a/tests-integration/fixtures/checking/181_role_declaration_loosen_error/Main.snap +++ b/tests-integration/fixtures/checking/181_role_declaration_loosen_error/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -20,5 +21,9 @@ F Roles F = [Representational, Nominal] -Errors -InvalidRoleDeclaration { type_id: Idx::(0), parameter_index: 1, declared: Phantom, inferred: Nominal } at [] +Diagnostics +error[InvalidRoleDeclaration]: Invalid role declaration: declared Phantom, inferred Nominal + --> 3:1..3:21 + | +3 | data F f a = F (f a) + | ^~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/189_coercible_different_heads_error/Main.snap b/tests-integration/fixtures/checking/189_coercible_different_heads_error/Main.snap index b269ccafd..f754bfe83 100644 --- a/tests-integration/fixtures/checking/189_coercible_different_heads_error/Main.snap +++ b/tests-integration/fixtures/checking/189_coercible_different_heads_error/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -27,5 +28,9 @@ Roles Maybe = [Representational] Either = [Representational, Representational] -Errors -NoInstanceFound { Coercible @Type (Maybe Int) (Either Int String) } at [TermDeclaration(Idx::(4))] +Diagnostics +error[NoInstanceFound]: No instance found for: Coercible @Type (Maybe Int) (Either Int String) + --> 8:1..8:50 + | +8 | coerceDifferent :: Maybe Int -> Either Int String + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/190_coercible_nominal/Main.snap b/tests-integration/fixtures/checking/190_coercible_nominal/Main.snap index 3858b4984..63c70a844 100644 --- a/tests-integration/fixtures/checking/190_coercible_nominal/Main.snap +++ b/tests-integration/fixtures/checking/190_coercible_nominal/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -12,5 +13,9 @@ Nominal :: Type -> Type Roles Nominal = [Nominal] -Errors -NoInstanceFound { Coercible @Type (Nominal Int) (Nominal String) } at [TermDeclaration(Idx::(1))] +Diagnostics +error[NoInstanceFound]: No instance found for: Coercible @Type (Nominal Int) (Nominal String) + --> 12:1..12:56 + | +12 | coerceNominalDifferent :: Nominal Int -> Nominal String + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/191_coercible_newtype_hidden/Main.snap b/tests-integration/fixtures/checking/191_coercible_newtype_hidden/Main.snap index 4fa9f60ed..f4536d2c1 100644 --- a/tests-integration/fixtures/checking/191_coercible_newtype_hidden/Main.snap +++ b/tests-integration/fixtures/checking/191_coercible_newtype_hidden/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -8,8 +9,24 @@ coerceQualified :: Int -> HiddenAge Types -Errors -CoercibleConstructorNotInScope { file_id: Idx::(30), item_id: Idx::(0) } at [TermDeclaration(Idx::(0))] -NoInstanceFound { Coercible @Type Int HiddenAge } at [TermDeclaration(Idx::(0))] -CoercibleConstructorNotInScope { file_id: Idx::(30), item_id: Idx::(0) } at [TermDeclaration(Idx::(1))] -NoInstanceFound { Coercible @Type Int HiddenAge } at [TermDeclaration(Idx::(1))] +Diagnostics +error[CoercibleConstructorNotInScope]: Constructor not in scope for Coercible + --> 7:1..7:33 + | +7 | coerceHidden :: Int -> HiddenAge + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Coercible @Type Int HiddenAge + --> 7:1..7:33 + | +7 | coerceHidden :: Int -> HiddenAge + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[CoercibleConstructorNotInScope]: Constructor not in scope for Coercible + --> 10:1..10:38 + | +10 | coerceQualified :: Int -> L.HiddenAge + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Coercible @Type Int HiddenAge + --> 10:1..10:38 + | +10 | coerceQualified :: Int -> L.HiddenAge + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/193_coercible_newtype_open_hidden/Main.snap b/tests-integration/fixtures/checking/193_coercible_newtype_open_hidden/Main.snap index cffd0690d..e9fde3f18 100644 --- a/tests-integration/fixtures/checking/193_coercible_newtype_open_hidden/Main.snap +++ b/tests-integration/fixtures/checking/193_coercible_newtype_open_hidden/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -7,6 +8,14 @@ coerceOpen :: Int -> HiddenAge Types -Errors -CoercibleConstructorNotInScope { file_id: Idx::(30), item_id: Idx::(0) } at [TermDeclaration(Idx::(0))] -NoInstanceFound { Coercible @Type Int HiddenAge } at [TermDeclaration(Idx::(0))] +Diagnostics +error[CoercibleConstructorNotInScope]: Constructor not in scope for Coercible + --> 6:1..6:31 + | +6 | coerceOpen :: Int -> HiddenAge + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Coercible @Type Int HiddenAge + --> 6:1..6:31 + | +6 | coerceOpen :: Int -> HiddenAge + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/197_coercible_higher_kinded_error/Main.snap b/tests-integration/fixtures/checking/197_coercible_higher_kinded_error/Main.snap index 541c5dcca..305c53988 100644 --- a/tests-integration/fixtures/checking/197_coercible_higher_kinded_error/Main.snap +++ b/tests-integration/fixtures/checking/197_coercible_higher_kinded_error/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -29,5 +30,9 @@ Maybe = [Representational] List = [Representational] Container = [Representational] -Errors -NoInstanceFound { Coercible (Maybe (~&0 :: Type)) (List (~&0 :: Type)) } at [TermDeclaration(Idx::(4))] +Diagnostics +error[NoInstanceFound]: No instance found for: Coercible (Maybe (~_ :: Type)) (List (~_ :: Type)) + --> 11:1..11:62 + | +11 | coerceContainerDifferent :: Container Maybe -> Container List + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/199_coercible_higher_kinded_polykinded/Main.snap b/tests-integration/fixtures/checking/199_coercible_higher_kinded_polykinded/Main.snap index 7f300c098..3adc95cf1 100644 --- a/tests-integration/fixtures/checking/199_coercible_higher_kinded_polykinded/Main.snap +++ b/tests-integration/fixtures/checking/199_coercible_higher_kinded_polykinded/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -13,8 +14,8 @@ MaybeAlias :: forall (k :: Type) (n :: (k :: Type)) (a :: Type). Maybe @(k :: Type) (n :: (k :: Type)) (a :: Type) -> MaybeAlias @(k :: Type) (n :: (k :: Type)) (a :: Type) -coerceContainer :: Container Maybe -> Container MaybeAlias -coerceContainerReverse :: Container MaybeAlias -> Container Maybe +coerceContainer :: Container (Maybe @Type) -> Container (MaybeAlias @Type) +coerceContainerReverse :: Container (MaybeAlias @Type) -> Container (Maybe @Type) Types Maybe :: forall (k :: Type). (k :: Type) -> Type -> Type diff --git a/tests-integration/fixtures/checking/202_int_compare_invalid/Main.snap b/tests-integration/fixtures/checking/202_int_compare_invalid/Main.snap index f6f770a20..1e95992b9 100644 --- a/tests-integration/fixtures/checking/202_int_compare_invalid/Main.snap +++ b/tests-integration/fixtures/checking/202_int_compare_invalid/Main.snap @@ -1,6 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs -assertion_line: 12 +assertion_line: 28 expression: report --- Terms @@ -12,11 +12,39 @@ invalidEq :: Proxy @(Row Int) ( left :: 1, right :: 5 ) Types -Errors -NoInstanceFound { Compare 10 5 LT } at [TermDeclaration(Idx::(0))] -NoInstanceFound { Compare 5 1 LT } at [TermDeclaration(Idx::(0))] -NoInstanceFound { Compare 1 5 GT } at [TermDeclaration(Idx::(1))] -NoInstanceFound { Compare 5 10 GT } at [TermDeclaration(Idx::(1))] -NoInstanceFound { Compare 5 1 LT } at [TermDeclaration(Idx::(2))] -NoInstanceFound { Compare 1 5 GT } at [TermDeclaration(Idx::(3))] -NoInstanceFound { Compare 1 5 EQ } at [TermDeclaration(Idx::(4))] +Diagnostics +error[NoInstanceFound]: No instance found for: Compare 10 5 LT + --> 6:1..6:51 + | +6 | invalidTransLt :: Proxy ( left :: 10, right :: 1 ) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Compare 5 1 LT + --> 6:1..6:51 + | +6 | invalidTransLt :: Proxy ( left :: 10, right :: 1 ) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Compare 1 5 GT + --> 10:1..10:51 + | +10 | invalidTransGt :: Proxy ( left :: 1, right :: 10 ) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Compare 5 10 GT + --> 10:1..10:51 + | +10 | invalidTransGt :: Proxy ( left :: 1, right :: 10 ) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Compare 5 1 LT + --> 14:1..14:45 + | +14 | invalidLt :: Proxy ( left :: 5, right :: 1 ) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Compare 1 5 GT + --> 17:1..17:45 + | +17 | invalidGt :: Proxy ( left :: 1, right :: 5 ) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[NoInstanceFound]: No instance found for: Compare 1 5 EQ + --> 20:1..20:45 + | +20 | invalidEq :: Proxy ( left :: 1, right :: 5 ) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/205_builtin_warn/Main.snap b/tests-integration/fixtures/checking/205_builtin_warn/Main.snap index d4173f18c..ee4188a9a 100644 --- a/tests-integration/fixtures/checking/205_builtin_warn/Main.snap +++ b/tests-integration/fixtures/checking/205_builtin_warn/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -47,21 +48,45 @@ Proxy Roles Proxy = [Phantom] -Errors -CustomWarning { .. } at [TermDeclaration(Idx::(2))] - This function is deprecated -CustomWarning { .. } at [TermDeclaration(Idx::(4))] - Left Right -CustomWarning { .. } at [TermDeclaration(Idx::(6))] - Line 1 - Line 2 -CustomWarning { .. } at [TermDeclaration(Idx::(8))] - Got type: Int -CustomWarning { .. } at [TermDeclaration(Idx::(10))] - Label: myField -CustomWarning { .. } at [TermDeclaration(Idx::(12))] - Label: "h e l l o" -CustomWarning { .. } at [TermDeclaration(Idx::(14))] - Label: "hel\"lo" -CustomWarning { .. } at [TermDeclaration(Idx::(16))] - Label: """raw\nstring""" +Diagnostics +warning[CustomWarning]: This function is deprecated + --> 11:1..11:20 + | +11 | useWarnBasic :: Int + | ^~~~~~~~~~~~~~~~~~~ +warning[CustomWarning]: Left Right + --> 17:1..17:21 + | +17 | useWarnBeside :: Int + | ^~~~~~~~~~~~~~~~~~~~ +warning[CustomWarning]: Line 1 +Line 2 + --> 23:1..23:20 + | +23 | useWarnAbove :: Int + | ^~~~~~~~~~~~~~~~~~~ +warning[CustomWarning]: Got type: Int + --> 29:1..29:26 + | +29 | useWarnQuote :: Proxy Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~ +warning[CustomWarning]: Label: myField + --> 35:1..35:25 + | +35 | useWarnQuoteLabel :: Int + | ^~~~~~~~~~~~~~~~~~~~~~~~ +warning[CustomWarning]: Label: "h e l l o" + --> 41:1..41:31 + | +41 | useWarnQuoteLabelSpaces :: Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[CustomWarning]: Label: "hel\"lo" + --> 47:1..47:30 + | +47 | useWarnQuoteLabelQuote :: Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[CustomWarning]: Label: """raw\nstring""" + --> 53:1..53:28 + | +53 | useWarnQuoteLabelRaw :: Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/206_builtin_fail/Main.snap b/tests-integration/fixtures/checking/206_builtin_fail/Main.snap index 559b6812f..25ec6ed14 100644 --- a/tests-integration/fixtures/checking/206_builtin_fail/Main.snap +++ b/tests-integration/fixtures/checking/206_builtin_fail/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -26,9 +27,15 @@ Proxy Roles Proxy = [Phantom] -Errors -CustomFailure { .. } at [TermDeclaration(Idx::(2))] - This operation is not allowed -CustomFailure { .. } at [TermDeclaration(Idx::(4))] - Error: - Type String +Diagnostics +error[CustomFailure]: This operation is not allowed + --> 11:1..11:20 + | +11 | useFailBasic :: Int + | ^~~~~~~~~~~~~~~~~~~ +error[CustomFailure]: Error: +Type String + --> 17:1..17:31 + | +17 | useFailComplex :: Proxy String + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/207_operator_class_method/Main.snap b/tests-integration/fixtures/checking/207_operator_class_method/Main.snap index a59f0462e..402809796 100644 --- a/tests-integration/fixtures/checking/207_operator_class_method/Main.snap +++ b/tests-integration/fixtures/checking/207_operator_class_method/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -13,7 +14,7 @@ Types Semiring :: Type -> Constraint Classes -class Semiring (&0 :: Type) +class Semiring (a :: Type) Instances instance Semiring (Int :: Type) diff --git a/tests-integration/fixtures/checking/208_int_add_constraint/Main.snap b/tests-integration/fixtures/checking/208_int_add_constraint/Main.snap index 7319bae4d..84a1b1668 100644 --- a/tests-integration/fixtures/checking/208_int_add_constraint/Main.snap +++ b/tests-integration/fixtures/checking/208_int_add_constraint/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -13,8 +14,8 @@ Types Program :: forall (t0 :: Type) (t1 :: Type). (t0 :: Type) -> (t1 :: Type) -> Constraint Classes -class Program (&2 :: (&0 :: Type)) (&3 :: (&1 :: Type)) +class Program (n :: (t0 :: Type)) (m :: (t1 :: Type)) Instances -instance (Add (&0 :: Int) 1 (&2 :: Int), Add (&2 :: Int) 1 (&1 :: Int)) => Program ((&0 :: Int) :: Int) ((&1 :: Int) :: Int) +instance (Add (n :: Int) 1 (n1 :: Int), Add (n1 :: Int) 1 (n2 :: Int)) => Program ((n :: Int) :: Int) ((n2 :: Int) :: Int) chain: 0 diff --git a/tests-integration/fixtures/checking/209_int_cons_constraint/Main.snap b/tests-integration/fixtures/checking/209_int_cons_constraint/Main.snap index 45072ba0d..8b2a166a2 100644 --- a/tests-integration/fixtures/checking/209_int_cons_constraint/Main.snap +++ b/tests-integration/fixtures/checking/209_int_cons_constraint/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking/generated.rs +assertion_line: 28 expression: report --- Terms @@ -13,10 +14,14 @@ Types Build :: forall (t0 :: Type) (t1 :: Type). (t0 :: Type) -> (t1 :: Type) -> Constraint Classes -class Build (&2 :: (&0 :: Type)) (&3 :: (&1 :: Type)) +class Build (n :: (t0 :: Type)) (r :: (t1 :: Type)) Instances -instance forall (&0 :: Type). Build (0 :: Int) (() :: Row (&0 :: Type)) +instance forall (t14 :: Type). Build (0 :: Int) (() :: Row (t14 :: Type)) chain: 0 -instance (Add (&2 :: Int) 1 (&0 :: Int), ToString (&0 :: Int) (&3 :: Symbol), Append "n" (&3 :: Symbol) (&4 :: Symbol), Build @Int @(Row Int) (&2 :: Int) (&5 :: Row Int), Cons @Int (&4 :: Symbol) (&0 :: Int) (&5 :: Row Int) (&1 :: Row Int)) => Build ((&0 :: Int) :: Int) ((&1 :: Row Int) :: Row Int) +instance (Add (minusOne :: Int) 1 (currentId :: Int), ToString (currentId :: Int) (labelId :: Symbol), Append "n" (labelId :: Symbol) (actualLabel :: Symbol), Build @Int @(Row Int) (minusOne :: Int) (minusOneResult :: Row Int), Cons @Int + (actualLabel :: Symbol) + (currentId :: Int) + (minusOneResult :: Row Int) + (finalResult :: Row Int)) => Build ((currentId :: Int) :: Int) ((finalResult :: Row Int) :: Row Int) chain: 1 diff --git a/tests-integration/fixtures/checking/215_do_bind_error/Main.purs b/tests-integration/fixtures/checking/215_do_bind_error/Main.purs new file mode 100644 index 000000000..441806c95 --- /dev/null +++ b/tests-integration/fixtures/checking/215_do_bind_error/Main.purs @@ -0,0 +1,15 @@ +module Main where + +foreign import data Effect :: Type -> Type + +foreign import pure :: forall a. a -> Effect a +foreign import bind :: forall a b. Effect a -> (a -> Effect b) -> Effect b +foreign import discard :: forall a b. Effect a -> (a -> Effect b) -> Effect b +foreign import add :: Int -> Int -> Int + +test :: Effect Int +test = do + a <- pure 123456 + b <- pure "life" + pure (add a b) + diff --git a/tests-integration/fixtures/checking/215_do_bind_error/Main.snap b/tests-integration/fixtures/checking/215_do_bind_error/Main.snap new file mode 100644 index 000000000..681f2fe2e --- /dev/null +++ b/tests-integration/fixtures/checking/215_do_bind_error/Main.snap @@ -0,0 +1,28 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +bind :: + forall (a :: Type) (b :: Type). + Effect (a :: Type) -> ((a :: Type) -> Effect (b :: Type)) -> Effect (b :: Type) +discard :: + forall (a :: Type) (b :: Type). + Effect (a :: Type) -> ((a :: Type) -> Effect (b :: Type)) -> Effect (b :: Type) +add :: Int -> Int -> Int +test :: Effect Int + +Types +Effect :: Type -> Type + +Roles +Effect = [Nominal] + +Diagnostics +error[CannotUnify]: Cannot unify 'String' with 'Int' + --> 14:15..14:16 + | +14 | pure (add a b) + | ^ diff --git a/tests-integration/fixtures/checking/216_ado_bind_error/Main.purs b/tests-integration/fixtures/checking/216_ado_bind_error/Main.purs new file mode 100644 index 000000000..ec5f32ebd --- /dev/null +++ b/tests-integration/fixtures/checking/216_ado_bind_error/Main.purs @@ -0,0 +1,13 @@ +module Main where + +foreign import data Effect :: Type -> Type + +foreign import pure :: forall a. a -> Effect a +foreign import map :: forall a b. (a -> b) -> Effect a -> Effect b +foreign import apply :: forall a b. Effect (a -> b) -> Effect a -> Effect b +foreign import add :: Int -> Int -> Int + +test = ado + a <- pure 123456 + b <- pure "life" + in add a b diff --git a/tests-integration/fixtures/checking/216_ado_bind_error/Main.snap b/tests-integration/fixtures/checking/216_ado_bind_error/Main.snap new file mode 100644 index 000000000..1db43b1f4 --- /dev/null +++ b/tests-integration/fixtures/checking/216_ado_bind_error/Main.snap @@ -0,0 +1,28 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +map :: + forall (a :: Type) (b :: Type). + ((a :: Type) -> (b :: Type)) -> Effect (a :: Type) -> Effect (b :: Type) +apply :: + forall (a :: Type) (b :: Type). + Effect ((a :: Type) -> (b :: Type)) -> Effect (a :: Type) -> Effect (b :: Type) +add :: Int -> Int -> Int +test :: Effect Int + +Types +Effect :: Type -> Type + +Roles +Effect = [Nominal] + +Diagnostics +error[CannotUnify]: Cannot unify 'String' with 'Int' + --> 13:12..13:13 + | +13 | in add a b + | ^ diff --git a/tests-integration/fixtures/checking/217_do_monad_error/Main.purs b/tests-integration/fixtures/checking/217_do_monad_error/Main.purs new file mode 100644 index 000000000..7307e2ad0 --- /dev/null +++ b/tests-integration/fixtures/checking/217_do_monad_error/Main.purs @@ -0,0 +1,24 @@ +module Main where + +foreign import data Effect :: Type -> Type +foreign import data Aff :: Type -> Type + +foreign import pure :: forall a. a -> Effect a +foreign import affPure :: forall a. a -> Aff a + +foreign import bind :: forall a b. Effect a -> (a -> Effect b) -> Effect b +foreign import discard :: forall a b. Effect a -> (a -> Effect b) -> Effect b + +test :: Effect { a :: Int, b :: String, c :: Int } +test = do + a <- pure 123456 + b <- affPure "life" + c <- pure 123456 + pure { a, b, c } + +test' = do + a <- pure 123456 + b <- affPure "life" + c <- pure 123456 + pure { a, b, c } + diff --git a/tests-integration/fixtures/checking/217_do_monad_error/Main.snap b/tests-integration/fixtures/checking/217_do_monad_error/Main.snap new file mode 100644 index 000000000..252b65fac --- /dev/null +++ b/tests-integration/fixtures/checking/217_do_monad_error/Main.snap @@ -0,0 +1,46 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +affPure :: forall (a :: Type). (a :: Type) -> Aff (a :: Type) +bind :: + forall (a :: Type) (b :: Type). + Effect (a :: Type) -> ((a :: Type) -> Effect (b :: Type)) -> Effect (b :: Type) +discard :: + forall (a :: Type) (b :: Type). + Effect (a :: Type) -> ((a :: Type) -> Effect (b :: Type)) -> Effect (b :: Type) +test :: Effect { a :: Int, b :: String, c :: Int } +test' :: forall (t27 :: Type). Effect { a :: Int, b :: (t27 :: Type), c :: Int } + +Types +Effect :: Type -> Type +Aff :: Type -> Type + +Roles +Effect = [Nominal] +Aff = [Nominal] + +Diagnostics +error[CannotUnify]: Cannot unify 'Aff' with 'Effect' + --> 15:3..15:22 + | +15 | b <- affPure "life" + | ^~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff String' with 'Effect ?18[:0]' + --> 15:3..15:22 + | +15 | b <- affPure "life" + | ^~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff' with 'Effect' + --> 21:3..21:22 + | +21 | b <- affPure "life" + | ^~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff String' with 'Effect ?37[:0]' + --> 21:3..21:22 + | +21 | b <- affPure "life" + | ^~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/218_ado_monad_error/Main.purs b/tests-integration/fixtures/checking/218_ado_monad_error/Main.purs new file mode 100644 index 000000000..f5794d64d --- /dev/null +++ b/tests-integration/fixtures/checking/218_ado_monad_error/Main.purs @@ -0,0 +1,23 @@ +module Main where + +foreign import data Effect :: Type -> Type +foreign import data Aff :: Type -> Type + +foreign import pure :: forall a. a -> Effect a +foreign import affPure :: forall a. a -> Aff a + +foreign import map :: forall a b. (a -> b) -> Effect a -> Effect b +foreign import apply :: forall a b. Effect (a -> b) -> Effect a -> Effect b + +test :: Effect { a :: Int, b :: String, c :: Int } +test = ado + a <- pure 123456 + b <- affPure "life" + c <- pure 123456 + in { a, b, c } + +test' = ado + a <- pure 123456 + b <- affPure "life" + c <- pure 123456 + in { a, b, c } diff --git a/tests-integration/fixtures/checking/218_ado_monad_error/Main.snap b/tests-integration/fixtures/checking/218_ado_monad_error/Main.snap new file mode 100644 index 000000000..b17a8e282 --- /dev/null +++ b/tests-integration/fixtures/checking/218_ado_monad_error/Main.snap @@ -0,0 +1,46 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +affPure :: forall (a :: Type). (a :: Type) -> Aff (a :: Type) +map :: + forall (a :: Type) (b :: Type). + ((a :: Type) -> (b :: Type)) -> Effect (a :: Type) -> Effect (b :: Type) +apply :: + forall (a :: Type) (b :: Type). + Effect ((a :: Type) -> (b :: Type)) -> Effect (a :: Type) -> Effect (b :: Type) +test :: Effect { a :: Int, b :: String, c :: Int } +test' :: forall (t30 :: Type). Effect { a :: Int, b :: (t30 :: Type), c :: Int } + +Types +Effect :: Type -> Type +Aff :: Type -> Type + +Roles +Effect = [Nominal] +Aff = [Nominal] + +Diagnostics +error[CannotUnify]: Cannot unify 'Aff' with 'Effect' + --> 15:3..15:22 + | +15 | b <- affPure "life" + | ^~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff String' with 'Effect ?15[:0]' + --> 15:3..15:22 + | +15 | b <- affPure "life" + | ^~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff' with 'Effect' + --> 21:3..21:22 + | +21 | b <- affPure "life" + | ^~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff String' with 'Effect ?30[:0]' + --> 21:3..21:22 + | +21 | b <- affPure "life" + | ^~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/219_do_mixed_monad_error/Main.purs b/tests-integration/fixtures/checking/219_do_mixed_monad_error/Main.purs new file mode 100644 index 000000000..b93bca141 --- /dev/null +++ b/tests-integration/fixtures/checking/219_do_mixed_monad_error/Main.purs @@ -0,0 +1,23 @@ +module Main where + +import Control.Applicative (pure) +import Control.Bind (bind, discard) +import Effect (Effect) +import Effect.Aff (Aff) + +foreign import effect :: Effect Int +foreign import aff :: Aff String + +-- Test: error should be attributed to the `aff` line (first conflicting statement) +-- not the `effect` line +test :: Effect { a :: Int, b :: String } +test = do + a <- effect + b <- aff + pure { a, b } + +-- Inference variant +test' = do + a <- effect + b <- aff + pure { a, b } diff --git a/tests-integration/fixtures/checking/219_do_mixed_monad_error/Main.snap b/tests-integration/fixtures/checking/219_do_mixed_monad_error/Main.snap new file mode 100644 index 000000000..e1290c6f2 --- /dev/null +++ b/tests-integration/fixtures/checking/219_do_mixed_monad_error/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +effect :: Effect Int +aff :: Aff String +test :: Effect { a :: Int, b :: String } +test' :: forall (t23 :: Type). Effect (t23 :: Type) + +Types + +Diagnostics +error[CannotUnify]: Cannot unify 'Aff' with 'Effect' + --> 16:3..16:11 + | +16 | b <- aff + | ^~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff ?11[:0]' with 'Effect ?8[:0]' + --> 16:3..16:11 + | +16 | b <- aff + | ^~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff' with 'Effect' + --> 22:3..22:11 + | +22 | b <- aff + | ^~~~~~~~ +error[CannotUnify]: Cannot unify 'Aff ?26[:0]' with 'Effect ?23[:0]' + --> 22:3..22:11 + | +22 | b <- aff + | ^~~~~~~~ diff --git a/tests-integration/fixtures/checking/220_do_let_premature_solve/Main.purs b/tests-integration/fixtures/checking/220_do_let_premature_solve/Main.purs new file mode 100644 index 000000000..2aa41d84e --- /dev/null +++ b/tests-integration/fixtures/checking/220_do_let_premature_solve/Main.purs @@ -0,0 +1,28 @@ +module Main where + +foreign import data Effect :: Type -> Type +foreign import data Unit :: Type + +foreign import unit :: Unit +foreign import pure :: forall a. a -> Effect a +foreign import bind :: forall a b. Effect a -> (a -> Effect b) -> Effect b +foreign import discard :: forall a. Effect a -> Effect Unit -> Effect Unit + +class Semiring a where + add :: a -> a -> a + +instance Semiring Int where + add = addImpl + +foreign import addImpl :: Int -> Int -> Int + +infixl 6 add as + + +thing1 :: Effect String +thing1 = pure "hello" + +test :: Effect Unit +test = do + a <- thing1 + let f = a + 123 + pure unit diff --git a/tests-integration/fixtures/checking/220_do_let_premature_solve/Main.snap b/tests-integration/fixtures/checking/220_do_let_premature_solve/Main.snap new file mode 100644 index 000000000..d72ef8e90 --- /dev/null +++ b/tests-integration/fixtures/checking/220_do_let_premature_solve/Main.snap @@ -0,0 +1,45 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +unit :: Unit +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +bind :: + forall (a :: Type) (b :: Type). + Effect (a :: Type) -> ((a :: Type) -> Effect (b :: Type)) -> Effect (b :: Type) +discard :: forall (a :: Type). Effect (a :: Type) -> Effect Unit -> Effect Unit +add :: forall (a :: Type). Semiring (a :: Type) => (a :: Type) -> (a :: Type) -> (a :: Type) +addImpl :: Int -> Int -> Int ++ :: forall (a :: Type). Semiring (a :: Type) => (a :: Type) -> (a :: Type) -> (a :: Type) +thing1 :: Effect String +test :: Effect Unit + +Types +Effect :: Type -> Type +Unit :: Type +Semiring :: Type -> Constraint + +Roles +Effect = [Nominal] +Unit = [] + +Classes +class Semiring (a :: Type) + +Instances +instance Semiring (Int :: Type) + chain: 0 + +Diagnostics +error[CannotUnify]: Cannot unify 'Int' with 'String' + --> 27:15..27:18 + | +27 | let f = a + 123 + | ^~~ +error[NoInstanceFound]: No instance found for: Semiring String + --> 24:1..24:20 + | +24 | test :: Effect Unit + | ^~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/221_do_let_annotation_solve/Main.purs b/tests-integration/fixtures/checking/221_do_let_annotation_solve/Main.purs new file mode 100644 index 000000000..9ab3a657a --- /dev/null +++ b/tests-integration/fixtures/checking/221_do_let_annotation_solve/Main.purs @@ -0,0 +1,20 @@ +module Main where + +foreign import data Effect :: Type -> Type +foreign import data Unit :: Type + +foreign import unit :: Unit +foreign import pure :: forall a. a -> Effect a +foreign import bind :: forall a b. Effect a -> (a -> Effect b) -> Effect b +foreign import discard :: forall a. Effect a -> Effect Unit -> Effect Unit + +thing1 :: Effect String +thing1 = pure "hello" + +test :: Effect Unit +test = do + a <- thing1 + let + f :: Int + f = a + pure unit diff --git a/tests-integration/fixtures/checking/221_do_let_annotation_solve/Main.snap b/tests-integration/fixtures/checking/221_do_let_annotation_solve/Main.snap new file mode 100644 index 000000000..3dfcee3b0 --- /dev/null +++ b/tests-integration/fixtures/checking/221_do_let_annotation_solve/Main.snap @@ -0,0 +1,29 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +unit :: Unit +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +bind :: + forall (a :: Type) (b :: Type). + Effect (a :: Type) -> ((a :: Type) -> Effect (b :: Type)) -> Effect (b :: Type) +discard :: forall (a :: Type). Effect (a :: Type) -> Effect Unit -> Effect Unit +thing1 :: Effect String +test :: Effect Unit + +Types +Effect :: Type -> Type +Unit :: Type + +Roles +Effect = [Nominal] +Unit = [] + +Diagnostics +error[CannotUnify]: Cannot unify 'String' with 'Int' + --> 19:9..19:10 + | +19 | f = a + | ^ diff --git a/tests-integration/fixtures/checking/222_ado_let_premature_solve/Main.purs b/tests-integration/fixtures/checking/222_ado_let_premature_solve/Main.purs new file mode 100644 index 000000000..77f9722cc --- /dev/null +++ b/tests-integration/fixtures/checking/222_ado_let_premature_solve/Main.purs @@ -0,0 +1,28 @@ +module Main where + +foreign import data Effect :: Type -> Type +foreign import data Unit :: Type + +foreign import unit :: Unit +foreign import pure :: forall a. a -> Effect a +foreign import map :: forall a b. (a -> b) -> Effect a -> Effect b +foreign import apply :: forall a b. Effect (a -> b) -> Effect a -> Effect b + +class Semiring a where + add :: a -> a -> a + +instance Semiring Int where + add = addImpl + +foreign import addImpl :: Int -> Int -> Int + +infixl 6 add as + + +thing1 :: Effect String +thing1 = pure "hello" + +test :: Effect Unit +test = ado + a <- thing1 + let f = a + 123 + in unit diff --git a/tests-integration/fixtures/checking/222_ado_let_premature_solve/Main.snap b/tests-integration/fixtures/checking/222_ado_let_premature_solve/Main.snap new file mode 100644 index 000000000..288fca5c9 --- /dev/null +++ b/tests-integration/fixtures/checking/222_ado_let_premature_solve/Main.snap @@ -0,0 +1,47 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +unit :: Unit +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +map :: + forall (a :: Type) (b :: Type). + ((a :: Type) -> (b :: Type)) -> Effect (a :: Type) -> Effect (b :: Type) +apply :: + forall (a :: Type) (b :: Type). + Effect ((a :: Type) -> (b :: Type)) -> Effect (a :: Type) -> Effect (b :: Type) +add :: forall (a :: Type). Semiring (a :: Type) => (a :: Type) -> (a :: Type) -> (a :: Type) +addImpl :: Int -> Int -> Int ++ :: forall (a :: Type). Semiring (a :: Type) => (a :: Type) -> (a :: Type) -> (a :: Type) +thing1 :: Effect String +test :: Effect Unit + +Types +Effect :: Type -> Type +Unit :: Type +Semiring :: Type -> Constraint + +Roles +Effect = [Nominal] +Unit = [] + +Classes +class Semiring (a :: Type) + +Instances +instance Semiring (Int :: Type) + chain: 0 + +Diagnostics +error[CannotUnify]: Cannot unify 'Int' with 'String' + --> 27:15..27:18 + | +27 | let f = a + 123 + | ^~~ +error[NoInstanceFound]: No instance found for: Semiring String + --> 24:1..24:20 + | +24 | test :: Effect Unit + | ^~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/223_ado_let_annotation_solve/Main.purs b/tests-integration/fixtures/checking/223_ado_let_annotation_solve/Main.purs new file mode 100644 index 000000000..0728bc90b --- /dev/null +++ b/tests-integration/fixtures/checking/223_ado_let_annotation_solve/Main.purs @@ -0,0 +1,20 @@ +module Main where + +foreign import data Effect :: Type -> Type +foreign import data Unit :: Type + +foreign import unit :: Unit +foreign import pure :: forall a. a -> Effect a +foreign import map :: forall a b. (a -> b) -> Effect a -> Effect b +foreign import apply :: forall a b. Effect (a -> b) -> Effect a -> Effect b + +thing1 :: Effect String +thing1 = pure "hello" + +test :: Effect Unit +test = ado + a <- thing1 + let + f :: Int + f = a + in unit diff --git a/tests-integration/fixtures/checking/223_ado_let_annotation_solve/Main.snap b/tests-integration/fixtures/checking/223_ado_let_annotation_solve/Main.snap new file mode 100644 index 000000000..a6f095a78 --- /dev/null +++ b/tests-integration/fixtures/checking/223_ado_let_annotation_solve/Main.snap @@ -0,0 +1,31 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +unit :: Unit +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +map :: + forall (a :: Type) (b :: Type). + ((a :: Type) -> (b :: Type)) -> Effect (a :: Type) -> Effect (b :: Type) +apply :: + forall (a :: Type) (b :: Type). + Effect ((a :: Type) -> (b :: Type)) -> Effect (a :: Type) -> Effect (b :: Type) +thing1 :: Effect String +test :: Effect Unit + +Types +Effect :: Type -> Type +Unit :: Type + +Roles +Effect = [Nominal] +Unit = [] + +Diagnostics +error[CannotUnify]: Cannot unify 'String' with 'Int' + --> 19:9..19:10 + | +19 | f = a + | ^ diff --git a/tests-integration/fixtures/checking/224_record_shrinking/Main.purs b/tests-integration/fixtures/checking/224_record_shrinking/Main.purs new file mode 100644 index 000000000..9a86121ae --- /dev/null +++ b/tests-integration/fixtures/checking/224_record_shrinking/Main.purs @@ -0,0 +1,4 @@ +module Main where + +test :: { a :: Int, b :: Int } -> Int +test { a } = a diff --git a/tests-integration/fixtures/checking/224_record_shrinking/Main.snap b/tests-integration/fixtures/checking/224_record_shrinking/Main.snap new file mode 100644 index 000000000..b79528bbb --- /dev/null +++ b/tests-integration/fixtures/checking/224_record_shrinking/Main.snap @@ -0,0 +1,9 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: { a :: Int, b :: Int } -> Int + +Types diff --git a/tests-integration/fixtures/checking/225_record_binder_additional_property/Main.purs b/tests-integration/fixtures/checking/225_record_binder_additional_property/Main.purs new file mode 100644 index 000000000..bc79aeef7 --- /dev/null +++ b/tests-integration/fixtures/checking/225_record_binder_additional_property/Main.purs @@ -0,0 +1,4 @@ +module Main where + +test :: { a :: Int } -> Int +test { a, b, c } = a diff --git a/tests-integration/fixtures/checking/225_record_binder_additional_property/Main.snap b/tests-integration/fixtures/checking/225_record_binder_additional_property/Main.snap new file mode 100644 index 000000000..95be51e9a --- /dev/null +++ b/tests-integration/fixtures/checking/225_record_binder_additional_property/Main.snap @@ -0,0 +1,16 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: { a :: Int } -> Int + +Types + +Diagnostics +error[AdditionalProperty]: Additional properties not allowed: b, c + --> 4:6..4:17 + | +4 | test { a, b, c } = a + | ^~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/226_record_binder_additional_property_nested/Main.purs b/tests-integration/fixtures/checking/226_record_binder_additional_property_nested/Main.purs new file mode 100644 index 000000000..f14006b44 --- /dev/null +++ b/tests-integration/fixtures/checking/226_record_binder_additional_property_nested/Main.purs @@ -0,0 +1,4 @@ +module Main where + +test :: { outer :: { x :: Int } } -> Int +test { outer: { x, y } } = x diff --git a/tests-integration/fixtures/checking/226_record_binder_additional_property_nested/Main.snap b/tests-integration/fixtures/checking/226_record_binder_additional_property_nested/Main.snap new file mode 100644 index 000000000..af26c31b5 --- /dev/null +++ b/tests-integration/fixtures/checking/226_record_binder_additional_property_nested/Main.snap @@ -0,0 +1,16 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: { outer :: { x :: Int } } -> Int + +Types + +Diagnostics +error[AdditionalProperty]: Additional properties not allowed: y + --> 4:15..4:23 + | +4 | test { outer: { x, y } } = x + | ^~~~~~~~ diff --git a/tests-integration/fixtures/checking/227_record_expression_exact_match/Main.purs b/tests-integration/fixtures/checking/227_record_expression_exact_match/Main.purs new file mode 100644 index 000000000..80032018f --- /dev/null +++ b/tests-integration/fixtures/checking/227_record_expression_exact_match/Main.purs @@ -0,0 +1,4 @@ +module Main where + +test :: { a :: Int, b :: Int } +test = { a: 1, b: 2 } diff --git a/tests-integration/fixtures/checking/227_record_expression_exact_match/Main.snap b/tests-integration/fixtures/checking/227_record_expression_exact_match/Main.snap new file mode 100644 index 000000000..de1ecb3c8 --- /dev/null +++ b/tests-integration/fixtures/checking/227_record_expression_exact_match/Main.snap @@ -0,0 +1,9 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: { a :: Int, b :: Int } + +Types diff --git a/tests-integration/fixtures/checking/228_record_expression_missing_field/Main.purs b/tests-integration/fixtures/checking/228_record_expression_missing_field/Main.purs new file mode 100644 index 000000000..b38d96678 --- /dev/null +++ b/tests-integration/fixtures/checking/228_record_expression_missing_field/Main.purs @@ -0,0 +1,4 @@ +module Main where + +test :: { a :: Int, b :: Int } +test = { a: 1 } diff --git a/tests-integration/fixtures/checking/228_record_expression_missing_field/Main.snap b/tests-integration/fixtures/checking/228_record_expression_missing_field/Main.snap new file mode 100644 index 000000000..661ca0868 --- /dev/null +++ b/tests-integration/fixtures/checking/228_record_expression_missing_field/Main.snap @@ -0,0 +1,16 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: { a :: Int, b :: Int } + +Types + +Diagnostics +error[PropertyIsMissing]: Missing required properties: b + --> 4:8..4:16 + | +4 | test = { a: 1 } + | ^~~~~~~~ diff --git a/tests-integration/fixtures/checking/229_record_expression_additional_field/Main.purs b/tests-integration/fixtures/checking/229_record_expression_additional_field/Main.purs new file mode 100644 index 000000000..2deff69d7 --- /dev/null +++ b/tests-integration/fixtures/checking/229_record_expression_additional_field/Main.purs @@ -0,0 +1,4 @@ +module Main where + +test :: { a :: Int } +test = { a: 1, b: 2 } diff --git a/tests-integration/fixtures/checking/229_record_expression_additional_field/Main.snap b/tests-integration/fixtures/checking/229_record_expression_additional_field/Main.snap new file mode 100644 index 000000000..3262091eb --- /dev/null +++ b/tests-integration/fixtures/checking/229_record_expression_additional_field/Main.snap @@ -0,0 +1,16 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: { a :: Int } + +Types + +Diagnostics +error[AdditionalProperty]: Additional properties not allowed: b + --> 4:8..4:22 + | +4 | test = { a: 1, b: 2 } + | ^~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/230_record_expression_missing_and_additional/Main.purs b/tests-integration/fixtures/checking/230_record_expression_missing_and_additional/Main.purs new file mode 100644 index 000000000..bdd8fb6d6 --- /dev/null +++ b/tests-integration/fixtures/checking/230_record_expression_missing_and_additional/Main.purs @@ -0,0 +1,4 @@ +module Main where + +test :: { a :: Int, b :: Int } +test = { a: 1, c: 3 } diff --git a/tests-integration/fixtures/checking/230_record_expression_missing_and_additional/Main.snap b/tests-integration/fixtures/checking/230_record_expression_missing_and_additional/Main.snap new file mode 100644 index 000000000..b47d949dc --- /dev/null +++ b/tests-integration/fixtures/checking/230_record_expression_missing_and_additional/Main.snap @@ -0,0 +1,21 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: { a :: Int, b :: Int } + +Types + +Diagnostics +error[PropertyIsMissing]: Missing required properties: b + --> 4:8..4:22 + | +4 | test = { a: 1, c: 3 } + | ^~~~~~~~~~~~~~ +error[AdditionalProperty]: Additional properties not allowed: c + --> 4:8..4:22 + | +4 | test = { a: 1, c: 3 } + | ^~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/231_record_expression_nested_additional/Main.purs b/tests-integration/fixtures/checking/231_record_expression_nested_additional/Main.purs new file mode 100644 index 000000000..e11b1035b --- /dev/null +++ b/tests-integration/fixtures/checking/231_record_expression_nested_additional/Main.purs @@ -0,0 +1,4 @@ +module Main where + +test :: { outer :: { x :: Int } } +test = { outer: { x: 1, y: 2 } } diff --git a/tests-integration/fixtures/checking/231_record_expression_nested_additional/Main.snap b/tests-integration/fixtures/checking/231_record_expression_nested_additional/Main.snap new file mode 100644 index 000000000..224f707c7 --- /dev/null +++ b/tests-integration/fixtures/checking/231_record_expression_nested_additional/Main.snap @@ -0,0 +1,21 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: { outer :: { x :: Int } } + +Types + +Diagnostics +error[AdditionalProperty]: Additional properties not allowed: y + --> 4:17..4:31 + | +4 | test = { outer: { x: 1, y: 2 } } + | ^~~~~~~~~~~~~~ +error[AdditionalProperty]: Additional properties not allowed: y + --> 4:8..4:33 + | +4 | test = { outer: { x: 1, y: 2 } } + | ^~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/232_instance_head_nil_kind_application/Main.purs b/tests-integration/fixtures/checking/232_instance_head_nil_kind_application/Main.purs new file mode 100644 index 000000000..30a2be505 --- /dev/null +++ b/tests-integration/fixtures/checking/232_instance_head_nil_kind_application/Main.purs @@ -0,0 +1,13 @@ +module Main where + +import Prim.RowList as RL + +class ListToRow :: RL.RowList Type -> Constraint +class ListToRow xs + +instance listToRowNil :: ListToRow RL.Nil + +class ListToRow2 :: RL.RowList Type -> RL.RowList Type -> Constraint +class ListToRow2 xs ys + +instance listToRow2Nil :: ListToRow2 RL.Nil RL.Nil diff --git a/tests-integration/fixtures/checking/232_instance_head_nil_kind_application/Main.snap b/tests-integration/fixtures/checking/232_instance_head_nil_kind_application/Main.snap new file mode 100644 index 000000000..0cc40bdc1 --- /dev/null +++ b/tests-integration/fixtures/checking/232_instance_head_nil_kind_application/Main.snap @@ -0,0 +1,20 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms + +Types +ListToRow :: RowList Type -> Constraint +ListToRow2 :: RowList Type -> RowList Type -> Constraint + +Classes +class ListToRow (xs :: RowList Type) +class ListToRow2 (xs :: RowList Type) (ys :: RowList Type) + +Instances +instance ListToRow (Nil @Type :: RowList Type) + chain: 0 +instance ListToRow2 (Nil @Type :: RowList Type) (Nil @Type :: RowList Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/233_record_instance_matching/Main.purs b/tests-integration/fixtures/checking/233_record_instance_matching/Main.purs new file mode 100644 index 000000000..1d49266a1 --- /dev/null +++ b/tests-integration/fixtures/checking/233_record_instance_matching/Main.purs @@ -0,0 +1,21 @@ +module Main where + +class Make :: Type -> Type -> Constraint +class Make a b | a -> b where + make :: a -> b + +instance Make { | r } { | r } where + make x = x + +testMake :: { a :: Int, b :: String } -> { a :: Int, b :: String } +testMake = make + +class Convert :: Type -> Type -> Constraint +class Convert a b | a -> b where + convert :: a -> b + +instance Convert { | r } { converted :: { | r } } where + convert x = { converted: x } + +testConvert :: { x :: Int } -> { converted :: { x :: Int } } +testConvert = convert diff --git a/tests-integration/fixtures/checking/233_record_instance_matching/Main.snap b/tests-integration/fixtures/checking/233_record_instance_matching/Main.snap new file mode 100644 index 000000000..991becc86 --- /dev/null +++ b/tests-integration/fixtures/checking/233_record_instance_matching/Main.snap @@ -0,0 +1,25 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +make :: forall (a :: Type) (b :: Type). Make (a :: Type) (b :: Type) => (a :: Type) -> (b :: Type) +testMake :: { a :: Int, b :: String } -> { a :: Int, b :: String } +convert :: + forall (a :: Type) (b :: Type). Convert (a :: Type) (b :: Type) => (a :: Type) -> (b :: Type) +testConvert :: { x :: Int } -> { converted :: { x :: Int } } + +Types +Make :: Type -> Type -> Constraint +Convert :: Type -> Type -> Constraint + +Classes +class Make (a :: Type) (b :: Type) +class Convert (a :: Type) (b :: Type) + +Instances +instance Make ({| (r :: Row Type) } :: Type) ({| (r :: Row Type) } :: Type) + chain: 0 +instance Convert ({| (r :: Row Type) } :: Type) ({ converted :: {| (r :: Row Type) } } :: Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/234_record_instance_open_row/Main.purs b/tests-integration/fixtures/checking/234_record_instance_open_row/Main.purs new file mode 100644 index 000000000..314c9740f --- /dev/null +++ b/tests-integration/fixtures/checking/234_record_instance_open_row/Main.purs @@ -0,0 +1,27 @@ +module Main where + +class Clone :: Type -> Type -> Constraint +class Clone a b | a -> b where + clone :: a -> b + +instance Clone { | r } { | r } where + clone x = x + +clonePerson :: { name :: String, age :: Int } -> { name :: String, age :: Int } +clonePerson = clone + +cloneEmpty :: {} -> {} +cloneEmpty = clone + +cloneSingle :: { x :: Int } -> { x :: Int } +cloneSingle = clone + +class Nest :: Type -> Type -> Constraint +class Nest a b | a -> b where + nest :: a -> b + +instance Nest { | r } { inner :: { | r }, outer :: Int } where + nest x = { inner: x, outer: 0 } + +testNest :: { a :: String } -> { inner :: { a :: String }, outer :: Int } +testNest = nest diff --git a/tests-integration/fixtures/checking/234_record_instance_open_row/Main.snap b/tests-integration/fixtures/checking/234_record_instance_open_row/Main.snap new file mode 100644 index 000000000..a3dc99d0b --- /dev/null +++ b/tests-integration/fixtures/checking/234_record_instance_open_row/Main.snap @@ -0,0 +1,26 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +clone :: forall (a :: Type) (b :: Type). Clone (a :: Type) (b :: Type) => (a :: Type) -> (b :: Type) +clonePerson :: { age :: Int, name :: String } -> { age :: Int, name :: String } +cloneEmpty :: {} -> {} +cloneSingle :: { x :: Int } -> { x :: Int } +nest :: forall (a :: Type) (b :: Type). Nest (a :: Type) (b :: Type) => (a :: Type) -> (b :: Type) +testNest :: { a :: String } -> { inner :: { a :: String }, outer :: Int } + +Types +Clone :: Type -> Type -> Constraint +Nest :: Type -> Type -> Constraint + +Classes +class Clone (a :: Type) (b :: Type) +class Nest (a :: Type) (b :: Type) + +Instances +instance Clone ({| (r :: Row Type) } :: Type) ({| (r :: Row Type) } :: Type) + chain: 0 +instance Nest ({| (r :: Row Type) } :: Type) ({ inner :: {| (r :: Row Type) }, outer :: Int } :: Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/235_instance_head_invalid_row/Main.purs b/tests-integration/fixtures/checking/235_instance_head_invalid_row/Main.purs new file mode 100644 index 000000000..461d9b6fc --- /dev/null +++ b/tests-integration/fixtures/checking/235_instance_head_invalid_row/Main.purs @@ -0,0 +1,12 @@ +module Main where + +data Proxy :: forall k. k -> Type +data Proxy a = Proxy + +class T :: forall k. k -> Type +class T a + +instance T ( a :: Int ) +instance T { a :: Int } +instance T (Proxy ( a :: Int )) +instance T (Proxy { a :: Int }) diff --git a/tests-integration/fixtures/checking/235_instance_head_invalid_row/Main.snap b/tests-integration/fixtures/checking/235_instance_head_invalid_row/Main.snap new file mode 100644 index 000000000..631bd92ab --- /dev/null +++ b/tests-integration/fixtures/checking/235_instance_head_invalid_row/Main.snap @@ -0,0 +1,55 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Proxy :: forall (k :: Type) (a :: (k :: Type)). Proxy @(k :: Type) (a :: (k :: Type)) + +Types +Proxy :: forall (k :: Type). (k :: Type) -> Type +T :: forall (k :: Type). (k :: Type) -> Type + +Data +Proxy + Quantified = :0 + Kind = :1 + + +Roles +Proxy = [Phantom] + +Classes +class T (a :: (k :: Type)) + +Instances +instance T (( a :: Int ) :: Row Type) + chain: 0 +instance T ({ a :: Int } :: Type) + chain: 0 +instance T (Proxy @(Row Type) ( a :: Int ) :: Type) + chain: 0 +instance T (Proxy @Type { a :: Int } :: Type) + chain: 0 + +Diagnostics +error[InstanceHeadLabeledRow]: Instance argument at position 0 contains a labeled row, but this position is not determined by any functional dependency. Only the `( | r )` form is allowed. Got '( a :: Int )' instead. + --> 9:1..9:24 + | +9 | instance T ( a :: Int ) + | ^~~~~~~~~~~~~~~~~~~~~~~ +error[InstanceHeadLabeledRow]: Instance argument at position 0 contains a labeled row, but this position is not determined by any functional dependency. Only the `( | r )` form is allowed. Got '{ a :: Int }' instead. + --> 10:1..10:24 + | +10 | instance T { a :: Int } + | ^~~~~~~~~~~~~~~~~~~~~~~ +error[InstanceHeadLabeledRow]: Instance argument at position 0 contains a labeled row, but this position is not determined by any functional dependency. Only the `( | r )` form is allowed. Got 'Proxy @(Row Type) ( a :: Int )' instead. + --> 11:1..11:32 + | +11 | instance T (Proxy ( a :: Int )) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[InstanceHeadLabeledRow]: Instance argument at position 0 contains a labeled row, but this position is not determined by any functional dependency. Only the `( | r )` form is allowed. Got 'Proxy @Type { a :: Int }' instead. + --> 12:1..12:32 + | +12 | instance T (Proxy { a :: Int }) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/236_category_function_instance/Main.purs b/tests-integration/fixtures/checking/236_category_function_instance/Main.purs new file mode 100644 index 000000000..98a83df5b --- /dev/null +++ b/tests-integration/fixtures/checking/236_category_function_instance/Main.purs @@ -0,0 +1,21 @@ +module Main where + +class Semigroupoid :: forall k. (k -> k -> Type) -> Constraint +class Semigroupoid a where + compose :: forall b c d. a c d -> a b c -> a b d + +class Category :: forall k. (k -> k -> Type) -> Constraint +class Semigroupoid a <= Category a where + identity :: forall t. a t t + +instance semigroupoidFn :: Semigroupoid (->) where + compose f g x = f (g x) + +instance categoryFn :: Category (->) where + identity x = x + +test :: forall a. a -> a +test = identity + +test2 :: Int -> Int +test2 = identity diff --git a/tests-integration/fixtures/checking/236_category_function_instance/Main.snap b/tests-integration/fixtures/checking/236_category_function_instance/Main.snap new file mode 100644 index 000000000..c28a8d682 --- /dev/null +++ b/tests-integration/fixtures/checking/236_category_function_instance/Main.snap @@ -0,0 +1,33 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +compose :: + forall (k :: Type) (a :: (k :: Type) -> (k :: Type) -> Type) (b :: (k :: Type)) (c :: (k :: Type)) + (d :: (k :: Type)). + Semigroupoid (a :: (k :: Type) -> (k :: Type) -> Type) => + (a :: (k :: Type) -> (k :: Type) -> Type) (c :: (k :: Type)) (d :: (k :: Type)) -> + (a :: (k :: Type) -> (k :: Type) -> Type) (b :: (k :: Type)) (c :: (k :: Type)) -> + (a :: (k :: Type) -> (k :: Type) -> Type) (b :: (k :: Type)) (d :: (k :: Type)) +identity :: + forall (k :: Type) (a :: (k :: Type) -> (k :: Type) -> Type) (t :: (k :: Type)). + Category (a :: (k :: Type) -> (k :: Type) -> Type) => + (a :: (k :: Type) -> (k :: Type) -> Type) (t :: (k :: Type)) (t :: (k :: Type)) +test :: forall (a :: Type). (a :: Type) -> (a :: Type) +test2 :: Int -> Int + +Types +Semigroupoid :: forall (k :: Type). ((k :: Type) -> (k :: Type) -> Type) -> Constraint +Category :: forall (k :: Type). ((k :: Type) -> (k :: Type) -> Type) -> Constraint + +Classes +class Semigroupoid (a :: (k :: Type) -> (k :: Type) -> Type) +class Semigroupoid @(k :: Type) (a :: (k :: Type) -> (k :: Type) -> Type) <= Category (a :: (k :: Type) -> (k :: Type) -> Type) + +Instances +instance Semigroupoid (Function :: Type -> Type -> Type) + chain: 0 +instance Category (Function :: Type -> Type -> Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/237_bound_variable_unification/Main.purs b/tests-integration/fixtures/checking/237_bound_variable_unification/Main.purs new file mode 100644 index 000000000..83638e681 --- /dev/null +++ b/tests-integration/fixtures/checking/237_bound_variable_unification/Main.purs @@ -0,0 +1,20 @@ +module Main where + +-- Test based on Data.Bounded's boundedRecordCons pattern where +-- a bound row variable appears in a type annotation within a where clause. + +foreign import unsafeSet :: forall r1 r2 a. String -> a -> Record r1 -> Record r2 + +class BuildRecord :: Row Type -> Row Type -> Constraint +class BuildRecord row subrow | row -> subrow where + buildIt :: Record subrow + +instance buildRecordImpl :: BuildRecord row subrow where + buildIt = result + where + -- Type annotation references the bound variable `subrow` + result :: Record subrow + result = unsafeSet "x" 42 {} + +test :: forall r s. BuildRecord r s => Record s +test = buildIt diff --git a/tests-integration/fixtures/checking/237_bound_variable_unification/Main.snap b/tests-integration/fixtures/checking/237_bound_variable_unification/Main.snap new file mode 100644 index 000000000..b2cac7d32 --- /dev/null +++ b/tests-integration/fixtures/checking/237_bound_variable_unification/Main.snap @@ -0,0 +1,25 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +unsafeSet :: + forall (r1 :: Row Type) (r2 :: Row Type) (a :: Type). + String -> (a :: Type) -> {| (r1 :: Row Type) } -> {| (r2 :: Row Type) } +buildIt :: + forall (row :: Row Type) (subrow :: Row Type). + BuildRecord (row :: Row Type) (subrow :: Row Type) => {| (subrow :: Row Type) } +test :: + forall (r :: Row Type) (s :: Row Type). + BuildRecord (r :: Row Type) (s :: Row Type) => {| (s :: Row Type) } + +Types +BuildRecord :: Row Type -> Row Type -> Constraint + +Classes +class BuildRecord (row :: Row Type) (subrow :: Row Type) + +Instances +instance BuildRecord ((row :: Row Type) :: Row Type) ((subrow :: Row Type) :: Row Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/238_function_application_subtype/Main.purs b/tests-integration/fixtures/checking/238_function_application_subtype/Main.purs new file mode 100644 index 000000000..0354dab64 --- /dev/null +++ b/tests-integration/fixtures/checking/238_function_application_subtype/Main.purs @@ -0,0 +1,14 @@ +module Main where + +class Category :: forall k. (k -> k -> Type) -> Constraint +class Category a where + identity :: forall t. a t t + +instance categoryFn :: Category (->) where + identity x = x + +testFnApp :: (->) Int Int +testFnApp = identity + +testAppFn :: Int -> Int +testAppFn = identity diff --git a/tests-integration/fixtures/checking/238_function_application_subtype/Main.snap b/tests-integration/fixtures/checking/238_function_application_subtype/Main.snap new file mode 100644 index 000000000..6d2d4bc81 --- /dev/null +++ b/tests-integration/fixtures/checking/238_function_application_subtype/Main.snap @@ -0,0 +1,22 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +identity :: + forall (k :: Type) (a :: (k :: Type) -> (k :: Type) -> Type) (t :: (k :: Type)). + Category (a :: (k :: Type) -> (k :: Type) -> Type) => + (a :: (k :: Type) -> (k :: Type) -> Type) (t :: (k :: Type)) (t :: (k :: Type)) +testFnApp :: Function Int Int +testAppFn :: Int -> Int + +Types +Category :: forall (k :: Type). ((k :: Type) -> (k :: Type) -> Type) -> Constraint + +Classes +class Category (a :: (k :: Type) -> (k :: Type) -> Type) + +Instances +instance Category (Function :: Type -> Type -> Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/245_do_notation_panic/Main.purs b/tests-integration/fixtures/checking/245_do_notation_panic/Main.purs new file mode 100644 index 000000000..9b3a9d8cd --- /dev/null +++ b/tests-integration/fixtures/checking/245_do_notation_panic/Main.purs @@ -0,0 +1,15 @@ +module Main where + +foreign import data Effect :: Type -> Type + +foreign import pure :: forall a. a -> Effect a +foreign import bind :: forall a b. Effect a -> (a -> Effect b) -> Effect b +foreign import discard :: forall a b. Effect a -> (a -> Effect b) -> Effect b + +-- Single-statement do block should work without panicking +test :: Effect Int +test = do + pure 1 + +test' = do + pure 1 diff --git a/tests-integration/fixtures/checking/245_do_notation_panic/Main.snap b/tests-integration/fixtures/checking/245_do_notation_panic/Main.snap new file mode 100644 index 000000000..c5f722619 --- /dev/null +++ b/tests-integration/fixtures/checking/245_do_notation_panic/Main.snap @@ -0,0 +1,21 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +pure :: forall (a :: Type). (a :: Type) -> Effect (a :: Type) +bind :: + forall (a :: Type) (b :: Type). + Effect (a :: Type) -> ((a :: Type) -> Effect (b :: Type)) -> Effect (b :: Type) +discard :: + forall (a :: Type) (b :: Type). + Effect (a :: Type) -> ((a :: Type) -> Effect (b :: Type)) -> Effect (b :: Type) +test :: Effect Int +test' :: Effect Int + +Types +Effect :: Type -> Type + +Roles +Effect = [Nominal] diff --git a/tests-integration/fixtures/checking/246_do_bind_only/Main.purs b/tests-integration/fixtures/checking/246_do_bind_only/Main.purs new file mode 100644 index 000000000..2e40cc8ea --- /dev/null +++ b/tests-integration/fixtures/checking/246_do_bind_only/Main.purs @@ -0,0 +1,14 @@ +module Main where + +import Control.Applicative (pure) +import Control.Bind (bind) +import Effect (Effect) + +test :: Effect Int +test = do + x <- pure 1 + pure x + +test' = do + x <- pure 1 + pure x diff --git a/tests-integration/fixtures/checking/246_do_bind_only/Main.snap b/tests-integration/fixtures/checking/246_do_bind_only/Main.snap new file mode 100644 index 000000000..44fed64f5 --- /dev/null +++ b/tests-integration/fixtures/checking/246_do_bind_only/Main.snap @@ -0,0 +1,12 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: Effect Int +test' :: + forall (t17 :: Type -> Type). + Bind (t17 :: Type -> Type) => Applicative (t17 :: Type -> Type) => (t17 :: Type -> Type) Int + +Types diff --git a/tests-integration/fixtures/checking/247_do_discard_not_in_scope/Main.purs b/tests-integration/fixtures/checking/247_do_discard_not_in_scope/Main.purs new file mode 100644 index 000000000..ee5abbee8 --- /dev/null +++ b/tests-integration/fixtures/checking/247_do_discard_not_in_scope/Main.purs @@ -0,0 +1,16 @@ +module Main where + +import Control.Applicative (pure) +import Control.Bind (bind) +import Effect (Effect) + +test :: Effect Int +test = do + pure 0 + x <- pure 1 + pure x + +test' = do + pure 0 + x <- pure 1 + pure x diff --git a/tests-integration/fixtures/checking/247_do_discard_not_in_scope/Main.snap b/tests-integration/fixtures/checking/247_do_discard_not_in_scope/Main.snap new file mode 100644 index 000000000..b74085011 --- /dev/null +++ b/tests-integration/fixtures/checking/247_do_discard_not_in_scope/Main.snap @@ -0,0 +1,24 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: Effect Int +test' :: + forall (t19 :: Type -> Type). + Applicative (t19 :: Type -> Type) => Bind (t19 :: Type -> Type) => (t19 :: Type -> Type) Int + +Types + +Diagnostics +error[NotInScope]: 'discard' is not in scope + --> 8:8..11:9 + | +8 | test = do + | ^~ +error[NotInScope]: 'discard' is not in scope + --> 13:9..16:9 + | +13 | test' = do + | ^~ diff --git a/tests-integration/fixtures/checking/248_do_empty_block/Main.purs b/tests-integration/fixtures/checking/248_do_empty_block/Main.purs new file mode 100644 index 000000000..fc2d819a4 --- /dev/null +++ b/tests-integration/fixtures/checking/248_do_empty_block/Main.purs @@ -0,0 +1,8 @@ +module Main where + +import Effect (Effect) + +test :: Effect Int +test = do + +test' = do diff --git a/tests-integration/fixtures/checking/248_do_empty_block/Main.snap b/tests-integration/fixtures/checking/248_do_empty_block/Main.snap new file mode 100644 index 000000000..4de116a36 --- /dev/null +++ b/tests-integration/fixtures/checking/248_do_empty_block/Main.snap @@ -0,0 +1,32 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: Effect Int +test' :: ??? + +Types + +Diagnostics +error[EmptyDoBlock]: Empty do block + --> 6:8..6:10 + | +6 | test = do + | ^~ +error[CannotUnify]: Cannot unify '???' with 'Effect Int' + --> 6:8..6:10 + | +6 | test = do + | ^~ +error[EmptyDoBlock]: Empty do block + --> 8:9..8:11 + | +8 | test' = do + | ^~ +error[CannotUnify]: Cannot unify 'Type' with '???' + --> 8:1..8:11 + | +8 | test' = do + | ^~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/249_do_final_bind/Main.purs b/tests-integration/fixtures/checking/249_do_final_bind/Main.purs new file mode 100644 index 000000000..9c3019d6e --- /dev/null +++ b/tests-integration/fixtures/checking/249_do_final_bind/Main.purs @@ -0,0 +1,12 @@ +module Main where + +import Control.Applicative (pure) +import Control.Bind (bind) +import Effect (Effect) + +test :: Effect Int +test = do + x <- pure 1 + +test' = do + x <- pure 1 diff --git a/tests-integration/fixtures/checking/249_do_final_bind/Main.snap b/tests-integration/fixtures/checking/249_do_final_bind/Main.snap new file mode 100644 index 000000000..3ec4ba806 --- /dev/null +++ b/tests-integration/fixtures/checking/249_do_final_bind/Main.snap @@ -0,0 +1,22 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: Effect Int +test' :: forall (t8 :: Type -> Type). Applicative (t8 :: Type -> Type) => (t8 :: Type -> Type) Int + +Types + +Diagnostics +warning[InvalidFinalBind]: Invalid final bind statement in do expression + --> 9:3..9:14 + | +9 | x <- pure 1 + | ^~~~~~~~~~~ +warning[InvalidFinalBind]: Invalid final bind statement in do expression + --> 12:3..12:14 + | +12 | x <- pure 1 + | ^~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/250_do_final_let/Main.purs b/tests-integration/fixtures/checking/250_do_final_let/Main.purs new file mode 100644 index 000000000..bfcd874e2 --- /dev/null +++ b/tests-integration/fixtures/checking/250_do_final_let/Main.purs @@ -0,0 +1,10 @@ +module Main where + +import Effect (Effect) + +test :: Effect Int +test = do + let x = 1 + +test' = do + let x = 1 diff --git a/tests-integration/fixtures/checking/250_do_final_let/Main.snap b/tests-integration/fixtures/checking/250_do_final_let/Main.snap new file mode 100644 index 000000000..bb36dbd18 --- /dev/null +++ b/tests-integration/fixtures/checking/250_do_final_let/Main.snap @@ -0,0 +1,32 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: Effect Int +test' :: ??? + +Types + +Diagnostics +error[InvalidFinalLet]: Invalid final let statement in do expression + --> 7:3..7:12 + | +7 | let x = 1 + | ^~~~~~~~~ +error[CannotUnify]: Cannot unify '???' with 'Effect Int' + --> 6:8..7:12 + | +6 | test = do + | ^~ +error[InvalidFinalLet]: Invalid final let statement in do expression + --> 10:3..10:12 + | +10 | let x = 1 + | ^~~~~~~~~ +error[CannotUnify]: Cannot unify 'Type' with '???' + --> 9:1..10:12 + | +9 | test' = do + | ^~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/251_lookup_implicit_panic/Main.purs b/tests-integration/fixtures/checking/251_lookup_implicit_panic/Main.purs new file mode 100644 index 000000000..947e64ab0 --- /dev/null +++ b/tests-integration/fixtures/checking/251_lookup_implicit_panic/Main.purs @@ -0,0 +1,14 @@ +module Main where + +import Type.Proxy (Proxy(..)) + +class Identity a where + identity :: a -> a + +instance Identity (Undefined a) where + identity a = + let + proxy :: Proxy a + proxy = Proxy + in + a diff --git a/tests-integration/fixtures/checking/251_lookup_implicit_panic/Main.snap b/tests-integration/fixtures/checking/251_lookup_implicit_panic/Main.snap new file mode 100644 index 000000000..dede4dd36 --- /dev/null +++ b/tests-integration/fixtures/checking/251_lookup_implicit_panic/Main.snap @@ -0,0 +1,44 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +identity :: forall (a :: Type). Identity (a :: Type) => (a :: Type) -> (a :: Type) + +Types +Identity :: Type -> Constraint + +Classes +class Identity (a :: Type) + +Instances +instance forall (t2 :: ???) (t1 :: Type). Identity (??? (a :: (t2 :: (t1 :: Type))) :: ???) + chain: 0 + +Diagnostics +error[NotInScope]: 'Undefined' is not in scope + --> 8:20..8:29 + | +8 | instance Identity (Undefined a) where + | ^~~~~~~~~ +error[InvalidTypeApplication]: Cannot apply type '???' to '(a :: ?2[:0])'. '???' has kind '???', which is not a function kind. + --> 8:20..8:31 + | +8 | instance Identity (Undefined a) where + | ^~~~~~~~~~~ +error[CannotUnify]: Cannot unify '???' with 'Type' + --> 8:19..8:32 + | +8 | instance Identity (Undefined a) where + | ^~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Type' with '???' + --> 8:1..14:8 + | +8 | instance Identity (Undefined a) where + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Type' with '???' + --> 8:1..14:8 + | +8 | instance Identity (Undefined a) where + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/252_invalid_type_application_basic/Main.purs b/tests-integration/fixtures/checking/252_invalid_type_application_basic/Main.purs new file mode 100644 index 000000000..56548ca02 --- /dev/null +++ b/tests-integration/fixtures/checking/252_invalid_type_application_basic/Main.purs @@ -0,0 +1,3 @@ +module Main where + +type Bad = Int String diff --git a/tests-integration/fixtures/checking/252_invalid_type_application_basic/Main.snap b/tests-integration/fixtures/checking/252_invalid_type_application_basic/Main.snap new file mode 100644 index 000000000..cd9dfc5f0 --- /dev/null +++ b/tests-integration/fixtures/checking/252_invalid_type_application_basic/Main.snap @@ -0,0 +1,28 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms + +Types +Bad :: ??? + +Synonyms +Bad = Int String + Quantified = :0 + Kind = :0 + Type = :0 + + +Diagnostics +error[InvalidTypeApplication]: Cannot apply type 'Int' to 'String'. 'Int' has kind 'Type', which is not a function kind. + --> 3:12..3:22 + | +3 | type Bad = Int String + | ^~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Type' with '???' + --> 3:12..3:22 + | +3 | type Bad = Int String + | ^~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/253_invalid_type_application_too_many/Main.purs b/tests-integration/fixtures/checking/253_invalid_type_application_too_many/Main.purs new file mode 100644 index 000000000..5929643fa --- /dev/null +++ b/tests-integration/fixtures/checking/253_invalid_type_application_too_many/Main.purs @@ -0,0 +1,3 @@ +module Main where + +type Bad = Array Int String diff --git a/tests-integration/fixtures/checking/253_invalid_type_application_too_many/Main.snap b/tests-integration/fixtures/checking/253_invalid_type_application_too_many/Main.snap new file mode 100644 index 000000000..2e9312acc --- /dev/null +++ b/tests-integration/fixtures/checking/253_invalid_type_application_too_many/Main.snap @@ -0,0 +1,28 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms + +Types +Bad :: ??? + +Synonyms +Bad = Array Int String + Quantified = :0 + Kind = :0 + Type = :0 + + +Diagnostics +error[InvalidTypeApplication]: Cannot apply type 'Array Int' to 'String'. 'Array Int' has kind 'Type', which is not a function kind. + --> 3:12..3:28 + | +3 | type Bad = Array Int String + | ^~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Type' with '???' + --> 3:12..3:28 + | +3 | type Bad = Array Int String + | ^~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/254_higher_rank_elaboration/Main.purs b/tests-integration/fixtures/checking/254_higher_rank_elaboration/Main.purs new file mode 100644 index 000000000..835832bd8 --- /dev/null +++ b/tests-integration/fixtures/checking/254_higher_rank_elaboration/Main.purs @@ -0,0 +1,21 @@ +module Main where + +class IsSymbol (sym :: Symbol) where + reflectSymbol :: Proxy sym -> String + +data Proxy (a :: Symbol) = Proxy + +reifySymbol :: forall r. String -> (forall sym. IsSymbol sym => Proxy sym -> r) -> r +reifySymbol s f = coerce f { reflectSymbol: \_ -> s } Proxy + where + coerce + :: (forall sym1. IsSymbol sym1 => Proxy sym1 -> r) + -> { reflectSymbol :: Proxy "" -> String } + -> Proxy "" + -> r + coerce = unsafeCoerce + +foreign import unsafeCoerce :: forall a b. a -> b + +test :: String -> String +test s = reifySymbol s reflectSymbol diff --git a/tests-integration/fixtures/checking/254_higher_rank_elaboration/Main.snap b/tests-integration/fixtures/checking/254_higher_rank_elaboration/Main.snap new file mode 100644 index 000000000..0f1cb48ef --- /dev/null +++ b/tests-integration/fixtures/checking/254_higher_rank_elaboration/Main.snap @@ -0,0 +1,31 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +reflectSymbol :: forall (sym :: Symbol). IsSymbol (sym :: Symbol) => Proxy (sym :: Symbol) -> String +Proxy :: forall (a :: Symbol). Proxy (a :: Symbol) +reifySymbol :: + forall (r :: Type). + String -> + (forall (sym :: Symbol). IsSymbol (sym :: Symbol) => Proxy (sym :: Symbol) -> (r :: Type)) -> + (r :: Type) +unsafeCoerce :: forall (a :: Type) (b :: Type). (a :: Type) -> (b :: Type) +test :: String -> String + +Types +IsSymbol :: Symbol -> Constraint +Proxy :: Symbol -> Type + +Data +Proxy + Quantified = :0 + Kind = :0 + + +Roles +Proxy = [Phantom] + +Classes +class IsSymbol (sym :: Symbol) diff --git a/tests-integration/fixtures/checking/255_exhaustive_basic/Main.purs b/tests-integration/fixtures/checking/255_exhaustive_basic/Main.purs new file mode 100644 index 000000000..9aab358ea --- /dev/null +++ b/tests-integration/fixtures/checking/255_exhaustive_basic/Main.purs @@ -0,0 +1,9 @@ +module Main where + +data Maybe a = Just a | Nothing + +test1 = case _ of + Just _ -> 1 + +test2 = case _ of + Nothing -> 2 diff --git a/tests-integration/fixtures/checking/255_exhaustive_basic/Main.snap b/tests-integration/fixtures/checking/255_exhaustive_basic/Main.snap new file mode 100644 index 000000000..6f813729a --- /dev/null +++ b/tests-integration/fixtures/checking/255_exhaustive_basic/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +test1 :: forall (t5 :: Type). Partial => Maybe (t5 :: Type) -> Int +test2 :: forall (t10 :: Type). Partial => Maybe (t10 :: Type) -> Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 5:9..6:14 + | +5 | test1 = case _ of + | ^~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Just _ + --> 8:9..9:15 + | +8 | test2 = case _ of + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/256_exhaustive_multiple/Main.purs b/tests-integration/fixtures/checking/256_exhaustive_multiple/Main.purs new file mode 100644 index 000000000..bfe455291 --- /dev/null +++ b/tests-integration/fixtures/checking/256_exhaustive_multiple/Main.purs @@ -0,0 +1,29 @@ +module Main where + +data Maybe a = Just a | Nothing + +complete = case _, _ of + Just _, Just _ -> 1 + Just _, Nothing -> 2 + Nothing, Just _ -> 3 + Nothing, Nothing -> 4 + +incomplete1 = case _, _ of + Just _, Nothing -> 2 + Nothing, Just _ -> 3 + Nothing, Nothing -> 4 + +incomplete2 = case _, _ of + Just _, Just _ -> 1 + Nothing, Just _ -> 3 + Nothing, Nothing -> 4 + +incomplete3 = case _, _ of + Just _, Just _ -> 1 + Just _, Nothing -> 2 + Nothing, Nothing -> 4 + +incomplete4 = case _, _ of + Just _, Just _ -> 1 + Just _, Nothing -> 2 + Nothing, Just _ -> 3 diff --git a/tests-integration/fixtures/checking/256_exhaustive_multiple/Main.snap b/tests-integration/fixtures/checking/256_exhaustive_multiple/Main.snap new file mode 100644 index 000000000..b6c484879 --- /dev/null +++ b/tests-integration/fixtures/checking/256_exhaustive_multiple/Main.snap @@ -0,0 +1,51 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +complete :: forall (t6 :: Type) (t7 :: Type). Maybe (t6 :: Type) -> Maybe (t7 :: Type) -> Int +incomplete1 :: + forall (t19 :: Type) (t20 :: Type). Partial => Maybe (t19 :: Type) -> Maybe (t20 :: Type) -> Int +incomplete2 :: + forall (t30 :: Type) (t31 :: Type). Partial => Maybe (t30 :: Type) -> Maybe (t31 :: Type) -> Int +incomplete3 :: + forall (t41 :: Type) (t42 :: Type). Partial => Maybe (t41 :: Type) -> Maybe (t42 :: Type) -> Int +incomplete4 :: + forall (t52 :: Type) (t53 :: Type). Partial => Maybe (t52 :: Type) -> Maybe (t53 :: Type) -> Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Just _, Just _ + --> 11:15..14:24 + | +11 | incomplete1 = case _, _ of + | ^~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Just _, Nothing + --> 16:15..19:24 + | +16 | incomplete2 = case _, _ of + | ^~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing, Just _ + --> 21:15..24:24 + | +21 | incomplete3 = case _, _ of + | ^~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing, Nothing + --> 26:15..29:23 + | +26 | incomplete4 = case _, _ of + | ^~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/257_exhaustive_tuple/Main.purs b/tests-integration/fixtures/checking/257_exhaustive_tuple/Main.purs new file mode 100644 index 000000000..9e80fa604 --- /dev/null +++ b/tests-integration/fixtures/checking/257_exhaustive_tuple/Main.purs @@ -0,0 +1,31 @@ +module Main where + +data Tuple a b = Tuple a b + +data Maybe a = Just a | Nothing + +complete = case _ of + Tuple (Just _) (Just _) -> 1 + Tuple (Just _) Nothing -> 2 + Tuple Nothing (Just _) -> 3 + Tuple Nothing Nothing -> 4 + +incomplete1 = case _ of + Tuple (Just _) Nothing -> 2 + Tuple Nothing (Just _) -> 3 + Tuple Nothing Nothing -> 4 + +incomplete2 = case _ of + Tuple (Just _) (Just _) -> 1 + Tuple Nothing (Just _) -> 3 + Tuple Nothing Nothing -> 4 + +incomplete3 = case _ of + Tuple (Just _) (Just _) -> 1 + Tuple (Just _) Nothing -> 2 + Tuple Nothing Nothing -> 4 + +incomplete4 = case _ of + Tuple (Just _) (Just _) -> 1 + Tuple (Just _) Nothing -> 2 + Tuple Nothing (Just _) -> 3 diff --git a/tests-integration/fixtures/checking/257_exhaustive_tuple/Main.snap b/tests-integration/fixtures/checking/257_exhaustive_tuple/Main.snap new file mode 100644 index 000000000..4f91ed1e4 --- /dev/null +++ b/tests-integration/fixtures/checking/257_exhaustive_tuple/Main.snap @@ -0,0 +1,63 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Tuple :: forall (a :: Type) (b :: Type). (a :: Type) -> (b :: Type) -> Tuple (a :: Type) (b :: Type) +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +complete :: + forall (t9 :: Type) (t10 :: Type). Tuple (Maybe (t9 :: Type)) (Maybe (t10 :: Type)) -> Int +incomplete1 :: + forall (t29 :: Type) (t30 :: Type). + Partial => Tuple (Maybe (t29 :: Type)) (Maybe (t30 :: Type)) -> Int +incomplete2 :: + forall (t45 :: Type) (t46 :: Type). + Partial => Tuple (Maybe (t45 :: Type)) (Maybe (t46 :: Type)) -> Int +incomplete3 :: + forall (t61 :: Type) (t62 :: Type). + Partial => Tuple (Maybe (t61 :: Type)) (Maybe (t62 :: Type)) -> Int +incomplete4 :: + forall (t77 :: Type) (t78 :: Type). + Partial => Tuple (Maybe (t77 :: Type)) (Maybe (t78 :: Type)) -> Int + +Types +Tuple :: Type -> Type -> Type +Maybe :: Type -> Type + +Data +Tuple + Quantified = :0 + Kind = :0 + +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Tuple = [Representational, Representational] +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Tuple (Just _) (Just _) + --> 13:15..16:29 + | +13 | incomplete1 = case _ of + | ^~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Tuple (Just _) Nothing + --> 18:15..21:29 + | +18 | incomplete2 = case _ of + | ^~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Tuple Nothing (Just _) + --> 23:15..26:29 + | +23 | incomplete3 = case _ of + | ^~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Tuple Nothing Nothing + --> 28:15..31:30 + | +28 | incomplete4 = case _ of + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/258_redundant_patterns/Main.purs b/tests-integration/fixtures/checking/258_redundant_patterns/Main.purs new file mode 100644 index 000000000..660482d17 --- /dev/null +++ b/tests-integration/fixtures/checking/258_redundant_patterns/Main.purs @@ -0,0 +1,25 @@ +module Main where + +data Unit = Unit + +unit = case _ of + Unit -> 1 + _ -> 2 + Unit -> 3 + +data YesNo = Yes | No + +yes = case _ of + Yes -> 1 + _ -> 2 + Yes -> 3 + +no = case _ of + Yes -> 1 + _ -> 2 + No -> 3 + +yesNo = case _ of + Yes -> 1 + No -> 2 + _ -> 3 diff --git a/tests-integration/fixtures/checking/258_redundant_patterns/Main.snap b/tests-integration/fixtures/checking/258_redundant_patterns/Main.snap new file mode 100644 index 000000000..9514b893e --- /dev/null +++ b/tests-integration/fixtures/checking/258_redundant_patterns/Main.snap @@ -0,0 +1,53 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Unit :: Unit +unit :: Unit -> Int +Yes :: YesNo +No :: YesNo +yes :: YesNo -> Int +no :: YesNo -> Int +yesNo :: YesNo -> Int + +Types +Unit :: Type +YesNo :: Type + +Data +Unit + Quantified = :0 + Kind = :0 + +YesNo + Quantified = :0 + Kind = :0 + + +Roles +Unit = [] +YesNo = [] + +Diagnostics +warning[RedundantPattern]: Pattern match has redundant patterns: _, Unit + --> 5:8..8:12 + | +5 | unit = case _ of + | ^~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: Yes + --> 12:7..15:11 + | +12 | yes = case _ of + | ^~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: No + --> 17:6..20:10 + | +17 | no = case _ of + | ^~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: _ + --> 22:9..25:9 + | +22 | yesNo = case _ of + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/259_exhaustive_boolean_partial/Main.purs b/tests-integration/fixtures/checking/259_exhaustive_boolean_partial/Main.purs new file mode 100644 index 000000000..0ff0c5fa1 --- /dev/null +++ b/tests-integration/fixtures/checking/259_exhaustive_boolean_partial/Main.purs @@ -0,0 +1,25 @@ +module Main where + +testPartialTrue :: Boolean -> Int +testPartialTrue b = case b of + true -> 1 + +testPartialFalse :: Boolean -> Int +testPartialFalse b = case b of + false -> 0 + +testExhaustive :: Boolean -> Int +testExhaustive b = case b of + true -> 1 + false -> 0 + +testNestedPartial :: Boolean -> Boolean -> Int +testNestedPartial x y = case x of + true -> case y of + true -> 1 + false -> 0 + +testWildcardBoolean :: Boolean -> Int +testWildcardBoolean b = case b of + true -> 1 + _ -> 0 diff --git a/tests-integration/fixtures/checking/259_exhaustive_boolean_partial/Main.snap b/tests-integration/fixtures/checking/259_exhaustive_boolean_partial/Main.snap new file mode 100644 index 000000000..0a1e1088b --- /dev/null +++ b/tests-integration/fixtures/checking/259_exhaustive_boolean_partial/Main.snap @@ -0,0 +1,45 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +testPartialTrue :: Boolean -> Int +testPartialFalse :: Boolean -> Int +testExhaustive :: Boolean -> Int +testNestedPartial :: Boolean -> Boolean -> Int +testWildcardBoolean :: Boolean -> Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: false + --> 4:21..5:12 + | +4 | testPartialTrue b = case b of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 3:1..3:34 + | +3 | testPartialTrue :: Boolean -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: true + --> 8:22..9:13 + | +8 | testPartialFalse b = case b of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 7:1..7:35 + | +7 | testPartialFalse :: Boolean -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: false + --> 18:11..19:14 + | +18 | true -> case y of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 16:1..16:47 + | +16 | testNestedPartial :: Boolean -> Boolean -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/260_exhaustive_integer_partial/Main.purs b/tests-integration/fixtures/checking/260_exhaustive_integer_partial/Main.purs new file mode 100644 index 000000000..d63158652 --- /dev/null +++ b/tests-integration/fixtures/checking/260_exhaustive_integer_partial/Main.purs @@ -0,0 +1,14 @@ +module Main where + +testPartialZero :: Int -> Int +testPartialZero n = case n of + 0 -> 1 + +testPartialOne :: Int -> Int +testPartialOne n = case n of + 1 -> 1 + +testWildcard :: Int -> Int +testWildcard n = case n of + 0 -> 1 + _ -> 0 diff --git a/tests-integration/fixtures/checking/260_exhaustive_integer_partial/Main.snap b/tests-integration/fixtures/checking/260_exhaustive_integer_partial/Main.snap new file mode 100644 index 000000000..9b97290af --- /dev/null +++ b/tests-integration/fixtures/checking/260_exhaustive_integer_partial/Main.snap @@ -0,0 +1,33 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +testPartialZero :: Int -> Int +testPartialOne :: Int -> Int +testWildcard :: Int -> Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 4:21..5:9 + | +4 | testPartialZero n = case n of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 3:1..3:30 + | +3 | testPartialZero :: Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 8:20..9:9 + | +8 | testPartialOne n = case n of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 7:1..7:29 + | +7 | testPartialOne :: Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/261_exhaustive_number_partial/Main.purs b/tests-integration/fixtures/checking/261_exhaustive_number_partial/Main.purs new file mode 100644 index 000000000..390e63c4d --- /dev/null +++ b/tests-integration/fixtures/checking/261_exhaustive_number_partial/Main.purs @@ -0,0 +1,14 @@ +module Main where + +testPartialZero :: Number -> Int +testPartialZero n = case n of + 0.0 -> 1 + +testPartialOneFive :: Number -> Int +testPartialOneFive n = case n of + 1.5 -> 1 + +testWildcard :: Number -> Int +testWildcard n = case n of + 0.0 -> 1 + _ -> 0 diff --git a/tests-integration/fixtures/checking/261_exhaustive_number_partial/Main.snap b/tests-integration/fixtures/checking/261_exhaustive_number_partial/Main.snap new file mode 100644 index 000000000..0db427372 --- /dev/null +++ b/tests-integration/fixtures/checking/261_exhaustive_number_partial/Main.snap @@ -0,0 +1,33 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +testPartialZero :: Number -> Int +testPartialOneFive :: Number -> Int +testWildcard :: Number -> Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 4:21..5:11 + | +4 | testPartialZero n = case n of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 3:1..3:33 + | +3 | testPartialZero :: Number -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 8:24..9:11 + | +8 | testPartialOneFive n = case n of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 7:1..7:36 + | +7 | testPartialOneFive :: Number -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/262_exhaustive_char_partial/Main.purs b/tests-integration/fixtures/checking/262_exhaustive_char_partial/Main.purs new file mode 100644 index 000000000..f5b8628cf --- /dev/null +++ b/tests-integration/fixtures/checking/262_exhaustive_char_partial/Main.purs @@ -0,0 +1,14 @@ +module Main where + +testPartialA :: Char -> Int +testPartialA c = case c of + 'a' -> 1 + +testPartialB :: Char -> Int +testPartialB c = case c of + 'b' -> 1 + +testWildcard :: Char -> Int +testWildcard c = case c of + 'a' -> 1 + _ -> 0 diff --git a/tests-integration/fixtures/checking/262_exhaustive_char_partial/Main.snap b/tests-integration/fixtures/checking/262_exhaustive_char_partial/Main.snap new file mode 100644 index 000000000..cca1ff1b5 --- /dev/null +++ b/tests-integration/fixtures/checking/262_exhaustive_char_partial/Main.snap @@ -0,0 +1,33 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +testPartialA :: Char -> Int +testPartialB :: Char -> Int +testWildcard :: Char -> Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 4:18..5:11 + | +4 | testPartialA c = case c of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 3:1..3:28 + | +3 | testPartialA :: Char -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 8:18..9:11 + | +8 | testPartialB c = case c of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 7:1..7:28 + | +7 | testPartialB :: Char -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/263_exhaustive_string_partial/Main.purs b/tests-integration/fixtures/checking/263_exhaustive_string_partial/Main.purs new file mode 100644 index 000000000..27defd004 --- /dev/null +++ b/tests-integration/fixtures/checking/263_exhaustive_string_partial/Main.purs @@ -0,0 +1,14 @@ +module Main where + +testPartialHello :: String -> Int +testPartialHello s = case s of + "hello" -> 1 + +testPartialWorld :: String -> Int +testPartialWorld s = case s of + "world" -> 1 + +testWildcard :: String -> Int +testWildcard s = case s of + "hello" -> 1 + _ -> 0 diff --git a/tests-integration/fixtures/checking/263_exhaustive_string_partial/Main.snap b/tests-integration/fixtures/checking/263_exhaustive_string_partial/Main.snap new file mode 100644 index 000000000..8cf496310 --- /dev/null +++ b/tests-integration/fixtures/checking/263_exhaustive_string_partial/Main.snap @@ -0,0 +1,33 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +testPartialHello :: String -> Int +testPartialWorld :: String -> Int +testWildcard :: String -> Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 4:22..5:15 + | +4 | testPartialHello s = case s of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 3:1..3:34 + | +3 | testPartialHello :: String -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 8:22..9:15 + | +8 | testPartialWorld s = case s of + | ^~~~~~~~~ +error[NoInstanceFound]: No instance found for: Partial + --> 7:1..7:34 + | +7 | testPartialWorld :: String -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/264_equation_exhaustive_basic/Main.purs b/tests-integration/fixtures/checking/264_equation_exhaustive_basic/Main.purs new file mode 100644 index 000000000..f7654cfae --- /dev/null +++ b/tests-integration/fixtures/checking/264_equation_exhaustive_basic/Main.purs @@ -0,0 +1,9 @@ +module Main where + +data Maybe a = Just a | Nothing + +test1 :: Maybe Int -> Int +test1 (Just _) = 1 + +test2 :: Maybe Int -> Int +test2 Nothing = 2 diff --git a/tests-integration/fixtures/checking/264_equation_exhaustive_basic/Main.snap b/tests-integration/fixtures/checking/264_equation_exhaustive_basic/Main.snap new file mode 100644 index 000000000..95159119c --- /dev/null +++ b/tests-integration/fixtures/checking/264_equation_exhaustive_basic/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +test1 :: Maybe Int -> Int +test2 :: Maybe Int -> Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 5:1..5:26 + | +5 | test1 :: Maybe Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Just _ + --> 8:1..8:26 + | +8 | test2 :: Maybe Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/265_equation_redundant/Main.purs b/tests-integration/fixtures/checking/265_equation_redundant/Main.purs new file mode 100644 index 000000000..943cbf1d3 --- /dev/null +++ b/tests-integration/fixtures/checking/265_equation_redundant/Main.purs @@ -0,0 +1,25 @@ +module Main where + +data Unit = Unit + +unit :: Unit -> Int +unit Unit = 1 +unit _ = 2 +unit Unit = 3 + +data YesNo = Yes | No + +yes :: YesNo -> Int +yes Yes = 1 +yes _ = 2 +yes Yes = 3 + +no :: YesNo -> Int +no Yes = 1 +no _ = 2 +no No = 3 + +yesNo :: YesNo -> Int +yesNo Yes = 1 +yesNo No = 2 +yesNo _ = 3 diff --git a/tests-integration/fixtures/checking/265_equation_redundant/Main.snap b/tests-integration/fixtures/checking/265_equation_redundant/Main.snap new file mode 100644 index 000000000..02b574a83 --- /dev/null +++ b/tests-integration/fixtures/checking/265_equation_redundant/Main.snap @@ -0,0 +1,53 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Unit :: Unit +unit :: Unit -> Int +Yes :: YesNo +No :: YesNo +yes :: YesNo -> Int +no :: YesNo -> Int +yesNo :: YesNo -> Int + +Types +Unit :: Type +YesNo :: Type + +Data +Unit + Quantified = :0 + Kind = :0 + +YesNo + Quantified = :0 + Kind = :0 + + +Roles +Unit = [] +YesNo = [] + +Diagnostics +warning[RedundantPattern]: Pattern match has redundant patterns: _, Unit + --> 5:1..5:20 + | +5 | unit :: Unit -> Int + | ^~~~~~~~~~~~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: Yes + --> 12:1..12:20 + | +12 | yes :: YesNo -> Int + | ^~~~~~~~~~~~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: No + --> 17:1..17:19 + | +17 | no :: YesNo -> Int + | ^~~~~~~~~~~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: _ + --> 22:1..22:22 + | +22 | yesNo :: YesNo -> Int + | ^~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/266_equation_guarded/Main.purs b/tests-integration/fixtures/checking/266_equation_guarded/Main.purs new file mode 100644 index 000000000..0bfc36ba0 --- /dev/null +++ b/tests-integration/fixtures/checking/266_equation_guarded/Main.purs @@ -0,0 +1,9 @@ +module Main where + +testGuarded :: Boolean -> Int +testGuarded true | false = 1 +testGuarded false = 0 + +testGuardedBoth :: Boolean -> Int +testGuardedBoth true | true = 1 +testGuardedBoth false | true = 0 diff --git a/tests-integration/fixtures/checking/266_equation_guarded/Main.snap b/tests-integration/fixtures/checking/266_equation_guarded/Main.snap new file mode 100644 index 000000000..ddca9dbd8 --- /dev/null +++ b/tests-integration/fixtures/checking/266_equation_guarded/Main.snap @@ -0,0 +1,17 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +testGuarded :: Boolean -> Int +testGuardedBoth :: Boolean -> Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: true + --> 3:1..3:30 + | +3 | testGuarded :: Boolean -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/267_equation_multiple_arguments/Main.purs b/tests-integration/fixtures/checking/267_equation_multiple_arguments/Main.purs new file mode 100644 index 000000000..b8ad90eea --- /dev/null +++ b/tests-integration/fixtures/checking/267_equation_multiple_arguments/Main.purs @@ -0,0 +1,29 @@ +module Main where + +data Maybe a = Just a | Nothing + +complete :: Maybe Int -> Maybe Int -> Int +complete (Just _) (Just _) = 1 +complete (Just _) Nothing = 2 +complete Nothing (Just _) = 3 +complete Nothing Nothing = 4 + +incomplete1 :: Maybe Int -> Maybe Int -> Int +incomplete1 (Just _) Nothing = 2 +incomplete1 Nothing (Just _) = 3 +incomplete1 Nothing Nothing = 4 + +incomplete2 :: Maybe Int -> Maybe Int -> Int +incomplete2 (Just _) (Just _) = 1 +incomplete2 Nothing (Just _) = 3 +incomplete2 Nothing Nothing = 4 + +incomplete3 :: Maybe Int -> Maybe Int -> Int +incomplete3 (Just _) (Just _) = 1 +incomplete3 (Just _) Nothing = 2 +incomplete3 Nothing Nothing = 4 + +incomplete4 :: Maybe Int -> Maybe Int -> Int +incomplete4 (Just _) (Just _) = 1 +incomplete4 (Just _) Nothing = 2 +incomplete4 Nothing (Just _) = 3 diff --git a/tests-integration/fixtures/checking/267_equation_multiple_arguments/Main.snap b/tests-integration/fixtures/checking/267_equation_multiple_arguments/Main.snap new file mode 100644 index 000000000..7f029315d --- /dev/null +++ b/tests-integration/fixtures/checking/267_equation_multiple_arguments/Main.snap @@ -0,0 +1,47 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +complete :: Maybe Int -> Maybe Int -> Int +incomplete1 :: Maybe Int -> Maybe Int -> Int +incomplete2 :: Maybe Int -> Maybe Int -> Int +incomplete3 :: Maybe Int -> Maybe Int -> Int +incomplete4 :: Maybe Int -> Maybe Int -> Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Just _, Just _ + --> 11:1..11:45 + | +11 | incomplete1 :: Maybe Int -> Maybe Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Just _, Nothing + --> 16:1..16:45 + | +16 | incomplete2 :: Maybe Int -> Maybe Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing, Just _ + --> 21:1..21:45 + | +21 | incomplete3 :: Maybe Int -> Maybe Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing, Nothing + --> 26:1..26:45 + | +26 | incomplete4 :: Maybe Int -> Maybe Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/268_let_equation_exhaustive/Main.purs b/tests-integration/fixtures/checking/268_let_equation_exhaustive/Main.purs new file mode 100644 index 000000000..c8b080141 --- /dev/null +++ b/tests-integration/fixtures/checking/268_let_equation_exhaustive/Main.purs @@ -0,0 +1,17 @@ +module Main where + +data Maybe a = Just a | Nothing + +test :: Int +test = + let + f :: Maybe Int -> Int + f (Just _) = 1 + in f Nothing + +test2 :: Int +test2 = + let + g :: Maybe Int -> Int + g Nothing = 1 + in g (Just 42) diff --git a/tests-integration/fixtures/checking/268_let_equation_exhaustive/Main.snap b/tests-integration/fixtures/checking/268_let_equation_exhaustive/Main.snap new file mode 100644 index 000000000..1613ff249 --- /dev/null +++ b/tests-integration/fixtures/checking/268_let_equation_exhaustive/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +test :: Int +test2 :: Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 8:5..9:19 + | +8 | f :: Maybe Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Just _ + --> 15:5..16:18 + | +15 | g :: Maybe Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/269_instance_equation_exhaustive/Main.purs b/tests-integration/fixtures/checking/269_instance_equation_exhaustive/Main.purs new file mode 100644 index 000000000..8527ab124 --- /dev/null +++ b/tests-integration/fixtures/checking/269_instance_equation_exhaustive/Main.purs @@ -0,0 +1,7 @@ +module Main where + +class C a where + c :: a -> Int + +instance cBool :: C Boolean where + c true = 1 diff --git a/tests-integration/fixtures/checking/269_instance_equation_exhaustive/Main.snap b/tests-integration/fixtures/checking/269_instance_equation_exhaustive/Main.snap new file mode 100644 index 000000000..c026af80f --- /dev/null +++ b/tests-integration/fixtures/checking/269_instance_equation_exhaustive/Main.snap @@ -0,0 +1,24 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +c :: forall (a :: Type). C (a :: Type) => (a :: Type) -> Int + +Types +C :: Type -> Constraint + +Classes +class C (a :: Type) + +Instances +instance C (Boolean :: Type) + chain: 0 + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: false + --> 6:1..7:13 + | +6 | instance cBool :: C Boolean where + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/270_record_constructor_exhaustive/Main.purs b/tests-integration/fixtures/checking/270_record_constructor_exhaustive/Main.purs new file mode 100644 index 000000000..c1bc3f6fc --- /dev/null +++ b/tests-integration/fixtures/checking/270_record_constructor_exhaustive/Main.purs @@ -0,0 +1,24 @@ +module Main where + +data Maybe a = Just a | Nothing + +-- Simple record pattern should be exhaustive +test1 :: { x :: Int, y :: Int } -> Int +test1 { x, y } = x + +-- Record with nested Maybe constructor should report missing Nothing +test2 :: { x :: Maybe Int } -> Int +test2 { x: Just n } = n + +-- Multiple record patterns - second is redundant since record is exhaustive +test3 :: { x :: Int, y :: Int } -> Int +test3 { x, y } = x +test3 r = 0 + +-- Record with multiple fields containing constructors - missing Nothing cases +test4 :: { a :: Maybe Int, b :: Maybe String } -> Int +test4 { a: Just n, b: Just _ } = n + +-- Nested record patterns should be exhaustive +test5 :: { inner :: { x :: Int } } -> Int +test5 { inner: { x } } = x diff --git a/tests-integration/fixtures/checking/270_record_constructor_exhaustive/Main.snap b/tests-integration/fixtures/checking/270_record_constructor_exhaustive/Main.snap new file mode 100644 index 000000000..696d8f24c --- /dev/null +++ b/tests-integration/fixtures/checking/270_record_constructor_exhaustive/Main.snap @@ -0,0 +1,42 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +test1 :: { x :: Int, y :: Int } -> Int +test2 :: { x :: Maybe Int } -> Int +test3 :: { x :: Int, y :: Int } -> Int +test4 :: { a :: Maybe Int, b :: Maybe String } -> Int +test5 :: { inner :: { x :: Int } } -> Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: { x: Nothing } + --> 10:1..10:35 + | +10 | test2 :: { x :: Maybe Int } -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: _ + --> 14:1..14:39 + | +14 | test3 :: { x :: Int, y :: Int } -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: { a: Nothing, b: _ } + --> 19:1..19:54 + | +19 | test4 :: { a :: Maybe Int, b :: Maybe String } -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/271_array_exhaustive/Main.purs b/tests-integration/fixtures/checking/271_array_exhaustive/Main.purs new file mode 100644 index 000000000..2c236189f --- /dev/null +++ b/tests-integration/fixtures/checking/271_array_exhaustive/Main.purs @@ -0,0 +1,18 @@ +module Main where + +-- Non-exhaustive case: [] and [x] don't cover all array lengths +testNonExhaustive :: Array Int -> Int +testNonExhaustive [] = 0 +testNonExhaustive [x] = x + +-- Redundant case: two length-1 patterns, second is redundant +testRedundant :: Array Int -> Int +testRedundant [x] = x +testRedundant [y] = y +testRedundant _ = 0 + +-- Exhaustive case: wildcard covers all remaining cases +testExhaustive :: Array Int -> Int +testExhaustive [] = 0 +testExhaustive [x] = x +testExhaustive _ = 0 diff --git a/tests-integration/fixtures/checking/271_array_exhaustive/Main.snap b/tests-integration/fixtures/checking/271_array_exhaustive/Main.snap new file mode 100644 index 000000000..088d9b6d0 --- /dev/null +++ b/tests-integration/fixtures/checking/271_array_exhaustive/Main.snap @@ -0,0 +1,23 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +testNonExhaustive :: Array Int -> Int +testRedundant :: Array Int -> Int +testExhaustive :: Array Int -> Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 4:1..4:38 + | +4 | testNonExhaustive :: Array Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: [_] + --> 9:1..9:34 + | +9 | testRedundant :: Array Int -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/272_array_nested_constructor/Main.purs b/tests-integration/fixtures/checking/272_array_nested_constructor/Main.purs new file mode 100644 index 000000000..07ca081b8 --- /dev/null +++ b/tests-integration/fixtures/checking/272_array_nested_constructor/Main.purs @@ -0,0 +1,32 @@ +module Main where + +data Maybe a = Just a | Nothing + +-- Array with nested Maybe constructor - missing Nothing case +testArrayWithMaybe :: Array (Maybe Int) -> Int +testArrayWithMaybe [Just n] = n + +-- Array with nested Maybe - multiple elements, missing Nothing cases +testArrayWithMultipleMaybe :: Array (Maybe Int) -> Int +testArrayWithMultipleMaybe [Just n, Just m] = n + +-- Exhaustive array with Maybe - wildcard covers remaining cases +testArrayMaybeExhaustive :: Array (Maybe Int) -> Int +testArrayMaybeExhaustive [Just n] = n +testArrayMaybeExhaustive _ = 0 + +-- Nested arrays with Maybe - complex pattern +testNestedArrayMaybe :: Array (Array (Maybe Int)) -> Int +testNestedArrayMaybe [[Just n]] = n + +-- Exhaustive inner case - all Maybe constructors covered plus wildcard for other lengths +testExhaustiveInner :: Array (Maybe Int) -> Int +testExhaustiveInner [Just n] = n +testExhaustiveInner [Nothing] = 0 +testExhaustiveInner _ = 0 + +-- Useless branch with multiple elements - second pattern is redundant +testUselessMultiple :: Array (Maybe Int) -> Int +testUselessMultiple [Just n, Just m] = n +testUselessMultiple [Just a, Just b] = a +testUselessMultiple _ = 0 diff --git a/tests-integration/fixtures/checking/272_array_nested_constructor/Main.snap b/tests-integration/fixtures/checking/272_array_nested_constructor/Main.snap new file mode 100644 index 000000000..d3dcf2a2b --- /dev/null +++ b/tests-integration/fixtures/checking/272_array_nested_constructor/Main.snap @@ -0,0 +1,48 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +testArrayWithMaybe :: Array (Maybe Int) -> Int +testArrayWithMultipleMaybe :: Array (Maybe Int) -> Int +testArrayMaybeExhaustive :: Array (Maybe Int) -> Int +testNestedArrayMaybe :: Array (Array (Maybe Int)) -> Int +testExhaustiveInner :: Array (Maybe Int) -> Int +testUselessMultiple :: Array (Maybe Int) -> Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 6:1..6:47 + | +6 | testArrayWithMaybe :: Array (Maybe Int) -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 10:1..10:55 + | +10 | testArrayWithMultipleMaybe :: Array (Maybe Int) -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 19:1..19:57 + | +19 | testNestedArrayMaybe :: Array (Array (Maybe Int)) -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +warning[RedundantPattern]: Pattern match has redundant patterns: [Just _, Just _] + --> 29:1..29:48 + | +29 | testUselessMultiple :: Array (Maybe Int) -> Int + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/273_class_member_instantiation/Main.purs b/tests-integration/fixtures/checking/273_class_member_instantiation/Main.purs new file mode 100644 index 000000000..8a907bb7d --- /dev/null +++ b/tests-integration/fixtures/checking/273_class_member_instantiation/Main.purs @@ -0,0 +1,24 @@ +module Main where + +data Unit = Unit + +class Monoid a where + mempty :: a + +class Semigroup a where + append :: a -> a -> a + +foreign import apply :: forall a b. (a -> b) -> a -> b +foreign import pure :: forall m a. a -> m a +foreign import lift2 :: forall m a b c. (a -> b -> c) -> m a -> m b -> m c + +test1 :: forall a. Monoid a => Unit -> a +test1 _ = mempty + +test2 = \_ -> mempty + +test3 = apply (\x -> x) mempty + +test4 = pure mempty + +test5 = lift2 append diff --git a/tests-integration/fixtures/checking/273_class_member_instantiation/Main.snap b/tests-integration/fixtures/checking/273_class_member_instantiation/Main.snap new file mode 100644 index 000000000..69264ec1b --- /dev/null +++ b/tests-integration/fixtures/checking/273_class_member_instantiation/Main.snap @@ -0,0 +1,47 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Unit :: Unit +mempty :: forall (a :: Type). Monoid (a :: Type) => (a :: Type) +append :: forall (a :: Type). Semigroup (a :: Type) => (a :: Type) -> (a :: Type) -> (a :: Type) +apply :: forall (a :: Type) (b :: Type). ((a :: Type) -> (b :: Type)) -> (a :: Type) -> (b :: Type) +pure :: forall (m :: Type -> Type) (a :: Type). (a :: Type) -> (m :: Type -> Type) (a :: Type) +lift2 :: + forall (m :: Type -> Type) (a :: Type) (b :: Type) (c :: Type). + ((a :: Type) -> (b :: Type) -> (c :: Type)) -> + (m :: Type -> Type) (a :: Type) -> + (m :: Type -> Type) (b :: Type) -> + (m :: Type -> Type) (c :: Type) +test1 :: forall (a :: Type). Monoid (a :: Type) => Unit -> (a :: Type) +test2 :: forall (t22 :: Type) (t23 :: Type). Monoid (t23 :: Type) => (t22 :: Type) -> (t23 :: Type) +test3 :: forall (t24 :: Type). Monoid (t24 :: Type) => (t24 :: Type) +test4 :: + forall (t31 :: Type -> Type) (t32 :: Type). + Monoid (t32 :: Type) => (t31 :: Type -> Type) (t32 :: Type) +test5 :: + forall (t36 :: Type -> Type) (t39 :: Type). + Semigroup (t39 :: Type) => + (t36 :: Type -> Type) (t39 :: Type) -> + (t36 :: Type -> Type) (t39 :: Type) -> + (t36 :: Type -> Type) (t39 :: Type) + +Types +Unit :: Type +Monoid :: Type -> Constraint +Semigroup :: Type -> Constraint + +Data +Unit + Quantified = :0 + Kind = :0 + + +Roles +Unit = [] + +Classes +class Monoid (a :: Type) +class Semigroup (a :: Type) diff --git a/tests-integration/fixtures/checking/274_givens_retained/Main.purs b/tests-integration/fixtures/checking/274_givens_retained/Main.purs new file mode 100644 index 000000000..432660420 --- /dev/null +++ b/tests-integration/fixtures/checking/274_givens_retained/Main.purs @@ -0,0 +1,10 @@ +module Main where + +class Given a where + consume :: a -> a + +testGiven :: forall a. Given a => a -> a +testGiven a = consume a + where + b = consume a + c = consume a diff --git a/tests-integration/fixtures/checking/274_givens_retained/Main.snap b/tests-integration/fixtures/checking/274_givens_retained/Main.snap new file mode 100644 index 000000000..6dfb4f733 --- /dev/null +++ b/tests-integration/fixtures/checking/274_givens_retained/Main.snap @@ -0,0 +1,14 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +consume :: forall (a :: Type). Given (a :: Type) => (a :: Type) -> (a :: Type) +testGiven :: forall (a :: Type). Given (a :: Type) => (a :: Type) -> (a :: Type) + +Types +Given :: Type -> Constraint + +Classes +class Given (a :: Type) diff --git a/tests-integration/fixtures/checking/275_givens_scoped/Main.purs b/tests-integration/fixtures/checking/275_givens_scoped/Main.purs new file mode 100644 index 000000000..dbbdec9d6 --- /dev/null +++ b/tests-integration/fixtures/checking/275_givens_scoped/Main.purs @@ -0,0 +1,15 @@ +module Main where + +class Given a where + consume :: a -> a + +testGiven :: forall a. Given a => a -> a +testGiven a = consume a + where + -- b's constraint should be valid in b + b :: Given Int => a + b = let consumeInt = consume 42 in a + + -- b's constraint should not leak to c + c :: Int + c = consume 42 diff --git a/tests-integration/fixtures/checking/275_givens_scoped/Main.snap b/tests-integration/fixtures/checking/275_givens_scoped/Main.snap new file mode 100644 index 000000000..fef05ae09 --- /dev/null +++ b/tests-integration/fixtures/checking/275_givens_scoped/Main.snap @@ -0,0 +1,21 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +consume :: forall (a :: Type). Given (a :: Type) => (a :: Type) -> (a :: Type) +testGiven :: forall (a :: Type). Given (a :: Type) => (a :: Type) -> (a :: Type) + +Types +Given :: Type -> Constraint + +Classes +class Given (a :: Type) + +Diagnostics +error[NoInstanceFound]: No instance found for: Given Int + --> 6:1..6:41 + | +6 | testGiven :: forall a. Given a => a -> a + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/276_where_clause_outer_scope/Main.purs b/tests-integration/fixtures/checking/276_where_clause_outer_scope/Main.purs new file mode 100644 index 000000000..93ea28543 --- /dev/null +++ b/tests-integration/fixtures/checking/276_where_clause_outer_scope/Main.purs @@ -0,0 +1,17 @@ +module Main where + +import Type.Proxy (Proxy(..)) + +-- Minimal reproduction: where clause referencing outer type variables +-- The `r` in coerce's signature should reference the outer forall's `r` + +foreign import unsafeCoerce :: forall a b. a -> b + +test :: forall t r. t -> (forall v. Proxy v -> r) -> r +test s f = coerce f Proxy + where + coerce + :: (forall v. Proxy v -> r) + -> Proxy _ + -> r + coerce = unsafeCoerce diff --git a/tests-integration/fixtures/checking/276_where_clause_outer_scope/Main.snap b/tests-integration/fixtures/checking/276_where_clause_outer_scope/Main.snap new file mode 100644 index 000000000..33878c7e7 --- /dev/null +++ b/tests-integration/fixtures/checking/276_where_clause_outer_scope/Main.snap @@ -0,0 +1,14 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +unsafeCoerce :: forall (a :: Type) (b :: Type). (a :: Type) -> (b :: Type) +test :: + forall (t6 :: Type) (t :: Type) (r :: Type). + (t :: Type) -> + (forall (v :: (t6 :: Type)). Proxy @(t6 :: Type) (v :: (t6 :: Type)) -> (r :: Type)) -> + (r :: Type) + +Types diff --git a/tests-integration/fixtures/checking/277_keyword_as_variable/Main.purs b/tests-integration/fixtures/checking/277_keyword_as_variable/Main.purs new file mode 100644 index 000000000..7448ea784 --- /dev/null +++ b/tests-integration/fixtures/checking/277_keyword_as_variable/Main.purs @@ -0,0 +1,15 @@ +module Main where + +foreign import data Test1 :: forall as. as -> as +foreign import data Test2 :: forall phantom. phantom -> phantom +foreign import data Test3 :: forall nominal. nominal -> nominal +foreign import data Test4 :: forall representational. representational -> representational +foreign import data Test5 :: forall hiding. hiding -> hiding +foreign import data Test6 :: forall role. role -> role + +test1 = \as -> as +test2 = \phantom -> phantom +test3 = \nominal -> nominal +test4 = \representational -> representational +test5 = \hiding -> hiding +test6 = \role -> role diff --git a/tests-integration/fixtures/checking/277_keyword_as_variable/Main.snap b/tests-integration/fixtures/checking/277_keyword_as_variable/Main.snap new file mode 100644 index 000000000..b242a5bfe --- /dev/null +++ b/tests-integration/fixtures/checking/277_keyword_as_variable/Main.snap @@ -0,0 +1,28 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test1 :: forall (t8 :: Type). (t8 :: Type) -> (t8 :: Type) +test2 :: forall (t11 :: Type). (t11 :: Type) -> (t11 :: Type) +test3 :: forall (t14 :: Type). (t14 :: Type) -> (t14 :: Type) +test4 :: forall (t17 :: Type). (t17 :: Type) -> (t17 :: Type) +test5 :: forall (t20 :: Type). (t20 :: Type) -> (t20 :: Type) +test6 :: forall (t23 :: Type). (t23 :: Type) -> (t23 :: Type) + +Types +Test1 :: forall (as :: Type). (as :: Type) -> (as :: Type) +Test2 :: forall (phantom :: Type). (phantom :: Type) -> (phantom :: Type) +Test3 :: forall (nominal :: Type). (nominal :: Type) -> (nominal :: Type) +Test4 :: forall (representational :: Type). (representational :: Type) -> (representational :: Type) +Test5 :: forall (hiding :: Type). (hiding :: Type) -> (hiding :: Type) +Test6 :: forall (role :: Type). (role :: Type) -> (role :: Type) + +Roles +Test1 = [Nominal] +Test2 = [Nominal] +Test3 = [Nominal] +Test4 = [Nominal] +Test5 = [Nominal] +Test6 = [Nominal] diff --git a/tests-integration/fixtures/checking/278_partial_case_nested/Main.purs b/tests-integration/fixtures/checking/278_partial_case_nested/Main.purs new file mode 100644 index 000000000..cbf235179 --- /dev/null +++ b/tests-integration/fixtures/checking/278_partial_case_nested/Main.purs @@ -0,0 +1,17 @@ +module Main where + +import Partial.Unsafe (unsafePartial) + +partialCase :: Partial => Int +partialCase = case 123 of + 123 -> 123 + +partialCase' = case 123 of + 123 -> 123 + +partialNested :: Int +partialNested = unsafePartial (case 123 of + 123 -> 123) + +partialNested' = unsafePartial (case 123 of + 123 -> 123) diff --git a/tests-integration/fixtures/checking/278_partial_case_nested/Main.snap b/tests-integration/fixtures/checking/278_partial_case_nested/Main.snap new file mode 100644 index 000000000..9dd3e74d8 --- /dev/null +++ b/tests-integration/fixtures/checking/278_partial_case_nested/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +partialCase :: Partial => Int +partialCase' :: Partial => Int +partialNested :: Int +partialNested' :: Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 6:15..7:13 + | +6 | partialCase = case 123 of + | ^~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 9:16..10:13 + | +9 | partialCase' = case 123 of + | ^~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 13:32..14:13 + | +13 | partialNested = unsafePartial (case 123 of + | ^~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 16:33..17:13 + | +16 | partialNested' = unsafePartial (case 123 of + | ^~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/279_partial_let_where/Main.purs b/tests-integration/fixtures/checking/279_partial_let_where/Main.purs new file mode 100644 index 000000000..e6383cc07 --- /dev/null +++ b/tests-integration/fixtures/checking/279_partial_let_where/Main.purs @@ -0,0 +1,27 @@ +module Main where + +partialLet :: Partial => Int +partialLet = + let + value = case 123 of + 123 -> 123 + in + value + +partialLet' = + let + value = case 123 of + 123 -> 123 + in + value + +partialWhere :: Partial => Int +partialWhere = value + where + value = case 123 of + 123 -> 123 + +partialWhere' = value + where + value = case 123 of + 123 -> 123 diff --git a/tests-integration/fixtures/checking/279_partial_let_where/Main.snap b/tests-integration/fixtures/checking/279_partial_let_where/Main.snap new file mode 100644 index 000000000..190dcc64d --- /dev/null +++ b/tests-integration/fixtures/checking/279_partial_let_where/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +partialLet :: Partial => Int +partialLet' :: Partial => Int +partialWhere :: Partial => Int +partialWhere' :: Partial => Int + +Types + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 6:13..7:17 + | +6 | value = case 123 of + | ^~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 13:13..14:17 + | +13 | value = case 123 of + | ^~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 21:11..22:15 + | +21 | value = case 123 of + | ^~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: _ + --> 26:11..27:15 + | +26 | value = case 123 of + | ^~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/280_partial_case_variable/Main.purs b/tests-integration/fixtures/checking/280_partial_case_variable/Main.purs new file mode 100644 index 000000000..8332b7fe2 --- /dev/null +++ b/tests-integration/fixtures/checking/280_partial_case_variable/Main.purs @@ -0,0 +1,9 @@ +module Main where + +data Maybe a = Just a | Nothing + +test a = case a of + Just 123 -> 123 + +test' = case _ of + Just 123 -> 123 diff --git a/tests-integration/fixtures/checking/280_partial_case_variable/Main.snap b/tests-integration/fixtures/checking/280_partial_case_variable/Main.snap new file mode 100644 index 000000000..90196e14c --- /dev/null +++ b/tests-integration/fixtures/checking/280_partial_case_variable/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +test :: Partial => Maybe Int -> Int +test' :: Partial => Maybe Int -> Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 5:10..6:18 + | +5 | test a = case a of + | ^~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 8:9..9:18 + | +8 | test' = case _ of + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/281_sectioned_constraint_generation/Main.purs b/tests-integration/fixtures/checking/281_sectioned_constraint_generation/Main.purs new file mode 100644 index 000000000..9a0aae6f0 --- /dev/null +++ b/tests-integration/fixtures/checking/281_sectioned_constraint_generation/Main.purs @@ -0,0 +1,9 @@ +module Main where + +data Unit = Unit + +class Example a where + example :: a + +test = case _ of + Unit -> example diff --git a/tests-integration/fixtures/checking/281_sectioned_constraint_generation/Main.snap b/tests-integration/fixtures/checking/281_sectioned_constraint_generation/Main.snap new file mode 100644 index 000000000..23737b729 --- /dev/null +++ b/tests-integration/fixtures/checking/281_sectioned_constraint_generation/Main.snap @@ -0,0 +1,25 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Unit :: Unit +example :: forall (a :: Type). Example (a :: Type) => (a :: Type) +test :: forall (t4 :: Type). Example (t4 :: Type) => Unit -> (t4 :: Type) + +Types +Unit :: Type +Example :: Type -> Constraint + +Data +Unit + Quantified = :0 + Kind = :0 + + +Roles +Unit = [] + +Classes +class Example (a :: Type) diff --git a/tests-integration/fixtures/checking/282_higher_rank_unification/Lib.purs b/tests-integration/fixtures/checking/282_higher_rank_unification/Lib.purs new file mode 100644 index 000000000..ead390097 --- /dev/null +++ b/tests-integration/fixtures/checking/282_higher_rank_unification/Lib.purs @@ -0,0 +1,12 @@ +module Lib where + +data Maybe a = Nothing | Just a + +isJust :: forall a. Maybe a -> Boolean +isJust (Just _) = true +isJust Nothing = false + +foreign import data Fn2 :: Type -> Type -> Type -> Type +foreign import data Fn3 :: Type -> Type -> Type -> Type -> Type +foreign import runFn2 :: forall a b c. Fn2 a b c -> a -> b -> c +foreign import runFn3 :: forall a b c d. Fn3 a b c d -> a -> b -> c -> d diff --git a/tests-integration/fixtures/checking/282_higher_rank_unification/Main.purs b/tests-integration/fixtures/checking/282_higher_rank_unification/Main.purs new file mode 100644 index 000000000..a65d9cca0 --- /dev/null +++ b/tests-integration/fixtures/checking/282_higher_rank_unification/Main.purs @@ -0,0 +1,16 @@ +module Main where + +import Lib (Maybe(..), isJust, Fn2, Fn3, runFn2, runFn3) + +foreign import findImpl + :: forall a b. Fn2 (forall c. Maybe c) (a -> Maybe b) (Maybe b) + +findMap :: forall a b. (a -> Maybe b) -> Maybe b +findMap = runFn2 findImpl Nothing + +foreign import findMapImpl + :: forall a b + . Fn3 (forall c. Maybe c) (forall c. Maybe c -> Boolean) (a -> Maybe b) (Maybe b) + +findMap' :: forall a b. (a -> Maybe b) -> Maybe b +findMap' = runFn3 findMapImpl Nothing isJust diff --git a/tests-integration/fixtures/checking/282_higher_rank_unification/Main.snap b/tests-integration/fixtures/checking/282_higher_rank_unification/Main.snap new file mode 100644 index 000000000..652139c26 --- /dev/null +++ b/tests-integration/fixtures/checking/282_higher_rank_unification/Main.snap @@ -0,0 +1,23 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +findImpl :: + forall (a :: Type) (b :: Type). + Fn2 + (forall (c :: Type). Maybe (c :: Type)) + ((a :: Type) -> Maybe (b :: Type)) + (Maybe (b :: Type)) +findMap :: forall (a :: Type) (b :: Type). ((a :: Type) -> Maybe (b :: Type)) -> Maybe (b :: Type) +findMapImpl :: + forall (a :: Type) (b :: Type). + Fn3 + (forall (c :: Type). Maybe (c :: Type)) + (forall (c :: Type). Maybe (c :: Type) -> Boolean) + ((a :: Type) -> Maybe (b :: Type)) + (Maybe (b :: Type)) +findMap' :: forall (a :: Type) (b :: Type). ((a :: Type) -> Maybe (b :: Type)) -> Maybe (b :: Type) + +Types diff --git a/tests-integration/fixtures/checking/283_type_operator_synonym_expansion/Main.purs b/tests-integration/fixtures/checking/283_type_operator_synonym_expansion/Main.purs new file mode 100644 index 000000000..cc87032eb --- /dev/null +++ b/tests-integration/fixtures/checking/283_type_operator_synonym_expansion/Main.purs @@ -0,0 +1,15 @@ +module Main where + +data Maybe a = Just a | Nothing + +type NaturalTransformation f g = forall a. f a -> g a + +infixr 4 type NaturalTransformation as ~> + +test :: Maybe ~> Maybe +test (Just a) = Just a +test Nothing = Nothing + +test' :: NaturalTransformation Maybe Maybe +test' (Just a) = Just a +test' Nothing = Nothing diff --git a/tests-integration/fixtures/checking/283_type_operator_synonym_expansion/Main.snap b/tests-integration/fixtures/checking/283_type_operator_synonym_expansion/Main.snap new file mode 100644 index 000000000..cd6643e09 --- /dev/null +++ b/tests-integration/fixtures/checking/283_type_operator_synonym_expansion/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +test :: Maybe ~> Maybe +test' :: NaturalTransformation Maybe Maybe + +Types +Maybe :: Type -> Type +NaturalTransformation :: + forall (t13 :: Type). ((t13 :: Type) -> Type) -> ((t13 :: Type) -> Type) -> Type +~> :: forall (t13 :: Type). ((t13 :: Type) -> Type) -> ((t13 :: Type) -> Type) -> Type + +Synonyms +NaturalTransformation = forall (t13 :: Type) (f :: (t13 :: Type) -> Type) (g :: (t13 :: Type) -> Type) (a :: (t13 :: Type)). + (f :: (t13 :: Type) -> Type) (a :: (t13 :: Type)) -> + (g :: (t13 :: Type) -> Type) (a :: (t13 :: Type)) + Quantified = :1 + Kind = :0 + Type = :2 + + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] diff --git a/tests-integration/fixtures/checking/284_type_operator_synonym_with_binders/Main.purs b/tests-integration/fixtures/checking/284_type_operator_synonym_with_binders/Main.purs new file mode 100644 index 000000000..f5600b31a --- /dev/null +++ b/tests-integration/fixtures/checking/284_type_operator_synonym_with_binders/Main.purs @@ -0,0 +1,11 @@ +module Main where + +data List a = Cons a (List a) | Nil + +type Transform f g = forall a. f a -> g a + +infixr 4 type Transform as ~> + +head :: List ~> List +head (Cons a _) = Cons a Nil +head Nil = Nil diff --git a/tests-integration/fixtures/checking/284_type_operator_synonym_with_binders/Main.snap b/tests-integration/fixtures/checking/284_type_operator_synonym_with_binders/Main.snap new file mode 100644 index 000000000..bfd0ac9af --- /dev/null +++ b/tests-integration/fixtures/checking/284_type_operator_synonym_with_binders/Main.snap @@ -0,0 +1,32 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Cons :: forall (a :: Type). (a :: Type) -> List (a :: Type) -> List (a :: Type) +Nil :: forall (a :: Type). List (a :: Type) +head :: List ~> List + +Types +List :: Type -> Type +Transform :: forall (t14 :: Type). ((t14 :: Type) -> Type) -> ((t14 :: Type) -> Type) -> Type +~> :: forall (t14 :: Type). ((t14 :: Type) -> Type) -> ((t14 :: Type) -> Type) -> Type + +Synonyms +Transform = forall (t14 :: Type) (f :: (t14 :: Type) -> Type) (g :: (t14 :: Type) -> Type) (a :: (t14 :: Type)). + (f :: (t14 :: Type) -> Type) (a :: (t14 :: Type)) -> + (g :: (t14 :: Type) -> Type) (a :: (t14 :: Type)) + Quantified = :1 + Kind = :0 + Type = :2 + + +Data +List + Quantified = :0 + Kind = :0 + + +Roles +List = [Representational] diff --git a/tests-integration/fixtures/checking/285_derive_newtype_higher_kinded/Main.purs b/tests-integration/fixtures/checking/285_derive_newtype_higher_kinded/Main.purs new file mode 100644 index 000000000..c39d2fd45 --- /dev/null +++ b/tests-integration/fixtures/checking/285_derive_newtype_higher_kinded/Main.purs @@ -0,0 +1,11 @@ +module Main where + +class Empty f where + empty :: f Int + +instance Empty Array where + empty = [] + +newtype Wrapper a = Wrapper (Array a) + +derive newtype instance Empty Wrapper diff --git a/tests-integration/fixtures/checking/285_derive_newtype_higher_kinded/Main.snap b/tests-integration/fixtures/checking/285_derive_newtype_higher_kinded/Main.snap new file mode 100644 index 000000000..6cef90660 --- /dev/null +++ b/tests-integration/fixtures/checking/285_derive_newtype_higher_kinded/Main.snap @@ -0,0 +1,31 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +empty :: forall (f :: Type -> Type). Empty (f :: Type -> Type) => (f :: Type -> Type) Int +Wrapper :: forall (a :: Type). Array (a :: Type) -> Wrapper (a :: Type) + +Types +Empty :: (Type -> Type) -> Constraint +Wrapper :: Type -> Type + +Data +Wrapper + Quantified = :0 + Kind = :0 + + +Roles +Wrapper = [Representational] + +Classes +class Empty (f :: Type -> Type) + +Instances +instance Empty (Array :: Type -> Type) + chain: 0 + +Derived +derive Empty (Wrapper :: Type -> Type) diff --git a/tests-integration/fixtures/checking/286_invalid_vector_newtype_derive/Main.purs b/tests-integration/fixtures/checking/286_invalid_vector_newtype_derive/Main.purs new file mode 100644 index 000000000..6222d1a44 --- /dev/null +++ b/tests-integration/fixtures/checking/286_invalid_vector_newtype_derive/Main.purs @@ -0,0 +1,13 @@ +module Main where + +class Empty f where + empty :: f Int + +instance Empty Array where + empty = [] + +newtype Vector n a = Vector (Array a) +derive newtype instance Empty (Vector n) + +newtype InvalidVector a n = InvalidVector (Array a) +derive newtype instance Empty (InvalidVector Int) diff --git a/tests-integration/fixtures/checking/286_invalid_vector_newtype_derive/Main.snap b/tests-integration/fixtures/checking/286_invalid_vector_newtype_derive/Main.snap new file mode 100644 index 000000000..8973c82f9 --- /dev/null +++ b/tests-integration/fixtures/checking/286_invalid_vector_newtype_derive/Main.snap @@ -0,0 +1,49 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +empty :: forall (f :: Type -> Type). Empty (f :: Type -> Type) => (f :: Type -> Type) Int +Vector :: + forall (t5 :: Type) (n :: (t5 :: Type)) (a :: Type). + Array (a :: Type) -> Vector @(t5 :: Type) (n :: (t5 :: Type)) (a :: Type) +InvalidVector :: + forall (t8 :: Type) (a :: Type) (n :: (t8 :: Type)). + Array (a :: Type) -> InvalidVector @(t8 :: Type) (a :: Type) (n :: (t8 :: Type)) + +Types +Empty :: (Type -> Type) -> Constraint +Vector :: forall (t5 :: Type). (t5 :: Type) -> Type -> Type +InvalidVector :: forall (t8 :: Type). Type -> (t8 :: Type) -> Type + +Data +Vector + Quantified = :1 + Kind = :0 + +InvalidVector + Quantified = :1 + Kind = :0 + + +Roles +Vector = [Phantom, Representational] +InvalidVector = [Representational, Phantom] + +Classes +class Empty (f :: Type -> Type) + +Instances +instance Empty (Array :: Type -> Type) + chain: 0 + +Derived +derive forall (t9 :: Type). Empty (Vector @(t9 :: Type) (n :: (t9 :: Type)) :: Type -> Type) + +Diagnostics +error[InvalidNewtypeDeriveSkolemArguments]: Cannot derive newtype instance where skolemised arguments do not appear trailing in the inner type. + --> 13:1..13:50 + | +13 | derive newtype instance Empty (InvalidVector Int) + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/287_lambda_partial/Main.purs b/tests-integration/fixtures/checking/287_lambda_partial/Main.purs new file mode 100644 index 000000000..b92323ac9 --- /dev/null +++ b/tests-integration/fixtures/checking/287_lambda_partial/Main.purs @@ -0,0 +1,11 @@ +module Main where + +import Partial.Unsafe (unsafePartial) + +data Maybe a = Just a | Nothing + +isJust = \(Just _) -> true +isNothing = \Nothing -> true + +unsafeIsJust = unsafePartial isJust +unsafeIsNothing = unsafePartial isNothing diff --git a/tests-integration/fixtures/checking/287_lambda_partial/Main.snap b/tests-integration/fixtures/checking/287_lambda_partial/Main.snap new file mode 100644 index 000000000..e4bbb736c --- /dev/null +++ b/tests-integration/fixtures/checking/287_lambda_partial/Main.snap @@ -0,0 +1,36 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +isJust :: forall (t4 :: Type). Partial => Maybe (t4 :: Type) -> Boolean +isNothing :: forall (t8 :: Type). Partial => Maybe (t8 :: Type) -> Boolean +unsafeIsJust :: forall (t12 :: Type). Maybe (t12 :: Type) -> Boolean +unsafeIsNothing :: forall (t16 :: Type). Maybe (t16 :: Type) -> Boolean + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 7:10..7:27 + | +7 | isJust = \(Just _) -> true + | ^~~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Just _ + --> 8:13..8:29 + | +8 | isNothing = \Nothing -> true + | ^~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/288_unsafe_partial_application/Main.purs b/tests-integration/fixtures/checking/288_unsafe_partial_application/Main.purs new file mode 100644 index 000000000..5500a8bf8 --- /dev/null +++ b/tests-integration/fixtures/checking/288_unsafe_partial_application/Main.purs @@ -0,0 +1,21 @@ +module Main where + +import Partial.Unsafe (unsafePartial) + +data Maybe a = Just a | Nothing + +fromJust :: forall a. Partial => Maybe a -> a +fromJust (Just a) = a + +-- unsafePartial discharging Partial from an applied expression +test :: Int +test = unsafePartial (fromJust (Just 42)) + +test' = unsafePartial (fromJust (Just 42)) + +-- unsafePartial with partial application in map position +mapPartial :: (Int -> Boolean) -> Array Int -> Maybe Int +mapPartial = unsafePartial mapPartialImpl + where + mapPartialImpl :: Partial => (Int -> Boolean) -> Array Int -> Maybe Int + mapPartialImpl _ _ = Just 0 diff --git a/tests-integration/fixtures/checking/288_unsafe_partial_application/Main.snap b/tests-integration/fixtures/checking/288_unsafe_partial_application/Main.snap new file mode 100644 index 000000000..74e0ee7eb --- /dev/null +++ b/tests-integration/fixtures/checking/288_unsafe_partial_application/Main.snap @@ -0,0 +1,31 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +fromJust :: forall (a :: Type). Partial => Maybe (a :: Type) -> (a :: Type) +test :: Int +test' :: Int +mapPartial :: (Int -> Boolean) -> Array Int -> Maybe Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 7:1..7:46 + | +7 | fromJust :: forall a. Partial => Maybe a -> a + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/289_custom_constraint_discharge/Main.purs b/tests-integration/fixtures/checking/289_custom_constraint_discharge/Main.purs new file mode 100644 index 000000000..3542131b4 --- /dev/null +++ b/tests-integration/fixtures/checking/289_custom_constraint_discharge/Main.purs @@ -0,0 +1,15 @@ +module Main where + +class MyConstraint :: Constraint +class MyConstraint + +constrained :: MyConstraint => Int +constrained = 42 + +removeConstraint :: forall a. (MyConstraint => a) -> a +removeConstraint x = x + +test :: Int +test = removeConstraint constrained + +test' = removeConstraint constrained diff --git a/tests-integration/fixtures/checking/289_custom_constraint_discharge/Main.snap b/tests-integration/fixtures/checking/289_custom_constraint_discharge/Main.snap new file mode 100644 index 000000000..08e1a5067 --- /dev/null +++ b/tests-integration/fixtures/checking/289_custom_constraint_discharge/Main.snap @@ -0,0 +1,23 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +constrained :: MyConstraint => Int +removeConstraint :: forall (a :: Type). (MyConstraint => (a :: Type)) -> (a :: Type) +test :: Int +test' :: Int + +Types +MyConstraint :: Constraint + +Classes +class MyConstraint + +Diagnostics +error[NoInstanceFound]: No instance found for: MyConstraint + --> 9:1..9:55 + | +9 | removeConstraint :: forall a. (MyConstraint => a) -> a + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/290_apply_constraint_discharge/Main.purs b/tests-integration/fixtures/checking/290_apply_constraint_discharge/Main.purs new file mode 100644 index 000000000..4104cb667 --- /dev/null +++ b/tests-integration/fixtures/checking/290_apply_constraint_discharge/Main.purs @@ -0,0 +1,26 @@ +module Main where + +import Partial.Unsafe (unsafePartial) + +data Maybe a = Just a | Nothing + +fromJust :: forall a. Partial => Maybe a -> a +fromJust (Just a) = a + +deleteAt :: forall a. Int -> Array a -> Maybe (Array a) +deleteAt _ _ = Nothing + +apply :: forall a b. (a -> b) -> a -> b +apply f x = f x + +infixr 0 apply as $ + +-- apply ($) discharging Partial from a simple expression +test :: Int +test = unsafePartial $ fromJust (Just 42) + +test' = unsafePartial $ fromJust (Just 42) + +-- apply ($) discharging Partial from a more complex expression +test2 :: Array Int -> Array Int +test2 ys = unsafePartial $ fromJust (deleteAt 0 ys) diff --git a/tests-integration/fixtures/checking/290_apply_constraint_discharge/Main.snap b/tests-integration/fixtures/checking/290_apply_constraint_discharge/Main.snap new file mode 100644 index 000000000..901607398 --- /dev/null +++ b/tests-integration/fixtures/checking/290_apply_constraint_discharge/Main.snap @@ -0,0 +1,34 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +fromJust :: forall (a :: Type). Partial => Maybe (a :: Type) -> (a :: Type) +deleteAt :: forall (a :: Type). Int -> Array (a :: Type) -> Maybe (Array (a :: Type)) +apply :: forall (a :: Type) (b :: Type). ((a :: Type) -> (b :: Type)) -> (a :: Type) -> (b :: Type) +$ :: forall (a :: Type) (b :: Type). ((a :: Type) -> (b :: Type)) -> (a :: Type) -> (b :: Type) +test :: Int +test' :: Int +test2 :: Array Int -> Array Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 7:1..7:46 + | +7 | fromJust :: forall a. Partial => Maybe a -> a + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/291_compose_constraint_discharge/Main.purs b/tests-integration/fixtures/checking/291_compose_constraint_discharge/Main.purs new file mode 100644 index 000000000..07cc6e7b4 --- /dev/null +++ b/tests-integration/fixtures/checking/291_compose_constraint_discharge/Main.purs @@ -0,0 +1,22 @@ +module Main where + +import Partial.Unsafe (unsafePartial) + +data Maybe a = Just a | Nothing + +fromJust :: forall a. Partial => Maybe a -> a +fromJust (Just a) = a + +compose :: forall a b c. (b -> c) -> (a -> b) -> a -> c +compose f g x = f (g x) + +infixr 9 compose as <<< + +toArray :: forall a. Maybe a -> Array a +toArray _ = [] + +-- compose (<<<) discharging Partial through operator chain +test :: forall a b. (Array a -> Maybe b) -> Maybe a -> b +test f = unsafePartial (fromJust <<< f <<< toArray) + +test' f = unsafePartial (fromJust <<< f <<< toArray) diff --git a/tests-integration/fixtures/checking/291_compose_constraint_discharge/Main.snap b/tests-integration/fixtures/checking/291_compose_constraint_discharge/Main.snap new file mode 100644 index 000000000..800943747 --- /dev/null +++ b/tests-integration/fixtures/checking/291_compose_constraint_discharge/Main.snap @@ -0,0 +1,41 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +fromJust :: forall (a :: Type). Partial => Maybe (a :: Type) -> (a :: Type) +compose :: + forall (a :: Type) (b :: Type) (c :: Type). + ((b :: Type) -> (c :: Type)) -> ((a :: Type) -> (b :: Type)) -> (a :: Type) -> (c :: Type) +<<< :: + forall (a :: Type) (b :: Type) (c :: Type). + ((b :: Type) -> (c :: Type)) -> ((a :: Type) -> (b :: Type)) -> (a :: Type) -> (c :: Type) +toArray :: forall (a :: Type). Maybe (a :: Type) -> Array (a :: Type) +test :: + forall (a :: Type) (b :: Type). + (Array (a :: Type) -> Maybe (b :: Type)) -> Maybe (a :: Type) -> (b :: Type) +test' :: + forall (t24 :: Type) (t29 :: Type). + (Array (t29 :: Type) -> Maybe (t24 :: Type)) -> Maybe (t29 :: Type) -> (t24 :: Type) + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 7:1..7:46 + | +7 | fromJust :: forall a. Partial => Maybe a -> a + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Lib.purs b/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Lib.purs new file mode 100644 index 000000000..d3e704d70 --- /dev/null +++ b/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Lib.purs @@ -0,0 +1,18 @@ +module Lib where + +class Apply (f :: Type -> Type) where + apply :: forall a b. f (a -> b) -> f a -> f b + +class Functor (f :: Type -> Type) where + map :: forall a b. (a -> b) -> f a -> f b + +class Foldable (f :: Type -> Type) where + foldr :: forall a b. (a -> b -> b) -> b -> f a -> b + +data Fn2 a b c = Fn2 + +foreign import runFn2 :: forall a b c. Fn2 a b c -> a -> b -> c + +data Fn3 a b c d = Fn3 + +foreign import runFn3 :: forall a b c d. Fn3 a b c d -> a -> b -> c -> d diff --git a/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Main.purs b/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Main.purs new file mode 100644 index 000000000..ad39437b5 --- /dev/null +++ b/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Main.purs @@ -0,0 +1,27 @@ +module Main where + +import Lib (class Apply, apply, class Functor, map, class Foldable, foldr, Fn2, Fn3, runFn2, runFn3) + +-- When a class method like `apply` (Apply f => ...) is passed as an +-- argument to a function expecting a higher-rank type without constraints +-- (forall a' b'. m (a' -> b') -> m a' -> m b'), the constraint must be +-- peeled as a wanted rather than leaking into unification. Reproduces +-- the traverse1Impl pattern from Data.Array.NonEmpty.Internal. +foreign import impl3 + :: forall m a b + . Fn3 + (forall a' b'. (m (a' -> b') -> m a' -> m b')) + (forall a' b'. (a' -> b') -> m a' -> m b') + (a -> m b) + (m b) + +test :: forall m a b. Apply m => Functor m => (a -> m b) -> m b +test f = runFn3 impl3 apply map f + +-- Similar pattern with Foldable: fromFoldable = runFn2 impl2 foldr +foreign import impl2 + :: forall f a + . Fn2 (forall b. (a -> b -> b) -> b -> f a -> b) (f a) (Int) + +test2 :: forall f a. Foldable f => f a -> Int +test2 = runFn2 impl2 foldr diff --git a/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Main.snap b/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Main.snap new file mode 100644 index 000000000..a5ca71aec --- /dev/null +++ b/tests-integration/fixtures/checking/292_higher_rank_constraint_discharge/Main.snap @@ -0,0 +1,39 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +impl3 :: + forall (m :: Type -> Type) (a :: Type) (b :: Type). + Fn3 @Type @Type @Type @Type + (forall (a' :: Type) (b' :: Type). + (m :: Type -> Type) ((a' :: Type) -> (b' :: Type)) -> + (m :: Type -> Type) (a' :: Type) -> + (m :: Type -> Type) (b' :: Type)) + (forall (a' :: Type) (b' :: Type). + ((a' :: Type) -> (b' :: Type)) -> + (m :: Type -> Type) (a' :: Type) -> + (m :: Type -> Type) (b' :: Type)) + ((a :: Type) -> (m :: Type -> Type) (b :: Type)) + ((m :: Type -> Type) (b :: Type)) +test :: + forall (m :: Type -> Type) (a :: Type) (b :: Type). + Apply (m :: Type -> Type) => + Functor (m :: Type -> Type) => + ((a :: Type) -> (m :: Type -> Type) (b :: Type)) -> (m :: Type -> Type) (b :: Type) +impl2 :: + forall (f :: Type -> Type) (a :: Type). + Fn2 @Type @Type @Type + (forall (b :: Type). + ((a :: Type) -> (b :: Type) -> (b :: Type)) -> + (b :: Type) -> + (f :: Type -> Type) (a :: Type) -> + (b :: Type)) + ((f :: Type -> Type) (a :: Type)) + Int +test2 :: + forall (f :: Type -> Type) (a :: Type). + Foldable (f :: Type -> Type) => (f :: Type -> Type) (a :: Type) -> Int + +Types diff --git a/tests-integration/fixtures/checking/293_exhaustive_guards_otherwise_true/Main.purs b/tests-integration/fixtures/checking/293_exhaustive_guards_otherwise_true/Main.purs new file mode 100644 index 000000000..c8c99ad54 --- /dev/null +++ b/tests-integration/fixtures/checking/293_exhaustive_guards_otherwise_true/Main.purs @@ -0,0 +1,27 @@ +module Main where + +import Data.Boolean (otherwise) + +foreign import lessThan :: Int -> Int -> Boolean + +test :: Int -> Int +test x = case x of + n + | lessThan n 0 -> 0 + | otherwise -> n + +test' x = case x of + n + | lessThan n 0 -> 0 + | otherwise -> n + +test2 :: Int -> Int +test2 x = case x of + n + | lessThan n 0 -> 0 + | true -> n + +test2' x = case x of + n + | lessThan n 0 -> 0 + | true -> n diff --git a/tests-integration/fixtures/checking/293_exhaustive_guards_otherwise_true/Main.snap b/tests-integration/fixtures/checking/293_exhaustive_guards_otherwise_true/Main.snap new file mode 100644 index 000000000..5d7d8f55a --- /dev/null +++ b/tests-integration/fixtures/checking/293_exhaustive_guards_otherwise_true/Main.snap @@ -0,0 +1,13 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +lessThan :: Int -> Int -> Boolean +test :: Int -> Int +test' :: Int -> Int +test2 :: Int -> Int +test2' :: Int -> Int + +Types diff --git a/tests-integration/fixtures/checking/294_exhaustive_operator_constructor/Main.purs b/tests-integration/fixtures/checking/294_exhaustive_operator_constructor/Main.purs new file mode 100644 index 000000000..ac020bb03 --- /dev/null +++ b/tests-integration/fixtures/checking/294_exhaustive_operator_constructor/Main.purs @@ -0,0 +1,16 @@ +module Main where + +data NonEmpty a = NonEmpty a (Array a) + +infixr 5 NonEmpty as :| + +test1 (x :| _) = x + +test2 (NonEmpty x _) = x + +data List a = Cons a (List a) | Nil + +infixr 5 Cons as : + +test3 = case _ of + (x : _) -> x diff --git a/tests-integration/fixtures/checking/294_exhaustive_operator_constructor/Main.snap b/tests-integration/fixtures/checking/294_exhaustive_operator_constructor/Main.snap new file mode 100644 index 000000000..da34b2bb6 --- /dev/null +++ b/tests-integration/fixtures/checking/294_exhaustive_operator_constructor/Main.snap @@ -0,0 +1,39 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +NonEmpty :: forall (a :: Type). (a :: Type) -> Array (a :: Type) -> NonEmpty (a :: Type) +:| :: forall (a :: Type). (a :: Type) -> Array (a :: Type) -> NonEmpty (a :: Type) +test1 :: forall (t5 :: Type). NonEmpty (t5 :: Type) -> (t5 :: Type) +test2 :: forall (t8 :: Type). NonEmpty (t8 :: Type) -> (t8 :: Type) +Cons :: forall (a :: Type). (a :: Type) -> List (a :: Type) -> List (a :: Type) +Nil :: forall (a :: Type). List (a :: Type) +: :: forall (a :: Type). (a :: Type) -> List (a :: Type) -> List (a :: Type) +test3 :: forall (t12 :: Type). Partial => List (t12 :: Type) -> (t12 :: Type) + +Types +NonEmpty :: Type -> Type +List :: Type -> Type + +Data +NonEmpty + Quantified = :0 + Kind = :0 + +List + Quantified = :0 + Kind = :0 + + +Roles +NonEmpty = [Representational] +List = [Representational] + +Diagnostics +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nil + --> 15:9..16:15 + | +15 | test3 = case _ of + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/295_superclass_entailment_where_binding/Main.purs b/tests-integration/fixtures/checking/295_superclass_entailment_where_binding/Main.purs new file mode 100644 index 000000000..2836f830a --- /dev/null +++ b/tests-integration/fixtures/checking/295_superclass_entailment_where_binding/Main.purs @@ -0,0 +1,28 @@ +module Main where + +import Control.Applicative (class Applicative, pure) +import Control.Bind (class Bind, bind) +import Control.Monad (class Monad) +import Control.Monad.Rec (class MonadRec, tailRecM) +import Data.Functor (class Functor, map) + +-- Where-binding uses `pure` with the outer MonadRec constraint. +-- The where-binding's type variable is only unified with the +-- outer skolem after its body is checked, so the constraint +-- solver must emit equalities for stuck given positions. +test :: forall m a. MonadRec m => a -> m a +test a = go a + where + go x = pure x + +-- needs Bind, via MonadRec => Monad => Bind +test2 :: forall m a. MonadRec m => m a -> m a +test2 ma = go ma + where + go x = bind x pure + +-- needs Functor, via MonadRec => Monad => Apply => Functor +test3 :: forall m. MonadRec m => m Int -> m Int +test3 mi = go mi + where + go x = map (\y -> y) x diff --git a/tests-integration/fixtures/checking/295_superclass_entailment_where_binding/Main.snap b/tests-integration/fixtures/checking/295_superclass_entailment_where_binding/Main.snap new file mode 100644 index 000000000..3ce205810 --- /dev/null +++ b/tests-integration/fixtures/checking/295_superclass_entailment_where_binding/Main.snap @@ -0,0 +1,18 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: + forall (m :: Type -> Type) (a :: Type). + MonadRec (m :: Type -> Type) => (a :: Type) -> (m :: Type -> Type) (a :: Type) +test2 :: + forall (m :: Type -> Type) (a :: Type). + MonadRec (m :: Type -> Type) => + (m :: Type -> Type) (a :: Type) -> (m :: Type -> Type) (a :: Type) +test3 :: + forall (m :: Type -> Type). + MonadRec (m :: Type -> Type) => (m :: Type -> Type) Int -> (m :: Type -> Type) Int + +Types diff --git a/tests-integration/fixtures/checking/296_type_operator_synonym_in_application/Main.purs b/tests-integration/fixtures/checking/296_type_operator_synonym_in_application/Main.purs new file mode 100644 index 000000000..51a8a2416 --- /dev/null +++ b/tests-integration/fixtures/checking/296_type_operator_synonym_in_application/Main.purs @@ -0,0 +1,13 @@ +module Main where + +type Transform f g = forall a. f a -> g a + +infixr 4 type Transform as ~> + +data Box a = Box a + +unbox :: Box ~> Array +unbox (Box a) = [a] + +test :: Array Int +test = unbox (Box 1) diff --git a/tests-integration/fixtures/checking/296_type_operator_synonym_in_application/Main.snap b/tests-integration/fixtures/checking/296_type_operator_synonym_in_application/Main.snap new file mode 100644 index 000000000..3d503d7fd --- /dev/null +++ b/tests-integration/fixtures/checking/296_type_operator_synonym_in_application/Main.snap @@ -0,0 +1,32 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Box :: forall (a :: Type). (a :: Type) -> Box (a :: Type) +unbox :: Box ~> Array +test :: Array Int + +Types +Transform :: forall (t12 :: Type). ((t12 :: Type) -> Type) -> ((t12 :: Type) -> Type) -> Type +~> :: forall (t12 :: Type). ((t12 :: Type) -> Type) -> ((t12 :: Type) -> Type) -> Type +Box :: Type -> Type + +Synonyms +Transform = forall (t12 :: Type) (f :: (t12 :: Type) -> Type) (g :: (t12 :: Type) -> Type) (a :: (t12 :: Type)). + (f :: (t12 :: Type) -> Type) (a :: (t12 :: Type)) -> + (g :: (t12 :: Type) -> Type) (a :: (t12 :: Type)) + Quantified = :1 + Kind = :0 + Type = :2 + + +Data +Box + Quantified = :0 + Kind = :0 + + +Roles +Box = [Representational] diff --git a/tests-integration/fixtures/checking/297_applied_function_type_decomposition/Main.purs b/tests-integration/fixtures/checking/297_applied_function_type_decomposition/Main.purs new file mode 100644 index 000000000..cc32af416 --- /dev/null +++ b/tests-integration/fixtures/checking/297_applied_function_type_decomposition/Main.purs @@ -0,0 +1,28 @@ +module Main where + +import Data.Semigroupoid ((<<<)) + +data Maybe a = Just a | Nothing + +-- When `<<<` solves its type variable `p` to `Function`, the result +-- type is `Application(Application(Function, a), b)` rather than +-- the native `Function(a, b)`. This must be decomposed correctly +-- during function-application checking. + +class Foldable f where + foldMap :: forall a m. (a -> m) -> f a -> m + +class Foldable f <= FoldableWithIndex i f where + foldMapWithIndex :: forall a m. (i -> a -> m) -> f a -> m + foldlWithIndex :: forall a b. (i -> b -> a -> b) -> b -> f a -> b + foldrWithIndex :: forall a b. (i -> a -> b -> b) -> b -> f a -> b + +data NonEmpty f a = NonEmpty a (f a) + +instance Foldable f => Foldable (NonEmpty f) where + foldMap f (NonEmpty a fa) = f a + +instance FoldableWithIndex i f => FoldableWithIndex (Maybe i) (NonEmpty f) where + foldMapWithIndex f (NonEmpty a fa) = f Nothing a + foldlWithIndex f b (NonEmpty a fa) = foldlWithIndex (f <<< Just) (f Nothing b a) fa + foldrWithIndex f b (NonEmpty a fa) = f Nothing a (foldrWithIndex (f <<< Just) b fa) diff --git a/tests-integration/fixtures/checking/297_applied_function_type_decomposition/Main.snap b/tests-integration/fixtures/checking/297_applied_function_type_decomposition/Main.snap new file mode 100644 index 000000000..ae7b7c9ff --- /dev/null +++ b/tests-integration/fixtures/checking/297_applied_function_type_decomposition/Main.snap @@ -0,0 +1,63 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +foldMap :: + forall (f :: Type -> Type) (a :: Type) (m :: Type). + Foldable (f :: Type -> Type) => + ((a :: Type) -> (m :: Type)) -> (f :: Type -> Type) (a :: Type) -> (m :: Type) +foldMapWithIndex :: + forall (i :: Type) (f :: Type -> Type) (a :: Type) (m :: Type). + FoldableWithIndex (i :: Type) (f :: Type -> Type) => + ((i :: Type) -> (a :: Type) -> (m :: Type)) -> (f :: Type -> Type) (a :: Type) -> (m :: Type) +foldlWithIndex :: + forall (i :: Type) (f :: Type -> Type) (a :: Type) (b :: Type). + FoldableWithIndex (i :: Type) (f :: Type -> Type) => + ((i :: Type) -> (b :: Type) -> (a :: Type) -> (b :: Type)) -> + (b :: Type) -> + (f :: Type -> Type) (a :: Type) -> + (b :: Type) +foldrWithIndex :: + forall (i :: Type) (f :: Type -> Type) (a :: Type) (b :: Type). + FoldableWithIndex (i :: Type) (f :: Type -> Type) => + ((i :: Type) -> (a :: Type) -> (b :: Type) -> (b :: Type)) -> + (b :: Type) -> + (f :: Type -> Type) (a :: Type) -> + (b :: Type) +NonEmpty :: + forall (f :: Type -> Type) (a :: Type). + (a :: Type) -> (f :: Type -> Type) (a :: Type) -> NonEmpty (f :: Type -> Type) (a :: Type) + +Types +Maybe :: Type -> Type +Foldable :: (Type -> Type) -> Constraint +FoldableWithIndex :: Type -> (Type -> Type) -> Constraint +NonEmpty :: (Type -> Type) -> Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + +NonEmpty + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] +NonEmpty = [Representational, Nominal] + +Classes +class Foldable (f :: Type -> Type) +class Foldable (f :: Type -> Type) <= FoldableWithIndex (i :: Type) (f :: Type -> Type) + +Instances +instance Foldable (f :: Type -> Type) => Foldable (NonEmpty (f :: Type -> Type) :: Type -> Type) + chain: 0 +instance FoldableWithIndex (i :: Type) (f :: Type -> Type) => FoldableWithIndex (Maybe (i :: Type) :: Type) (NonEmpty (f :: Type -> Type) :: Type -> Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/298_operator_alias_class_method/Main.purs b/tests-integration/fixtures/checking/298_operator_alias_class_method/Main.purs new file mode 100644 index 000000000..81bda1175 --- /dev/null +++ b/tests-integration/fixtures/checking/298_operator_alias_class_method/Main.purs @@ -0,0 +1,12 @@ +module Main where + +import Control.Category (class Category, identity) + +-- Operator alias for a class method whose return type +-- is an applied type variable (a t t), not a Function. +infixl 4 identity as <<$>> + +test :: (Int -> Int) -> Int -> Int +test f x = f <<$>> x + +test' f x = f <<$>> x diff --git a/tests-integration/fixtures/checking/298_operator_alias_class_method/Main.snap b/tests-integration/fixtures/checking/298_operator_alias_class_method/Main.snap new file mode 100644 index 000000000..bd9bfb5e4 --- /dev/null +++ b/tests-integration/fixtures/checking/298_operator_alias_class_method/Main.snap @@ -0,0 +1,16 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +<<$>> :: + forall (t2 :: Type) (a :: (t2 :: Type) -> (t2 :: Type) -> Type) (t :: (t2 :: Type)). + Category (a :: (t2 :: Type) -> (t2 :: Type) -> Type) => + (a :: (t2 :: Type) -> (t2 :: Type) -> Type) (t :: (t2 :: Type)) (t :: (t2 :: Type)) +test :: (Int -> Int) -> Int -> Int +test' :: + forall (t8 :: Type) (t12 :: Type). + ((t12 :: Type) -> (t8 :: Type)) -> (t12 :: Type) -> (t8 :: Type) + +Types diff --git a/tests-integration/fixtures/checking/299_derive_mutual_visibility_same_module/Main.purs b/tests-integration/fixtures/checking/299_derive_mutual_visibility_same_module/Main.purs new file mode 100644 index 000000000..b7b49d4b6 --- /dev/null +++ b/tests-integration/fixtures/checking/299_derive_mutual_visibility_same_module/Main.purs @@ -0,0 +1,11 @@ +module Main where + +import Data.Eq (class Eq) + +data DurationComponent = Hours | Minutes | Seconds + +data Duration = Duration DurationComponent Int + +-- Eq Duration depends on Eq DurationComponent, which is derived later. +derive instance Eq Duration +derive instance Eq DurationComponent diff --git a/tests-integration/fixtures/checking/299_derive_mutual_visibility_same_module/Main.snap b/tests-integration/fixtures/checking/299_derive_mutual_visibility_same_module/Main.snap new file mode 100644 index 000000000..94f3ae4f9 --- /dev/null +++ b/tests-integration/fixtures/checking/299_derive_mutual_visibility_same_module/Main.snap @@ -0,0 +1,32 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Hours :: DurationComponent +Minutes :: DurationComponent +Seconds :: DurationComponent +Duration :: DurationComponent -> Int -> Duration + +Types +DurationComponent :: Type +Duration :: Type + +Data +DurationComponent + Quantified = :0 + Kind = :0 + +Duration + Quantified = :0 + Kind = :0 + + +Roles +DurationComponent = [] +Duration = [] + +Derived +derive Eq (Duration :: Type) +derive Eq (DurationComponent :: Type) diff --git a/tests-integration/fixtures/checking/300_instance_shift_variables/Main.purs b/tests-integration/fixtures/checking/300_instance_shift_variables/Main.purs new file mode 100644 index 000000000..41f0caab3 --- /dev/null +++ b/tests-integration/fixtures/checking/300_instance_shift_variables/Main.purs @@ -0,0 +1,9 @@ +module Main where + +import Data.Functor (class Functor, map) + +newtype Wrap :: forall k. Type -> (k -> Type) -> k -> Type +newtype Wrap e w a = Wrap (w a) + +instance Functor w => Functor (Wrap e w) where + map f (Wrap x) = Wrap (map f x) diff --git a/tests-integration/fixtures/checking/300_instance_shift_variables/Main.snap b/tests-integration/fixtures/checking/300_instance_shift_variables/Main.snap new file mode 100644 index 000000000..212a7bfdd --- /dev/null +++ b/tests-integration/fixtures/checking/300_instance_shift_variables/Main.snap @@ -0,0 +1,26 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Wrap :: + forall (k :: Type) (e :: Type) (w :: (k :: Type) -> Type) (a :: (k :: Type)). + (w :: (k :: Type) -> Type) (a :: (k :: Type)) -> + Wrap @(k :: Type) (e :: Type) (w :: (k :: Type) -> Type) (a :: (k :: Type)) + +Types +Wrap :: forall (k :: Type). Type -> ((k :: Type) -> Type) -> (k :: Type) -> Type + +Data +Wrap + Quantified = :0 + Kind = :1 + + +Roles +Wrap = [Phantom, Representational, Nominal] + +Instances +instance Functor (w :: Type -> Type) => Functor (Wrap @Type (e :: Type) (w :: Type -> Type) :: Type -> Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/301_coercible_symmetry/Main.purs b/tests-integration/fixtures/checking/301_coercible_symmetry/Main.purs new file mode 100644 index 000000000..f197ac855 --- /dev/null +++ b/tests-integration/fixtures/checking/301_coercible_symmetry/Main.purs @@ -0,0 +1,10 @@ +module Main where + +import Data.Newtype (class Newtype, wrap) + +newtype Age = Age Int + +derive instance Newtype Age _ + +wrapAge :: Int -> Age +wrapAge = wrap diff --git a/tests-integration/fixtures/checking/301_coercible_symmetry/Main.snap b/tests-integration/fixtures/checking/301_coercible_symmetry/Main.snap new file mode 100644 index 000000000..fe88ee232 --- /dev/null +++ b/tests-integration/fixtures/checking/301_coercible_symmetry/Main.snap @@ -0,0 +1,23 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Age :: Int -> Age +wrapAge :: Int -> Age + +Types +Age :: Type + +Data +Age + Quantified = :0 + Kind = :0 + + +Roles +Age = [] + +Derived +derive Newtype (Age :: Type) (Int :: Type) diff --git a/tests-integration/fixtures/checking/302_coercible_function_decomposition/Main.purs b/tests-integration/fixtures/checking/302_coercible_function_decomposition/Main.purs new file mode 100644 index 000000000..e34a355cc --- /dev/null +++ b/tests-integration/fixtures/checking/302_coercible_function_decomposition/Main.purs @@ -0,0 +1,29 @@ +module Main where + +import Safe.Coerce (class Coercible, coerce) +import Data.Newtype (class Newtype) + +newtype Age = Age Int + +derive instance Newtype Age _ + +coerceFn :: (Age -> Int) -> (Int -> Age) +coerceFn = coerce + +over :: forall t a s b. Newtype t a => Newtype s b => (a -> t) -> (a -> b) -> t -> s +over _ = coerce + +under :: forall t a s b. Newtype t a => Newtype s b => (a -> t) -> (t -> s) -> a -> b +under _ = coerce + +alaF + :: forall f g t a s b + . Coercible (f t) (f a) + => Coercible (g s) (g b) + => Newtype t a + => Newtype s b + => (a -> t) + -> (f t -> g s) + -> f a + -> g b +alaF _ = coerce diff --git a/tests-integration/fixtures/checking/302_coercible_function_decomposition/Main.snap b/tests-integration/fixtures/checking/302_coercible_function_decomposition/Main.snap new file mode 100644 index 000000000..663869e59 --- /dev/null +++ b/tests-integration/fixtures/checking/302_coercible_function_decomposition/Main.snap @@ -0,0 +1,46 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Age :: Int -> Age +coerceFn :: (Age -> Int) -> Int -> Age +over :: + forall (t :: Type) (a :: Type) (s :: Type) (b :: Type). + Newtype @Type (t :: Type) (a :: Type) => + Newtype @Type (s :: Type) (b :: Type) => + ((a :: Type) -> (t :: Type)) -> ((a :: Type) -> (b :: Type)) -> (t :: Type) -> (s :: Type) +under :: + forall (t :: Type) (a :: Type) (s :: Type) (b :: Type). + Newtype @Type (t :: Type) (a :: Type) => + Newtype @Type (s :: Type) (b :: Type) => + ((a :: Type) -> (t :: Type)) -> ((t :: Type) -> (s :: Type)) -> (a :: Type) -> (b :: Type) +alaF :: + forall (t37 :: Type) (f :: Type -> Type) (g :: (t37 :: Type) -> Type) (t :: Type) (a :: Type) + (s :: (t37 :: Type)) (b :: (t37 :: Type)). + Coercible @Type ((f :: Type -> Type) (t :: Type)) ((f :: Type -> Type) (a :: Type)) => + Coercible @Type + ((g :: (t37 :: Type) -> Type) (s :: (t37 :: Type))) + ((g :: (t37 :: Type) -> Type) (b :: (t37 :: Type))) => + Newtype @Type (t :: Type) (a :: Type) => + Newtype @(t37 :: Type) (s :: (t37 :: Type)) (b :: (t37 :: Type)) => + ((a :: Type) -> (t :: Type)) -> + ((f :: Type -> Type) (t :: Type) -> (g :: (t37 :: Type) -> Type) (s :: (t37 :: Type))) -> + (f :: Type -> Type) (a :: Type) -> + (g :: (t37 :: Type) -> Type) (b :: (t37 :: Type)) + +Types +Age :: Type + +Data +Age + Quantified = :0 + Kind = :0 + + +Roles +Age = [] + +Derived +derive Newtype (Age :: Type) (Int :: Type) diff --git a/tests-integration/fixtures/checking/303_instance_given_constraint/Main.purs b/tests-integration/fixtures/checking/303_instance_given_constraint/Main.purs new file mode 100644 index 000000000..0a414e647 --- /dev/null +++ b/tests-integration/fixtures/checking/303_instance_given_constraint/Main.purs @@ -0,0 +1,24 @@ +module Main where + +import Data.Functor (class Functor) + +-- Class members using a type synonym with forall, combined with +-- fundeps and given constraints from the instance context. + +type Transform :: (Type -> Type) -> (Type -> Type) -> Type +type Transform f g = forall a. f a -> g a + +infixr 4 type Transform as ~> + +class (Functor m, Functor f) <= Parallel (f :: Type -> Type) (m :: Type -> Type) | m -> f, f -> m where + parallel :: m ~> f + sequential :: f ~> m + +newtype ReaderT r (m :: Type -> Type) a = ReaderT (r -> m a) + +mapReaderT :: forall r m n a b. (m a -> n b) -> ReaderT r m a -> ReaderT r n b +mapReaderT f (ReaderT g) = ReaderT (\r -> f (g r)) + +instance (Parallel f m) => Parallel (ReaderT e f) (ReaderT e m) where + parallel = mapReaderT parallel + sequential = mapReaderT sequential diff --git a/tests-integration/fixtures/checking/303_instance_given_constraint/Main.snap b/tests-integration/fixtures/checking/303_instance_given_constraint/Main.snap new file mode 100644 index 000000000..736dfd759 --- /dev/null +++ b/tests-integration/fixtures/checking/303_instance_given_constraint/Main.snap @@ -0,0 +1,51 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +parallel :: + forall (f :: Type -> Type) (m :: Type -> Type). + Parallel (f :: Type -> Type) (m :: Type -> Type) => (m :: Type -> Type) ~> (f :: Type -> Type) +sequential :: + forall (f :: Type -> Type) (m :: Type -> Type). + Parallel (f :: Type -> Type) (m :: Type -> Type) => (f :: Type -> Type) ~> (m :: Type -> Type) +ReaderT :: + forall (r :: Type) (m :: Type -> Type) (a :: Type). + ((r :: Type) -> (m :: Type -> Type) (a :: Type)) -> + ReaderT (r :: Type) (m :: Type -> Type) (a :: Type) +mapReaderT :: + forall (r :: Type) (m :: Type -> Type) (n :: Type -> Type) (a :: Type) (b :: Type). + ((m :: Type -> Type) (a :: Type) -> (n :: Type -> Type) (b :: Type)) -> + ReaderT (r :: Type) (m :: Type -> Type) (a :: Type) -> + ReaderT (r :: Type) (n :: Type -> Type) (b :: Type) + +Types +Transform :: (Type -> Type) -> (Type -> Type) -> Type +~> :: (Type -> Type) -> (Type -> Type) -> Type +Parallel :: (Type -> Type) -> (Type -> Type) -> Constraint +ReaderT :: Type -> (Type -> Type) -> Type -> Type + +Synonyms +Transform = forall (f :: Type -> Type) (g :: Type -> Type) (a :: Type). + (f :: Type -> Type) (a :: Type) -> (g :: Type -> Type) (a :: Type) + Quantified = :0 + Kind = :0 + Type = :2 + + +Data +ReaderT + Quantified = :0 + Kind = :0 + + +Roles +ReaderT = [Representational, Representational, Nominal] + +Classes +class Functor (m :: Type -> Type), Functor (f :: Type -> Type) <= Parallel (f :: Type -> Type) (m :: Type -> Type) + +Instances +instance Parallel (f :: Type -> Type) (m :: Type -> Type) => Parallel (ReaderT (e :: Type) (f :: Type -> Type) :: Type -> Type) (ReaderT (e :: Type) (m :: Type -> Type) :: Type -> Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/305_type_operator_unification/Main.purs b/tests-integration/fixtures/checking/305_type_operator_unification/Main.purs new file mode 100644 index 000000000..2b112ff83 --- /dev/null +++ b/tests-integration/fixtures/checking/305_type_operator_unification/Main.purs @@ -0,0 +1,11 @@ +module Main where + +data Either a b = Left a | Right b + +infixr 6 type Either as \/ + +in1 :: forall a z. a -> a \/ z +in1 = Left + +in2 :: forall a b z. b -> a \/ b \/ z +in2 v = Right (Left v) diff --git a/tests-integration/fixtures/checking/305_type_operator_unification/Main.snap b/tests-integration/fixtures/checking/305_type_operator_unification/Main.snap new file mode 100644 index 000000000..54fe134d2 --- /dev/null +++ b/tests-integration/fixtures/checking/305_type_operator_unification/Main.snap @@ -0,0 +1,25 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Left :: forall (a :: Type) (b :: Type). (a :: Type) -> Either (a :: Type) (b :: Type) +Right :: forall (a :: Type) (b :: Type). (b :: Type) -> Either (a :: Type) (b :: Type) +in1 :: forall (a :: Type) (z :: Type). (a :: Type) -> (a :: Type) \/ (z :: Type) +in2 :: + forall (a :: Type) (b :: Type) (z :: Type). + (b :: Type) -> (a :: Type) \/ (b :: Type) \/ (z :: Type) + +Types +Either :: Type -> Type -> Type +\/ :: Type -> Type -> Type + +Data +Either + Quantified = :0 + Kind = :0 + + +Roles +Either = [Representational, Representational] diff --git a/tests-integration/fixtures/checking/306_kind_application_instance_matching/Main.purs b/tests-integration/fixtures/checking/306_kind_application_instance_matching/Main.purs new file mode 100644 index 000000000..53105fa87 --- /dev/null +++ b/tests-integration/fixtures/checking/306_kind_application_instance_matching/Main.purs @@ -0,0 +1,11 @@ +module Main where + +import Data.Newtype (class Newtype, unwrap) + +newtype Endo :: forall k. (k -> k -> Type) -> k -> Type +newtype Endo c a = Endo (c a a) + +instance Newtype (Endo c a) (c a a) + +test :: forall b. Endo Function b -> b -> b +test x = unwrap x diff --git a/tests-integration/fixtures/checking/306_kind_application_instance_matching/Main.snap b/tests-integration/fixtures/checking/306_kind_application_instance_matching/Main.snap new file mode 100644 index 000000000..cc7e2a340 --- /dev/null +++ b/tests-integration/fixtures/checking/306_kind_application_instance_matching/Main.snap @@ -0,0 +1,27 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Endo :: + forall (k :: Type) (c :: (k :: Type) -> (k :: Type) -> Type) (a :: (k :: Type)). + (c :: (k :: Type) -> (k :: Type) -> Type) (a :: (k :: Type)) (a :: (k :: Type)) -> + Endo @(k :: Type) (c :: (k :: Type) -> (k :: Type) -> Type) (a :: (k :: Type)) +test :: forall (b :: Type). Endo @Type Function (b :: Type) -> (b :: Type) -> (b :: Type) + +Types +Endo :: forall (k :: Type). ((k :: Type) -> (k :: Type) -> Type) -> (k :: Type) -> Type + +Data +Endo + Quantified = :0 + Kind = :1 + + +Roles +Endo = [Representational, Nominal] + +Instances +instance forall (t4 :: Type). Newtype (Endo @(t4 :: Type) (c :: (t4 :: Type) -> (t4 :: Type) -> Type) (a :: (t4 :: Type)) :: Type) ((c :: (t4 :: Type) -> (t4 :: Type) -> Type) (a :: (t4 :: Type)) (a :: (t4 :: Type)) :: Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/307_where_let_interaction/Main.purs b/tests-integration/fixtures/checking/307_where_let_interaction/Main.purs new file mode 100644 index 000000000..5975571f0 --- /dev/null +++ b/tests-integration/fixtures/checking/307_where_let_interaction/Main.purs @@ -0,0 +1,18 @@ +module Main where + +class MyClass a where + method :: a -> Int + +instance MyClass Int where + method _ = 42 + +test :: forall a. MyClass a => a -> Int +test _ = + let go x = method x + in go 42 + +test2 :: forall a. MyClass a => a -> Int +test2 _ = + let go :: _ -> _ + go x = method x + in go 42 diff --git a/tests-integration/fixtures/checking/307_where_let_interaction/Main.snap b/tests-integration/fixtures/checking/307_where_let_interaction/Main.snap new file mode 100644 index 000000000..09c47702f --- /dev/null +++ b/tests-integration/fixtures/checking/307_where_let_interaction/Main.snap @@ -0,0 +1,19 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +method :: forall (a :: Type). MyClass (a :: Type) => (a :: Type) -> Int +test :: forall (a :: Type). MyClass (a :: Type) => (a :: Type) -> Int +test2 :: forall (a :: Type). MyClass (a :: Type) => (a :: Type) -> Int + +Types +MyClass :: Type -> Constraint + +Classes +class MyClass (a :: Type) + +Instances +instance MyClass (Int :: Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/308_let_constraint_scoping/Main.purs b/tests-integration/fixtures/checking/308_let_constraint_scoping/Main.purs new file mode 100644 index 000000000..8a627f956 --- /dev/null +++ b/tests-integration/fixtures/checking/308_let_constraint_scoping/Main.purs @@ -0,0 +1,17 @@ +module Main where + +class MyClass a where + method :: a -> Int + +instance MyClass Int where + method _ = 42 + +test :: forall a. MyClass a => a -> Int +test x = + let + bar y = method y + + baz :: MyClass Int => Int + baz = method 42 + in + bar x diff --git a/tests-integration/fixtures/checking/308_let_constraint_scoping/Main.snap b/tests-integration/fixtures/checking/308_let_constraint_scoping/Main.snap new file mode 100644 index 000000000..2047c008a --- /dev/null +++ b/tests-integration/fixtures/checking/308_let_constraint_scoping/Main.snap @@ -0,0 +1,18 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +method :: forall (a :: Type). MyClass (a :: Type) => (a :: Type) -> Int +test :: forall (a :: Type). MyClass (a :: Type) => (a :: Type) -> Int + +Types +MyClass :: Type -> Constraint + +Classes +class MyClass (a :: Type) + +Instances +instance MyClass (Int :: Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/309_synonym_function_result_kind/Main.purs b/tests-integration/fixtures/checking/309_synonym_function_result_kind/Main.purs new file mode 100644 index 000000000..7b0132c9b --- /dev/null +++ b/tests-integration/fixtures/checking/309_synonym_function_result_kind/Main.purs @@ -0,0 +1,9 @@ +module Main where + +data Box (f :: Type -> Type) (a :: Type) = Box (f a) + +type Wrap :: (Type -> Type) -> Type -> Type +type Wrap f = Box f + +test :: forall f. Wrap f Int -> Wrap f Int +test x = x diff --git a/tests-integration/fixtures/checking/309_synonym_function_result_kind/Main.snap b/tests-integration/fixtures/checking/309_synonym_function_result_kind/Main.snap new file mode 100644 index 000000000..6d4c97e13 --- /dev/null +++ b/tests-integration/fixtures/checking/309_synonym_function_result_kind/Main.snap @@ -0,0 +1,30 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Box :: + forall (f :: Type -> Type) (a :: Type). + (f :: Type -> Type) (a :: Type) -> Box (f :: Type -> Type) (a :: Type) +test :: forall (f :: Type -> Type). Wrap (f :: Type -> Type) Int -> Wrap (f :: Type -> Type) Int + +Types +Box :: (Type -> Type) -> Type -> Type +Wrap :: (Type -> Type) -> Type -> Type + +Synonyms +Wrap = forall (f :: Type -> Type). Box (f :: Type -> Type) + Quantified = :0 + Kind = :0 + Type = :1 + + +Data +Box + Quantified = :0 + Kind = :0 + + +Roles +Box = [Representational, Nominal] diff --git a/tests-integration/fixtures/checking/310_synonym_forall_expansion/Main.purs b/tests-integration/fixtures/checking/310_synonym_forall_expansion/Main.purs new file mode 100644 index 000000000..9a8c39a56 --- /dev/null +++ b/tests-integration/fixtures/checking/310_synonym_forall_expansion/Main.purs @@ -0,0 +1,6 @@ +module Main where + +type NatTrans f g = forall a. f a -> g a + +apply :: forall f g. NatTrans f g -> f Int -> g Int +apply nat fa = nat fa diff --git a/tests-integration/fixtures/checking/310_synonym_forall_expansion/Main.snap b/tests-integration/fixtures/checking/310_synonym_forall_expansion/Main.snap new file mode 100644 index 000000000..cd7455473 --- /dev/null +++ b/tests-integration/fixtures/checking/310_synonym_forall_expansion/Main.snap @@ -0,0 +1,22 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +apply :: + forall (f :: Type -> Type) (g :: Type -> Type). + NatTrans (f :: Type -> Type) (g :: Type -> Type) -> + (f :: Type -> Type) Int -> + (g :: Type -> Type) Int + +Types +NatTrans :: forall (t12 :: Type). ((t12 :: Type) -> Type) -> ((t12 :: Type) -> Type) -> Type + +Synonyms +NatTrans = forall (t12 :: Type) (f :: (t12 :: Type) -> Type) (g :: (t12 :: Type) -> Type) (a :: (t12 :: Type)). + (f :: (t12 :: Type) -> Type) (a :: (t12 :: Type)) -> + (g :: (t12 :: Type) -> Type) (a :: (t12 :: Type)) + Quantified = :1 + Kind = :0 + Type = :2 diff --git a/tests-integration/fixtures/checking/311_prim_qualified/Main.purs b/tests-integration/fixtures/checking/311_prim_qualified/Main.purs new file mode 100644 index 000000000..51cfc9317 --- /dev/null +++ b/tests-integration/fixtures/checking/311_prim_qualified/Main.purs @@ -0,0 +1,3 @@ +module Main where + +foreign import data Test :: Array Prim.Int diff --git a/tests-integration/fixtures/checking/311_prim_qualified/Main.snap b/tests-integration/fixtures/checking/311_prim_qualified/Main.snap new file mode 100644 index 000000000..e0d9f9494 --- /dev/null +++ b/tests-integration/fixtures/checking/311_prim_qualified/Main.snap @@ -0,0 +1,12 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms + +Types +Test :: Array Int + +Roles +Test = [] diff --git a/tests-integration/fixtures/checking/312_prim_qualified_override/Main.purs b/tests-integration/fixtures/checking/312_prim_qualified_override/Main.purs new file mode 100644 index 000000000..1a0883284 --- /dev/null +++ b/tests-integration/fixtures/checking/312_prim_qualified_override/Main.purs @@ -0,0 +1,5 @@ +module Main where + +import Prim (String) as Prim + +foreign import data Test :: Array Prim.Int diff --git a/tests-integration/fixtures/checking/312_prim_qualified_override/Main.snap b/tests-integration/fixtures/checking/312_prim_qualified_override/Main.snap new file mode 100644 index 000000000..4e741bdf6 --- /dev/null +++ b/tests-integration/fixtures/checking/312_prim_qualified_override/Main.snap @@ -0,0 +1,24 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms + +Types +Test :: Array ??? + +Roles +Test = [] + +Diagnostics +error[NotInScope]: 'Prim.Int' is not in scope + --> 5:35..5:43 + | +5 | foreign import data Test :: Array Prim.Int + | ^~~~~~~~ +error[CannotUnify]: Cannot unify '???' with 'Type' + --> 5:35..5:43 + | +5 | foreign import data Test :: Array Prim.Int + | ^~~~~~~~ diff --git a/tests-integration/fixtures/checking/313_guarded_constraint_propagation/Main.purs b/tests-integration/fixtures/checking/313_guarded_constraint_propagation/Main.purs new file mode 100644 index 000000000..fbcb6fb62 --- /dev/null +++ b/tests-integration/fixtures/checking/313_guarded_constraint_propagation/Main.purs @@ -0,0 +1,14 @@ +module Main where + +class Generate f where + generate :: forall a b. (b -> a) -> b -> f a + +data List a = Nil | Cons a + +instance Generate List where + generate f b = Cons (f b) + +test :: Int -> List Int +test start + | true = generate (\x -> x) start + | true = generate (\x -> x) start diff --git a/tests-integration/fixtures/checking/313_guarded_constraint_propagation/Main.snap b/tests-integration/fixtures/checking/313_guarded_constraint_propagation/Main.snap new file mode 100644 index 000000000..8b9bc3333 --- /dev/null +++ b/tests-integration/fixtures/checking/313_guarded_constraint_propagation/Main.snap @@ -0,0 +1,33 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +generate :: + forall (f :: Type -> Type) (a :: Type) (b :: Type). + Generate (f :: Type -> Type) => + ((b :: Type) -> (a :: Type)) -> (b :: Type) -> (f :: Type -> Type) (a :: Type) +Nil :: forall (a :: Type). List (a :: Type) +Cons :: forall (a :: Type). (a :: Type) -> List (a :: Type) +test :: Int -> List Int + +Types +Generate :: (Type -> Type) -> Constraint +List :: Type -> Type + +Data +List + Quantified = :0 + Kind = :0 + + +Roles +List = [Representational] + +Classes +class Generate (f :: Type -> Type) + +Instances +instance Generate (List :: Type -> Type) + chain: 0 diff --git a/tests-integration/fixtures/checking/314_derive_newtype_function/Main.purs b/tests-integration/fixtures/checking/314_derive_newtype_function/Main.purs new file mode 100644 index 000000000..4fe996dd8 --- /dev/null +++ b/tests-integration/fixtures/checking/314_derive_newtype_function/Main.purs @@ -0,0 +1,9 @@ +module Main where + +import Data.Semigroupoid (class Semigroupoid) +import Control.Category (class Category) + +newtype Builder a b = Builder (a -> b) + +derive newtype instance Semigroupoid Builder +derive newtype instance Category Builder diff --git a/tests-integration/fixtures/checking/314_derive_newtype_function/Main.snap b/tests-integration/fixtures/checking/314_derive_newtype_function/Main.snap new file mode 100644 index 000000000..47fb4186a --- /dev/null +++ b/tests-integration/fixtures/checking/314_derive_newtype_function/Main.snap @@ -0,0 +1,24 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Builder :: + forall (a :: Type) (b :: Type). ((a :: Type) -> (b :: Type)) -> Builder (a :: Type) (b :: Type) + +Types +Builder :: Type -> Type -> Type + +Data +Builder + Quantified = :0 + Kind = :0 + + +Roles +Builder = [Representational, Representational] + +Derived +derive Semigroupoid (Builder :: Type -> Type -> Type) +derive Category (Builder :: Type -> Type -> Type) diff --git a/tests-integration/fixtures/checking/315_operator_chain_mixed_fixity/Main.purs b/tests-integration/fixtures/checking/315_operator_chain_mixed_fixity/Main.purs new file mode 100644 index 000000000..670241154 --- /dev/null +++ b/tests-integration/fixtures/checking/315_operator_chain_mixed_fixity/Main.purs @@ -0,0 +1,24 @@ +module Main where + +import Data.Eq (class Eq) + +eq :: forall a. Eq a => a -> a -> Boolean +eq _ _ = true + +conj :: Boolean -> Boolean -> Boolean +conj _ _ = true + +infix 4 eq as == +infixr 3 conj as && + +-- Single operator: should work +test1 :: Int -> Boolean +test1 h = h == 2 + +-- Single &&: should work +test2 :: Boolean +test2 = true && true + +-- Mixed chain: the failing case +test3 :: Int -> Int -> Boolean +test3 h rh = h == 2 && rh == 1 diff --git a/tests-integration/fixtures/checking/315_operator_chain_mixed_fixity/Main.snap b/tests-integration/fixtures/checking/315_operator_chain_mixed_fixity/Main.snap new file mode 100644 index 000000000..a6b303614 --- /dev/null +++ b/tests-integration/fixtures/checking/315_operator_chain_mixed_fixity/Main.snap @@ -0,0 +1,15 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +eq :: forall (a :: Type). Eq (a :: Type) => (a :: Type) -> (a :: Type) -> Boolean +conj :: Boolean -> Boolean -> Boolean +== :: forall (a :: Type). Eq (a :: Type) => (a :: Type) -> (a :: Type) -> Boolean +&& :: Boolean -> Boolean -> Boolean +test1 :: Int -> Boolean +test2 :: Boolean +test3 :: Int -> Int -> Boolean + +Types diff --git a/tests-integration/fixtures/checking/316_synonym_derive/Main.purs b/tests-integration/fixtures/checking/316_synonym_derive/Main.purs new file mode 100644 index 000000000..5fc20cd0c --- /dev/null +++ b/tests-integration/fixtures/checking/316_synonym_derive/Main.purs @@ -0,0 +1,12 @@ +module Main where + +import Data.Eq (class Eq) + +foreign import data State :: Type + +instance Eq State where + eq _ _ = true + +newtype Test = Test State + +derive newtype instance Eq Test diff --git a/tests-integration/fixtures/checking/316_synonym_derive/Main.snap b/tests-integration/fixtures/checking/316_synonym_derive/Main.snap new file mode 100644 index 000000000..f698b2914 --- /dev/null +++ b/tests-integration/fixtures/checking/316_synonym_derive/Main.snap @@ -0,0 +1,28 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Test :: State -> Test + +Types +State :: Type +Test :: Type + +Data +Test + Quantified = :0 + Kind = :0 + + +Roles +State = [] +Test = [] + +Instances +instance Eq (State :: Type) + chain: 0 + +Derived +derive Eq (Test :: Type) diff --git a/tests-integration/fixtures/checking/317_higher_rank_fields/Main.purs b/tests-integration/fixtures/checking/317_higher_rank_fields/Main.purs new file mode 100644 index 000000000..d88ac11bd --- /dev/null +++ b/tests-integration/fixtures/checking/317_higher_rank_fields/Main.purs @@ -0,0 +1,9 @@ +module Main where + +type Test = { identity :: forall a. a -> a } + +test1 :: Test -> Int +test1 t = t.identity 42 + +test2 :: Test -> Int +test2 { identity } = identity 42 diff --git a/tests-integration/fixtures/checking/317_higher_rank_fields/Main.snap b/tests-integration/fixtures/checking/317_higher_rank_fields/Main.snap new file mode 100644 index 000000000..290639f67 --- /dev/null +++ b/tests-integration/fixtures/checking/317_higher_rank_fields/Main.snap @@ -0,0 +1,17 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test1 :: { identity :: forall (a :: Type). (a :: Type) -> (a :: Type) } -> Int +test2 :: { identity :: forall (a :: Type). (a :: Type) -> (a :: Type) } -> Int + +Types +Test :: Type + +Synonyms +Test = { identity :: forall (a :: Type). (a :: Type) -> (a :: Type) } + Quantified = :0 + Kind = :0 + Type = :0 diff --git a/tests-integration/fixtures/checking/322_phantom_kind_inference/Main.purs b/tests-integration/fixtures/checking/322_phantom_kind_inference/Main.purs new file mode 100644 index 000000000..d08c40393 --- /dev/null +++ b/tests-integration/fixtures/checking/322_phantom_kind_inference/Main.purs @@ -0,0 +1,32 @@ +module Main where + +foreign import unsafeCoerce :: forall a b. a -> b + +data Query input a = Query input a +data HM state m a = HM state (m a) +data Solid input m + +type Spec state input m = + { eval :: forall a. Query input a -> HM state m a + } + +mkSpec + :: forall state input m + . Spec state input m + -> Solid input m +mkSpec = unsafeCoerce + +unSpec + :: forall input m a + . (forall state. Spec state input m -> a) + -> Solid input m + -> a +unSpec = unsafeCoerce + +hoist + :: forall input m + . Solid input m + -> Solid input m +hoist = unSpec \c -> mkSpec + { eval: c.eval + } diff --git a/tests-integration/fixtures/checking/322_phantom_kind_inference/Main.snap b/tests-integration/fixtures/checking/322_phantom_kind_inference/Main.snap new file mode 100644 index 000000000..590cebd88 --- /dev/null +++ b/tests-integration/fixtures/checking/322_phantom_kind_inference/Main.snap @@ -0,0 +1,66 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +unsafeCoerce :: forall (a :: Type) (b :: Type). (a :: Type) -> (b :: Type) +Query :: + forall (input :: Type) (a :: Type). + (input :: Type) -> (a :: Type) -> Query (input :: Type) (a :: Type) +HM :: + forall (t7 :: Type) (state :: Type) (m :: (t7 :: Type) -> Type) (a :: (t7 :: Type)). + (state :: Type) -> + (m :: (t7 :: Type) -> Type) (a :: (t7 :: Type)) -> + HM @(t7 :: Type) (state :: Type) (m :: (t7 :: Type) -> Type) (a :: (t7 :: Type)) +mkSpec :: + forall (state :: Type) (input :: Type) (m :: Type -> Type). + Spec (state :: Type) (input :: Type) (m :: Type -> Type) -> + Solid @Type @(Type -> Type) (input :: Type) (m :: Type -> Type) +unSpec :: + forall (input :: Type) (m :: Type -> Type) (a :: Type). + (forall (state :: Type). + Spec (state :: Type) (input :: Type) (m :: Type -> Type) -> (a :: Type)) -> + Solid @Type @(Type -> Type) (input :: Type) (m :: Type -> Type) -> + (a :: Type) +hoist :: + forall (input :: Type) (m :: Type -> Type). + Solid @Type @(Type -> Type) (input :: Type) (m :: Type -> Type) -> + Solid @Type @(Type -> Type) (input :: Type) (m :: Type -> Type) + +Types +Query :: Type -> Type -> Type +HM :: forall (t7 :: Type). Type -> ((t7 :: Type) -> Type) -> (t7 :: Type) -> Type +Solid :: forall (t9 :: Type) (t10 :: Type). (t9 :: Type) -> (t10 :: Type) -> Type +Spec :: Type -> Type -> (Type -> Type) -> Type + +Synonyms +Spec = forall (state :: Type) (input :: Type) (m :: Type -> Type). + { eval :: + forall (a :: Type). + Query (input :: Type) (a :: Type) -> + HM @Type (state :: Type) (m :: Type -> Type) (a :: Type) + } + Quantified = :0 + Kind = :0 + Type = :3 + + +Data +Query + Quantified = :0 + Kind = :0 + +HM + Quantified = :1 + Kind = :0 + +Solid + Quantified = :2 + Kind = :0 + + +Roles +Query = [Representational, Representational] +HM = [Representational, Representational, Nominal] +Solid = [Phantom, Phantom] diff --git a/tests-integration/fixtures/checking/323_operator_deferred_generalise/Main.purs b/tests-integration/fixtures/checking/323_operator_deferred_generalise/Main.purs new file mode 100644 index 000000000..54e17173b --- /dev/null +++ b/tests-integration/fixtures/checking/323_operator_deferred_generalise/Main.purs @@ -0,0 +1,16 @@ +module Main where + +foreign import data Component :: forall k. k -> Type + +data ForceTypeType :: (Type -> Type) -> Type +data ForceTypeType f = ForceTypeType + +knownInEquation :: forall m. Component m -> Component m -> Int +knownInEquation _ _ = + let + forced :: ForceTypeType m + forced = ForceTypeType + in + 42 + +infix 4 knownInEquation as +++ diff --git a/tests-integration/fixtures/checking/323_operator_deferred_generalise/Main.snap b/tests-integration/fixtures/checking/323_operator_deferred_generalise/Main.snap new file mode 100644 index 000000000..3b71782b8 --- /dev/null +++ b/tests-integration/fixtures/checking/323_operator_deferred_generalise/Main.snap @@ -0,0 +1,31 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +ForceTypeType :: forall (f :: Type -> Type). ForceTypeType (f :: Type -> Type) +knownInEquation :: + forall (m :: Type -> Type). + Component @(Type -> Type) (m :: Type -> Type) -> + Component @(Type -> Type) (m :: Type -> Type) -> + Int ++++ :: + forall (m :: Type -> Type). + Component @(Type -> Type) (m :: Type -> Type) -> + Component @(Type -> Type) (m :: Type -> Type) -> + Int + +Types +Component :: forall (k :: Type). (k :: Type) -> Type +ForceTypeType :: (Type -> Type) -> Type + +Data +ForceTypeType + Quantified = :0 + Kind = :0 + + +Roles +Component = [Nominal] +ForceTypeType = [Phantom] diff --git a/tests-integration/fixtures/checking/324_foreign_kind_polymorphism/Main.purs b/tests-integration/fixtures/checking/324_foreign_kind_polymorphism/Main.purs new file mode 100644 index 000000000..be5caf0bd --- /dev/null +++ b/tests-integration/fixtures/checking/324_foreign_kind_polymorphism/Main.purs @@ -0,0 +1,11 @@ +module Main where + +data Identity :: forall k. (k -> k) -> Type +data Identity a = Identity + +foreign import fn :: forall k. Identity k -> Identity k + +-- Foreign values must be generalised immediately +-- to avoid solving them to concrete types on usage. + +test = fn (Identity :: Identity Array) diff --git a/tests-integration/fixtures/checking/324_foreign_kind_polymorphism/Main.snap b/tests-integration/fixtures/checking/324_foreign_kind_polymorphism/Main.snap new file mode 100644 index 000000000..8c0613fc7 --- /dev/null +++ b/tests-integration/fixtures/checking/324_foreign_kind_polymorphism/Main.snap @@ -0,0 +1,26 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Identity :: + forall (k :: Type) (a :: (k :: Type) -> (k :: Type)). + Identity @(k :: Type) (a :: (k :: Type) -> (k :: Type)) +fn :: + forall (t3 :: Type) (k :: (t3 :: Type) -> (t3 :: Type)). + Identity @(t3 :: Type) (k :: (t3 :: Type) -> (t3 :: Type)) -> + Identity @(t3 :: Type) (k :: (t3 :: Type) -> (t3 :: Type)) +test :: Identity @Type Array + +Types +Identity :: forall (k :: Type). ((k :: Type) -> (k :: Type)) -> Type + +Data +Identity + Quantified = :0 + Kind = :1 + + +Roles +Identity = [Phantom] diff --git a/tests-integration/fixtures/checking/325_type_kind_deferred_generalise/Main.purs b/tests-integration/fixtures/checking/325_type_kind_deferred_generalise/Main.purs new file mode 100644 index 000000000..f7308691f --- /dev/null +++ b/tests-integration/fixtures/checking/325_type_kind_deferred_generalise/Main.purs @@ -0,0 +1,11 @@ +module Main where + +foreign import data Component :: forall k. k -> Type + +data ForceTypeType :: (Type -> Type) -> Type +data ForceTypeType f = ForceTypeType + +data Known :: forall k. Component k -> Type -> Type +data Known a b = Known (ForceTypeType k) + +infix 4 type Known as +++ diff --git a/tests-integration/fixtures/checking/325_type_kind_deferred_generalise/Main.snap b/tests-integration/fixtures/checking/325_type_kind_deferred_generalise/Main.snap new file mode 100644 index 000000000..aff7fe0fa --- /dev/null +++ b/tests-integration/fixtures/checking/325_type_kind_deferred_generalise/Main.snap @@ -0,0 +1,32 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +ForceTypeType :: forall (f :: Type -> Type). ForceTypeType (f :: Type -> Type) +Known :: + forall (k :: Type -> Type) (a :: Component @(Type -> Type) (k :: Type -> Type)) (b :: Type). + ForceTypeType (k :: Type -> Type) -> + Known @(k :: Type -> Type) (a :: Component @(Type -> Type) (k :: Type -> Type)) (b :: Type) + +Types +Component :: forall (k :: Type). (k :: Type) -> Type +ForceTypeType :: (Type -> Type) -> Type +Known :: forall (k :: Type -> Type). Component @(Type -> Type) (k :: Type -> Type) -> Type -> Type ++++ :: forall (k :: Type -> Type). Component @(Type -> Type) (k :: Type -> Type) -> Type -> Type + +Data +ForceTypeType + Quantified = :0 + Kind = :0 + +Known + Quantified = :0 + Kind = :1 + + +Roles +Component = [Nominal] +ForceTypeType = [Phantom] +Known = [Phantom, Phantom] diff --git a/tests-integration/fixtures/checking/326_let_retain_polymorphism/Main.purs b/tests-integration/fixtures/checking/326_let_retain_polymorphism/Main.purs new file mode 100644 index 000000000..e248a6cbc --- /dev/null +++ b/tests-integration/fixtures/checking/326_let_retain_polymorphism/Main.purs @@ -0,0 +1,28 @@ +module Main where + +class IxFunctor :: forall ix. (ix -> ix -> Type -> Type) -> Constraint +class IxFunctor f where + imap :: forall a b x y. (a -> b) -> f x y a -> f x y b + +class IxApply :: forall ix. (ix -> ix -> Type -> Type) -> Constraint +class IxFunctor m <= IxApply m where + iapply :: forall a b x y z. m x y (a -> b) -> m y z a -> m x z b + +class IxApplicative :: forall ix. (ix -> ix -> Type -> Type) -> Constraint +class IxApply m <= IxApplicative m where + ipure :: forall a x. a -> m x x a + +class IxBind :: forall ix. (ix -> ix -> Type -> Type) -> Constraint +class IxApply m <= IxBind m where + ibind :: forall a b x y z. m x y a -> (a -> m y z b) -> m x z b + +class IxMonad :: forall ix. (ix -> ix -> Type -> Type) -> Constraint +class (IxApplicative m, IxBind m) <= IxMonad m + +iap :: forall m a b x y z. IxMonad m => m x y (a -> b) -> m y z a -> m x z b +iap f a = do + f' <- f + a' <- a + ipure (f' a') + where + bind = ibind diff --git a/tests-integration/fixtures/checking/326_let_retain_polymorphism/Main.snap b/tests-integration/fixtures/checking/326_let_retain_polymorphism/Main.snap new file mode 100644 index 000000000..594e03286 --- /dev/null +++ b/tests-integration/fixtures/checking/326_let_retain_polymorphism/Main.snap @@ -0,0 +1,91 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +imap :: + forall (ix :: Type) (f :: (ix :: Type) -> (ix :: Type) -> Type -> Type) (a :: Type) (b :: Type) + (x :: (ix :: Type)) (y :: (ix :: Type)). + IxFunctor (f :: (ix :: Type) -> (ix :: Type) -> Type -> Type) => + ((a :: Type) -> (b :: Type)) -> + (f :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (x :: (ix :: Type)) + (y :: (ix :: Type)) + (a :: Type) -> + (f :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (x :: (ix :: Type)) + (y :: (ix :: Type)) + (b :: Type) +iapply :: + forall (ix :: Type) (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) (a :: Type) (b :: Type) + (x :: (ix :: Type)) (y :: (ix :: Type)) (z :: (ix :: Type)). + IxApply (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) => + (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (x :: (ix :: Type)) + (y :: (ix :: Type)) + ((a :: Type) -> (b :: Type)) -> + (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (y :: (ix :: Type)) + (z :: (ix :: Type)) + (a :: Type) -> + (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (x :: (ix :: Type)) + (z :: (ix :: Type)) + (b :: Type) +ipure :: + forall (ix :: Type) (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) (a :: Type) + (x :: (ix :: Type)). + IxApplicative (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) => + (a :: Type) -> + (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (x :: (ix :: Type)) + (x :: (ix :: Type)) + (a :: Type) +ibind :: + forall (ix :: Type) (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) (a :: Type) (b :: Type) + (x :: (ix :: Type)) (y :: (ix :: Type)) (z :: (ix :: Type)). + IxBind (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) => + (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (x :: (ix :: Type)) + (y :: (ix :: Type)) + (a :: Type) -> + ((a :: Type) -> + (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (y :: (ix :: Type)) + (z :: (ix :: Type)) + (b :: Type)) -> + (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) + (x :: (ix :: Type)) + (z :: (ix :: Type)) + (b :: Type) +iap :: + forall (t33 :: Type) (m :: (t33 :: Type) -> (t33 :: Type) -> Type -> Type) (a :: Type) (b :: Type) + (x :: (t33 :: Type)) (y :: (t33 :: Type)) (z :: (t33 :: Type)). + IxMonad @(t33 :: Type) (m :: (t33 :: Type) -> (t33 :: Type) -> Type -> Type) => + (m :: (t33 :: Type) -> (t33 :: Type) -> Type -> Type) + (x :: (t33 :: Type)) + (y :: (t33 :: Type)) + ((a :: Type) -> (b :: Type)) -> + (m :: (t33 :: Type) -> (t33 :: Type) -> Type -> Type) + (y :: (t33 :: Type)) + (z :: (t33 :: Type)) + (a :: Type) -> + (m :: (t33 :: Type) -> (t33 :: Type) -> Type -> Type) + (x :: (t33 :: Type)) + (z :: (t33 :: Type)) + (b :: Type) + +Types +IxFunctor :: forall (ix :: Type). ((ix :: Type) -> (ix :: Type) -> Type -> Type) -> Constraint +IxApply :: forall (ix :: Type). ((ix :: Type) -> (ix :: Type) -> Type -> Type) -> Constraint +IxApplicative :: forall (ix :: Type). ((ix :: Type) -> (ix :: Type) -> Type -> Type) -> Constraint +IxBind :: forall (ix :: Type). ((ix :: Type) -> (ix :: Type) -> Type -> Type) -> Constraint +IxMonad :: forall (ix :: Type). ((ix :: Type) -> (ix :: Type) -> Type -> Type) -> Constraint + +Classes +class IxFunctor (f :: (ix :: Type) -> (ix :: Type) -> Type -> Type) +class IxFunctor @(ix :: Type) (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) <= IxApply (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) +class IxApply @(ix :: Type) (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) <= IxApplicative (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) +class IxApply @(ix :: Type) (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) <= IxBind (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) +class IxApplicative @(ix :: Type) (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type), IxBind @(ix :: Type) (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) <= IxMonad (m :: (ix :: Type) -> (ix :: Type) -> Type -> Type) diff --git a/tests-integration/fixtures/checking/327_qualified_do/Lib.purs b/tests-integration/fixtures/checking/327_qualified_do/Lib.purs new file mode 100644 index 000000000..d577cb296 --- /dev/null +++ b/tests-integration/fixtures/checking/327_qualified_do/Lib.purs @@ -0,0 +1,5 @@ +module Lib where + +foreign import bind :: forall m a b. m a -> (a -> m b) -> m b +foreign import discard :: forall m a b. m a -> (a -> m b) -> m b +foreign import pure :: forall m a. a -> m a diff --git a/tests-integration/fixtures/checking/327_qualified_do/Main.purs b/tests-integration/fixtures/checking/327_qualified_do/Main.purs new file mode 100644 index 000000000..fa02ac325 --- /dev/null +++ b/tests-integration/fixtures/checking/327_qualified_do/Main.purs @@ -0,0 +1,7 @@ +module Main where + +import Lib as L + +test = L.do + life <- L.pure 42 + L.pure life diff --git a/tests-integration/fixtures/checking/327_qualified_do/Main.snap b/tests-integration/fixtures/checking/327_qualified_do/Main.snap new file mode 100644 index 000000000..82c5b8f82 --- /dev/null +++ b/tests-integration/fixtures/checking/327_qualified_do/Main.snap @@ -0,0 +1,9 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: forall (t7 :: Type -> Type). (t7 :: Type -> Type) Int + +Types diff --git a/tests-integration/fixtures/checking/328_binder_instantiation/Main.purs b/tests-integration/fixtures/checking/328_binder_instantiation/Main.purs new file mode 100644 index 000000000..ceb764ab6 --- /dev/null +++ b/tests-integration/fixtures/checking/328_binder_instantiation/Main.purs @@ -0,0 +1,25 @@ +module Main where + +data Maybe a = Just a | Nothing +data Id = MkId (forall a. a -> a) + +identity :: forall a. Maybe (a -> a) +identity = Nothing + +test :: Partial => Int +test = case identity of + Just f -> let _ = f 42 in f true + +test2 :: Id -> Boolean +test2 x = case x of + MkId f -> let _ = f 42 in f true + +test3 :: Partial => Int +test3 = + let (Just f) = identity + in let _ = f 42 in f true + +test4 :: Id -> Boolean +test4 x = + let (MkId f) = x + in let _ = f 42 in f true diff --git a/tests-integration/fixtures/checking/328_binder_instantiation/Main.snap b/tests-integration/fixtures/checking/328_binder_instantiation/Main.snap new file mode 100644 index 000000000..baa27c7bd --- /dev/null +++ b/tests-integration/fixtures/checking/328_binder_instantiation/Main.snap @@ -0,0 +1,54 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +MkId :: (forall (a :: Type). (a :: Type) -> (a :: Type)) -> Id +identity :: forall (a :: Type). Maybe ((a :: Type) -> (a :: Type)) +test :: Partial => Int +test2 :: Id -> Boolean +test3 :: Partial => Int +test4 :: Id -> Boolean + +Types +Maybe :: Type -> Type +Id :: Type + +Data +Maybe + Quantified = :0 + Kind = :0 + +Id + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] +Id = [] + +Diagnostics +error[CannotUnify]: Cannot unify 'Boolean' with 'Int' + --> 11:31..11:35 + | +11 | Just f -> let _ = f 42 in f true + | ^~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 10:8..11:35 + | +10 | test = case identity of + | ^~~~~~~~~~~~~~~~ +warning[MissingPatterns]: Pattern match is not exhaustive. Missing: Nothing + --> 19:3..20:28 + | +19 | let (Just f) = identity + | ^~~~~~~~~~~~~~~~~~~~~~~ +error[CannotUnify]: Cannot unify 'Boolean' with 'Int' + --> 20:24..20:28 + | +20 | in let _ = f 42 in f true + | ^~~~ diff --git a/tests-integration/fixtures/checking/329_pattern_nothing_first/Main.purs b/tests-integration/fixtures/checking/329_pattern_nothing_first/Main.purs new file mode 100644 index 000000000..8a06720e5 --- /dev/null +++ b/tests-integration/fixtures/checking/329_pattern_nothing_first/Main.purs @@ -0,0 +1,8 @@ +module Main where + +data Maybe a = Just a | Nothing + +test x = case x of + Nothing -> 0 + Just { a } -> 0 + Just { a, b } -> 0 diff --git a/tests-integration/fixtures/checking/329_pattern_nothing_first/Main.snap b/tests-integration/fixtures/checking/329_pattern_nothing_first/Main.snap new file mode 100644 index 000000000..d8a2aabe6 --- /dev/null +++ b/tests-integration/fixtures/checking/329_pattern_nothing_first/Main.snap @@ -0,0 +1,30 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Just :: forall (a :: Type). (a :: Type) -> Maybe (a :: Type) +Nothing :: forall (a :: Type). Maybe (a :: Type) +test :: + forall (t7 :: Type) (t11 :: Type) (t13 :: Row Type). + Maybe { a :: (t7 :: Type) | ( b :: (t11 :: Type) | (t13 :: Row Type) ) } -> Int + +Types +Maybe :: Type -> Type + +Data +Maybe + Quantified = :0 + Kind = :0 + + +Roles +Maybe = [Representational] + +Diagnostics +warning[RedundantPattern]: Pattern match has redundant patterns: Just ({ a: _, b: _ }) + --> 5:10..8:21 + | +5 | test x = case x of + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/330_record_subset_labels/Main.purs b/tests-integration/fixtures/checking/330_record_subset_labels/Main.purs new file mode 100644 index 000000000..6204d428f --- /dev/null +++ b/tests-integration/fixtures/checking/330_record_subset_labels/Main.purs @@ -0,0 +1,7 @@ +module Main where + +data Box a = Box a + +test x = case x of + Box { a, b } -> 0 + Box { a } -> 0 diff --git a/tests-integration/fixtures/checking/330_record_subset_labels/Main.snap b/tests-integration/fixtures/checking/330_record_subset_labels/Main.snap new file mode 100644 index 000000000..8ba2aee1a --- /dev/null +++ b/tests-integration/fixtures/checking/330_record_subset_labels/Main.snap @@ -0,0 +1,29 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Box :: forall (a :: Type). (a :: Type) -> Box (a :: Type) +test :: + forall (t6 :: Type) (t7 :: Type) (t12 :: Row Type). + Box { a :: (t6 :: Type), b :: (t7 :: Type) | (t12 :: Row Type) } -> Int + +Types +Box :: Type -> Type + +Data +Box + Quantified = :0 + Kind = :0 + + +Roles +Box = [Representational] + +Diagnostics +warning[RedundantPattern]: Pattern match has redundant patterns: Box ({ a: _ }) + --> 5:10..7:17 + | +5 | test x = case x of + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/331_record_progressive_labels/Main.purs b/tests-integration/fixtures/checking/331_record_progressive_labels/Main.purs new file mode 100644 index 000000000..1338f10ba --- /dev/null +++ b/tests-integration/fixtures/checking/331_record_progressive_labels/Main.purs @@ -0,0 +1,8 @@ +module Main where + +data Box a = Box a + +test x = case x of + Box { a } -> 0 + Box { a, b } -> 0 + Box { a, b, c } -> 0 diff --git a/tests-integration/fixtures/checking/331_record_progressive_labels/Main.snap b/tests-integration/fixtures/checking/331_record_progressive_labels/Main.snap new file mode 100644 index 000000000..c88b44057 --- /dev/null +++ b/tests-integration/fixtures/checking/331_record_progressive_labels/Main.snap @@ -0,0 +1,31 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Box :: forall (a :: Type). (a :: Type) -> Box (a :: Type) +test :: + forall (t6 :: Type) (t16 :: Type) (t17 :: Type) (t20 :: Row Type). + Box + { a :: (t6 :: Type) | ( b :: (t16 :: Type) | ( c :: (t17 :: Type) | (t20 :: Row Type) ) ) } -> + Int + +Types +Box :: Type -> Type + +Data +Box + Quantified = :0 + Kind = :0 + + +Roles +Box = [Representational] + +Diagnostics +warning[RedundantPattern]: Pattern match has redundant patterns: Box ({ a: _, b: _ }), Box ({ a: _, b: _, c: _ }) + --> 5:10..8:23 + | +5 | test x = case x of + | ^~~~~~~~~ diff --git a/tests-integration/fixtures/checking/332_record_equation_labels/Main.purs b/tests-integration/fixtures/checking/332_record_equation_labels/Main.purs new file mode 100644 index 000000000..75e447030 --- /dev/null +++ b/tests-integration/fixtures/checking/332_record_equation_labels/Main.purs @@ -0,0 +1,6 @@ +module Main where + +data Box a = Box a + +test (Box { a }) = 0 +test (Box { a, b }) = 0 diff --git a/tests-integration/fixtures/checking/332_record_equation_labels/Main.snap b/tests-integration/fixtures/checking/332_record_equation_labels/Main.snap new file mode 100644 index 000000000..e9b0503b2 --- /dev/null +++ b/tests-integration/fixtures/checking/332_record_equation_labels/Main.snap @@ -0,0 +1,29 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Box :: forall (a :: Type). (a :: Type) -> Box (a :: Type) +test :: + forall (t7 :: Type) (t8 :: Type) (t11 :: Row Type). + Box { a :: (t7 :: Type) | ( b :: (t8 :: Type) | (t11 :: Row Type) ) } -> Int + +Types +Box :: Type -> Type + +Data +Box + Quantified = :0 + Kind = :0 + + +Roles +Box = [Representational] + +Diagnostics +warning[RedundantPattern]: Pattern match has redundant patterns: Box ({ a: _, b: _ }) + --> 5:1..5:21 + | +5 | test (Box { a }) = 0 + | ^~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/333_row_open_union/Main.purs b/tests-integration/fixtures/checking/333_row_open_union/Main.purs new file mode 100644 index 000000000..89485f2f1 --- /dev/null +++ b/tests-integration/fixtures/checking/333_row_open_union/Main.purs @@ -0,0 +1,22 @@ +module Main where + +import Prim.Row as Row + +data Proxy :: forall k. k -> Type +data Proxy a = Proxy + +foreign import unsafeCoerce :: forall a b. a -> b + +openLeft :: forall r u. Row.Union (a :: Int | r) (b :: String) u => Proxy u +openLeft = Proxy + +openRight :: forall r u. Row.Union (a :: Int) (b :: String | r) u => Proxy u +openRight = Proxy + +backwardLeft :: forall l r. Row.Union l (b :: String) (a :: Int, b :: String | r) => Proxy l +backwardLeft = Proxy + +backwardRight :: forall r u. Row.Union (a :: Int) r (a :: Int, b :: String | u) => Proxy r +backwardRight = Proxy + +forceSolve = { openLeft, openRight, backwardLeft, backwardRight } diff --git a/tests-integration/fixtures/checking/333_row_open_union/Main.snap b/tests-integration/fixtures/checking/333_row_open_union/Main.snap new file mode 100644 index 000000000..ac1671607 --- /dev/null +++ b/tests-integration/fixtures/checking/333_row_open_union/Main.snap @@ -0,0 +1,44 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Proxy :: forall (k :: Type) (a :: (k :: Type)). Proxy @(k :: Type) (a :: (k :: Type)) +unsafeCoerce :: forall (a :: Type) (b :: Type). (a :: Type) -> (b :: Type) +openLeft :: + forall (r :: Row Type) (u :: Row Type). + Union @Type ( a :: Int | (r :: Row Type) ) ( b :: String ) (u :: Row Type) => + Proxy @(Row Type) (u :: Row Type) +openRight :: + forall (r :: Row Type) (u :: Row Type). + Union @Type ( a :: Int ) ( b :: String | (r :: Row Type) ) (u :: Row Type) => + Proxy @(Row Type) (u :: Row Type) +backwardLeft :: + forall (l :: Row Type) (r :: Row Type). + Union @Type (l :: Row Type) ( b :: String ) ( a :: Int, b :: String | (r :: Row Type) ) => + Proxy @(Row Type) (l :: Row Type) +backwardRight :: + forall (r :: Row Type) (u :: Row Type). + Union @Type ( a :: Int ) (r :: Row Type) ( a :: Int, b :: String | (u :: Row Type) ) => + Proxy @(Row Type) (r :: Row Type) +forceSolve :: + forall (t38 :: Row Type) (t40 :: Row Type) (t43 :: Row Type) (t45 :: Row Type) (t46 :: Row Type). + Union (t38 :: Row Type) ( b :: String ) (t46 :: Row Type) => + { backwardLeft :: Proxy @(Row Type) ( a :: Int | (t43 :: Row Type) ) + , backwardRight :: Proxy @(Row Type) ( b :: String | (t45 :: Row Type) ) + , openLeft :: Proxy @(Row Type) ( a :: Int | (t46 :: Row Type) ) + , openRight :: Proxy @(Row Type) ( a :: Int, b :: String | (t40 :: Row Type) ) + } + +Types +Proxy :: forall (k :: Type). (k :: Type) -> Type + +Data +Proxy + Quantified = :0 + Kind = :1 + + +Roles +Proxy = [Phantom] diff --git a/tests-integration/fixtures/checking/334_row_open_cons/Main.purs b/tests-integration/fixtures/checking/334_row_open_cons/Main.purs new file mode 100644 index 000000000..6a881ce77 --- /dev/null +++ b/tests-integration/fixtures/checking/334_row_open_cons/Main.purs @@ -0,0 +1,17 @@ +module Main where + +import Prim.Row as Row + +data Proxy :: forall k. k -> Type +data Proxy a = Proxy + +consOpen :: forall r row. Row.Cons "x" Int (a :: String | r) row => Proxy row +consOpen = Proxy + +decomposeOpen :: forall t tail r. Row.Cons "x" t tail (x :: Int, a :: String | r) => Proxy t +decomposeOpen = Proxy + +extractTail :: forall tail r. Row.Cons "x" Int tail (x :: Int, a :: String | r) => Proxy tail +extractTail = Proxy + +forceSolve = { consOpen, decomposeOpen, extractTail } diff --git a/tests-integration/fixtures/checking/334_row_open_cons/Main.snap b/tests-integration/fixtures/checking/334_row_open_cons/Main.snap new file mode 100644 index 000000000..10ba85eb0 --- /dev/null +++ b/tests-integration/fixtures/checking/334_row_open_cons/Main.snap @@ -0,0 +1,37 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Proxy :: forall (k :: Type) (a :: (k :: Type)). Proxy @(k :: Type) (a :: (k :: Type)) +consOpen :: + forall (r :: Row Type) (row :: Row Type). + Cons @Type "x" Int ( a :: String | (r :: Row Type) ) (row :: Row Type) => + Proxy @(Row Type) (row :: Row Type) +decomposeOpen :: + forall (t :: Type) (tail :: Row Type) (r :: Row Type). + Cons @Type "x" (t :: Type) (tail :: Row Type) ( a :: String, x :: Int | (r :: Row Type) ) => + Proxy @Type (t :: Type) +extractTail :: + forall (tail :: Row Type) (r :: Row Type). + Cons @Type "x" Int (tail :: Row Type) ( a :: String, x :: Int | (r :: Row Type) ) => + Proxy @(Row Type) (tail :: Row Type) +forceSolve :: + forall (t26 :: Row Type) (t32 :: Row Type). + { consOpen :: Proxy @(Row Type) ( a :: String, x :: Int | (t26 :: Row Type) ) + , decomposeOpen :: Proxy @Type Int + , extractTail :: Proxy @(Row Type) ( a :: String | (t32 :: Row Type) ) + } + +Types +Proxy :: forall (k :: Type). (k :: Type) -> Type + +Data +Proxy + Quantified = :0 + Kind = :1 + + +Roles +Proxy = [Phantom] diff --git a/tests-integration/fixtures/checking/335_row_open_lacks/Main.purs b/tests-integration/fixtures/checking/335_row_open_lacks/Main.purs new file mode 100644 index 000000000..84c7849c9 --- /dev/null +++ b/tests-integration/fixtures/checking/335_row_open_lacks/Main.purs @@ -0,0 +1,17 @@ +module Main where + +import Prim.Row as Row + +data Proxy :: forall k. k -> Type +data Proxy a = Proxy + +lacksOpen :: forall r. Row.Lacks "missing" (a :: Int, b :: String | r) => Proxy r -> Int +lacksOpen _ = 0 + +lacksPresent :: forall r. Row.Lacks "a" (a :: Int | r) => Proxy r -> Int +lacksPresent _ = 0 + +empty :: Proxy () +empty = Proxy + +forceSolve = { lacksOpen: lacksOpen empty, lacksPresent: lacksPresent empty } diff --git a/tests-integration/fixtures/checking/335_row_open_lacks/Main.snap b/tests-integration/fixtures/checking/335_row_open_lacks/Main.snap new file mode 100644 index 000000000..6e55786a2 --- /dev/null +++ b/tests-integration/fixtures/checking/335_row_open_lacks/Main.snap @@ -0,0 +1,35 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +Proxy :: forall (k :: Type) (a :: (k :: Type)). Proxy @(k :: Type) (a :: (k :: Type)) +lacksOpen :: + forall (r :: Row Type). + Lacks @Type "missing" ( a :: Int, b :: String | (r :: Row Type) ) => + Proxy @(Row Type) (r :: Row Type) -> Int +lacksPresent :: + forall (r :: Row Type). + Lacks @Type "a" ( a :: Int | (r :: Row Type) ) => Proxy @(Row Type) (r :: Row Type) -> Int +empty :: forall (t13 :: Type). Proxy @(Row (t13 :: Type)) () +forceSolve :: { lacksOpen :: Int, lacksPresent :: Int } + +Types +Proxy :: forall (k :: Type). (k :: Type) -> Type + +Data +Proxy + Quantified = :0 + Kind = :1 + + +Roles +Proxy = [Phantom] + +Diagnostics +error[NoInstanceFound]: No instance found for: Lacks @Type "a" ( a :: Int | () ) + --> 17:1..17:78 + | +17 | forceSolve = { lacksOpen: lacksOpen empty, lacksPresent: lacksPresent empty } + | ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/tests-integration/fixtures/checking/336_row_open_record/Main.purs b/tests-integration/fixtures/checking/336_row_open_record/Main.purs new file mode 100644 index 000000000..9ab0137a2 --- /dev/null +++ b/tests-integration/fixtures/checking/336_row_open_record/Main.purs @@ -0,0 +1,21 @@ +module Main where + +import Prim.Row as Row + +foreign import unsafeCoerce :: forall a b. a -> b + +union :: forall r1 r2 r3. Row.Union r1 r2 r3 => Record r1 -> Record r2 -> Record r3 +union _ _ = unsafeCoerce {} + +addField :: forall r. { a :: Int | r } -> { a :: Int, b :: String | r } +addField x = union x { b: "hi" } + +test = addField { a: 1, c: true } + +insertX :: forall r. Row.Lacks "x" r => Record r -> Record (x :: Int | r) +insertX _ = unsafeCoerce {} + +insertOpen :: forall r. Row.Lacks "x" r => { a :: Int | r } -> { x :: Int, a :: Int | r } +insertOpen x = insertX x + +test2 = insertOpen { a: 1, b: "hi" } diff --git a/tests-integration/fixtures/checking/336_row_open_record/Main.snap b/tests-integration/fixtures/checking/336_row_open_record/Main.snap new file mode 100644 index 000000000..41376d9d6 --- /dev/null +++ b/tests-integration/fixtures/checking/336_row_open_record/Main.snap @@ -0,0 +1,25 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +unsafeCoerce :: forall (a :: Type) (b :: Type). (a :: Type) -> (b :: Type) +union :: + forall (r1 :: Row Type) (r2 :: Row Type) (r3 :: Row Type). + Union @Type (r1 :: Row Type) (r2 :: Row Type) (r3 :: Row Type) => + {| (r1 :: Row Type) } -> {| (r2 :: Row Type) } -> {| (r3 :: Row Type) } +addField :: + forall (r :: Row Type). + { a :: Int | (r :: Row Type) } -> { a :: Int, b :: String | (r :: Row Type) } +test :: { a :: Int, b :: String | ( c :: Boolean ) } +insertX :: + forall (r :: Row Type). + Lacks @Type "x" (r :: Row Type) => {| (r :: Row Type) } -> { x :: Int | (r :: Row Type) } +insertOpen :: + forall (r :: Row Type). + Lacks @Type "x" (r :: Row Type) => + { a :: Int | (r :: Row Type) } -> { a :: Int, x :: Int | (r :: Row Type) } +test2 :: { a :: Int, x :: Int | ( b :: String ) } + +Types diff --git a/tests-integration/fixtures/checking/337_void_data/Main.purs b/tests-integration/fixtures/checking/337_void_data/Main.purs new file mode 100644 index 000000000..bda30cdba --- /dev/null +++ b/tests-integration/fixtures/checking/337_void_data/Main.purs @@ -0,0 +1,6 @@ +module Main where + +data SList + +foreign import data SCons :: Symbol -> SList -> SList +foreign import data SNil :: SList diff --git a/tests-integration/fixtures/checking/337_void_data/Main.snap b/tests-integration/fixtures/checking/337_void_data/Main.snap new file mode 100644 index 000000000..9e7f9d154 --- /dev/null +++ b/tests-integration/fixtures/checking/337_void_data/Main.snap @@ -0,0 +1,22 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms + +Types +SList :: Type +SCons :: Symbol -> SList -> SList +SNil :: SList + +Data +SList + Quantified = :0 + Kind = :0 + + +Roles +SList = [] +SCons = [Nominal, Nominal] +SNil = [] diff --git a/tests-integration/fixtures/checking/338_module_export_alias_overlap/IsInt.purs b/tests-integration/fixtures/checking/338_module_export_alias_overlap/IsInt.purs new file mode 100644 index 000000000..5f808d9f2 --- /dev/null +++ b/tests-integration/fixtures/checking/338_module_export_alias_overlap/IsInt.purs @@ -0,0 +1,4 @@ +module IsInt where + +value :: Int +value = 42 diff --git a/tests-integration/fixtures/checking/338_module_export_alias_overlap/IsString.purs b/tests-integration/fixtures/checking/338_module_export_alias_overlap/IsString.purs new file mode 100644 index 000000000..b621e6d8f --- /dev/null +++ b/tests-integration/fixtures/checking/338_module_export_alias_overlap/IsString.purs @@ -0,0 +1,7 @@ +module IsString where + +value :: String +value = "hello" + +other :: Boolean +other = true diff --git a/tests-integration/fixtures/checking/338_module_export_alias_overlap/Lib.purs b/tests-integration/fixtures/checking/338_module_export_alias_overlap/Lib.purs new file mode 100644 index 000000000..0c49ffb80 --- /dev/null +++ b/tests-integration/fixtures/checking/338_module_export_alias_overlap/Lib.purs @@ -0,0 +1,4 @@ +module Lib (module Exports) where + +import IsInt (value) as Exports +import IsString (other) as Exports diff --git a/tests-integration/fixtures/checking/338_module_export_alias_overlap/Main.purs b/tests-integration/fixtures/checking/338_module_export_alias_overlap/Main.purs new file mode 100644 index 000000000..7c71099a7 --- /dev/null +++ b/tests-integration/fixtures/checking/338_module_export_alias_overlap/Main.purs @@ -0,0 +1,5 @@ +module Main where + +import Lib (value) + +test = value diff --git a/tests-integration/fixtures/checking/338_module_export_alias_overlap/Main.snap b/tests-integration/fixtures/checking/338_module_export_alias_overlap/Main.snap new file mode 100644 index 000000000..20236616d --- /dev/null +++ b/tests-integration/fixtures/checking/338_module_export_alias_overlap/Main.snap @@ -0,0 +1,9 @@ +--- +source: tests-integration/tests/checking/generated.rs +assertion_line: 28 +expression: report +--- +Terms +test :: Int + +Types diff --git a/tests-integration/fixtures/checking/prelude/Control.Bind.purs b/tests-integration/fixtures/checking/prelude/Control.Bind.purs new file mode 100644 index 000000000..0ee7419fe --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Control.Bind.purs @@ -0,0 +1,9 @@ +module Control.Bind where + +import Control.Apply (class Apply) + +class Apply m <= Bind m where + bind :: forall a b. m a -> (a -> m b) -> m b + +discard :: forall a b m. Bind m => m a -> (a -> m b) -> m b +discard = bind diff --git a/tests-integration/fixtures/checking/prelude/Control.Category.purs b/tests-integration/fixtures/checking/prelude/Control.Category.purs new file mode 100644 index 000000000..7d3c82136 --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Control.Category.purs @@ -0,0 +1,9 @@ +module Control.Category where + +import Data.Semigroupoid (class Semigroupoid) + +class Semigroupoid a <= Category a where + identity :: forall t. a t t + +instance categoryFn :: Category (->) where + identity x = x diff --git a/tests-integration/fixtures/checking/prelude/Control.Monad.Rec.purs b/tests-integration/fixtures/checking/prelude/Control.Monad.Rec.purs new file mode 100644 index 000000000..4483bc38f --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Control.Monad.Rec.purs @@ -0,0 +1,6 @@ +module Control.Monad.Rec where + +import Control.Monad (class Monad) + +class Monad m <= MonadRec m where + tailRecM :: forall a b. (a -> m b) -> a -> m b diff --git a/tests-integration/fixtures/checking/prelude/Control.Monad.purs b/tests-integration/fixtures/checking/prelude/Control.Monad.purs new file mode 100644 index 000000000..99eedb050 --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Control.Monad.purs @@ -0,0 +1,6 @@ +module Control.Monad where + +import Control.Applicative (class Applicative) +import Control.Bind (class Bind) + +class (Applicative m, Bind m) <= Monad m diff --git a/tests-integration/fixtures/checking/prelude/Data.Boolean.purs b/tests-integration/fixtures/checking/prelude/Data.Boolean.purs new file mode 100644 index 000000000..75a3e8e69 --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Data.Boolean.purs @@ -0,0 +1,4 @@ +module Data.Boolean where + +otherwise :: Boolean +otherwise = true diff --git a/tests-integration/fixtures/checking/prelude/Data.Semigroupoid.purs b/tests-integration/fixtures/checking/prelude/Data.Semigroupoid.purs new file mode 100644 index 000000000..299046ddd --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Data.Semigroupoid.purs @@ -0,0 +1,9 @@ +module Data.Semigroupoid where + +class Semigroupoid a where + compose :: forall b c d. a c d -> a b c -> a b d + +infixr 9 compose as <<< + +instance semigroupoidFn :: Semigroupoid (->) where + compose f g x = f (g x) diff --git a/tests-integration/fixtures/checking/prelude/Effect.Aff.purs b/tests-integration/fixtures/checking/prelude/Effect.Aff.purs new file mode 100644 index 000000000..4fef90e62 --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Effect.Aff.purs @@ -0,0 +1,28 @@ +module Effect.Aff where + +import Control.Applicative (class Applicative) +import Control.Apply (class Apply) +import Control.Bind (class Bind) +import Control.Monad (class Monad) +import Data.Functor (class Functor) + +foreign import data Aff :: Type -> Type + +foreign import mapAff :: forall a b. (a -> b) -> Aff a -> Aff b +foreign import applyAff :: forall a b. Aff (a -> b) -> Aff a -> Aff b +foreign import pureAff :: forall a. a -> Aff a +foreign import bindAff :: forall a b. Aff a -> (a -> Aff b) -> Aff b + +instance Functor Aff where + map = mapAff + +instance Apply Aff where + apply = applyAff + +instance Applicative Aff where + pure = pureAff + +instance Bind Aff where + bind = bindAff + +instance Monad Aff diff --git a/tests-integration/fixtures/checking/prelude/Effect.purs b/tests-integration/fixtures/checking/prelude/Effect.purs new file mode 100644 index 000000000..f634e9ea2 --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Effect.purs @@ -0,0 +1,28 @@ +module Effect where + +import Control.Applicative (class Applicative) +import Control.Apply (class Apply) +import Control.Bind (class Bind) +import Control.Monad (class Monad) +import Data.Functor (class Functor) + +foreign import data Effect :: Type -> Type + +foreign import mapEffect :: forall a b. (a -> b) -> Effect a -> Effect b +foreign import applyEffect :: forall a b. Effect (a -> b) -> Effect a -> Effect b +foreign import pureEffect :: forall a. a -> Effect a +foreign import bindEffect :: forall a b. Effect a -> (a -> Effect b) -> Effect b + +instance Functor Effect where + map = mapEffect + +instance Apply Effect where + apply = applyEffect + +instance Applicative Effect where + pure = pureEffect + +instance Bind Effect where + bind = bindEffect + +instance Monad Effect diff --git a/tests-integration/fixtures/checking/prelude/Partial.Unsafe.purs b/tests-integration/fixtures/checking/prelude/Partial.Unsafe.purs new file mode 100644 index 000000000..b92decc3a --- /dev/null +++ b/tests-integration/fixtures/checking/prelude/Partial.Unsafe.purs @@ -0,0 +1,6 @@ +module Partial.Unsafe where + +import Safe.Coerce (unsafeCoerce) + +unsafePartial :: forall a. (Partial => a) -> a +unsafePartial = unsafeCoerce diff --git a/tests-integration/fixtures/checking/prelude/Safe.Coerce.purs b/tests-integration/fixtures/checking/prelude/Safe.Coerce.purs index d701d1430..c24a3e05d 100644 --- a/tests-integration/fixtures/checking/prelude/Safe.Coerce.purs +++ b/tests-integration/fixtures/checking/prelude/Safe.Coerce.purs @@ -1,4 +1,4 @@ -module Safe.Coerce where +module Safe.Coerce (coerce, module Prim.Coerce) where import Prim.Coerce (class Coercible) diff --git a/tests-integration/fixtures/lowering/001_ado_statement_recursion/Main.snap b/tests-integration/fixtures/lowering/001_ado_statement_recursion/Main.snap index 2fe8eb8b3..3d761ef02 100644 --- a/tests-integration/fixtures/lowering/001_ado_statement_recursion/Main.snap +++ b/tests-integration/fixtures/lowering/001_ado_statement_recursion/Main.snap @@ -6,11 +6,11 @@ module Main Expressions: -x'notBinder@Some(Position { line: 4, character: 4 }) - resolves to binder Some(Position { line: 2, character: 10 }) -x'notBinder@Some(Position { line: 3, character: 21 }) - resolves to top-level name -pure@Some(Position { line: 3, character: 16 }) - resolves to top-level name +x'notBinder@4:4 + -> binder@2:10 +x'notBinder@3:21 + -> top-level +pure@3:16 + -> top-level Types: diff --git a/tests-integration/fixtures/lowering/002_class_equation/Main.snap b/tests-integration/fixtures/lowering/002_class_equation/Main.snap index aa353c73e..cf180bece 100644 --- a/tests-integration/fixtures/lowering/002_class_equation/Main.snap +++ b/tests-integration/fixtures/lowering/002_class_equation/Main.snap @@ -9,25 +9,25 @@ Expressions: Types: -k@Some(Position { line: 2, character: 30 }) - resolves to forall Some(Position { line: 2, character: 22 }) -p@Some(Position { line: 4, character: 20 }) - resolves to forall Some(Position { line: 4, character: 17 }) -p@Some(Position { line: 4, character: 27 }) - resolves to forall Some(Position { line: 4, character: 17 }) -k@Some(Position { line: 2, character: 25 }) - resolves to forall Some(Position { line: 2, character: 22 }) -a@Some(Position { line: 7, character: 17 }) - resolves to forall Some(Position { line: 6, character: 17 }) -a@Some(Position { line: 6, character: 8 }) - resolves to forall Some(Position { line: 6, character: 17 }) -a@Some(Position { line: 4, character: 22 }) - resolves to forall Some(Position { line: 3, character: 12 }) -k@Some(Position { line: 3, character: 27 }) - resolves to forall Some(Position { line: 2, character: 22 }) -b@Some(Position { line: 4, character: 29 }) - resolves to forall Some(Position { line: 3, character: 21 }) -a@Some(Position { line: 7, character: 12 }) - resolves to forall Some(Position { line: 6, character: 17 }) -k@Some(Position { line: 3, character: 18 }) - resolves to forall Some(Position { line: 2, character: 22 }) +k@2:30 + -> forall@2:22 +p@4:20 + -> forall@4:17 +p@4:27 + -> forall@4:17 +k@2:25 + -> forall@2:22 +a@7:17 + -> forall@6:17 +a@6:8 + -> forall@6:17 +a@4:22 + -> forall@3:12 +k@3:27 + -> forall@2:22 +b@4:29 + -> forall@3:21 +a@7:12 + -> forall@6:17 +k@3:18 + -> forall@2:22 diff --git a/tests-integration/fixtures/lowering/003_data_equation/Main.snap b/tests-integration/fixtures/lowering/003_data_equation/Main.snap index 9595b3c30..60a5e7adb 100644 --- a/tests-integration/fixtures/lowering/003_data_equation/Main.snap +++ b/tests-integration/fixtures/lowering/003_data_equation/Main.snap @@ -9,13 +9,13 @@ Expressions: Types: -a@Some(Position { line: 4, character: 22 }) - resolves to forall Some(Position { line: 4, character: 11 }) -k@Some(Position { line: 6, character: 23 }) - resolves to forall Some(Position { line: 6, character: 20 }) -k@Some(Position { line: 7, character: 16 }) - resolves to forall Some(Position { line: 6, character: 20 }) -b@Some(Position { line: 4, character: 32 }) - resolves to forall Some(Position { line: 4, character: 13 }) -a@Some(Position { line: 2, character: 19 }) - resolves to forall Some(Position { line: 2, character: 10 }) +a@4:22 + -> forall@4:11 +k@6:23 + -> forall@6:20 +k@7:16 + -> forall@6:20 +b@4:32 + -> forall@4:13 +a@2:19 + -> forall@2:10 diff --git a/tests-integration/fixtures/lowering/004_derive_declaration/Main.snap b/tests-integration/fixtures/lowering/004_derive_declaration/Main.snap index a2b62a804..9f4e13409 100644 --- a/tests-integration/fixtures/lowering/004_derive_declaration/Main.snap +++ b/tests-integration/fixtures/lowering/004_derive_declaration/Main.snap @@ -9,7 +9,7 @@ Expressions: Types: -a@Some(Position { line: 2, character: 25 }) +a@2:25 introduces a constraint variable "a" -a@Some(Position { line: 3, character: 34 }) +a@3:34 introduces a constraint variable "a" diff --git a/tests-integration/fixtures/lowering/005_do_statement/Main.snap b/tests-integration/fixtures/lowering/005_do_statement/Main.snap index a1b98da72..e37385dad 100644 --- a/tests-integration/fixtures/lowering/005_do_statement/Main.snap +++ b/tests-integration/fixtures/lowering/005_do_statement/Main.snap @@ -6,23 +6,23 @@ module Main Expressions: -x@Some(Position { line: 3, character: 8 }) - resolves to top-level name -pure@Some(Position { line: 5, character: 6 }) - resolves to top-level name -pure@Some(Position { line: 6, character: 12 }) - resolves to top-level name -x@Some(Position { line: 7, character: 8 }) - resolves to equation Some(Position { line: 4, character: 5 }) -action@Some(Position { line: 2, character: 9 }) - resolves to top-level name -y@Some(Position { line: 7, character: 12 }) - resolves to binder Some(Position { line: 4, character: 12 }) -z@Some(Position { line: 7, character: 16 }) - resolves to equation Some(Position { line: 6, character: 5 }) -z@Some(Position { line: 3, character: 12 }) - resolves to top-level name -y@Some(Position { line: 3, character: 10 }) - resolves to top-level name +x@3:8 + -> top-level +pure@5:6 + -> top-level +pure@6:12 + -> top-level +x@7:8 + -> equation@4:5 +action@2:9 + -> top-level +y@7:12 + -> binder@4:12 +z@7:16 + -> equation@6:5 +z@3:12 + -> top-level +y@3:10 + -> top-level Types: diff --git a/tests-integration/fixtures/lowering/006_do_statement_recursion/Main.snap b/tests-integration/fixtures/lowering/006_do_statement_recursion/Main.snap index ddc39c037..40c1a7bfd 100644 --- a/tests-integration/fixtures/lowering/006_do_statement_recursion/Main.snap +++ b/tests-integration/fixtures/lowering/006_do_statement_recursion/Main.snap @@ -6,17 +6,17 @@ module Main Expressions: -y'equation@Some(Position { line: 3, character: 27 }) - resolves to equation Some(Position { line: 3, character: 5 }) -pure@Some(Position { line: 4, character: 33 }) - resolves to top-level name -pure@Some(Position { line: 4, character: 16 }) - resolves to top-level name -a'binder@Some(Position { line: 3, character: 38 }) - resolves to binder Some(Position { line: 3, character: 16 }) -x'notBinder@Some(Position { line: 5, character: 6 }) - resolves to binder Some(Position { line: 3, character: 47 }) -x'notBinder@Some(Position { line: 4, character: 21 }) - resolves to top-level name +y'equation@3:27 + -> equation@3:5 +pure@4:33 + -> top-level +pure@4:16 + -> top-level +a'binder@3:38 + -> binder@3:16 +x'notBinder@5:6 + -> binder@3:47 +x'notBinder@4:21 + -> top-level Types: diff --git a/tests-integration/fixtures/lowering/007_instance_declaration/Main.snap b/tests-integration/fixtures/lowering/007_instance_declaration/Main.snap index 82f9670d8..7e4918392 100644 --- a/tests-integration/fixtures/lowering/007_instance_declaration/Main.snap +++ b/tests-integration/fixtures/lowering/007_instance_declaration/Main.snap @@ -6,38 +6,38 @@ module Main Expressions: -eqMaybeImpl@Some(Position { line: 8, character: 6 }) - resolves to top-level name -eqIntImpl@Some(Position { line: 4, character: 6 }) - resolves to top-level name -b@Some(Position { line: 12, character: 11 }) - resolves to binder Some(Position { line: 12, character: 7 }) +eqMaybeImpl@8:6 + -> top-level +eqIntImpl@4:6 + -> top-level +b@12:11 + -> binder@12:7 Types: -a@Some(Position { line: 6, character: 11 }) - resolves to a constraint variable "a" - Some(Position { line: 6, character: 26 }) -b@Some(Position { line: 10, character: 21 }) - resolves to a constraint variable "b" - Some(Position { line: 10, character: 19 }) -a@Some(Position { line: 7, character: 24 }) - resolves to a constraint variable "a" - Some(Position { line: 6, character: 26 }) -b@Some(Position { line: 10, character: 19 }) +a@6:11 + -> constraint variable "a" + 6:26 +b@10:21 + -> constraint variable "b" + 10:19 +a@7:24 + -> constraint variable "a" + 6:26 +b@10:19 introduces a constraint variable "b" -a@Some(Position { line: 6, character: 26 }) +a@6:26 introduces a constraint variable "a" -a@Some(Position { line: 7, character: 13 }) - resolves to a constraint variable "a" - Some(Position { line: 6, character: 26 }) -b@Some(Position { line: 11, character: 22 }) - resolves to a constraint variable "b" - Some(Position { line: 10, character: 19 }) -b@Some(Position { line: 11, character: 29 }) - resolves to a constraint variable "b" - Some(Position { line: 10, character: 19 }) -p@Some(Position { line: 11, character: 20 }) - resolves to forall Some(Position { line: 11, character: 17 }) -p@Some(Position { line: 11, character: 27 }) - resolves to forall Some(Position { line: 11, character: 17 }) +a@7:13 + -> constraint variable "a" + 6:26 +b@11:22 + -> constraint variable "b" + 10:19 +b@11:29 + -> constraint variable "b" + 10:19 +p@11:20 + -> forall@11:17 +p@11:27 + -> forall@11:17 diff --git a/tests-integration/fixtures/lowering/008_newtype_equation/Main.snap b/tests-integration/fixtures/lowering/008_newtype_equation/Main.snap index cc8b5dede..8b7dbfc89 100644 --- a/tests-integration/fixtures/lowering/008_newtype_equation/Main.snap +++ b/tests-integration/fixtures/lowering/008_newtype_equation/Main.snap @@ -9,11 +9,11 @@ Expressions: Types: -a@Some(Position { line: 5, character: 29 }) - resolves to forall Some(Position { line: 5, character: 9 }) -k@Some(Position { line: 5, character: 15 }) - resolves to forall Some(Position { line: 4, character: 19 }) -k@Some(Position { line: 4, character: 22 }) - resolves to forall Some(Position { line: 4, character: 19 }) -a@Some(Position { line: 2, character: 17 }) - resolves to forall Some(Position { line: 2, character: 10 }) +a@5:29 + -> forall@5:9 +k@5:15 + -> forall@4:19 +k@4:22 + -> forall@4:19 +a@2:17 + -> forall@2:10 diff --git a/tests-integration/fixtures/lowering/009_signature_equation/Main.snap b/tests-integration/fixtures/lowering/009_signature_equation/Main.snap index 91402b586..2f4dbc657 100644 --- a/tests-integration/fixtures/lowering/009_signature_equation/Main.snap +++ b/tests-integration/fixtures/lowering/009_signature_equation/Main.snap @@ -9,9 +9,9 @@ Expressions: Types: -k@Some(Position { line: 3, character: 12 }) - resolves to forall Some(Position { line: 2, character: 16 }) -k@Some(Position { line: 2, character: 19 }) - resolves to forall Some(Position { line: 2, character: 16 }) -a@Some(Position { line: 3, character: 23 }) - resolves to forall Some(Position { line: 3, character: 6 }) +k@3:12 + -> forall@2:16 +k@2:19 + -> forall@2:16 +a@3:23 + -> forall@3:6 diff --git a/tests-integration/fixtures/lowering/010_value_equation/Main.snap b/tests-integration/fixtures/lowering/010_value_equation/Main.snap index 9007d5f31..be30321c6 100644 --- a/tests-integration/fixtures/lowering/010_value_equation/Main.snap +++ b/tests-integration/fixtures/lowering/010_value_equation/Main.snap @@ -6,29 +6,29 @@ module Main Expressions: -a@Some(Position { line: 3, character: 15 }) - resolves to binder Some(Position { line: 3, character: 4 }) -z@Some(Position { line: 10, character: 4 }) - resolves to signature Some(Position { line: 7, character: 5 }) - resolves to equation Some(Position { line: 8, character: 12 }) -add@Some(Position { line: 14, character: 6 }) - resolves to top-level name -a@Some(Position { line: 9, character: 7 }) - resolves to binder Some(Position { line: 6, character: 5 }) +a@3:15 + -> binder@3:4 +z@10:4 + -> signature@7:5 + -> equation@8:12 +add@14:6 + -> top-level +a@9:7 + -> binder@6:5 Types: -a@Some(Position { line: 3, character: 19 }) - resolves to forall Some(Position { line: 2, character: 12 }) -a@Some(Position { line: 2, character: 20 }) - resolves to forall Some(Position { line: 2, character: 12 }) -a@Some(Position { line: 5, character: 20 }) - resolves to forall Some(Position { line: 5, character: 15 }) -a@Some(Position { line: 2, character: 15 }) - resolves to forall Some(Position { line: 2, character: 12 }) -a@Some(Position { line: 5, character: 30 }) - resolves to forall Some(Position { line: 5, character: 15 }) -a@Some(Position { line: 3, character: 8 }) - resolves to forall Some(Position { line: 2, character: 12 }) -b@Some(Position { line: 5, character: 25 }) - resolves to forall Some(Position { line: 5, character: 17 }) +a@3:19 + -> forall@2:12 +a@2:20 + -> forall@2:12 +a@5:20 + -> forall@5:15 +a@2:15 + -> forall@2:12 +a@5:30 + -> forall@5:15 +a@3:8 + -> forall@2:12 +b@5:25 + -> forall@5:17 diff --git a/tests-integration/fixtures/lowering/011_case_after_let/Main.snap b/tests-integration/fixtures/lowering/011_case_after_let/Main.snap index 653f19bae..2b2312aab 100644 --- a/tests-integration/fixtures/lowering/011_case_after_let/Main.snap +++ b/tests-integration/fixtures/lowering/011_case_after_let/Main.snap @@ -6,12 +6,12 @@ module Main Expressions: -b@Some(Position { line: 12, character: 17 }) - resolves to binder Some(Position { line: 12, character: 12 }) -a@Some(Position { line: 9, character: 17 }) - resolves to binder Some(Position { line: 9, character: 12 }) +b@12:17 + -> binder@12:12 +a@9:17 + -> binder@9:12 Types: -a@Some(Position { line: 2, character: 19 }) - resolves to forall Some(Position { line: 2, character: 10 }) +a@2:19 + -> forall@2:10 diff --git a/tests-integration/fixtures/lowering/012_recursive_synonym/Main.snap b/tests-integration/fixtures/lowering/012_recursive_synonym/Main.snap index 2e3748de5..9a3279282 100644 --- a/tests-integration/fixtures/lowering/012_recursive_synonym/Main.snap +++ b/tests-integration/fixtures/lowering/012_recursive_synonym/Main.snap @@ -9,23 +9,23 @@ Expressions: Types: -a@Some(Position { line: 28, character: 26 }) - resolves to forall Some(Position { line: 28, character: 13 }) -a@Some(Position { line: 25, character: 37 }) - resolves to forall Some(Position { line: 25, character: 10 }) -a@Some(Position { line: 20, character: 38 }) - resolves to forall Some(Position { line: 20, character: 14 }) -a@Some(Position { line: 30, character: 22 }) - resolves to nothing -a@Some(Position { line: 19, character: 27 }) - resolves to forall Some(Position { line: 19, character: 13 }) -a@Some(Position { line: 30, character: 33 }) - resolves to nothing -a@Some(Position { line: 27, character: 22 }) - resolves to nothing -a@Some(Position { line: 27, character: 33 }) - resolves to nothing -k@Some(Position { line: 23, character: 38 }) - resolves to forall Some(Position { line: 23, character: 35 }) -a@Some(Position { line: 31, character: 26 }) - resolves to forall Some(Position { line: 31, character: 13 }) +a@28:26 + -> forall@28:13 +a@25:37 + -> forall@25:10 +a@20:38 + -> forall@20:14 +a@30:22 + -> nothing +a@19:27 + -> forall@19:13 +a@30:33 + -> nothing +a@27:22 + -> nothing +a@27:33 + -> nothing +k@23:38 + -> forall@23:35 +a@31:26 + -> forall@31:13 diff --git a/tests-integration/fixtures/lowering/013_ado_statement_let/Main.snap b/tests-integration/fixtures/lowering/013_ado_statement_let/Main.snap index 5b9852cfb..a5e925f8b 100644 --- a/tests-integration/fixtures/lowering/013_ado_statement_let/Main.snap +++ b/tests-integration/fixtures/lowering/013_ado_statement_let/Main.snap @@ -6,38 +6,38 @@ module Main Expressions: -pure@Some(Position { line: 11, character: 6 }) - resolves to top-level name -{ x, y, z }@Some(Position { line: 12, character: 4 }) - resolves to binder Some(Position { line: 8, character: 10 }) -{ x, y, z }@Some(Position { line: 12, character: 4 }) - resolves to equation Some(Position { line: 10, character: 5 }) -{ x, y, z }@Some(Position { line: 12, character: 4 }) - resolves to binder Some(Position { line: 10, character: 15 }) -{ x }@Some(Position { line: 10, character: 9 }) - resolves to binder Some(Position { line: 8, character: 10 }) -pure@Some(Position { line: 9, character: 6 }) - resolves to top-level name +pure@11:6 + -> top-level +{ x, y, z }@12:4 + -> binder@8:10 +{ x, y, z }@12:4 + -> equation@10:5 +{ x, y, z }@12:4 + -> binder@10:15 +{ x }@10:9 + -> binder@8:10 +pure@9:6 + -> top-level Types: -b@Some(Position { line: 5, character: 39 }) - resolves to forall Some(Position { line: 5, character: 30 }) -a@Some(Position { line: 6, character: 61 }) - resolves to forall Some(Position { line: 6, character: 30 }) -b@Some(Position { line: 6, character: 73 }) - resolves to forall Some(Position { line: 6, character: 32 }) -b@Some(Position { line: 6, character: 48 }) - resolves to forall Some(Position { line: 6, character: 32 }) -a@Some(Position { line: 5, character: 35 }) - resolves to forall Some(Position { line: 5, character: 28 }) -a@Some(Position { line: 6, character: 44 }) - resolves to forall Some(Position { line: 6, character: 30 }) -a@Some(Position { line: 5, character: 52 }) - resolves to forall Some(Position { line: 5, character: 28 }) -b@Some(Position { line: 5, character: 64 }) - resolves to forall Some(Position { line: 5, character: 30 }) -a@Some(Position { line: 4, character: 32 }) - resolves to forall Some(Position { line: 4, character: 29 }) -a@Some(Position { line: 4, character: 44 }) - resolves to forall Some(Position { line: 4, character: 29 }) +b@5:39 + -> forall@5:30 +a@6:61 + -> forall@6:30 +b@6:73 + -> forall@6:32 +b@6:48 + -> forall@6:32 +a@5:35 + -> forall@5:28 +a@6:44 + -> forall@6:30 +a@5:52 + -> forall@5:28 +b@5:64 + -> forall@5:30 +a@4:32 + -> forall@4:29 +a@4:44 + -> forall@4:29 diff --git a/tests-integration/fixtures/lowering/014_ado_statement_binder/Main.snap b/tests-integration/fixtures/lowering/014_ado_statement_binder/Main.snap index e7d2ca40d..61f407e3e 100644 --- a/tests-integration/fixtures/lowering/014_ado_statement_binder/Main.snap +++ b/tests-integration/fixtures/lowering/014_ado_statement_binder/Main.snap @@ -6,36 +6,36 @@ module Main Expressions: -pure@Some(Position { line: 10, character: 6 }) - resolves to top-level name -{ x, y }@Some(Position { line: 11, character: 4 }) - resolves to binder Some(Position { line: 8, character: 10 }) -{ x, y }@Some(Position { line: 11, character: 4 }) - resolves to binder Some(Position { line: 9, character: 14 }) -x@Some(Position { line: 10, character: 11 }) - resolves to binder Some(Position { line: 8, character: 10 }) -pure@Some(Position { line: 9, character: 6 }) - resolves to top-level name +pure@10:6 + -> top-level +{ x, y }@11:4 + -> binder@8:10 +{ x, y }@11:4 + -> binder@9:14 +x@10:11 + -> binder@8:10 +pure@9:6 + -> top-level Types: -b@Some(Position { line: 5, character: 39 }) - resolves to forall Some(Position { line: 5, character: 30 }) -a@Some(Position { line: 6, character: 61 }) - resolves to forall Some(Position { line: 6, character: 30 }) -b@Some(Position { line: 6, character: 73 }) - resolves to forall Some(Position { line: 6, character: 32 }) -b@Some(Position { line: 6, character: 48 }) - resolves to forall Some(Position { line: 6, character: 32 }) -a@Some(Position { line: 5, character: 35 }) - resolves to forall Some(Position { line: 5, character: 28 }) -a@Some(Position { line: 6, character: 44 }) - resolves to forall Some(Position { line: 6, character: 30 }) -a@Some(Position { line: 5, character: 52 }) - resolves to forall Some(Position { line: 5, character: 28 }) -b@Some(Position { line: 5, character: 64 }) - resolves to forall Some(Position { line: 5, character: 30 }) -a@Some(Position { line: 4, character: 32 }) - resolves to forall Some(Position { line: 4, character: 29 }) -a@Some(Position { line: 4, character: 44 }) - resolves to forall Some(Position { line: 4, character: 29 }) +b@5:39 + -> forall@5:30 +a@6:61 + -> forall@6:30 +b@6:73 + -> forall@6:32 +b@6:48 + -> forall@6:32 +a@5:35 + -> forall@5:28 +a@6:44 + -> forall@6:30 +a@5:52 + -> forall@5:28 +b@5:64 + -> forall@5:30 +a@4:32 + -> forall@4:29 +a@4:44 + -> forall@4:29 diff --git a/tests-integration/fixtures/lowering/015_instance_constraints/Main.snap b/tests-integration/fixtures/lowering/015_instance_constraints/Main.snap index 75a5dc1af..8d689d773 100644 --- a/tests-integration/fixtures/lowering/015_instance_constraints/Main.snap +++ b/tests-integration/fixtures/lowering/015_instance_constraints/Main.snap @@ -9,18 +9,18 @@ Expressions: Types: -n@Some(Position { line: 6, character: 22 }) - resolves to a constraint variable "n" - Some(Position { line: 6, character: 48 }) -n@Some(Position { line: 6, character: 48 }) +n@6:22 + -> constraint variable "n" + 6:48 +n@6:48 introduces a constraint variable "n" -o@Some(Position { line: 6, character: 36 }) - resolves to a constraint variable "o" - Some(Position { line: 6, character: 50 }) -m@Some(Position { line: 6, character: 24 }) +o@6:36 + -> constraint variable "o" + 6:50 +m@6:24 introduces a constraint variable "m" -o@Some(Position { line: 6, character: 50 }) +o@6:50 introduces a constraint variable "o" -m@Some(Position { line: 6, character: 34 }) - resolves to a constraint variable "m" - Some(Position { line: 6, character: 24 }) +m@6:34 + -> constraint variable "m" + 6:24 diff --git a/tests-integration/fixtures/lowering/016_derive_constraints/Main.snap b/tests-integration/fixtures/lowering/016_derive_constraints/Main.snap index 827685170..eaac4312c 100644 --- a/tests-integration/fixtures/lowering/016_derive_constraints/Main.snap +++ b/tests-integration/fixtures/lowering/016_derive_constraints/Main.snap @@ -9,18 +9,18 @@ Expressions: Types: -m@Some(Position { line: 6, character: 31 }) +m@6:31 introduces a constraint variable "m" -o@Some(Position { line: 6, character: 57 }) +o@6:57 introduces a constraint variable "o" -m@Some(Position { line: 6, character: 41 }) - resolves to a constraint variable "m" - Some(Position { line: 6, character: 31 }) -n@Some(Position { line: 6, character: 29 }) - resolves to a constraint variable "n" - Some(Position { line: 6, character: 55 }) -n@Some(Position { line: 6, character: 55 }) +m@6:41 + -> constraint variable "m" + 6:31 +n@6:29 + -> constraint variable "n" + 6:55 +n@6:55 introduces a constraint variable "n" -o@Some(Position { line: 6, character: 43 }) - resolves to a constraint variable "o" - Some(Position { line: 6, character: 57 }) +o@6:43 + -> constraint variable "o" + 6:57 diff --git a/tests-integration/fixtures/lsp/009_completion_suggestion/Main.snap b/tests-integration/fixtures/lsp/009_completion_suggestion/Main.snap index 4e6411a13..76daf648f 100644 --- a/tests-integration/fixtures/lsp/009_completion_suggestion/Main.snap +++ b/tests-integration/fixtures/lsp/009_completion_suggestion/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/lsp/generated.rs +assertion_line: 12 expression: report --- Completion at Position { line: 3, character: 7 } @@ -93,12 +94,12 @@ cls :: C ╭──────────────┬──────────────────────────┬─────────────────────┬─────────────────────────┬───────────────┬──────────────┬────────────────────────────────────────╮ │ label │ label_detail │ label_description │ sort_text │ filter_text │ text_edit │ additional_text_edits │ ├──────────────┼──────────────────────────┼─────────────────────┼─────────────────────────┼───────────────┼──────────────┼────────────────────────────────────────┤ -│ Char │ ... │ Prim │ Char │ Char │ 16:7..16:8 │ ... │ -│ │ │ │ │ │ Char │ │ -├──────────────┼──────────────────────────┼─────────────────────┼─────────────────────────┼───────────────┼──────────────┼────────────────────────────────────────┤ │ Constraint │ ... │ Prim │ Constraint │ Constraint │ 16:7..16:8 │ ... │ │ │ │ │ │ │ Constraint │ │ ├──────────────┼──────────────────────────┼─────────────────────┼─────────────────────────┼───────────────┼──────────────┼────────────────────────────────────────┤ +│ Char │ ... │ Prim │ Char │ Char │ 16:7..16:8 │ ... │ +│ │ │ │ │ │ Char │ │ +├──────────────┼──────────────────────────┼─────────────────────┼─────────────────────────┼───────────────┼──────────────┼────────────────────────────────────────┤ │ Coercible │ (import Prim.Coerce) │ Prim.Coerce │ Prim.Coerce.Coercible │ Coercible │ 16:7..16:8 │ 1:0..1:0 │ │ │ │ │ │ │ Coercible │ import Prim.Coerce (class Coercible) │ ├──────────────┼──────────────────────────┼─────────────────────┼─────────────────────────┼───────────────┼──────────────┼────────────────────────────────────────┤ diff --git a/tests-integration/fixtures/lsp/032_completion_cache_exact/Main.snap b/tests-integration/fixtures/lsp/032_completion_cache_exact/Main.snap index 74ff9807d..54143a5d9 100644 --- a/tests-integration/fixtures/lsp/032_completion_cache_exact/Main.snap +++ b/tests-integration/fixtures/lsp/032_completion_cache_exact/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/lsp/generated.rs +assertion_line: 12 expression: report --- Completion at Position { line: 2, character: 14 } @@ -138,27 +139,27 @@ type Qual1 = Data.Maybe. │ Text │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Text │ Data.Maybe.Text │ 20:13..20:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Text │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Warn │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Warn │ Data.Maybe.Warn │ 20:13..20:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Warn │ import Prim.TypeError as Data.Maybe │ +│ Above │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Above │ Data.Maybe.Above │ 20:13..20:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Above │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Beside │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Beside │ Data.Maybe.Beside │ 20:13..20:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Beside │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Fail │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Fail │ Data.Maybe.Fail │ 20:13..20:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Fail │ import Prim.TypeError as Data.Maybe │ +│ Quote │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Quote │ Data.Maybe.Quote │ 20:13..20:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Quote │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ QuoteLabel │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.QuoteLabel │ Data.Maybe.QuoteLabel │ 20:13..20:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.QuoteLabel │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Above │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Above │ Data.Maybe.Above │ 20:13..20:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Above │ import Prim.TypeError as Data.Maybe │ -├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Quote │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Quote │ Data.Maybe.Quote │ 20:13..20:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Quote │ import Prim.TypeError as Data.Maybe │ -├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Doc │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Doc │ Data.Maybe.Doc │ 20:13..20:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Doc │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ +│ Fail │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Fail │ Data.Maybe.Fail │ 20:13..20:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Fail │ import Prim.TypeError as Data.Maybe │ +├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ +│ Warn │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Warn │ Data.Maybe.Warn │ 20:13..20:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Warn │ import Prim.TypeError as Data.Maybe │ +├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Maybe │ (import Data.Maybe as Data.Maybe) │ Data.Maybe │ Data.Maybe.Maybe │ Data.Maybe.Maybe │ 20:13..20:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Maybe │ import Data.Maybe as Data.Maybe │ ╰──────────────┴──────────────────────────────────────────┴─────────────────────┴─────────────────────────────┴─────────────────────────┴─────────────────────────┴───────────────────────────────────────╯ @@ -186,27 +187,27 @@ type Qual2 = Data.Maybe. │ Text │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Text │ Data.Maybe.Text │ 23:13..23:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Text │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Warn │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Warn │ Data.Maybe.Warn │ 23:13..23:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Warn │ import Prim.TypeError as Data.Maybe │ +│ Above │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Above │ Data.Maybe.Above │ 23:13..23:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Above │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Beside │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Beside │ Data.Maybe.Beside │ 23:13..23:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Beside │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Fail │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Fail │ Data.Maybe.Fail │ 23:13..23:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Fail │ import Prim.TypeError as Data.Maybe │ +│ Quote │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Quote │ Data.Maybe.Quote │ 23:13..23:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Quote │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ QuoteLabel │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.QuoteLabel │ Data.Maybe.QuoteLabel │ 23:13..23:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.QuoteLabel │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Above │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Above │ Data.Maybe.Above │ 23:13..23:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Above │ import Prim.TypeError as Data.Maybe │ -├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Quote │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Quote │ Data.Maybe.Quote │ 23:13..23:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Quote │ import Prim.TypeError as Data.Maybe │ -├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Doc │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Doc │ Data.Maybe.Doc │ 23:13..23:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Doc │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ +│ Fail │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Fail │ Data.Maybe.Fail │ 23:13..23:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Fail │ import Prim.TypeError as Data.Maybe │ +├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ +│ Warn │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Warn │ Data.Maybe.Warn │ 23:13..23:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Warn │ import Prim.TypeError as Data.Maybe │ +├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Maybe │ (import Data.Maybe as Data.Maybe) │ Data.Maybe │ Data.Maybe.Maybe │ Data.Maybe.Maybe │ 23:13..23:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Maybe │ import Data.Maybe as Data.Maybe │ ╰──────────────┴──────────────────────────────────────────┴─────────────────────┴─────────────────────────────┴─────────────────────────┴─────────────────────────┴───────────────────────────────────────╯ diff --git a/tests-integration/fixtures/lsp/033_completion_cache_prefix/Main.snap b/tests-integration/fixtures/lsp/033_completion_cache_prefix/Main.snap index 9a00f0f38..8de4baf48 100644 --- a/tests-integration/fixtures/lsp/033_completion_cache_prefix/Main.snap +++ b/tests-integration/fixtures/lsp/033_completion_cache_prefix/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/lsp/generated.rs +assertion_line: 12 expression: report --- Completion at Position { line: 2, character: 14 } @@ -36,12 +37,12 @@ type Step2 = Ma │ Qual1 │ ... │ Local │ Qual1 │ Qual1 │ 5:13..5:15 │ ... │ │ │ │ │ │ │ Qual1 │ │ ├───────────┼────────────────────────┼─────────────────────┼────────────────────┼───────────────┼──────────────┼─────────────────────────────┤ -│ Partial │ ... │ Prim │ Partial │ Partial │ 5:13..5:15 │ ... │ -│ │ │ │ │ │ Partial │ │ -├───────────┼────────────────────────┼─────────────────────┼────────────────────┼───────────────┼──────────────┼─────────────────────────────┤ │ Char │ ... │ Prim │ Char │ Char │ 5:13..5:15 │ ... │ │ │ │ │ │ │ Char │ │ ├───────────┼────────────────────────┼─────────────────────┼────────────────────┼───────────────┼──────────────┼─────────────────────────────┤ +│ Partial │ ... │ Prim │ Partial │ Partial │ 5:13..5:15 │ ... │ +│ │ │ │ │ │ Partial │ │ +├───────────┼────────────────────────┼─────────────────────┼────────────────────┼───────────────┼──────────────┼─────────────────────────────┤ │ Maybe │ (import Data.Maybe) │ Data.Maybe │ Data.Maybe.Maybe │ Maybe │ 5:13..5:15 │ 1:0..1:0 │ │ │ │ │ │ │ Maybe │ import Data.Maybe (Maybe) │ ╰───────────┴────────────────────────┴─────────────────────┴────────────────────┴───────────────┴──────────────┴─────────────────────────────╯ @@ -189,27 +190,27 @@ type Qual1 = Data.Maybe. │ Text │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Text │ Data.Maybe.Text │ 26:13..26:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Text │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Warn │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Warn │ Data.Maybe.Warn │ 26:13..26:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Warn │ import Prim.TypeError as Data.Maybe │ +│ Above │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Above │ Data.Maybe.Above │ 26:13..26:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Above │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Beside │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Beside │ Data.Maybe.Beside │ 26:13..26:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Beside │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Fail │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Fail │ Data.Maybe.Fail │ 26:13..26:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Fail │ import Prim.TypeError as Data.Maybe │ +│ Quote │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Quote │ Data.Maybe.Quote │ 26:13..26:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Quote │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ QuoteLabel │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.QuoteLabel │ Data.Maybe.QuoteLabel │ 26:13..26:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.QuoteLabel │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Above │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Above │ Data.Maybe.Above │ 26:13..26:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Above │ import Prim.TypeError as Data.Maybe │ -├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Quote │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Quote │ Data.Maybe.Quote │ 26:13..26:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Quote │ import Prim.TypeError as Data.Maybe │ -├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Doc │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Doc │ Data.Maybe.Doc │ 26:13..26:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Doc │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ +│ Fail │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Fail │ Data.Maybe.Fail │ 26:13..26:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Fail │ import Prim.TypeError as Data.Maybe │ +├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ +│ Warn │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Warn │ Data.Maybe.Warn │ 26:13..26:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Warn │ import Prim.TypeError as Data.Maybe │ +├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Maybe │ (import Data.Maybe as Data.Maybe) │ Data.Maybe │ Data.Maybe.Maybe │ Data.Maybe.Maybe │ 26:13..26:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Maybe │ import Data.Maybe as Data.Maybe │ ╰──────────────┴──────────────────────────────────────────┴─────────────────────┴─────────────────────────────┴─────────────────────────┴─────────────────────────┴───────────────────────────────────────╯ @@ -237,27 +238,27 @@ type Qual2 = Data.Maybe. │ Text │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Text │ Data.Maybe.Text │ 29:13..29:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Text │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Warn │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Warn │ Data.Maybe.Warn │ 29:13..29:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Warn │ import Prim.TypeError as Data.Maybe │ +│ Above │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Above │ Data.Maybe.Above │ 29:13..29:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Above │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Beside │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Beside │ Data.Maybe.Beside │ 29:13..29:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Beside │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Fail │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Fail │ Data.Maybe.Fail │ 29:13..29:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Fail │ import Prim.TypeError as Data.Maybe │ +│ Quote │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Quote │ Data.Maybe.Quote │ 29:13..29:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Quote │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ QuoteLabel │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.QuoteLabel │ Data.Maybe.QuoteLabel │ 29:13..29:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.QuoteLabel │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Above │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Above │ Data.Maybe.Above │ 29:13..29:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Above │ import Prim.TypeError as Data.Maybe │ -├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ -│ Quote │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Quote │ Data.Maybe.Quote │ 29:13..29:24 │ 1:0..1:0 │ -│ │ │ │ │ │ Data.Maybe.Quote │ import Prim.TypeError as Data.Maybe │ -├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Doc │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Doc │ Data.Maybe.Doc │ 29:13..29:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Doc │ import Prim.TypeError as Data.Maybe │ ├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ +│ Fail │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Fail │ Data.Maybe.Fail │ 29:13..29:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Fail │ import Prim.TypeError as Data.Maybe │ +├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ +│ Warn │ (import Prim.TypeError as Data.Maybe) │ Prim.TypeError │ Prim.TypeError.Warn │ Data.Maybe.Warn │ 29:13..29:24 │ 1:0..1:0 │ +│ │ │ │ │ │ Data.Maybe.Warn │ import Prim.TypeError as Data.Maybe │ +├──────────────┼──────────────────────────────────────────┼─────────────────────┼─────────────────────────────┼─────────────────────────┼─────────────────────────┼───────────────────────────────────────┤ │ Maybe │ (import Data.Maybe as Data.Maybe) │ Data.Maybe │ Data.Maybe.Maybe │ Data.Maybe.Maybe │ 29:13..29:24 │ 1:0..1:0 │ │ │ │ │ │ │ Data.Maybe.Maybe │ import Data.Maybe as Data.Maybe │ ╰──────────────┴──────────────────────────────────────────┴─────────────────────┴─────────────────────────────┴─────────────────────────┴─────────────────────────┴───────────────────────────────────────╯ diff --git a/tests-integration/fixtures/resolving/001_local_resolution/Explicit.snap b/tests-integration/fixtures/resolving/001_local_resolution/Explicit.snap index f3433e620..c0991dfac 100644 --- a/tests-integration/fixtures/resolving/001_local_resolution/Explicit.snap +++ b/tests-integration/fixtures/resolving/001_local_resolution/Explicit.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module Explicit @@ -18,10 +19,12 @@ Exported Terms: Exported Types: - HiddenConstructor - Id - - Eq - Synonym - Maybe +Exported Classes: + - Eq + Local Terms: - value - eqMaybe @@ -37,10 +40,12 @@ Local Types: - HiddenConstructor - HiddenType - Id - - Eq - Synonym - Maybe +Local Classes: + - Eq + Class Members: - Eq.eq diff --git a/tests-integration/fixtures/resolving/001_local_resolution/ExplicitSelf.snap b/tests-integration/fixtures/resolving/001_local_resolution/ExplicitSelf.snap index 256bb8ac7..8838a8baa 100644 --- a/tests-integration/fixtures/resolving/001_local_resolution/ExplicitSelf.snap +++ b/tests-integration/fixtures/resolving/001_local_resolution/ExplicitSelf.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ExplicitSelf @@ -21,10 +22,12 @@ Exported Types: - HiddenConstructor - HiddenType - Id - - Eq - Synonym - Maybe +Exported Classes: + - Eq + Local Terms: - value - eqMaybe @@ -40,10 +43,12 @@ Local Types: - HiddenConstructor - HiddenType - Id - - Eq - Synonym - Maybe +Local Classes: + - Eq + Class Members: - Eq.eq diff --git a/tests-integration/fixtures/resolving/001_local_resolution/Implicit.snap b/tests-integration/fixtures/resolving/001_local_resolution/Implicit.snap index a9b713c54..342764022 100644 --- a/tests-integration/fixtures/resolving/001_local_resolution/Implicit.snap +++ b/tests-integration/fixtures/resolving/001_local_resolution/Implicit.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module Implicit @@ -21,10 +22,12 @@ Exported Types: - HiddenConstructor - HiddenType - Id - - Eq - Synonym - Maybe +Exported Classes: + - Eq + Local Terms: - value - eqMaybe @@ -40,10 +43,12 @@ Local Types: - HiddenConstructor - HiddenType - Id - - Eq - Synonym - Maybe +Local Classes: + - Eq + Class Members: - Eq.eq diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportExplicit.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportExplicit.snap index 55c4666ca..33e0630cc 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportExplicit.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportExplicit.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportExplicit @@ -12,6 +13,8 @@ Terms: Types: - Id is Explicit +Classes: + Terms: - Id is Implicit @@ -19,6 +22,8 @@ Types: - Hidden is Implicit - Id is Implicit +Classes: + Qualified Imports: QualifiedExplicit Terms: @@ -27,6 +32,8 @@ QualifiedExplicit Terms: QualifiedExplicit Types: - Id is Explicit +QualifiedExplicit Classes: + QualifiedImplicit Terms: - Id is Implicit @@ -34,14 +41,20 @@ QualifiedImplicit Types: - Hidden is Implicit - Id is Implicit +QualifiedImplicit Classes: + Exported Terms: Exported Types: +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportForLocalOnly.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportForLocalOnly.snap index c75a43e40..65a312334 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportForLocalOnly.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportForLocalOnly.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportForLocalOnly @@ -11,12 +12,16 @@ Terms: Types: +Classes: + Terms: - Id is Explicit Types: - Id is Explicit +Classes: + Qualified Imports: Exported Terms: @@ -25,10 +30,14 @@ Exported Terms: Exported Types: - Id +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportHiddenConstructor.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportHiddenConstructor.snap index 5c4ab3f61..ecdfa04a2 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportHiddenConstructor.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportHiddenConstructor.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportHiddenConstructor @@ -11,16 +12,22 @@ Terms: Types: - Hidden is Explicit +Classes: + Qualified Imports: Exported Terms: Exported Types: +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedExplicit.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedExplicit.snap index 50fc4a360..f2f03d5bf 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedExplicit.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedExplicit.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportQualifiedExplicit @@ -9,20 +10,22 @@ Unqualified Imports: Qualified Imports: L Terms: - - MkLibTy is Explicit - - Ctor3 is Explicit - libFn is Explicit + - MkLibTy is Explicit - Ctor1 is Explicit + - Ctor3 is Explicit L Types: - LibTy is Explicit - MultiTy is Explicit - TypeOnly is Explicit +L Classes: + Exported Terms: + - Ctor1 - libFn - MkLibTy - - Ctor1 - Ctor3 Exported Types: @@ -30,10 +33,14 @@ Exported Types: - MultiTy - TypeOnly +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedHiding.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedHiding.snap index 0da4f753b..60e4404f1 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedHiding.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedHiding.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportQualifiedHiding @@ -9,29 +10,35 @@ Unqualified Imports: Qualified Imports: L Terms: - - MkLibTy is Implicit - - Ctor3 is Implicit - libFn is Implicit + - MkLibTy is Implicit - Ctor1 is Implicit + - Ctor3 is Implicit L Types: - LibTy is Implicit - TypeOnly is Implicit +L Classes: + Exported Terms: + - Ctor1 - libFn - MkLibTy - - Ctor1 - Ctor3 Exported Types: - LibTy - TypeOnly +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedImplicit.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedImplicit.snap index 274921994..9afbb1011 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedImplicit.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportQualifiedImplicit.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportQualifiedImplicit @@ -9,13 +10,13 @@ Unqualified Imports: Qualified Imports: L Terms: - - MkLibTy is Implicit - - Ctor2 is Implicit - - hideMe is Implicit - MkHideTy is Implicit - - Ctor3 is Implicit + - hideMe is Implicit - libFn is Implicit + - MkLibTy is Implicit - Ctor1 is Implicit + - Ctor2 is Implicit + - Ctor3 is Implicit L Types: - LibTy is Implicit @@ -23,13 +24,15 @@ L Types: - TypeOnly is Implicit - HideTy is Implicit +L Classes: + Exported Terms: - MkHideTy - Ctor3 - libFn - - Ctor2 - MkLibTy - Ctor1 + - Ctor2 - hideMe Exported Types: @@ -38,10 +41,14 @@ Exported Types: - TypeOnly - HideTy +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedExplicit.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedExplicit.snap index 30595e6a3..e15208cfc 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedExplicit.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedExplicit.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportUnqualifiedExplicit @@ -7,22 +8,24 @@ module ImportUnqualifiedExplicit Unqualified Imports: Terms: - - MkLibTy is Explicit - - Ctor3 is Explicit - libFn is Explicit + - MkLibTy is Explicit - Ctor1 is Explicit + - Ctor3 is Explicit Types: - LibTy is Explicit - MultiTy is Explicit - TypeOnly is Explicit +Classes: + Qualified Imports: Exported Terms: + - Ctor1 - libFn - MkLibTy - - Ctor1 - Ctor3 Exported Types: @@ -30,10 +33,14 @@ Exported Types: - MultiTy - TypeOnly +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedHiding.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedHiding.snap index 8a35beca8..7d1a7cdae 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedHiding.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedHiding.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportUnqualifiedHiding @@ -7,31 +8,37 @@ module ImportUnqualifiedHiding Unqualified Imports: Terms: - - MkLibTy is Implicit - - Ctor3 is Implicit - libFn is Implicit + - MkLibTy is Implicit - Ctor1 is Implicit + - Ctor3 is Implicit Types: - LibTy is Implicit - TypeOnly is Implicit +Classes: + Qualified Imports: Exported Terms: + - Ctor1 - libFn - MkLibTy - - Ctor1 - Ctor3 Exported Types: - LibTy - TypeOnly +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedImplicit.snap b/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedImplicit.snap index 5fe83dfa3..d26c0eff5 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedImplicit.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/ImportUnqualifiedImplicit.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportUnqualifiedImplicit @@ -7,13 +8,13 @@ module ImportUnqualifiedImplicit Unqualified Imports: Terms: - - MkLibTy is Implicit - - Ctor2 is Implicit - - hideMe is Implicit - MkHideTy is Implicit - - Ctor3 is Implicit + - hideMe is Implicit - libFn is Implicit + - MkLibTy is Implicit - Ctor1 is Implicit + - Ctor2 is Implicit + - Ctor3 is Implicit Types: - LibTy is Implicit @@ -21,15 +22,17 @@ Types: - TypeOnly is Implicit - HideTy is Implicit +Classes: + Qualified Imports: Exported Terms: - MkHideTy - Ctor3 - libFn - - Ctor2 - MkLibTy - Ctor1 + - Ctor2 - hideMe Exported Types: @@ -38,10 +41,14 @@ Exported Types: - TypeOnly - HideTy +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/Library.snap b/tests-integration/fixtures/resolving/002_import_resolution/Library.snap index 23f9de747..81a3fa278 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/Library.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/Library.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module Library @@ -23,6 +24,8 @@ Exported Types: - TypeOnly - HideTy +Exported Classes: + Local Terms: - MkHideTy - Ctor3 @@ -38,6 +41,8 @@ Local Types: - TypeOnly - HideTy +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/002_import_resolution/LibraryExplicit.snap b/tests-integration/fixtures/resolving/002_import_resolution/LibraryExplicit.snap index 2b72b3e1c..8784fb7a9 100644 --- a/tests-integration/fixtures/resolving/002_import_resolution/LibraryExplicit.snap +++ b/tests-integration/fixtures/resolving/002_import_resolution/LibraryExplicit.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module LibraryExplicit @@ -15,6 +16,8 @@ Exported Types: - Hidden - Id +Exported Classes: + Local Terms: - Hidden - Id @@ -23,6 +26,8 @@ Local Types: - Hidden - Id +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/003_import_errors/DuplicateLocal.snap b/tests-integration/fixtures/resolving/003_import_errors/DuplicateLocal.snap index 50e1f5ab9..374a33e32 100644 --- a/tests-integration/fixtures/resolving/003_import_errors/DuplicateLocal.snap +++ b/tests-integration/fixtures/resolving/003_import_errors/DuplicateLocal.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module DuplicateLocal @@ -14,12 +15,16 @@ Exported Terms: Exported Types: - Data +Exported Classes: + Local Terms: - value Local Types: - Data +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/003_import_errors/DuplicateQualifiedImport.snap b/tests-integration/fixtures/resolving/003_import_errors/DuplicateQualifiedImport.snap index 91a1e1052..306eb304b 100644 --- a/tests-integration/fixtures/resolving/003_import_errors/DuplicateQualifiedImport.snap +++ b/tests-integration/fixtures/resolving/003_import_errors/DuplicateQualifiedImport.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module DuplicateQualifiedImport @@ -8,6 +9,14 @@ Unqualified Imports: Qualified Imports: +Library Terms: + - html is Implicit + +Library Types: + - Html is Implicit + +Library Classes: + Library Terms: - html is Implicit @@ -15,16 +24,20 @@ Library Types: - Css is Implicit - Html is Implicit +Library Classes: + Exported Terms: Exported Types: +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: - - TermImportConflict { existing: (Idx::(14), Idx::(0), AstId(9)), duplicate: (Idx::(13), Idx::(0), AstId(5)) } - - TypeImportConflict { existing: (Idx::(14), Idx::(0), AstId(9)), duplicate: (Idx::(13), Idx::(0), AstId(5)) } diff --git a/tests-integration/fixtures/resolving/003_import_errors/InvalidConstructor.snap b/tests-integration/fixtures/resolving/003_import_errors/InvalidConstructor.snap index 70a0df0b7..f2d5902a2 100644 --- a/tests-integration/fixtures/resolving/003_import_errors/InvalidConstructor.snap +++ b/tests-integration/fixtures/resolving/003_import_errors/InvalidConstructor.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module InvalidConstructor @@ -13,11 +14,15 @@ Exported Terms: Exported Types: - Invalid +Exported Classes: + Local Terms: Local Types: - Invalid +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/003_import_errors/InvalidImport.snap b/tests-integration/fixtures/resolving/003_import_errors/InvalidImport.snap index e5636e5ae..70bb2c49c 100644 --- a/tests-integration/fixtures/resolving/003_import_errors/InvalidImport.snap +++ b/tests-integration/fixtures/resolving/003_import_errors/InvalidImport.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module InvalidImport @@ -11,22 +12,30 @@ Terms: Types: - Invalid is Explicit +Classes: + Terms: Types: - Css is Explicit - Html is Explicit +Classes: + Qualified Imports: Exported Terms: Exported Types: +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/003_import_errors/LibraryA.snap b/tests-integration/fixtures/resolving/003_import_errors/LibraryA.snap index 3cd499743..6a0cdf75c 100644 --- a/tests-integration/fixtures/resolving/003_import_errors/LibraryA.snap +++ b/tests-integration/fixtures/resolving/003_import_errors/LibraryA.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module LibraryA @@ -15,6 +16,8 @@ Exported Types: - Css - Html +Exported Classes: + Local Terms: - html @@ -22,6 +25,8 @@ Local Types: - Css - Html +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/003_import_errors/LibraryB.snap b/tests-integration/fixtures/resolving/003_import_errors/LibraryB.snap index dd3ab0e4b..f305805f0 100644 --- a/tests-integration/fixtures/resolving/003_import_errors/LibraryB.snap +++ b/tests-integration/fixtures/resolving/003_import_errors/LibraryB.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module LibraryB @@ -14,12 +15,16 @@ Exported Terms: Exported Types: - Html +Exported Classes: + Local Terms: - html Local Types: - Html +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Internal.snap b/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Internal.snap index 3553a677f..2a2a9ca43 100644 --- a/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Internal.snap +++ b/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Internal.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module Internal @@ -14,12 +15,16 @@ Exported Terms: Exported Types: - Internal +Exported Classes: + Local Terms: - Internal Local Types: - Internal +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Library.snap b/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Library.snap index 408902443..c97fcbf1e 100644 --- a/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Library.snap +++ b/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Library.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module Library @@ -14,16 +15,22 @@ Internal Terms: Internal Types: - Internal is Implicit +Internal Classes: + Exported Terms: - Internal Exported Types: - Internal +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Main.snap b/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Main.snap index 7f2fa3ade..8c97f6e8c 100644 --- a/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Main.snap +++ b/tests-integration/fixtures/resolving/004_import_re_exported_constructor/Main.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module Main @@ -12,6 +13,8 @@ Terms: Types: - Internal is Explicit +Classes: + Qualified Imports: Exported Terms: @@ -19,11 +22,15 @@ Exported Terms: Exported Types: +Exported Classes: + Local Terms: - internal Local Types: +Local Classes: + Class Members: Errors: diff --git a/tests-integration/fixtures/resolving/005_class_members/ClassLibrary.snap b/tests-integration/fixtures/resolving/005_class_members/ClassLibrary.snap index 7365a8a52..a8f7cb060 100644 --- a/tests-integration/fixtures/resolving/005_class_members/ClassLibrary.snap +++ b/tests-integration/fixtures/resolving/005_class_members/ClassLibrary.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ClassLibrary @@ -15,6 +16,8 @@ Exported Terms: - eq Exported Types: + +Exported Classes: - Show - Ord - Eq @@ -26,6 +29,8 @@ Local Terms: - eq Local Types: + +Local Classes: - Show - Ord - Eq diff --git a/tests-integration/fixtures/resolving/005_class_members/HiddenClass.snap b/tests-integration/fixtures/resolving/005_class_members/HiddenClass.snap index 45f1269af..ae92f1d22 100644 --- a/tests-integration/fixtures/resolving/005_class_members/HiddenClass.snap +++ b/tests-integration/fixtures/resolving/005_class_members/HiddenClass.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module HiddenClass @@ -13,6 +14,8 @@ Terms: - eq is Implicit Types: + +Classes: - Show is Implicit - Ord is Implicit @@ -22,10 +25,14 @@ Exported Terms: Exported Types: +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: - Show.show (imported) - Ord.compare (imported) diff --git a/tests-integration/fixtures/resolving/005_class_members/ImportedClass.snap b/tests-integration/fixtures/resolving/005_class_members/ImportedClass.snap index cb86bf63d..dabbcfc42 100644 --- a/tests-integration/fixtures/resolving/005_class_members/ImportedClass.snap +++ b/tests-integration/fixtures/resolving/005_class_members/ImportedClass.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ImportedClass @@ -9,6 +10,8 @@ Unqualified Imports: Terms: Types: + +Classes: - Show is Explicit - Eq is Explicit @@ -18,10 +21,14 @@ Exported Terms: Exported Types: +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: - Show.show (imported) - Eq.eq (imported) diff --git a/tests-integration/fixtures/resolving/005_class_members/LocalClass.snap b/tests-integration/fixtures/resolving/005_class_members/LocalClass.snap index 3082f572f..c2b319b5d 100644 --- a/tests-integration/fixtures/resolving/005_class_members/LocalClass.snap +++ b/tests-integration/fixtures/resolving/005_class_members/LocalClass.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module LocalClass @@ -13,6 +14,8 @@ Exported Terms: - apply Exported Types: + +Exported Classes: - Apply - Functor @@ -21,6 +24,8 @@ Local Terms: - apply Local Types: + +Local Classes: - Apply - Functor diff --git a/tests-integration/fixtures/resolving/005_class_members/ReExportConsumer.snap b/tests-integration/fixtures/resolving/005_class_members/ReExportConsumer.snap index cff59af0a..9c896fd4d 100644 --- a/tests-integration/fixtures/resolving/005_class_members/ReExportConsumer.snap +++ b/tests-integration/fixtures/resolving/005_class_members/ReExportConsumer.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ReExportConsumer @@ -9,6 +10,8 @@ Unqualified Imports: Terms: Types: + +Classes: - Show is Explicit - Eq is Explicit @@ -18,10 +21,14 @@ Exported Terms: Exported Types: +Exported Classes: + Local Terms: Local Types: +Local Classes: + Class Members: - Show.show (imported) - Eq.eq (imported) diff --git a/tests-integration/fixtures/resolving/005_class_members/ReExporter.snap b/tests-integration/fixtures/resolving/005_class_members/ReExporter.snap index 0a0059427..95c06ea1b 100644 --- a/tests-integration/fixtures/resolving/005_class_members/ReExporter.snap +++ b/tests-integration/fixtures/resolving/005_class_members/ReExporter.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/resolving/generated.rs +assertion_line: 12 expression: report --- module ReExporter @@ -13,6 +14,8 @@ Terms: - eq is Implicit Types: + +Classes: - Show is Implicit - Ord is Implicit - Eq is Implicit @@ -26,6 +29,8 @@ Exported Terms: - eq Exported Types: + +Exported Classes: - Show - Ord - Eq @@ -34,6 +39,8 @@ Local Terms: Local Types: +Local Classes: + Class Members: - Show.show (imported) - Eq.eq (imported) diff --git a/tests-integration/src/generated/basic.rs b/tests-integration/src/generated/basic.rs index 6dbfa91c6..21abe995f 100644 --- a/tests-integration/src/generated/basic.rs +++ b/tests-integration/src/generated/basic.rs @@ -2,6 +2,7 @@ use std::fmt::Write; use analyzer::{QueryEngine, locate}; use checking::core::pretty; +use diagnostics::{DiagnosticsContext, ToDiagnostics, format_rustc}; use files::FileId; use indexing::{ImportKind, TermItem, TypeItem, TypeItemId, TypeItemKind}; use lowering::{ @@ -37,27 +38,47 @@ pub fn report_resolved(engine: &QueryEngine, id: FileId, name: &str) -> String { } writeln!(buffer, " - {name} is {kind:?}").unwrap(); } - } - writeln!(buffer).unwrap(); - writeln!(buffer, "Qualified Imports:").unwrap(); - for (name, import) in &resolved.qualified { writeln!(buffer).unwrap(); - writeln!(buffer, "{name} Terms:").unwrap(); - for (name, _, _, kind) in import.iter_terms() { + writeln!(buffer, "Classes:").unwrap(); + for (name, _, _, kind) in import.iter_classes() { if matches!(kind, ImportKind::Hidden) { continue; } writeln!(buffer, " - {name} is {kind:?}").unwrap(); } + } - writeln!(buffer).unwrap(); - writeln!(buffer, "{name} Types:").unwrap(); - for (name, _, _, kind) in import.iter_types() { - if matches!(kind, ImportKind::Hidden) { - continue; + writeln!(buffer).unwrap(); + writeln!(buffer, "Qualified Imports:").unwrap(); + for (name, imports) in &resolved.qualified { + for import in imports { + writeln!(buffer).unwrap(); + writeln!(buffer, "{name} Terms:").unwrap(); + for (name, _, _, kind) in import.iter_terms() { + if matches!(kind, ImportKind::Hidden) { + continue; + } + writeln!(buffer, " - {name} is {kind:?}").unwrap(); + } + + writeln!(buffer).unwrap(); + writeln!(buffer, "{name} Types:").unwrap(); + for (name, _, _, kind) in import.iter_types() { + if matches!(kind, ImportKind::Hidden) { + continue; + } + writeln!(buffer, " - {name} is {kind:?}").unwrap(); + } + + writeln!(buffer).unwrap(); + writeln!(buffer, "{name} Classes:").unwrap(); + for (name, _, _, kind) in import.iter_classes() { + if matches!(kind, ImportKind::Hidden) { + continue; + } + writeln!(buffer, " - {name} is {kind:?}").unwrap(); } - writeln!(buffer, " - {name} is {kind:?}").unwrap(); } } @@ -73,6 +94,12 @@ pub fn report_resolved(engine: &QueryEngine, id: FileId, name: &str) -> String { writeln!(buffer, " - {name}").unwrap(); } + writeln!(buffer).unwrap(); + writeln!(buffer, "Exported Classes:").unwrap(); + for (name, _, _) in resolved.exports.iter_classes() { + writeln!(buffer, " - {name}").unwrap(); + } + writeln!(buffer).unwrap(); writeln!(buffer, "Local Terms:").unwrap(); for (name, _, _) in resolved.locals.iter_terms() { @@ -85,6 +112,12 @@ pub fn report_resolved(engine: &QueryEngine, id: FileId, name: &str) -> String { writeln!(buffer, " - {name}").unwrap(); } + writeln!(buffer).unwrap(); + writeln!(buffer, "Local Classes:").unwrap(); + for (name, _, _) in resolved.locals.iter_classes() { + writeln!(buffer, " - {name}").unwrap(); + } + writeln!(buffer).unwrap(); writeln!(buffer, "Class Members:").unwrap(); let indexed = engine.indexed(id).unwrap(); @@ -160,7 +193,9 @@ pub fn report_lowered(engine: &QueryEngine, id: FileId, name: &str) -> String { macro_rules! pos { ($id:expr) => {{ let cst = stabilized.ast_ptr($id).unwrap(); - locate::offset_to_position(&content, cst.syntax_node_ptr().text_range().start()) + let range = cst.syntax_node_ptr().text_range(); + let p = locate::offset_to_position(&content, range.start()).unwrap(); + format!("{}:{}", p.line, p.character) }}; } @@ -173,10 +208,10 @@ pub fn report_lowered(engine: &QueryEngine, id: FileId, name: &str) -> String { let node = cst.syntax_node_ptr().to_node(module.syntax()); let text = node.text().to_string(); - writeln!(buffer, "{}@{:?}", text.trim(), pos!(type_id)).unwrap(); + writeln!(buffer, "{}@{}", text.trim(), pos!(type_id)).unwrap(); match resolution { Some(TypeVariableResolution::Forall(id)) => { - writeln!(buffer, " resolves to forall {:?}", pos!(*id)).unwrap(); + writeln!(buffer, " -> forall@{}", pos!(*id)).unwrap(); } Some(TypeVariableResolution::Implicit(ImplicitTypeVariable { binding, node, id })) => { let GraphNode::Implicit { bindings, .. } = &graph[*node] else { @@ -188,14 +223,14 @@ pub fn report_lowered(engine: &QueryEngine, id: FileId, name: &str) -> String { if *binding { writeln!(buffer, " introduces a constraint variable {name:?}").unwrap(); } else { - writeln!(buffer, " resolves to a constraint variable {name:?}").unwrap(); + writeln!(buffer, " -> constraint variable {name:?}").unwrap(); for &tid in type_ids { - writeln!(buffer, " {:?}", pos!(tid)).unwrap(); + writeln!(buffer, " {}", pos!(tid)).unwrap(); } } } None => { - writeln!(buffer, " resolves to nothing").unwrap(); + writeln!(buffer, " -> nothing").unwrap(); } } } @@ -215,38 +250,40 @@ fn report_on_term( let cst = stabilized.ast_ptr(expression_id).unwrap(); let node = cst.syntax_node_ptr().to_node(module.syntax()); let text = node.text().to_string(); - let position = locate::offset_to_position(content, node.text_range().start()); + let position = locate::offset_to_position(content, node.text_range().start()).unwrap(); - writeln!(buffer, "{}@{:?}", text.trim(), position).unwrap(); + writeln!(buffer, "{}@{}:{}", text.trim(), position.line, position.character).unwrap(); macro_rules! pos { ($id:expr) => {{ let cst = stabilized.ast_ptr($id).unwrap(); - locate::offset_to_position(content, cst.syntax_node_ptr().text_range().start()) + let range = cst.syntax_node_ptr().text_range(); + let p = locate::offset_to_position(content, range.start()).unwrap(); + format!("{}:{}", p.line, p.character) }}; } match resolution { Some(TermVariableResolution::Binder(id)) => { - writeln!(buffer, " resolves to binder {:?}", pos!(*id)).unwrap(); + writeln!(buffer, " -> binder@{}", pos!(*id)).unwrap(); } Some(TermVariableResolution::Let(let_binding_id)) => { let let_binding = info.get_let_binding_group(*let_binding_id); if let Some(sig) = let_binding.signature { - writeln!(buffer, " resolves to signature {:?}", pos!(sig)).unwrap(); + writeln!(buffer, " -> signature@{}", pos!(sig)).unwrap(); } for eq in let_binding.equations.iter() { - writeln!(buffer, " resolves to equation {:?}", pos!(*eq)).unwrap(); + writeln!(buffer, " -> equation@{}", pos!(*eq)).unwrap(); } } Some(TermVariableResolution::RecordPun(id)) => { - writeln!(buffer, " resolves to record pun {:?}", pos!(*id)).unwrap(); + writeln!(buffer, " -> record pun@{}", pos!(*id)).unwrap(); } Some(TermVariableResolution::Reference(..)) => { - writeln!(buffer, " resolves to top-level name").unwrap(); + writeln!(buffer, " -> top-level").unwrap(); } None => { - writeln!(buffer, " resolves to nothing").unwrap(); + writeln!(buffer, " -> nothing").unwrap(); } } } @@ -342,12 +379,11 @@ pub fn report_checked(engine: &QueryEngine, id: FileId) -> String { class_line.push_str(name); - // Print class type variables with their kinds - // level = quantified_variables + kind_variables + index (matches localize_class) - for (index, &kind) in class.type_variable_kinds.iter().enumerate() { - let level = class.quantified_variables.0 + class.kind_variables.0 + index as u32; + // Print class type variables with their kinds. + for (name, &kind) in class.type_variable_names.iter().zip(class.type_variable_kinds.iter()) + { let kind_str = pretty::print_global(engine, kind); - class_line.push_str(&format!(" (&{level} :: {kind_str})")); + class_line.push_str(&format!(" ({} :: {kind_str})", name.text)); } writeln!(snapshot, "class {class_line}").unwrap(); @@ -382,51 +418,35 @@ pub fn report_checked(engine: &QueryEngine, id: FileId) -> String { writeln!(snapshot, "derive {forall_prefix}{head}").unwrap(); } - if !checked.errors.is_empty() { - writeln!(snapshot, "\nErrors").unwrap(); + let content = engine.content(id); + + let (parsed, _) = engine.parsed(id).unwrap(); + let root = parsed.syntax_node(); + + let stabilized = engine.stabilized(id).unwrap(); + let lowered = engine.lowered(id).unwrap(); + let resolved = engine.resolved(id).unwrap(); + + let context = + DiagnosticsContext::new(&content, &root, &stabilized, &indexed, &lowered, &checked); + + let mut all_diagnostics = vec![]; + + for error in &lowered.errors { + all_diagnostics.extend(error.to_diagnostics(&context)); } + + for error in &resolved.errors { + all_diagnostics.extend(error.to_diagnostics(&context)); + } + for error in &checked.errors { - use checking::error::ErrorKind::*; - let pp = |t| pretty::print_global(engine, t); - let step = &error.step; - match error.kind { - CannotUnify { t1, t2 } => { - writeln!(snapshot, "CannotUnify {{ {}, {} }} at {step:?}", pp(t1), pp(t2)).unwrap(); - } - NoInstanceFound { constraint } => { - writeln!(snapshot, "NoInstanceFound {{ {} }} at {step:?}", pp(constraint)).unwrap(); - } - AmbiguousConstraint { constraint } => { - writeln!(snapshot, "AmbiguousConstraint {{ {} }} at {step:?}", pp(constraint)) - .unwrap(); - } - InstanceMemberTypeMismatch { expected, actual } => { - writeln!( - snapshot, - "InstanceMemberTypeMismatch {{ expected: {}, actual: {} }} at {step:?}", - pp(expected), - pp(actual) - ) - .unwrap(); - } - CustomWarning { message_id } => { - let message = &checked.custom_messages[message_id as usize]; - writeln!(snapshot, "CustomWarning {{ .. }} at {step:?}").unwrap(); - for line in message.lines() { - writeln!(snapshot, " {line}").unwrap(); - } - } - CustomFailure { message_id } => { - let message = &checked.custom_messages[message_id as usize]; - writeln!(snapshot, "CustomFailure {{ .. }} at {step:?}").unwrap(); - for line in message.lines() { - writeln!(snapshot, " {line}").unwrap(); - } - } - _ => { - writeln!(snapshot, "{:?} at {step:?}", error.kind).unwrap(); - } - } + all_diagnostics.extend(error.to_diagnostics(&context)); + } + + if !all_diagnostics.is_empty() { + writeln!(snapshot, "\nDiagnostics").unwrap(); + snapshot.push_str(&format_rustc(&all_diagnostics, &content)); } snapshot @@ -481,16 +501,18 @@ fn format_instance_head( head } -fn format_forall_prefix(engine: &QueryEngine, kind_variables: &[checking::core::TypeId]) -> String { +fn format_forall_prefix( + engine: &QueryEngine, + kind_variables: &[(checking::core::Name, checking::core::TypeId)], +) -> String { if kind_variables.is_empty() { return String::new(); } let binders: Vec<_> = kind_variables .iter() - .enumerate() - .map(|(i, kind)| { + .map(|(name, kind)| { let kind_str = pretty::print_global(engine, *kind); - format!("(&{i} :: {kind_str})") + format!("({} :: {kind_str})", name.text) }) .collect(); format!("forall {}. ", binders.join(" ")) diff --git a/tests-integration/src/lib.rs b/tests-integration/src/lib.rs index 0cb79b639..795c26319 100644 --- a/tests-integration/src/lib.rs +++ b/tests-integration/src/lib.rs @@ -12,6 +12,7 @@ use url::Url; fn load_file(engine: &mut QueryEngine, files: &mut Files, path: &Path) { let url = Url::from_file_path(path).unwrap(); let file = fs::read_to_string(path).unwrap(); + let file = file.replace("\r\n", "\n"); let uri = url.to_string(); let id = files.insert(uri, file); diff --git a/tests-integration/src/trace.rs b/tests-integration/src/trace.rs index 584fca378..6118c8691 100644 --- a/tests-integration/src/trace.rs +++ b/tests-integration/src/trace.rs @@ -51,9 +51,8 @@ where .with_span_events(FmtSpan::CLOSE) .json(); - let subscriber = tracing_subscriber::registry() - .with(formatter) - .with(EnvFilter::default().add_directive(level.into())); + let filter = EnvFilter::default().add_directive(level.into()); + let subscriber = tracing_subscriber::registry().with(formatter).with(filter); let result = tracing::subscriber::with_default(subscriber, f); file_writer.0.lock().unwrap().flush().expect("failed to flush trace file"); diff --git a/tests-integration/tests/checking.rs b/tests-integration/tests/checking.rs index 66882f226..a962ebdf4 100644 --- a/tests-integration/tests/checking.rs +++ b/tests-integration/tests/checking.rs @@ -7,7 +7,9 @@ use std::num::NonZeroU32; use analyzer::{QueryEngine, prim}; use checking::algorithm::state::{CheckContext, CheckState, UnificationState}; use checking::algorithm::{quantify, unification}; -use checking::core::{ForallBinder, RowField, RowType, Type, TypeId, Variable, debruijn, pretty}; +use checking::core::{ + ForallBinder, Name, RowField, RowType, Type, TypeId, Variable, debruijn, pretty, +}; use files::{FileId, Files}; use itertools::Itertools; use lowering::TypeVariableBindingId; @@ -19,21 +21,21 @@ struct ContextState<'r> { impl<'a> ContextState<'a> { fn new(engine: &'a QueryEngine, id: FileId) -> ContextState<'a> { - let mut state = CheckState::default(); + let mut state = CheckState::new(id); let context = CheckContext::new(engine, &mut state, id).unwrap(); ContextState { state, context } } } trait CheckStateExt { - fn bound_variable(&mut self, index: u32, kind: TypeId) -> TypeId; + fn bound_variable(&mut self, name: Name, kind: TypeId) -> TypeId; fn function(&mut self, argument: TypeId, result: TypeId) -> TypeId; } impl CheckStateExt for CheckState { - fn bound_variable(&mut self, index: u32, kind: TypeId) -> TypeId { - let var = Variable::Bound(debruijn::Level(index), kind); + fn bound_variable(&mut self, name: Name, kind: TypeId) -> TypeId { + let var = Variable::Bound(name, kind); self.storage.intern(Type::Variable(var)) } @@ -63,8 +65,18 @@ fn test_solve_simple() { let ContextState { ref context, ref mut state } = ContextState::new(&engine, id); // [a :: Int, b :: String] - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_1), context.prim.int); - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_2), context.prim.string); + let name_a = state.fresh_name_str("a"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_1), + context.prim.int, + name_a, + ); + let name_b = state.fresh_name_str("b"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_2), + context.prim.string, + name_b, + ); let unification = state.fresh_unification_type(context); let Type::Unification(unification_id) = state.storage[unification] else { @@ -91,16 +103,26 @@ fn test_solve_bound() { let ContextState { ref context, ref mut state } = ContextState::new(&engine, id); // [a :: Int, b :: String] - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_1), context.prim.int); - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_2), context.prim.string); + let name_a = state.fresh_name_str("a"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_1), + context.prim.int, + name_a.clone(), + ); + let name_b = state.fresh_name_str("b"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_2), + context.prim.string, + name_b.clone(), + ); let unification = state.fresh_unification_type(context); let Type::Unification(unification_id) = state.storage[unification] else { unreachable!("invariant violated"); }; - let bound_b = state.bound_variable(0, context.prim.int); - let bound_a = state.bound_variable(1, context.prim.string); + let bound_b = state.bound_variable(name_a, context.prim.int); + let bound_a = state.bound_variable(name_b, context.prim.string); let b_to_a = state.function(bound_b, bound_a); unification::solve(state, context, unification_id, b_to_a).unwrap(); @@ -118,31 +140,37 @@ fn test_solve_bound() { } #[test] -fn test_solve_invalid() { +fn test_solve_escaping_variable() { let (engine, id) = empty_engine(); let ContextState { ref context, ref mut state } = ContextState::new(&engine, id); // [a :: Int] - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_1), context.prim.int); + let name_a = state.fresh_name_str("a"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_1), + context.prim.int, + name_a, + ); + // ?u created at depth C = 1 let unification = state.fresh_unification_type(context); let Type::Unification(unification_id) = state.storage[unification] else { unreachable!("invariant violated"); }; - // [a :: Int, b :: String] - let level = state - .type_scope - .bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_2), context.prim.string); - - let bound_b = state.bound_variable(0, context.prim.int); - let bound_a = state.bound_variable(1, context.prim.string); - let b_to_a = state.function(bound_b, bound_a); + // [a :: Int, b :: String] S = 2 + let name_b = state.fresh_name_str("b"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_2), + context.prim.string, + name_b.clone(), + ); - state.type_scope.unbind(level); + // b is at level 1 which is C(1) <= level(1) < S(2) + let bound_b = state.bound_variable(name_b, context.prim.string); - let solve_result = unification::solve(state, context, unification_id, b_to_a).unwrap(); - assert!(solve_result.is_none()); + let solve_result = unification::solve(state, context, unification_id, bound_b).unwrap(); + assert!(solve_result.is_none(), "should reject: b escapes the scope where ?u was created"); } #[test] @@ -151,7 +179,12 @@ fn test_solve_promotion() { let ContextState { ref context, ref mut state } = ContextState::new(&engine, id); // [a :: Int] - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_1), context.prim.int); + let name_a = state.fresh_name_str("a"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_1), + context.prim.int, + name_a, + ); let unification_a = state.fresh_unification_type(context); let Type::Unification(unification_id) = state.storage[unification_a] else { @@ -159,7 +192,12 @@ fn test_solve_promotion() { }; // [a :: Int, b :: String] - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_2), context.prim.string); + let name_b = state.fresh_name_str("b"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_2), + context.prim.string, + name_b, + ); let unification_a_b = state.fresh_unification_type(context); unification::solve(state, context, unification_id, unification_a_b).unwrap(); @@ -169,9 +207,9 @@ fn test_solve_promotion() { let entries: Vec<_> = state.unification.iter().copied().collect(); for (index, entry) in entries.iter().enumerate() { let UnificationState::Solved(solution) = entry.state else { continue }; - let domain = entry.domain; + let depth = entry.depth; let solution = pretty::print_local(state, context, solution); - writeln!(snapshot, "?{index}[{domain}] := {solution}").unwrap(); + writeln!(snapshot, "?{index}[{depth}] := {solution}").unwrap(); } insta::assert_snapshot!(snapshot); @@ -217,10 +255,20 @@ fn test_quantify_ordering() { let (engine, id) = empty_engine(); let ContextState { ref context, ref mut state } = ContextState::new(&engine, id); - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_1), context.prim.t); + let name_a = state.fresh_name_str("_"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_1), + context.prim.t, + name_a, + ); let unification_a = state.fresh_unification_type(context); - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_2), context.prim.t); + let name_b = state.fresh_name_str("_"); + state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_2), + context.prim.t, + name_b, + ); let unification_b = state.fresh_unification_type(context); let function = state.storage.intern(Type::Function(unification_b, unification_a)); @@ -279,16 +327,24 @@ fn test_quantify_multiple_scoped() { fn make_forall_a_to_a(context: &CheckContext, state: &mut CheckState) -> TypeId { let fake_id = TypeVariableBindingId::new(FAKE_NONZERO_1); + let unbind_level = debruijn::Level(state.type_scope.size().0); - let level = state.type_scope.bind_forall(fake_id, context.prim.t); + let fresh = state.fresh_name_str("a"); + let name = state.type_scope.bind_forall(fake_id, context.prim.t, fresh); - let bound_a = state.bound_variable(0, context.prim.t); + let bound_a = state.bound_variable(name.clone(), context.prim.t); let a_to_a = state.function(bound_a, bound_a); - let binder = ForallBinder { visible: false, name: "a".into(), level, kind: context.prim.t }; + let binder = ForallBinder { + visible: false, + implicit: false, + text: "a".into(), + variable: name, + kind: context.prim.t, + }; let forall_a_to_a = state.storage.intern(Type::Forall(binder, a_to_a)); - state.type_scope.unbind(level); + state.type_scope.unbind(unbind_level); forall_a_to_a } @@ -329,7 +385,7 @@ fn test_subtype_mono_of_poly_fail() { // Create ∀a. a -> a let forall_a_to_a = make_forall_a_to_a(context, state); - // (Int -> Int) <: ∀a. (a -> a) should fail (RHS forall gets skolemized) + // (Int -> Int) <: ∀a. (a -> a) should fail (RHS forall gets skolemised) let int_to_int = state.function(context.prim.int, context.prim.int); let result = unification::subtype(state, context, int_to_int, forall_a_to_a).unwrap(); assert!(!result, "(Int -> Int) <: ∀a. (a -> a) should fail"); @@ -341,27 +397,47 @@ fn test_subtype_nested_forall() { let ContextState { ref context, ref mut state } = ContextState::new(&engine, id); // Create ∀a. ∀b. (a -> b -> a) - let level_a = - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_1), context.prim.t); - let level_b = - state.type_scope.bind_forall(TypeVariableBindingId::new(FAKE_NONZERO_2), context.prim.t); + let unbind_level = debruijn::Level(state.type_scope.size().0); + let fresh_a = state.fresh_name_str("a"); + let name_a = state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_1), + context.prim.t, + fresh_a, + ); + let fresh_b = state.fresh_name_str("b"); + let name_b = state.type_scope.bind_forall( + TypeVariableBindingId::new(FAKE_NONZERO_2), + context.prim.t, + fresh_b, + ); - let bound_a = state.bound_variable(1, context.prim.t); - let bound_b = state.bound_variable(0, context.prim.t); + let bound_a = state.bound_variable(name_a.clone(), context.prim.t); + let bound_b = state.bound_variable(name_b.clone(), context.prim.t); let b_to_a = state.function(bound_b, bound_a); let a_to_b_to_a = state.function(bound_a, b_to_a); let forall_b = state.storage.intern(Type::Forall( - ForallBinder { visible: false, name: "b".into(), level: level_b, kind: context.prim.t }, + ForallBinder { + visible: false, + implicit: false, + text: "b".into(), + variable: name_b, + kind: context.prim.t, + }, a_to_b_to_a, )); - state.type_scope.unbind(level_b); let forall_a_b = state.storage.intern(Type::Forall( - ForallBinder { visible: false, name: "a".into(), level: level_a, kind: context.prim.t }, + ForallBinder { + visible: false, + implicit: false, + text: "a".into(), + variable: name_a, + kind: context.prim.t, + }, forall_b, )); - state.type_scope.unbind(level_a); + state.type_scope.unbind(unbind_level); // ∀a. ∀b. (a -> b -> a) <: (Int -> String -> Int) should pass (LHS foralls get instantiated) let string_to_int = state.function(context.prim.string, context.prim.int); diff --git a/tests-integration/tests/checking/generated.rs b/tests-integration/tests/checking/generated.rs index 3c94e8ec3..c21be9b40 100644 --- a/tests-integration/tests/checking/generated.rs +++ b/tests-integration/tests/checking/generated.rs @@ -455,3 +455,237 @@ fn run_test(folder: &str, file: &str) { #[rustfmt::skip] #[test] fn test_213_row_constraint_multiple_main() { run_test("213_row_constraint_multiple", "Main"); } #[rustfmt::skip] #[test] fn test_214_row_nub_left_bias_main() { run_test("214_row_nub_left_bias", "Main"); } + +#[rustfmt::skip] #[test] fn test_215_do_bind_error_main() { run_test("215_do_bind_error", "Main"); } + +#[rustfmt::skip] #[test] fn test_216_ado_bind_error_main() { run_test("216_ado_bind_error", "Main"); } + +#[rustfmt::skip] #[test] fn test_217_do_monad_error_main() { run_test("217_do_monad_error", "Main"); } + +#[rustfmt::skip] #[test] fn test_218_ado_monad_error_main() { run_test("218_ado_monad_error", "Main"); } + +#[rustfmt::skip] #[test] fn test_219_do_mixed_monad_error_main() { run_test("219_do_mixed_monad_error", "Main"); } + +#[rustfmt::skip] #[test] fn test_220_do_let_premature_solve_main() { run_test("220_do_let_premature_solve", "Main"); } + +#[rustfmt::skip] #[test] fn test_221_do_let_annotation_solve_main() { run_test("221_do_let_annotation_solve", "Main"); } + +#[rustfmt::skip] #[test] fn test_222_ado_let_premature_solve_main() { run_test("222_ado_let_premature_solve", "Main"); } + +#[rustfmt::skip] #[test] fn test_223_ado_let_annotation_solve_main() { run_test("223_ado_let_annotation_solve", "Main"); } + +#[rustfmt::skip] #[test] fn test_224_record_shrinking_main() { run_test("224_record_shrinking", "Main"); } + +#[rustfmt::skip] #[test] fn test_225_record_binder_additional_property_main() { run_test("225_record_binder_additional_property", "Main"); } + +#[rustfmt::skip] #[test] fn test_226_record_binder_additional_property_nested_main() { run_test("226_record_binder_additional_property_nested", "Main"); } + +#[rustfmt::skip] #[test] fn test_227_record_expression_exact_match_main() { run_test("227_record_expression_exact_match", "Main"); } + +#[rustfmt::skip] #[test] fn test_228_record_expression_missing_field_main() { run_test("228_record_expression_missing_field", "Main"); } + +#[rustfmt::skip] #[test] fn test_229_record_expression_additional_field_main() { run_test("229_record_expression_additional_field", "Main"); } + +#[rustfmt::skip] #[test] fn test_230_record_expression_missing_and_additional_main() { run_test("230_record_expression_missing_and_additional", "Main"); } + +#[rustfmt::skip] #[test] fn test_231_record_expression_nested_additional_main() { run_test("231_record_expression_nested_additional", "Main"); } + +#[rustfmt::skip] #[test] fn test_232_instance_head_nil_kind_application_main() { run_test("232_instance_head_nil_kind_application", "Main"); } + +#[rustfmt::skip] #[test] fn test_233_record_instance_matching_main() { run_test("233_record_instance_matching", "Main"); } + +#[rustfmt::skip] #[test] fn test_234_record_instance_open_row_main() { run_test("234_record_instance_open_row", "Main"); } + +#[rustfmt::skip] #[test] fn test_235_instance_head_invalid_row_main() { run_test("235_instance_head_invalid_row", "Main"); } + +#[rustfmt::skip] #[test] fn test_236_category_function_instance_main() { run_test("236_category_function_instance", "Main"); } + +#[rustfmt::skip] #[test] fn test_237_bound_variable_unification_main() { run_test("237_bound_variable_unification", "Main"); } + +#[rustfmt::skip] #[test] fn test_238_function_application_subtype_main() { run_test("238_function_application_subtype", "Main"); } + +#[rustfmt::skip] #[test] fn test_245_do_notation_panic_main() { run_test("245_do_notation_panic", "Main"); } + +#[rustfmt::skip] #[test] fn test_246_do_bind_only_main() { run_test("246_do_bind_only", "Main"); } + +#[rustfmt::skip] #[test] fn test_247_do_discard_not_in_scope_main() { run_test("247_do_discard_not_in_scope", "Main"); } + +#[rustfmt::skip] #[test] fn test_248_do_empty_block_main() { run_test("248_do_empty_block", "Main"); } + +#[rustfmt::skip] #[test] fn test_249_do_final_bind_main() { run_test("249_do_final_bind", "Main"); } + +#[rustfmt::skip] #[test] fn test_250_do_final_let_main() { run_test("250_do_final_let", "Main"); } + +#[rustfmt::skip] #[test] fn test_251_lookup_implicit_panic_main() { run_test("251_lookup_implicit_panic", "Main"); } + +#[rustfmt::skip] #[test] fn test_252_invalid_type_application_basic_main() { run_test("252_invalid_type_application_basic", "Main"); } + +#[rustfmt::skip] #[test] fn test_253_invalid_type_application_too_many_main() { run_test("253_invalid_type_application_too_many", "Main"); } + +#[rustfmt::skip] #[test] fn test_254_higher_rank_elaboration_main() { run_test("254_higher_rank_elaboration", "Main"); } + +#[rustfmt::skip] #[test] fn test_255_exhaustive_basic_main() { run_test("255_exhaustive_basic", "Main"); } + +#[rustfmt::skip] #[test] fn test_256_exhaustive_multiple_main() { run_test("256_exhaustive_multiple", "Main"); } + +#[rustfmt::skip] #[test] fn test_257_exhaustive_tuple_main() { run_test("257_exhaustive_tuple", "Main"); } + +#[rustfmt::skip] #[test] fn test_258_redundant_patterns_main() { run_test("258_redundant_patterns", "Main"); } + +#[rustfmt::skip] #[test] fn test_259_exhaustive_boolean_partial_main() { run_test("259_exhaustive_boolean_partial", "Main"); } + +#[rustfmt::skip] #[test] fn test_260_exhaustive_integer_partial_main() { run_test("260_exhaustive_integer_partial", "Main"); } + +#[rustfmt::skip] #[test] fn test_261_exhaustive_number_partial_main() { run_test("261_exhaustive_number_partial", "Main"); } + +#[rustfmt::skip] #[test] fn test_262_exhaustive_char_partial_main() { run_test("262_exhaustive_char_partial", "Main"); } + +#[rustfmt::skip] #[test] fn test_263_exhaustive_string_partial_main() { run_test("263_exhaustive_string_partial", "Main"); } + +#[rustfmt::skip] #[test] fn test_264_equation_exhaustive_basic_main() { run_test("264_equation_exhaustive_basic", "Main"); } + +#[rustfmt::skip] #[test] fn test_265_equation_redundant_main() { run_test("265_equation_redundant", "Main"); } + +#[rustfmt::skip] #[test] fn test_266_equation_guarded_main() { run_test("266_equation_guarded", "Main"); } + +#[rustfmt::skip] #[test] fn test_267_equation_multiple_arguments_main() { run_test("267_equation_multiple_arguments", "Main"); } + +#[rustfmt::skip] #[test] fn test_268_let_equation_exhaustive_main() { run_test("268_let_equation_exhaustive", "Main"); } + +#[rustfmt::skip] #[test] fn test_269_instance_equation_exhaustive_main() { run_test("269_instance_equation_exhaustive", "Main"); } + +#[rustfmt::skip] #[test] fn test_270_record_constructor_exhaustive_main() { run_test("270_record_constructor_exhaustive", "Main"); } + +#[rustfmt::skip] #[test] fn test_271_array_exhaustive_main() { run_test("271_array_exhaustive", "Main"); } + +#[rustfmt::skip] #[test] fn test_272_array_nested_constructor_main() { run_test("272_array_nested_constructor", "Main"); } + +#[rustfmt::skip] #[test] fn test_273_class_member_instantiation_main() { run_test("273_class_member_instantiation", "Main"); } + +#[rustfmt::skip] #[test] fn test_274_givens_retained_main() { run_test("274_givens_retained", "Main"); } + +#[rustfmt::skip] #[test] fn test_275_givens_scoped_main() { run_test("275_givens_scoped", "Main"); } + +#[rustfmt::skip] #[test] fn test_276_where_clause_outer_scope_main() { run_test("276_where_clause_outer_scope", "Main"); } + +#[rustfmt::skip] #[test] fn test_277_keyword_as_variable_main() { run_test("277_keyword_as_variable", "Main"); } + +#[rustfmt::skip] #[test] fn test_278_partial_case_nested_main() { run_test("278_partial_case_nested", "Main"); } + +#[rustfmt::skip] #[test] fn test_279_partial_let_where_main() { run_test("279_partial_let_where", "Main"); } + +#[rustfmt::skip] #[test] fn test_280_partial_case_variable_main() { run_test("280_partial_case_variable", "Main"); } + +#[rustfmt::skip] #[test] fn test_281_sectioned_constraint_generation_main() { run_test("281_sectioned_constraint_generation", "Main"); } + +#[rustfmt::skip] #[test] fn test_282_higher_rank_unification_main() { run_test("282_higher_rank_unification", "Main"); } + +#[rustfmt::skip] #[test] fn test_283_type_operator_synonym_expansion_main() { run_test("283_type_operator_synonym_expansion", "Main"); } + +#[rustfmt::skip] #[test] fn test_284_type_operator_synonym_with_binders_main() { run_test("284_type_operator_synonym_with_binders", "Main"); } + +#[rustfmt::skip] #[test] fn test_285_derive_newtype_higher_kinded_main() { run_test("285_derive_newtype_higher_kinded", "Main"); } + +#[rustfmt::skip] #[test] fn test_286_invalid_vector_newtype_derive_main() { run_test("286_invalid_vector_newtype_derive", "Main"); } + +#[rustfmt::skip] #[test] fn test_287_lambda_partial_main() { run_test("287_lambda_partial", "Main"); } + +#[rustfmt::skip] #[test] fn test_288_unsafe_partial_application_main() { run_test("288_unsafe_partial_application", "Main"); } + +#[rustfmt::skip] #[test] fn test_289_custom_constraint_discharge_main() { run_test("289_custom_constraint_discharge", "Main"); } + +#[rustfmt::skip] #[test] fn test_290_apply_constraint_discharge_main() { run_test("290_apply_constraint_discharge", "Main"); } + +#[rustfmt::skip] #[test] fn test_291_compose_constraint_discharge_main() { run_test("291_compose_constraint_discharge", "Main"); } + +#[rustfmt::skip] #[test] fn test_292_higher_rank_constraint_discharge_main() { run_test("292_higher_rank_constraint_discharge", "Main"); } + +#[rustfmt::skip] #[test] fn test_293_exhaustive_guards_otherwise_true_main() { run_test("293_exhaustive_guards_otherwise_true", "Main"); } + +#[rustfmt::skip] #[test] fn test_294_exhaustive_operator_constructor_main() { run_test("294_exhaustive_operator_constructor", "Main"); } + +#[rustfmt::skip] #[test] fn test_295_superclass_entailment_where_binding_main() { run_test("295_superclass_entailment_where_binding", "Main"); } + +#[rustfmt::skip] #[test] fn test_296_type_operator_synonym_in_application_main() { run_test("296_type_operator_synonym_in_application", "Main"); } + +#[rustfmt::skip] #[test] fn test_297_applied_function_type_decomposition_main() { run_test("297_applied_function_type_decomposition", "Main"); } + +#[rustfmt::skip] #[test] fn test_298_operator_alias_class_method_main() { run_test("298_operator_alias_class_method", "Main"); } + +#[rustfmt::skip] #[test] fn test_299_derive_mutual_visibility_same_module_main() { run_test("299_derive_mutual_visibility_same_module", "Main"); } + +#[rustfmt::skip] #[test] fn test_300_instance_shift_variables_main() { run_test("300_instance_shift_variables", "Main"); } + +#[rustfmt::skip] #[test] fn test_301_coercible_symmetry_main() { run_test("301_coercible_symmetry", "Main"); } + +#[rustfmt::skip] #[test] fn test_302_coercible_function_decomposition_main() { run_test("302_coercible_function_decomposition", "Main"); } + +#[rustfmt::skip] #[test] fn test_303_instance_given_constraint_main() { run_test("303_instance_given_constraint", "Main"); } + +#[rustfmt::skip] #[test] fn test_305_type_operator_unification_main() { run_test("305_type_operator_unification", "Main"); } + +#[rustfmt::skip] #[test] fn test_306_kind_application_instance_matching_main() { run_test("306_kind_application_instance_matching", "Main"); } + +#[rustfmt::skip] #[test] fn test_307_where_let_interaction_main() { run_test("307_where_let_interaction", "Main"); } + +#[rustfmt::skip] #[test] fn test_308_let_constraint_scoping_main() { run_test("308_let_constraint_scoping", "Main"); } + +#[rustfmt::skip] #[test] fn test_309_synonym_function_result_kind_main() { run_test("309_synonym_function_result_kind", "Main"); } + +#[rustfmt::skip] #[test] fn test_310_synonym_forall_expansion_main() { run_test("310_synonym_forall_expansion", "Main"); } + +#[rustfmt::skip] #[test] fn test_311_prim_qualified_main() { run_test("311_prim_qualified", "Main"); } + +#[rustfmt::skip] #[test] fn test_312_prim_qualified_override_main() { run_test("312_prim_qualified_override", "Main"); } + +#[rustfmt::skip] #[test] fn test_313_guarded_constraint_propagation_main() { run_test("313_guarded_constraint_propagation", "Main"); } + +#[rustfmt::skip] #[test] fn test_314_derive_newtype_function_main() { run_test("314_derive_newtype_function", "Main"); } + +#[rustfmt::skip] #[test] fn test_315_operator_chain_mixed_fixity_main() { run_test("315_operator_chain_mixed_fixity", "Main"); } + +#[rustfmt::skip] #[test] fn test_316_synonym_derive_main() { run_test("316_synonym_derive", "Main"); } + +#[rustfmt::skip] #[test] fn test_317_higher_rank_fields_main() { run_test("317_higher_rank_fields", "Main"); } + +#[rustfmt::skip] #[test] fn test_318_higher_rank_newtype_main() { run_test("318_higher_rank_newtype", "Main"); } + +#[rustfmt::skip] #[test] fn test_319_higher_rank_record_accessor_main() { run_test("319_higher_rank_record_accessor", "Main"); } + +#[rustfmt::skip] #[test] fn test_320_higher_rank_record_binder_main() { run_test("320_higher_rank_record_binder", "Main"); } + +#[rustfmt::skip] #[test] fn test_321_higher_rank_record_literal_main() { run_test("321_higher_rank_record_literal", "Main"); } + +#[rustfmt::skip] #[test] fn test_322_phantom_kind_inference_main() { run_test("322_phantom_kind_inference", "Main"); } + +#[rustfmt::skip] #[test] fn test_323_operator_deferred_generalise_main() { run_test("323_operator_deferred_generalise", "Main"); } + +#[rustfmt::skip] #[test] fn test_324_foreign_kind_polymorphism_main() { run_test("324_foreign_kind_polymorphism", "Main"); } + +#[rustfmt::skip] #[test] fn test_325_type_kind_deferred_generalise_main() { run_test("325_type_kind_deferred_generalise", "Main"); } + +#[rustfmt::skip] #[test] fn test_326_let_retain_polymorphism_main() { run_test("326_let_retain_polymorphism", "Main"); } + +#[rustfmt::skip] #[test] fn test_327_qualified_do_main() { run_test("327_qualified_do", "Main"); } + +#[rustfmt::skip] #[test] fn test_328_binder_instantiation_main() { run_test("328_binder_instantiation", "Main"); } + +#[rustfmt::skip] #[test] fn test_329_pattern_nothing_first_main() { run_test("329_pattern_nothing_first", "Main"); } + +#[rustfmt::skip] #[test] fn test_330_record_subset_labels_main() { run_test("330_record_subset_labels", "Main"); } + +#[rustfmt::skip] #[test] fn test_331_record_progressive_labels_main() { run_test("331_record_progressive_labels", "Main"); } + +#[rustfmt::skip] #[test] fn test_332_record_equation_labels_main() { run_test("332_record_equation_labels", "Main"); } + +#[rustfmt::skip] #[test] fn test_333_row_open_union_main() { run_test("333_row_open_union", "Main"); } + +#[rustfmt::skip] #[test] fn test_334_row_open_cons_main() { run_test("334_row_open_cons", "Main"); } + +#[rustfmt::skip] #[test] fn test_335_row_open_lacks_main() { run_test("335_row_open_lacks", "Main"); } + +#[rustfmt::skip] #[test] fn test_336_row_open_record_main() { run_test("336_row_open_record", "Main"); } + +#[rustfmt::skip] #[test] fn test_337_void_data_main() { run_test("337_void_data", "Main"); } + +#[rustfmt::skip] #[test] fn test_338_module_export_alias_overlap_main() { run_test("338_module_export_alias_overlap", "Main"); } diff --git a/tests-integration/tests/snapshots/checking__constrained_invalid_constraint.snap b/tests-integration/tests/snapshots/checking__constrained_invalid_constraint.snap index 61e4b9493..c65154b69 100644 --- a/tests-integration/tests/snapshots/checking__constrained_invalid_constraint.snap +++ b/tests-integration/tests/snapshots/checking__constrained_invalid_constraint.snap @@ -5,8 +5,8 @@ expression: checked.errors [ CheckError { kind: CannotUnify { - t1: Id(1), - t2: Id(3), + t1: Id(1), + t2: Id(2), }, step: [ TypeDeclaration( diff --git a/tests-integration/tests/snapshots/checking__constrained_invalid_type.snap b/tests-integration/tests/snapshots/checking__constrained_invalid_type.snap index 0d722e5fb..f3caf3c9f 100644 --- a/tests-integration/tests/snapshots/checking__constrained_invalid_type.snap +++ b/tests-integration/tests/snapshots/checking__constrained_invalid_type.snap @@ -5,8 +5,8 @@ expression: checked.errors [ CheckError { kind: CannotUnify { - t1: Id(16), - t2: Id(1), + t1: Id(1), + t2: Id(2), }, step: [ TypeDeclaration( diff --git a/tests-integration/tests/snapshots/checking__invalid_type_operator_nullary.snap b/tests-integration/tests/snapshots/checking__invalid_type_operator_nullary.snap index b0baa0880..b9cd089af 100644 --- a/tests-integration/tests/snapshots/checking__invalid_type_operator_nullary.snap +++ b/tests-integration/tests/snapshots/checking__invalid_type_operator_nullary.snap @@ -5,7 +5,7 @@ expression: checked.errors [ CheckError { kind: InvalidTypeOperator { - id: Id(1), + kind_message: Id(1), }, step: [], }, diff --git a/tests-integration/tests/snapshots/checking__invalid_type_operator_ternary.snap b/tests-integration/tests/snapshots/checking__invalid_type_operator_ternary.snap index 9ac2b28c8..b9cd089af 100644 --- a/tests-integration/tests/snapshots/checking__invalid_type_operator_ternary.snap +++ b/tests-integration/tests/snapshots/checking__invalid_type_operator_ternary.snap @@ -5,7 +5,7 @@ expression: checked.errors [ CheckError { kind: InvalidTypeOperator { - id: Id(48), + kind_message: Id(1), }, step: [], }, diff --git a/tests-integration/tests/snapshots/checking__invalid_type_operator_unary.snap b/tests-integration/tests/snapshots/checking__invalid_type_operator_unary.snap index dc7c6071f..b9cd089af 100644 --- a/tests-integration/tests/snapshots/checking__invalid_type_operator_unary.snap +++ b/tests-integration/tests/snapshots/checking__invalid_type_operator_unary.snap @@ -5,7 +5,7 @@ expression: checked.errors [ CheckError { kind: InvalidTypeOperator { - id: Id(36), + kind_message: Id(1), }, step: [], }, diff --git a/tests-integration/tests/snapshots/checking__partial_synonym.snap b/tests-integration/tests/snapshots/checking__partial_synonym.snap index f7c489b43..3c32f19c7 100644 --- a/tests-integration/tests/snapshots/checking__partial_synonym.snap +++ b/tests-integration/tests/snapshots/checking__partial_synonym.snap @@ -18,8 +18,8 @@ expression: checked.errors }, CheckError { kind: CannotUnify { - t1: Id(1), - t2: Id(14), + t1: Id(1), + t2: Id(2), }, step: [ TypeDeclaration( diff --git a/tests-integration/tests/snapshots/checking__recursive_synonym_expansion_errors.snap b/tests-integration/tests/snapshots/checking__recursive_synonym_expansion_errors.snap index 725a637c3..a28dda92b 100644 --- a/tests-integration/tests/snapshots/checking__recursive_synonym_expansion_errors.snap +++ b/tests-integration/tests/snapshots/checking__recursive_synonym_expansion_errors.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking.rs +assertion_line: 1050 expression: checked.errors --- [ @@ -12,6 +13,12 @@ expression: checked.errors TermDeclaration( Idx::(0), ), + CheckingKind( + AstId(12), + ), + CheckingKind( + AstId(13), + ), ], }, CheckError { @@ -23,16 +30,28 @@ expression: checked.errors TermDeclaration( Idx::(0), ), + CheckingKind( + AstId(12), + ), + CheckingKind( + AstId(14), + ), ], }, CheckError { kind: RecursiveSynonymExpansion { file_id: Idx::(9), - item_id: Idx::(0), + item_id: Idx::(2), }, step: [ TermDeclaration( - Idx::(0), + Idx::(1), + ), + CheckingKind( + AstId(22), + ), + CheckingKind( + AstId(23), ), ], }, @@ -45,6 +64,40 @@ expression: checked.errors TermDeclaration( Idx::(1), ), + CheckingKind( + AstId(22), + ), + CheckingKind( + AstId(24), + ), + ], + }, + CheckError { + kind: RecursiveSynonymExpansion { + file_id: Idx::(9), + item_id: Idx::(0), + }, + step: [ + TermDeclaration( + Idx::(0), + ), + CheckingExpression( + AstId(20), + ), + ], + }, + CheckError { + kind: RecursiveSynonymExpansion { + file_id: Idx::(9), + item_id: Idx::(0), + }, + step: [ + TermDeclaration( + Idx::(0), + ), + CheckingExpression( + AstId(20), + ), ], }, CheckError { @@ -56,6 +109,9 @@ expression: checked.errors TermDeclaration( Idx::(1), ), + CheckingExpression( + AstId(30), + ), ], }, CheckError { @@ -67,6 +123,9 @@ expression: checked.errors TermDeclaration( Idx::(1), ), + CheckingExpression( + AstId(30), + ), ], }, ] diff --git a/tests-integration/tests/snapshots/checking__solve_bound.snap b/tests-integration/tests/snapshots/checking__solve_bound.snap index 8f4e11d65..c01301a9f 100644 --- a/tests-integration/tests/snapshots/checking__solve_bound.snap +++ b/tests-integration/tests/snapshots/checking__solve_bound.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/checking.rs +assertion_line: 138 expression: snapshot --- -(&0 :: Int) -> (&1 :: String) :: Type +(a :: Int) -> (b :: String) :: Type diff --git a/tests-integration/tests/snapshots/checking__unification_fail.snap b/tests-integration/tests/snapshots/checking__unification_fail.snap index 41ed19023..96ab08d04 100644 --- a/tests-integration/tests/snapshots/checking__unification_fail.snap +++ b/tests-integration/tests/snapshots/checking__unification_fail.snap @@ -5,8 +5,8 @@ expression: checked.errors [ CheckError { kind: CannotUnify { - t1: Id(8), - t2: Id(1), + t1: Id(1), + t2: Id(2), }, step: [ TypeDeclaration( diff --git a/tests-integration/tests/snapshots/lowering__ado_fn_not_in_scope.snap b/tests-integration/tests/snapshots/lowering__ado_fn_not_in_scope.snap index 521cc22e0..ac3aa4050 100644 --- a/tests-integration/tests/snapshots/lowering__ado_fn_not_in_scope.snap +++ b/tests-integration/tests/snapshots/lowering__ado_fn_not_in_scope.snap @@ -1,5 +1,6 @@ --- source: tests-integration/tests/lowering.rs +assertion_line: 186 expression: lowered.errors --- [ @@ -9,18 +10,6 @@ expression: lowered.errors id: AstId(8), }, ), - NotInScope( - AdoFn { - kind: Apply, - id: AstId(8), - }, - ), - NotInScope( - AdoFn { - kind: Pure, - id: AstId(8), - }, - ), NotInScope( ExprVariable { id: AstId(13), diff --git a/tests-integration/tests/snapshots/lowering__do_fn_not_in_scope.snap b/tests-integration/tests/snapshots/lowering__do_fn_not_in_scope.snap index 9ac58c737..48165d7b4 100644 --- a/tests-integration/tests/snapshots/lowering__do_fn_not_in_scope.snap +++ b/tests-integration/tests/snapshots/lowering__do_fn_not_in_scope.snap @@ -1,20 +1,9 @@ --- source: tests-integration/tests/lowering.rs +assertion_line: 161 expression: lowered.errors --- [ - NotInScope( - DoFn { - kind: Bind, - id: AstId(8), - }, - ), - NotInScope( - DoFn { - kind: Discard, - id: AstId(8), - }, - ), NotInScope( ExprVariable { id: AstId(12), diff --git a/vscode/package.json b/vscode/package.json index 80512b7e8..0d70af15e 100644 --- a/vscode/package.json +++ b/vscode/package.json @@ -46,5 +46,17 @@ }, "extensionDependencies": [ "nwolverson.language-purescript" - ] + ], + "contributes": { + "configuration": { + "title": "PureScript Analyzer", + "properties": { + "purescriptAnalyzer.sourceCommand": { + "type": "string", + "default": null, + "description": "Command to use to get source files. Setting this also disables the spago.lock integration." + } + } + } + } } diff --git a/vscode/src/extension.ts b/vscode/src/extension.ts index 1b152059d..aa9f19751 100644 --- a/vscode/src/extension.ts +++ b/vscode/src/extension.ts @@ -1,4 +1,4 @@ -import { ExtensionContext } from "vscode"; +import { ExtensionContext, workspace } from "vscode"; import { LanguageClient, @@ -10,8 +10,17 @@ import { let client: LanguageClient; export function activate(context: ExtensionContext) { + const config = workspace.getConfiguration("purescriptAnalyzer"); + const sourceCommand = config.get("sourceCommand"); + + const args: string[] = []; + if (sourceCommand) { + args.push("--source-command", sourceCommand); + } + const serverOptions: ServerOptions = { command: "purescript-analyzer", + args, transport: TransportKind.stdio, };