diff --git a/.dialyzer_ignore.exs b/.dialyzer_ignore.exs new file mode 100644 index 000000000..e69de29bb diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..63d5a024d --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# These are supported funding model platforms + +github: [scohen] diff --git a/.github/workflows/elixir.yml b/.github/workflows/elixir.yml index 5fe50d9dd..8b10adfd0 100644 --- a/.github/workflows/elixir.yml +++ b/.github/workflows/elixir.yml @@ -41,26 +41,18 @@ jobs: - name: Checkout code uses: actions/checkout@v4 - - name: Set Variables - id: set_mix_lock_hash - run: | - mix_lock_hash="${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}" - echo "mix_lock_hash=$mix_lock_hash" >> "$GITHUB_OUTPUT" - # Step: Define how to cache deps. Restores existing cache if present. - name: Cache deps id: cache-deps uses: actions/cache@v3 - env: - cache-name: cache-elixir-deps-1 with: path: | deps _build - key: ${{ runner.os }}-mix-${{ env.cache-name }}-${{ hashFiles('**/mix.lock') }} + key: ${{ runner.os }}-mix-${{ env.DEFAULT_ELIXIR }}-${{ env.DEFAULT_OTP }}-${{ hashFiles('**/mix.lock') }} restore-keys: | - ${{ runner.os }}-mix-${{ env.cache-name }}- + ${{ runner.os }}-mix-${{ env.DEFAULT_ELIXIR }}-${{ env.DEFAULT_OTP }}- # Step: Download project dependencies. If unchanged, uses # the cached version. @@ -89,6 +81,7 @@ jobs: project_mix_lock: ${{ format('{0}{1}', github.workspace, '/mix.lock') }} projects_ex_blob: ${{ format('{0}{1}', github.workspace, '/projects/**/*.ex') }} projects_locks_blob: ${{ format('{0}{1}', github.workspace, '/projects/*/mix.lock') }} + MIX_ENV: dev steps: # Step: Setup Elixir + Erlang image as the base. - name: Set up Elixir @@ -120,9 +113,9 @@ jobs: deps _build - key: ${{ runner.os }}-mix-${{ env.cache-name }}-${{ hashFiles('**/mix.lock') }} + key: ${{ runner.os }}-mix-${{ env.DEFAULT_ELIXIR }}-${{ env.DEFAULT_OTP }}-${{ hashFiles('**/mix.lock') }} restore-keys: | - ${{ runner.os }}-mix-${{ env.cache-name }}- + ${{ runner.os }}-mix-${{ env.DEFAULT_ELIXIR }}-${{ env.DEFAULT_OTP }}- # Step: Create dialyzer .plt files if they're not present - name: Cache dialyzer plt files @@ -161,14 +154,24 @@ jobs: # and running the workflow steps. matrix: include: - - elixir: "1.15.3-otp-26" - otp: "26.0.2" - - elixir: "1.15.3-otp-25" - otp: "25.3" - - elixir: "1.14.5-otp-25" - otp: "25.3" - - elixir: "1.13.4-otp-25" - otp: "25.3" + - elixir: "1.17" + otp: "27" + - elixir: "1.17" + otp: "26" + - elixir: "1.17" + otp: "25" + - elixir: "1.16" + otp: "26" + - elixir: "1.16" + otp: "25" + - elixir: "1.15.6" + otp: "26" + - elixir: "1.15.6" + otp: "25" + - elixir: "1.14" + otp: "25" + - elixir: "1.13" + otp: "25" steps: # Step: Check out the code. - name: Checkout code @@ -181,26 +184,18 @@ jobs: otp-version: ${{matrix.otp}} elixir-version: ${{matrix.elixir}} - - name: Set Variables - id: set_mix_lock_hash - run: | - mix_lock_hash="${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}" - echo "mix_lock_hash=$mix_lock_hash" >> "$GITHUB_OUTPUT" - # Step: Define how to cache deps. Restores existing cache if present. - name: Cache deps id: cache-deps uses: actions/cache@v3 - env: - cache-name: cache-elixir-deps-1 with: path: | deps _build - key: ${{ runner.os }}-mix-${{ env.cache-name }}-${{ hashFiles('**/mix.lock') }} + key: ${{ runner.os }}-mix-${{ matrix.elixir }}-${{ matrix.otp }}-${{ hashFiles('**/mix.lock') }} restore-keys: | - ${{ runner.os }}-mix-${{ env.cache-name }}- + ${{ runner.os }}-mix-${{ matrix.elixir }}-${{ matrix.otp }}- # Step: Download project dependencies. If unchanged, uses # the cached version. diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml index 93a396fce..88351c2fa 100644 --- a/.github/workflows/nix.yml +++ b/.github/workflows/nix.yml @@ -1,20 +1,22 @@ name: Nix derivation checks on: - push: - branches: ["main"] + pull_request: paths: ["mix.lock"] + workflow_dispatch: jobs: auto-update-nix-hash: + permissions: + contents: write runs-on: ubuntu-latest name: Auto update Nix hash steps: - - uses: actions/checkout@v4 - - uses: DeterminateSystems/nix-installer-action@main - - uses: DeterminateSystems/magic-nix-cache-action@main - - run: | - nix build --no-link .#__fodHashGen 2>&1 | awk '/gen:/ { print $2 }' > nix/hash - git config user.email "" - git config user.name "GitHub Action Bot" - git commit -m 'Update Nix hash of Mix deps' nix/hash && git push || true + - uses: actions/checkout@v4 + - uses: DeterminateSystems/nix-installer-action@main + - uses: DeterminateSystems/magic-nix-cache-action@main + - run: nix run .#update-hash | tee nix/hash + - uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: 'Update Nix hash of Mix deps' + - run: nix build diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1e4721a90..92e3758db 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -50,5 +50,6 @@ jobs: with: artifacts: lexical*.zip makeLatest: true - generateReleaseNotes: true + generateReleaseNotes: false allowUpdates: true + artifactErrorsFailBuild: true diff --git a/.iex.exs b/.iex.exs index b3bca57e8..ba228cdfa 100644 --- a/.iex.exs +++ b/.iex.exs @@ -1 +1,7 @@ use Lexical.Server.IEx.Helpers + +try do + Mix.ensure_application!(:observer) +rescue + _ -> nil +end diff --git a/.iex.namespaced.exs b/.iex.namespaced.exs index e04fd5ded..0cc651b53 100644 --- a/.iex.namespaced.exs +++ b/.iex.namespaced.exs @@ -1 +1,2 @@ use LXical.Server.IEx.Helpers +alias LXical, as: Lexical diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..802ec2064 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,455 @@ +## Unreleased + No changes yet +## v0.7 +We're releasing 0.7 so we can support Elixir 1.17 and Erlang 27. However, those users on Erlang 27 will experience large increases in the amount of memory Lexical takes up [due to this bug](https://github.com/erlang/otp/pull/8683). When that bug is fixed, we'll push out another release that will detect the newer version of Erlang and things should go back to normal. + +Other than supporting the newer versions of Erlang and Elixir, we've added the following features: + +### Features +* Organize aliases code action. When invoked, will alphabetize and flatten all your existing aliases in a module. It will also put all aliases in the same place in a module. +* Remove unused alias code action +* Add alias code action: Type a module, invoke the code action and see a list of modules to alias, select one, and it's added to your module's other aliases. +* Improved script handling: Now lexical won't execute scripts when editing them. Thanks, @zachallaun! +* We now show typespecs for struct field completions. Thanks @kirillrogovoy + +## What's Changed + +* Completion: Use existing specs to infer function signatures and vice-versa by @zachallaun https://github.com/lexical-lsp/lexical/pull/802 +* Don't evaluate top-level code in exs fils by @zachallaun https://github.com/lexical-lsp/lexical/pull/798 +* Completion: Don't insert impl attribute if it's already present by @zachallaun https://github.com/lexical-lsp/lexical/pull/801 +* Completion: Use existing specs to infer function signatures and vice-versa by @zachallaun https://github.com/lexical-lsp/lexical/pull/802 +* Organize Aliases by @scohen https://github.com/lexical-lsp/lexical/pull/725 +* Remove unused aliases by @scohen +* Refactor: Pass env into completion in remote control by @scohen in https://github.com/lexical-lsp/lexical/pull/733 +* Refactor: Increased type detail by @scohen in https://github.com/lexical-lsp/lexical/pull/734 +* Fix: Edge case for module loading by @scohen in https://github.com/lexical-lsp/lexical/pull/738 +* Improved store error handling by @scohen in https://github.com/lexical-lsp/lexical/pull/737 +* Complete callables without parens if present in locals_without_parens by @zachallaun in https://github.com/lexical-lsp/lexical/pull/739 +* Indexed delegated functions by @scohen in https://github.com/lexical-lsp/lexical/pull/729 +* Fix: Crash when typing english by @scohen in https://github.com/lexical-lsp/lexical/pull/742 +* Fix go to definition behavior for same-name, same-arity functions by directing to the first function by @scottming in https://github.com/lexical-lsp/lexical/pull/746 +* Completion: show type spec for struct fields by @kirillrogovoy in https://github.com/lexical-lsp/lexical/pull/751 +* Code Action: Add alias by @scohen in https://github.com/lexical-lsp/lexical/pull/740 +* Fixed: Go to definitions crashes on modules defined via a macro by @scohen in https://github.com/lexical-lsp/lexical/pull/753 +* Increased plugin timeouts by @scohen in https://github.com/lexical-lsp/lexical/pull/757 +* Code Action: Remove unused aliases by @scohen in https://github.com/lexical-lsp/lexical/pull/748 +* Reorder `test` macro completions by @zachallaun in https://github.com/lexical-lsp/lexical/pull/769 +* Added struct definition detection for ecto schemas by @scohen in https://github.com/lexical-lsp/lexical/pull/758 +* Sorted bang functions after non-bang variants by @scohen in https://github.com/lexical-lsp/lexical/pull/770 + +### v0.6.1 + +Small bugfix release. We found an issue regarding unicode conversion, and although it's existed for a while and no one complained, we felt that it was more likely to happen now that we have workspace symbols. + +* Fix conversion of UTF-8 positions to UTF-16 code units by @zachallaun in https://github.com/lexical-lsp/lexical/pull/719 +* Fix Entity.resolve not correctly resolving local function capture calls @scottming in https://github.com/lexical-lsp/lexical/pull/721 + +## v0.6.0 `24 April, 2024` +After multiple people asked, both document and workspace symbols have been implemented. +Both make heavy use of our indexing infrastructure, which provides extremely fast and +accurate results. + +We've also fixed a number of small inconsistencies and crashes in the indexer, making it more robust and accurate. I especially want to call out the fix that @lukad made, that improved indexing performance by 3600x on his large codebase. +When you update to this release, we strongly recommend re-indexing your project's source code by opening the project's `mix.exs` file and +running the `Rebuild 's code search index` code action. + +In addition, we've improved support for completions in phoenix controllers, stopped completions inside strings and have changed how we sort completions. The new sorting scheme is a big improvement for usability, and sorts things by how "close" they are to what you're working on. Give it a shot, we think you'll like it. + +I'd like to thank all our new contributors, and especially our core team of +@scottming, @zachallaun, @moosieus and @blond. You've all gone above and beyond. + +### What's Changed + +* Add Sublime Text instructions to docs by @distefam in https://github.com/lexical-lsp/lexical/pull/633 +* Implement callback completions by @Moosieus in https://github.com/lexical-lsp/lexical/pull/640 +* Fix `do` completing to `defmodule` in VSCode by @Moosieus in https://github.com/lexical-lsp/lexical/pull/642 +* Speed up indexing by not calling `deps_dir` for every file by @lukad in https://github.com/lexical-lsp/lexical/pull/646 +* Fixed bug where blocks weren't popped correctly by @scohen in https://github.com/lexical-lsp/lexical/pull/647 +* Fix crashing in unsaved vscode files by @Moosieus in https://github.com/lexical-lsp/lexical/pull/644 +* Find references for variables by @scohen in https://github.com/lexical-lsp/lexical/pull/645 +* New completion sorting approach by @Moosieus in https://github.com/lexical-lsp/lexical/pull/653 +* Fixed issue where function definitions were returning references by @scohen in https://github.com/lexical-lsp/lexical/pull/655 +* Document Symbols support by @scohen in https://github.com/lexical-lsp/lexical/pull/652 +* Prevent spurious errors in .heex files by disabling the on-type compiler. by @Moosieus in https://github.com/lexical-lsp/lexical/pull/662 +* Fixing crash when dealing with unicode files by @scohen in https://github.com/lexical-lsp/lexical/pull/672 +* Fix: Non-string test names crash exunit indexer by @scohen in https://github.com/lexical-lsp/lexical/pull/676 +* Added module attribute detection by @scohen in https://github.com/lexical-lsp/lexical/pull/679 +* Impl suggest behaviours by @scohen in https://github.com/lexical-lsp/lexical/pull/681 +* Support resolving Phoenix Controller modules. by @scottming in https://github.com/lexical-lsp/lexical/pull/659 +* Completions: Handled nil origin and full name by @scohen in https://github.com/lexical-lsp/lexical/pull/689 +* Fixed crash for in-progress module attributes by @scohen in https://github.com/lexical-lsp/lexical/pull/691 +* For module definitions, use the `Indexer` version instead of `ElixirSense` by @scottming in https://github.com/lexical-lsp/lexical/pull/658 +* Current modules can be nil by @scohen in https://github.com/lexical-lsp/lexical/pull/696 +* Stopped completions inside strings by @scohen in https://github.com/lexical-lsp/lexical/pull/692 +* Workspace symbols support by @scohen in https://github.com/lexical-lsp/lexical/pull/674 +* Fix: Module suggestion was incorrect for files with multiple periods by @scohen in https://github.com/lexical-lsp/lexical/pull/705 + +**Full Changelog**: https://github.com/lexical-lsp/lexical/compare/v0.5.2...v0.6.0 + +## v0.5.2 `29 February, 2024` +This is a bugfix release that fixes the following: + +* Updated credo by @scohen in #617 +* Update nix hash with new deps by @hauleth in #618 +* Prepare scripts for updating Nix hash by non-Nix users by @hauleth in #619 +* Update the installation documentation for the supported Elixir version by @scottming in #624 +* Fixed unused variable code action by @scohen in #628 + +## v0.5.1 `22 February, 2024` +This is a bugfix release that fixes an issue where lexical wouldn't start on MacOS if you're using the default bash, and not using one of the supported version managers (asdf, rtx or mise) + +## v0.5.0 `21 February, 2024` +Admittedly, it's been a long while since the last release, but we've laid the groundwork for some exciting new features. + +Since November, we've built out our search and indexing infrastructure that allows lexical to find interesting bits of your source code and highlight them for you later. We went through five separate backends before settling on one that is super duper fast, memory efficient(ish) and is written in pure elixir. + +Presently, we're using this to power our newest features, find references and go to definition. We've implemented both for modules, module attributes and functions. You'll notice that when lexical starts, it will index your project's source code. This is a one-time operation that should be quick, indexing lexical's and its dependencies 193,000 lines of source code takes around 10 seconds. This means that indexing is on by default, and there's no way to turn it off any more. We've crossed the rubicon, folks, and there's no turning back. + +Other good news, because of the indexing infrastructure, we no longer have to do a full build when lexical starts for the first time. This means the startup time has dramatically improved. On the lexical project, it has dropped from 12 seconds to 2. + +I'd like to thank @scottming @zachallaun and @Blond11516 for ensuring that the current state of the code is where it is today. Also, thank you @hauleth for taking care of the nix flake. + +And we've made a ton of bug fixes and usability improvements since 0.4.1 as well. Some highlights include: + +* Support for elixir version 1.16 +* Handled renaming of rtx to mise +* Multiple improvements to the hover popup +* Improved ease of writing new code actions +* Undefined variables diagnostics error for HEEx templates +* Code action: Suggested function names +* Completions for typespecs +* Improved nix flake + +### What's Changed + +* Correctly activate rtx during boot by @zachallaun in https://github.com/lexical-lsp/lexical/pull/430 +* Improve incompatible version errors on boot by @zachallaun in https://github.com/lexical-lsp/lexical/pull/389 +* i96: send and log messages by @jollyjerr in https://github.com/lexical-lsp/lexical/pull/420 +* Put module below call signature for hover funs/types by @zachallaun in https://github.com/lexical-lsp/lexical/pull/434 +* Fix diagnostics issue when `config_env` is called. by @scottming in https://github.com/lexical-lsp/lexical/pull/439 +* Find References by @scohen in https://github.com/lexical-lsp/lexical/pull/405 +* Code Actions refactor by @scohen in https://github.com/lexical-lsp/lexical/pull/453 +* Consider arity in function ordering by @yerguden in https://github.com/lexical-lsp/lexical/pull/442 +* Fix: Erlang function calls in pipes were incorrectly formatted by @scohen in https://github.com/lexical-lsp/lexical/pull/476 +* Fix: Stutter when completing inside string interpolations by @scohen in https://github.com/lexical-lsp/lexical/pull/464 +* Fix: Don't raise an exception if the build directory doesn't exist by @scohen in https://github.com/lexical-lsp/lexical/pull/481 +* Add Vim ALE configuration details by @jparise in https://github.com/lexical-lsp/lexical/pull/484 +* Removed unhelpful completion for :: symbol by @mdshamoon in https://github.com/lexical-lsp/lexical/pull/485 +* Add heex to filetype list for neovim by @soundmonster in https://github.com/lexical-lsp/lexical/pull/487 +* Added completions for typespecs by @scohen in https://github.com/lexical-lsp/lexical/pull/478 +* Fix module completion error after a dot by @zachallaun in https://github.com/lexical-lsp/lexical/pull/496 +* Add replacing unknown remote function to code actions by @sheldak in https://github.com/lexical-lsp/lexical/pull/443 +* Move from flake-utils to flake-parts by @hauleth in https://github.com/lexical-lsp/lexical/pull/498 +* Fix Diagnostic.Result to_lsp with 4-elem tuple position by @bangalcat in https://github.com/lexical-lsp/lexical/pull/502 +* Optimise the manual loading of dependent apps and modules. by @scottming in https://github.com/lexical-lsp/lexical/pull/455 +* Add instructions for LunarVim installation by @dimitarvp in https://github.com/lexical-lsp/lexical/pull/510 +* Find function references by @scohen in https://github.com/lexical-lsp/lexical/pull/516 +* Added percentage based progress reporters by @scohen in https://github.com/lexical-lsp/lexical/pull/519 +* Added reindex command by @scohen in https://github.com/lexical-lsp/lexical/pull/522 +* Support mise (new name for rtx) by @x-ji in https://github.com/lexical-lsp/lexical/pull/544 +* Correctly applied code lens options by @scohen in https://github.com/lexical-lsp/lexical/pull/553 +* Support Elixir 1.16 by @scottming in https://github.com/lexical-lsp/lexical/pull/535 +* chore(nix): update deps hash by @hauleth in https://github.com/lexical-lsp/lexical/pull/557 +* Resolve function definitions without parens by @zachallaun in https://github.com/lexical-lsp/lexical/pull/563 +* Correctly resolve imported calls by @zachallaun in https://github.com/lexical-lsp/lexical/pull/565 +* Implemented find references for module attributes by @scohen in https://github.com/lexical-lsp/lexical/pull/558 +* Correctly resolve `&Module.function/arity` syntax by @zachallaun in https://github.com/lexical-lsp/lexical/pull/566 +* Switched over to mise rather than rtx by @scohen in https://github.com/lexical-lsp/lexical/pull/580 +* Struct discovery now uses the index by @scohen in https://github.com/lexical-lsp/lexical/pull/582 +* Detected module references using __MODULE__ by @scohen in https://github.com/lexical-lsp/lexical/pull/603 +* Bumped garbage collection for some of our more intensive processes by @scohen in https://github.com/lexical-lsp/lexical/pull/600 +* Calling find references on a `defstruct` call finds defined structs by @scohen in https://github.com/lexical-lsp/lexical/pull/607 +* Zipped package now keeps file permissions by @scohen in https://github.com/lexical-lsp/lexical/pull/609 +* Excluded source files in build directory by @scohen in https://github.com/lexical-lsp/lexical/pull/610 + +## v0.4.1 `08 November, 2023` +This is a small bugfix release for 0.4.0 + +It contains the following fixes: + + * Fix: Stutter when completing some items inside strings (`:erl` would complete to `:erlerlang`) + * Fix: Undefined variable names in HEEX templates + * Fix: Erlang remote calls in pipelines did not have their first parameter removed during completion + * Feature: Function names in completions are ordered by name and then arity + +## v0.4.0 `24 October, 2023` +Welcome to Lexical v0.4.0 + +The main thrust of v0.4 is hover support and quality of life improvements. Now, when you hover over a module or function, you'll see relevant documentation, types and parameters. We've also spent a lot of time working on completions in #410, which makes them more consistent, fixes some bugs in certain language clients (like eglot adding an extra @ when completing module attributes), and greatly improves their feel in vscode. + +Additionally, quite a few of the changes in this PR were about laying the groundwork for our indexing infrastructure, which will debut in the next version. But fear not, this version has indexing disabled. + +I want to thank @zachallaun and @scottming for all their hard work on this release. They've made lexical faster, more friendly and have removed a bunch of bugs! + +Highlights include: + + * Document hover for functions and modules + * Improved boot scripts + * Automatically updating nix flake. Thanks, @hauleth + * Helix editor integration. Thanks @philipgiuliani + * .heex integration + * Massively improved completions (Check out the PR, it's too big to summarize) + +Bugs fixed: + + * Longstanding unicode completion / editing bugs slain. Unicode works perfectly now. + +### What's Changed + +* Suggest a module name for defmodule completion by @scohen in https://github.com/lexical-lsp/lexical/pull/338 +* Add Vanilla Emacs with eglot instruction by @dalugm in https://github.com/lexical-lsp/lexical/pull/343 +* Add elixir boot script to support having spaces in the package path by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/345 +* Centralize general-use AST modules in `common` by @zachallaun in https://github.com/lexical-lsp/lexical/pull/342 +* Allow release workflow to update existing releases by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/346 +* Fixed bug in path namespacing by @scohen in https://github.com/lexical-lsp/lexical/pull/350 +* Convert utf8->latin1 before decoding JSON-RPC payloads by @zachallaun in https://github.com/lexical-lsp/lexical/pull/353 +* Add support for `textDocument/hover` (for modules) by @zachallaun in https://github.com/lexical-lsp/lexical/pull/331 +* Fix markdown formatting for supported versions by @reisub in https://github.com/lexical-lsp/lexical/pull/355 +* Indexing features for modules by @scohen in https://github.com/lexical-lsp/lexical/pull/347 +* Refactor: Moved Dispatch from server to remote_control by @scohen in https://github.com/lexical-lsp/lexical/pull/357 +* Moved dispatch to :gen_event by @scohen in https://github.com/lexical-lsp/lexical/pull/358 +* Integrated indexing into language server by @scohen in https://github.com/lexical-lsp/lexical/pull/359 +* Fixed index store test failure by @scohen in https://github.com/lexical-lsp/lexical/pull/365 +* Add `current_project/1` helper function by @scottming in https://github.com/lexical-lsp/lexical/pull/360 +* Position refactor by @scohen in https://github.com/lexical-lsp/lexical/pull/364 +* Added logos to project and readme by @scohen in https://github.com/lexical-lsp/lexical/pull/366 +* Aliases was confused by nested non-module blocks by @scohen in https://github.com/lexical-lsp/lexical/pull/368 +* Better typespecs by @scohen in https://github.com/lexical-lsp/lexical/pull/367 +* Add Helix installation instructions by @philipgiuliani in https://github.com/lexical-lsp/lexical/pull/376 +* Generate correct typespec for LSP messages by @zachallaun in https://github.com/lexical-lsp/lexical/pull/380 +* Async indexing by @zachallaun in https://github.com/lexical-lsp/lexical/pull/371 +* Enabled warnings-as-errors on umbrella by @scohen in https://github.com/lexical-lsp/lexical/pull/383 +* Improved hover support: Structs, qualified calls and types, more info for modules by @zachallaun in https://github.com/lexical-lsp/lexical/pull/356 +* Explicitly implement protocol in completion modules by @zachallaun in https://github.com/lexical-lsp/lexical/pull/386 +* Refactor client capability tracking by @zachallaun in https://github.com/lexical-lsp/lexical/pull/385 +* Support for HEEx compilation by @scottming in https://github.com/lexical-lsp/lexical/pull/323 +* Made aliases better handle the __aliases__ special form by @scohen in https://github.com/lexical-lsp/lexical/pull/393 +* Fix the `eex` compiled flaky test by @scottming in https://github.com/lexical-lsp/lexical/pull/394 +* Enhanced ets / removed cub and mnesia backends. by @scohen in https://github.com/lexical-lsp/lexical/pull/392 +* Disabled indexing by @scohen in https://github.com/lexical-lsp/lexical/pull/399 +* Fix Field parsing error for zed editor by @scottming in https://github.com/lexical-lsp/lexical/pull/396 +* Fix the struct `KeyError` diagnostics by @scottming in https://github.com/lexical-lsp/lexical/pull/397 +* Always return `Completion.List` with `is_incomplete: true` by @zachallaun in https://github.com/lexical-lsp/lexical/pull/398 +* Detect version manager the same way in all scripts by @zachallaun in https://github.com/lexical-lsp/lexical/pull/390 +* Respond with `nil` instead of an error when formatting fails by @zachallaun in https://github.com/lexical-lsp/lexical/pull/411 +* Fixup README word repeating by @solar05 in https://github.com/lexical-lsp/lexical/pull/414 +* Made display name calculation relocatable by @scohen in https://github.com/lexical-lsp/lexical/pull/415 +* Move `entity` module to `remote_control` app by @scottming in https://github.com/lexical-lsp/lexical/pull/406 +* Reorder the startup order of the children of `Server.Project.Supervisor` by @scottming in https://github.com/lexical-lsp/lexical/pull/407 +* Refactor completions to always use text edits by @zachallaun in https://github.com/lexical-lsp/lexical/pull/409 +* Fix spec for `Lexical.Ast.cursor_path/2` by @zachallaun in https://github.com/lexical-lsp/lexical/pull/418 +* Fix `path_at/2` to allow path to branches if they're innermost by @zachallaun in https://github.com/lexical-lsp/lexical/pull/419 +* Improve completions by @zachallaun in https://github.com/lexical-lsp/lexical/pull/410 +* Improved memory performance while indexing by @scohen in https://github.com/lexical-lsp/lexical/pull/421 +* chore: update Nix definition by @hauleth in https://github.com/lexical-lsp/lexical/pull/417 +* Make the operational behavior of the ancestors of structures and modules more consistent by @scottming in https://github.com/lexical-lsp/lexical/pull/408 +* Refactor shell scripts and add Docker-based integration tests by @zachallaun in https://github.com/lexical-lsp/lexical/pull/395 + +**Full Changelog**: https://github.com/lexical-lsp/lexical/compare/v0.3.0...v0.4.0 + +### What's Changed + +* Suggest a module name for defmodule completion by @scohen in https://github.com/lexical-lsp/lexical/pull/338 +* Add Vanilla Emacs with eglot instruction by @dalugm in https://github.com/lexical-lsp/lexical/pull/343 +* Add elixir boot script to support having spaces in the package path by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/345 +* Centralize general-use AST modules in `common` by @zachallaun in https://github.com/lexical-lsp/lexical/pull/342 +* Allow release workflow to update existing releases by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/346 +* Fixed bug in path namespacing by @scohen in https://github.com/lexical-lsp/lexical/pull/350 +* Convert utf8->latin1 before decoding JSON-RPC payloads by @zachallaun in https://github.com/lexical-lsp/lexical/pull/353 +* Add support for `textDocument/hover` (for modules) by @zachallaun in https://github.com/lexical-lsp/lexical/pull/331 +* Fix markdown formatting for supported versions by @reisub in https://github.com/lexical-lsp/lexical/pull/355 +* Indexing features for modules by @scohen in https://github.com/lexical-lsp/lexical/pull/347 +* Refactor: Moved Dispatch from server to remote_control by @scohen in https://github.com/lexical-lsp/lexical/pull/357 +* Moved dispatch to :gen_event by @scohen in https://github.com/lexical-lsp/lexical/pull/358 +* Integrated indexing into language server by @scohen in https://github.com/lexical-lsp/lexical/pull/359 +* Fixed index store test failure by @scohen in https://github.com/lexical-lsp/lexical/pull/365 +* Add `current_project/1` helper function by @scottming in https://github.com/lexical-lsp/lexical/pull/360 +* Position refactor by @scohen in https://github.com/lexical-lsp/lexical/pull/364 +* Added logos to project and readme by @scohen in https://github.com/lexical-lsp/lexical/pull/366 +* Aliases was confused by nested non-module blocks by @scohen in https://github.com/lexical-lsp/lexical/pull/368 +* Better typespecs by @scohen in https://github.com/lexical-lsp/lexical/pull/367 +* Add Helix installation instructions by @philipgiuliani in https://github.com/lexical-lsp/lexical/pull/376 +* Generate correct typespec for LSP messages by @zachallaun in https://github.com/lexical-lsp/lexical/pull/380 +* Async indexing by @zachallaun in https://github.com/lexical-lsp/lexical/pull/371 +* Enabled warnings-as-errors on umbrella by @scohen in https://github.com/lexical-lsp/lexical/pull/383 +* Improved hover support: Structs, qualified calls and types, more info for modules by @zachallaun in https://github.com/lexical-lsp/lexical/pull/356 +* Explicitly implement protocol in completion modules by @zachallaun in https://github.com/lexical-lsp/lexical/pull/386 +* Refactor client capability tracking by @zachallaun in https://github.com/lexical-lsp/lexical/pull/385 +* Support for HEEx compilation by @scottming in https://github.com/lexical-lsp/lexical/pull/323 +* Made aliases better handle the __aliases__ special form by @scohen in https://github.com/lexical-lsp/lexical/pull/393 +* Fix the `eex` compiled flaky test by @scottming in https://github.com/lexical-lsp/lexical/pull/394 +* Enhanced ets / removed cub and mnesia backends. by @scohen in https://github.com/lexical-lsp/lexical/pull/392 +* Disabled indexing by @scohen in https://github.com/lexical-lsp/lexical/pull/399 +* Fix Field parsing error for zed editor by @scottming in https://github.com/lexical-lsp/lexical/pull/396 +* Fix the struct `KeyError` diagnostics by @scottming in https://github.com/lexical-lsp/lexical/pull/397 +* Always return `Completion.List` with `is_incomplete: true` by @zachallaun in https://github.com/lexical-lsp/lexical/pull/398 +* Detect version manager the same way in all scripts by @zachallaun in https://github.com/lexical-lsp/lexical/pull/390 +* Respond with `nil` instead of an error when formatting fails by @zachallaun in https://github.com/lexical-lsp/lexical/pull/411 +* Fixup README word repeating by @solar05 in https://github.com/lexical-lsp/lexical/pull/414 +* Made display name calculation relocatable by @scohen in https://github.com/lexical-lsp/lexical/pull/415 +* Move `entity` module to `remote_control` app by @scottming in https://github.com/lexical-lsp/lexical/pull/406 +* Reorder the startup order of the children of `Server.Project.Supervisor` by @scottming in https://github.com/lexical-lsp/lexical/pull/407 +* Refactor completions to always use text edits by @zachallaun in https://github.com/lexical-lsp/lexical/pull/409 +* Fix spec for `Lexical.Ast.cursor_path/2` by @zachallaun in https://github.com/lexical-lsp/lexical/pull/418 +* Fix `path_at/2` to allow path to branches if they're innermost by @zachallaun in https://github.com/lexical-lsp/lexical/pull/419 +* Improve completions by @zachallaun in https://github.com/lexical-lsp/lexical/pull/410 +* Improved memory performance while indexing by @scohen in https://github.com/lexical-lsp/lexical/pull/421 +* chore: update Nix definition by @hauleth in https://github.com/lexical-lsp/lexical/pull/417 +* Make the operational behavior of the ancestors of structures and modules more consistent by @scottming in https://github.com/lexical-lsp/lexical/pull/408 +* Refactor shell scripts and add Docker-based integration tests by @zachallaun in https://github.com/lexical-lsp/lexical/pull/395 + + +**Full Changelog**: https://github.com/lexical-lsp/lexical/compare/v0.3.3...v0.4.0 + +## v0.3.3 `05 September, 2023` +Fixed Unicode handling. + +Unicode was likely broken under the last several releases; Unicode in documents would result in incorrect errors popping up. This has been fixed, and was due to incorrect decoding in the standard input handler. + +**Full Changelog**: https://github.com/lexical-lsp/lexical/compare/v0.3.2...v0.3.3 + +## v0.3.2 `29 August, 2023` +0.3.2 fixes a bug where packaging would not produce namespaced artifacts if the lexical directory was inside a subdirectory that had one of its dependencies as a path element. + +For example, packaging would fail if lexical was in `/path/to/home/language_servers/lexical`. + +**Full Changelog**: https://github.com/lexical-lsp/lexical/compare/v0.3.1...v0.3.2 + +## v0.3.1 `24 August, 2023` + +This is a bugfix release. Packaging generated in v0.3.0 would not start in directories that contain spaces, and this is the default for vscode under macOS. +This release has a new launching mechanism that should allow us to use a lot less bash scripting. + +## v0.3.0 `23 August, 2023` + +### What's Changed + +* Support Struct fields completion when in struct arguments context by @scottming in https://github.com/lexical-lsp/lexical/pull/196 +* Fix: Argument names crashes in light of a literal atom by @scohen in https://github.com/lexical-lsp/lexical/pull/285 +* Add Nix Flake by @hauleth in https://github.com/lexical-lsp/lexical/pull/175 +* ci: Require strict versions from erlef/setup-beam by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/289 +* Refactor: Extracted Build.Project by @scohen in https://github.com/lexical-lsp/lexical/pull/292 +* Fixed code unit / codepoint confusion by @scohen in https://github.com/lexical-lsp/lexical/pull/290 +* Fixed project node naming conflicts by @scohen in https://github.com/lexical-lsp/lexical/pull/294 +* Remove logger for debugging port stdin/sdtout by @scottming in https://github.com/lexical-lsp/lexical/pull/298 +* Added support for per-file .eex compilation by @scohen in https://github.com/lexical-lsp/lexical/pull/296 +* Added default case by @scohen in https://github.com/lexical-lsp/lexical/pull/305 +* Config compiler by @scohen in https://github.com/lexical-lsp/lexical/pull/304 +* Namespacing refinements by @scohen in https://github.com/lexical-lsp/lexical/pull/307 +* Update architecture.md with spelling corrections by @axelclark in https://github.com/lexical-lsp/lexical/pull/310 +* Improve the documentation related to `neovim` installation. by @scottming in https://github.com/lexical-lsp/lexical/pull/308 +* Handle presense of multiple version managers by @awerment in https://github.com/lexical-lsp/lexical/pull/311 +* make sure not to choke on non-export prefixed path lines by @andyleclair in https://github.com/lexical-lsp/lexical/pull/312 +* Second attempt to make struct completion more consistent by @scottming in https://github.com/lexical-lsp/lexical/pull/225 +* Added installation instructions for Vim + Vim-LSP by @jHwls in https://github.com/lexical-lsp/lexical/pull/315 +* Reworked lexical packaging by @scohen in https://github.com/lexical-lsp/lexical/pull/314 +* Development docs by @scohen in https://github.com/lexical-lsp/lexical/pull/316 +* Fix extraneous logging in test by @scohen in https://github.com/lexical-lsp/lexical/pull/317 +* Update paths to start_lexical.sh in installation.md by @edwardsmit in https://github.com/lexical-lsp/lexical/pull/318 +* Fix: Flaky tests by @scohen in https://github.com/lexical-lsp/lexical/pull/320 +* Support for erlang 26 by @scohen in https://github.com/lexical-lsp/lexical/pull/319 +* Fix typo of package task by @scottming in https://github.com/lexical-lsp/lexical/pull/321 +* Fix VSCode installation instructions by @miXwui in https://github.com/lexical-lsp/lexical/pull/325 +* Fix package task generating empty ZIPs by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/334 +* Removed plugin_runner app by @scohen in https://github.com/lexical-lsp/lexical/pull/327 +* Added development docs suggestions by @scohen in https://github.com/lexical-lsp/lexical/pull/333 +* Added discord link and build badges by @scohen in https://github.com/lexical-lsp/lexical/pull/335 +* 0.3.0 Release by @scohen in https://github.com/lexical-lsp/lexical/pull/337 +* Context-aware "use" completions by @scohen in https://github.com/lexical-lsp/lexical/pull/336 + +**Full Changelog**: https://github.com/lexical-lsp/lexical/compare/v0.2.2...v0.3.0 + +## v0.2.2 `21 July, 2023` + +### What's Changed + +* fix: Add missing command to get rtx env by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/281 +* Update Lexical version to 0.2.2 by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/282 + + +**Full Changelog**: https://github.com/lexical-lsp/lexical/compare/v0.2.1...v0.2.2 + +## v0.2.1 `21 July, 2023` +This release bumps versions of our apps, and contains no improvements or fixes. + +## v0.2.0 `21 July, 2023` + +### What's Changed + +* Handled Cancel Notifications by @scohen in https://github.com/lexical-lsp/lexical/pull/157 +* Support work done progress during project compilation by @scottming in https://github.com/lexical-lsp/lexical/pull/135 +* Normalize naming by @scohen in https://github.com/lexical-lsp/lexical/pull/158 +* Addressed deadlocks in Document Store by @scohen in https://github.com/lexical-lsp/lexical/pull/160 +* Fix diagnostic for missing fields due to @enforce_keys by @scottming in https://github.com/lexical-lsp/lexical/pull/162 +* Enable --warnings-as-errors in CI by @scottming in https://github.com/lexical-lsp/lexical/pull/154 +* Added file watching by @scohen in https://github.com/lexical-lsp/lexical/pull/164 +* Fix CreateWorkDoneProgress for VScode and Emacs by @scottming in https://github.com/lexical-lsp/lexical/pull/161 +* Fixed infinite loop in document updates by @scohen in https://github.com/lexical-lsp/lexical/pull/166 +* Alias only returns modules by @scohen in https://github.com/lexical-lsp/lexical/pull/168 +* Added fragment capabilities to document by @scohen in https://github.com/lexical-lsp/lexical/pull/170 +* Fix record missing key's error by @scottming in https://github.com/lexical-lsp/lexical/pull/174 +* Do not create intermediate binaries by @hauleth in https://github.com/lexical-lsp/lexical/pull/176 +* Improved README.md by @scohen in https://github.com/lexical-lsp/lexical/pull/177 +* Removed string-based completion env operations by @scohen in https://github.com/lexical-lsp/lexical/pull/172 +* Fixed code actions / improved code mod api by @scohen in https://github.com/lexical-lsp/lexical/pull/179 +* Remove patch from progress/state_test by @scottming in https://github.com/lexical-lsp/lexical/pull/180 +* Improved struct completion by @scohen in https://github.com/lexical-lsp/lexical/pull/181 +* fix(asdf): change order of installation by @03juan in https://github.com/lexical-lsp/lexical/pull/186 +* Improve completions with default arguments by @scohen in https://github.com/lexical-lsp/lexical/pull/187 +* New project structure / beginning of plugins by @scohen in https://github.com/lexical-lsp/lexical/pull/184 +* Pulled out the name of arguments in pattern match args by @scohen in https://github.com/lexical-lsp/lexical/pull/193 +* Removed initial compile by @scohen in https://github.com/lexical-lsp/lexical/pull/194 +* Fix the parameter issue of Remote Callable in pipeline. by @scottming in https://github.com/lexical-lsp/lexical/pull/188 +* Removed wx and et applications by @scohen in https://github.com/lexical-lsp/lexical/pull/201 +* Improve UX when completing struct by @scottming in https://github.com/lexical-lsp/lexical/pull/190 +* Added a check for credo pipeline initial argument by @scohen in https://github.com/lexical-lsp/lexical/pull/200 +* We now use Lexical.Plugin.Diagnostics by @scohen in https://github.com/lexical-lsp/lexical/pull/197 +* Fix the NAMESPACE=1 release issue by @scottming in https://github.com/lexical-lsp/lexical/pull/203 +* Boost callable completions that are not double underscore/default by @viniciusmuller in https://github.com/lexical-lsp/lexical/pull/195 +* Reduces behaviour_info/1 priority in completions by @viniciusmuller in https://github.com/lexical-lsp/lexical/pull/205 +* Suggest behavior callbacks by @doughsay in https://github.com/lexical-lsp/lexical/pull/206 +* fix: completion context can be null by @hauleth in https://github.com/lexical-lsp/lexical/pull/210 +* Module sorting / Refactor boost by @scohen in https://github.com/lexical-lsp/lexical/pull/212 +* Dependency structs were not being detected by @scohen in https://github.com/lexical-lsp/lexical/pull/213 +* Load project config before compiling by @scohen in https://github.com/lexical-lsp/lexical/pull/215 +* Plugin Architecture by @scohen in https://github.com/lexical-lsp/lexical/pull/211 +* Refactor: Completion.Results are now Completion.Candidates by @scohen in https://github.com/lexical-lsp/lexical/pull/216 +* ci: Tag release workflow by @Blond11516 in https://github.com/lexical-lsp/lexical/pull/221 +* Added versions to plugins by @scohen in https://github.com/lexical-lsp/lexical/pull/219 +* Bring the 1.15 version `Code` and `:elixir_tokenizer` into lexical by @scottming in https://github.com/lexical-lsp/lexical/pull/217 +* Support map fields completion by @scottming in https://github.com/lexical-lsp/lexical/pull/226 +* Plugin packaging by @scohen in https://github.com/lexical-lsp/lexical/pull/222 +* Support projects having the same directory name as a dependency by @scohen in https://github.com/lexical-lsp/lexical/pull/227 +* Docs: Installation by @scohen in https://github.com/lexical-lsp/lexical/pull/229 +* Fixed plugins for external projects by @scohen in https://github.com/lexical-lsp/lexical/pull/230 +* Add neovim minimal configuaration by @scottming in https://github.com/lexical-lsp/lexical/pull/240 +* Fixed failing builds by @scohen in https://github.com/lexical-lsp/lexical/pull/241 +* Fix the issue of project name being too long by @scottming in https://github.com/lexical-lsp/lexical/pull/239 +* Updated to work with older versions of elixir / erlang by @scohen in https://github.com/lexical-lsp/lexical/pull/235 +* [issue-178] Snippet translations for macro by @Sleepful in https://github.com/lexical-lsp/lexical/pull/208 +* [issue-178] Fix macro_test by @Sleepful in https://github.com/lexical-lsp/lexical/pull/246 +* Compile warnings by @scohen in https://github.com/lexical-lsp/lexical/pull/250 +* WIP: Alias module by @scohen in https://github.com/lexical-lsp/lexical/pull/236 +* Fixed boundary issue by @scohen in https://github.com/lexical-lsp/lexical/pull/249 +* GitHub Actions improvements by @scohen in https://github.com/lexical-lsp/lexical/pull/245 +* Fixing flaky tests by @scohen in https://github.com/lexical-lsp/lexical/pull/252 +* Aliases can fail by @scohen in https://github.com/lexical-lsp/lexical/pull/251 +* Generate `.gitignore` for `.lexical` project workspace by @zachallaun in https://github.com/lexical-lsp/lexical/pull/218 +* Rebuild PLT files on projects dep changes by @scohen in https://github.com/lexical-lsp/lexical/pull/253 +* Changed docs to indicate support for 1.13 and erl 24 by @scohen in https://github.com/lexical-lsp/lexical/pull/257 +* Enforced project name validity by @scohen in https://github.com/lexical-lsp/lexical/pull/258 +* Removed double compilation by @scohen in https://github.com/lexical-lsp/lexical/pull/259 +* Completion improvements by @scohen in https://github.com/lexical-lsp/lexical/pull/260 +* The start line can be the end line by @scohen in https://github.com/lexical-lsp/lexical/pull/264 +* Fixed protocol consolidation by @scohen in https://github.com/lexical-lsp/lexical/pull/265 +* Heavy refactor of namespacing by @scohen in https://github.com/lexical-lsp/lexical/pull/266 +* Namespacing fixes / simplifications by @scohen in https://github.com/lexical-lsp/lexical/pull/268 +* Quieted compile warnings in test by @scohen in https://github.com/lexical-lsp/lexical/pull/270 +* Support Elixir 1.15 by @scottming in https://github.com/lexical-lsp/lexical/pull/261 +* Re-enabled multiple version support by @scohen in https://github.com/lexical-lsp/lexical/pull/269 +* Loadconfig needs to be called before deps are compiled by @scohen in https://github.com/lexical-lsp/lexical/pull/275 +* Added default candidate case by @scohen in https://github.com/lexical-lsp/lexical/pull/274 +* Replace with underscore can fail by @scohen in https://github.com/lexical-lsp/lexical/pull/276 +* Preparing for 0.2.0 release by @scohen in https://github.com/lexical-lsp/lexical/pull/278 + +**Full Changelog**: https://github.com/lexical-lsp/lexical/compare/4367692...v0.2.0 diff --git a/README.md b/README.md index 6ef318ada..cf454254c 100644 --- a/README.md +++ b/README.md @@ -66,11 +66,11 @@ faster. Follow the [Detailed Installation Instructions](pages/installation.md) - ``` - mix package - ``` +``` +mix package +``` - Lexical will now be available in `_build/dev/package/lexical` +Lexical will now be available in `_build/dev/package/lexical` If you would like to change the output directory, you can do so with the `--path` option @@ -112,6 +112,7 @@ mix benchmark /benchmarks/.exs ``` ### Logging + When lexical starts up, it creates a `.lexical` directory in the root directory of a project. Inside that directory are two log files, `lexical.log` and `project.log`. The `.lexical.log` log file contains @@ -202,3 +203,8 @@ iex(2)> complete :other, "defmo|" The same kind of support is available when you run `iex -S mix` in the lexical directory, and is helpful for narrowing down issues without disturbing your editor flow. + +### Other resources + +* [Architecture](pages/architecture.md) +* [Glossary](pages/glossary.md) diff --git a/apps/common/.formatter.exs b/apps/common/.formatter.exs index 574a19b44..92005875d 100644 --- a/apps/common/.formatter.exs +++ b/apps/common/.formatter.exs @@ -6,6 +6,15 @@ eventual_assertions = [ refute_eventually: 2 ] +detected_assertions = [ + assert_detected: 1, + assert_detected: 2, + refute_detected: 1, + refute_detected: 2 +] + +assertions = eventual_assertions ++ detected_assertions + [ inputs: [ "{mix,.formatter}.exs", @@ -13,6 +22,6 @@ eventual_assertions = [ "lib/lexical/**/*.{ex,ex}", "lib/mix/**/*.{ex,exs}" ], - locals_without_parens: eventual_assertions, - export: [locals_without_parens: eventual_assertions] + locals_without_parens: assertions, + export: [locals_without_parens: assertions] ] diff --git a/apps/common/lib/elixir/features.ex b/apps/common/lib/elixir/features.ex index 7b4c8d42c..4caca1397 100644 --- a/apps/common/lib/elixir/features.ex +++ b/apps/common/lib/elixir/features.ex @@ -1,13 +1,42 @@ defmodule Elixir.Features do + alias Lexical.VM.Versions + def with_diagnostics? do function_exported?(Code, :with_diagnostics, 1) end - def compile_wont_change_directory? do + def compile_keeps_current_directory? do Version.match?(System.version(), ">= 1.15.0") end - def config_reader? do - Version.match?(System.version(), ">= 1.11.0") + def after_verify? do + Version.match?(System.version(), ">= 1.14.0") + end + + def details_in_context? do + Version.match?(System.version(), ">= 1.16.0") + end + + def span_in_diagnostic? do + Version.match?(System.version(), ">= 1.16.0") + end + + def contains_set_theoretic_types? do + Version.match?(System.version(), ">= 1.17.0") + end + + @doc """ + Whether the `:compressed` ETS table option can be safely used. + + A bug in Erlang/OTP 27.0.0 and 27.0.1 can cause a segfault when + traversing the entire table with something like `:ets.foldl/3` if the + `:compressed` table option is used. The issue was fixed in Erlang 27.1 + + Relevant issue: https://github.com/erlang/otp/issues/8682 + """ + def can_use_compressed_ets_table? do + %{erlang: erlang_version} = Versions.to_versions(Versions.current()) + + Version.match?(erlang_version, "< 27.0.0 or >= 27.1.0") end end diff --git a/apps/common/lib/future/code.ex b/apps/common/lib/future/code.ex index c8c697d2a..c14ec8a6c 100644 --- a/apps/common/lib/future/code.ex +++ b/apps/common/lib/future/code.ex @@ -542,7 +542,7 @@ defmodule Future.Code do defp validated_eval_string(string, binding, opts_or_env) do %{line: line, file: file} = env = env_for_eval(opts_or_env) - forms = :elixir.string_to_quoted!(to_charlist(string), line, 1, file, []) + forms = :future_elixir.string_to_quoted!(to_charlist(string), line, 1, file, []) {value, binding, _env} = eval_verify(:eval_forms, [forms, binding, env]) {value, binding} end @@ -566,7 +566,7 @@ defmodule Future.Code do """ @doc since: "1.15.0" - @spec with_diagnostics(keyword(), (() -> result)) :: {result, [diagnostic(:warning | :error)]} + @spec with_diagnostics(keyword(), (-> result)) :: {result, [diagnostic(:warning | :error)]} when result: term() def with_diagnostics(opts \\ [], fun) do value = :erlang.get(:elixir_code_diagnostics) @@ -1230,8 +1230,8 @@ defmodule Future.Code do Process.put(:code_formatter_comments, []) opts = [preserve_comments: &preserve_comments/5] ++ opts - with {:ok, tokens} <- :elixir.string_to_tokens(charlist, line, column, file, opts), - {:ok, forms} <- :elixir.tokens_to_quoted(tokens, file, opts) do + with {:ok, tokens} <- :future_elixir.string_to_tokens(charlist, line, column, file, opts), + {:ok, forms} <- :future_elixir.tokens_to_quoted(tokens, file, opts) do comments = Enum.reverse(Process.get(:code_formatter_comments)) {:ok, forms, comments} end @@ -1258,7 +1258,7 @@ defmodule Future.Code do {forms, comments} {:error, {location, error, token}} -> - :elixir_errors.parse_error( + :future_elixir_errors.parse_error( location, Keyword.get(opts, :file, "nofile"), error, diff --git a/apps/common/lib/future/code/fragment.ex b/apps/common/lib/future/code/fragment.ex index 962ccdea5..2645d135a 100644 --- a/apps/common/lib/future/code/fragment.ex +++ b/apps/common/lib/future/code/fragment.ex @@ -1,4 +1,4 @@ -# Copied from https://github.com/elixir-lang/elixir/blob/b50c5eb031d4ce17cfa21674c69219b6eb170783/lib/elixir/lib/code/fragment.ex +# Copied from https://raw.githubusercontent.com/elixir-lang/elixir/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/lib/code/fragment.ex defmodule Future.Code.Fragment do alias Future.Code, as: Code @@ -34,7 +34,7 @@ defmodule Future.Code.Fragment do :expr iex> Code.Fragment.cursor_context("hello_wor") - {:local_or_var, 'hello_wor'} + {:local_or_var, ~c"hello_wor"} ## Return values @@ -484,15 +484,16 @@ defmodule Future.Code.Fragment do defp operator(rest, count, acc, _call_op?) do case :future_elixir_tokenizer.tokenize(acc, 1, 1, []) do - {:ok, _, _, _, [{:atom, _, _}]} -> + {:ok, _, _, _, [{:atom, _, _}], []} -> {{:unquoted_atom, tl(acc)}, count} - {:ok, _, _, _, [{_, _, op}]} -> + {:ok, _, _, _, [{_, _, op}], []} -> {rest, dot_count} = strip_spaces(rest, count) cond do - Code.Identifier.unary_op(op) == :error and Code.Identifier.binary_op(op) == :error -> - :none + Code.Identifier.unary_op(op) == :error and + Code.Identifier.binary_op(op) == :error -> + {:none, 0} match?([?. | rest] when rest == [] or hd(rest) != ?., rest) -> dot(tl(rest), dot_count + 1, acc) @@ -550,7 +551,7 @@ defmodule Future.Code.Fragment do ## Examples iex> Code.Fragment.surround_context("foo", {1, 1}) - %{begin: {1, 1}, context: {:local_or_var, 'foo'}, end: {1, 4}} + %{begin: {1, 1}, context: {:local_or_var, ~c"foo"}, end: {1, 4}} ## Differences to `cursor_context/2` @@ -600,7 +601,8 @@ defmodule Future.Code.Fragment do | {:dot, inside_dot, charlist} | {:module_attribute, charlist} | {:unquoted_atom, charlist} - | {:var, charlist}, + | {:var, charlist} + | :expr, inside_alias: {:local_or_var, charlist} | {:module_attribute, charlist}, @@ -638,7 +640,7 @@ defmodule Future.Code.Fragment do {reversed_pre, post} = adjust_position(reversed_pre, post) case take_identifier(post, []) do - :none -> + {_, [], _} -> maybe_operator(reversed_pre, post, line, opts) {:identifier, reversed_post, rest} -> @@ -646,7 +648,7 @@ defmodule Future.Code.Fragment do reversed = reversed_post ++ reversed_pre case codepoint_cursor_context(reversed, opts) do - {{:struct, acc}, offset} when acc != [] -> + {{:struct, acc}, offset} -> build_surround({:struct, acc}, reversed, line, offset) {{:alias, acc}, offset} -> @@ -751,27 +753,11 @@ defmodule Future.Code.Fragment do do: take_identifier(t, [h | acc]) defp take_identifier(rest, acc) do - {stripped, _} = strip_spaces(rest, 0) - - with [?. | t] <- stripped, + with {[?. | t], _} <- strip_spaces(rest, 0), {[h | _], _} when h in ?A..?Z <- strip_spaces(t, 0) do take_alias(rest, acc) else - # Consider it an identifier if we are at the end of line - # or if we have spaces not followed by . (call) or / (arity) - _ when acc == [] and (rest == [] or (hd(rest) in @space and hd(stripped) not in ~c"/.")) -> - {:identifier, acc, rest} - - # If we are immediately followed by a container, we are still part of the identifier. - # We don't consider << as it _may_ be an operator. - _ when acc == [] and hd(stripped) in ~c"({[" -> - {:identifier, acc, rest} - - _ when acc == [] -> - :none - - _ -> - {:identifier, acc, rest} + _ -> {:identifier, acc, rest} end end @@ -1106,9 +1092,41 @@ defmodule Future.Code.Fragment do @spec container_cursor_to_quoted(List.Chars.t(), keyword()) :: {:ok, Macro.t()} | {:error, {location :: keyword, binary | {binary, binary}, binary}} def container_cursor_to_quoted(fragment, opts \\ []) do - opts = - Keyword.take(opts, [:file, :line, :column, :columns, :token_metadata, :literal_encoder]) + opts = Keyword.take(opts, [:columns, :token_metadata, :literal_encoder]) + opts = [cursor_completion: true, emit_warnings: false] ++ opts + + file = Keyword.get(opts, :file, "nofile") + line = Keyword.get(opts, :line, 1) + column = Keyword.get(opts, :column, 1) + + case :future_elixir_tokenizer.tokenize(to_charlist(fragment), line, column, opts) do + {:ok, line, column, _warnings, rev_tokens, rev_terminators} -> + tokens = :lists.reverse(rev_tokens, rev_terminators) + + case :future_elixir.tokens_to_quoted(tokens, file, opts) do + {:ok, ast} -> + {:ok, ast} + + {:error, error} -> + # In case parsing fails, we give it another shot but handling fn/do/else/catch/rescue/after. + tokens = + :lists.reverse( + rev_tokens, + [{:stab_op, {line, column, nil}, :->}, {nil, {line, column + 2, nil}}] ++ + Enum.map(rev_terminators, fn tuple -> + {line, column, info} = elem(tuple, 1) + put_elem(tuple, 1, {line, column + 5, info}) + end) + ) + + case :future_elixir.tokens_to_quoted(tokens, file, opts) do + {:ok, ast} -> {:ok, ast} + {:error, _} -> {:error, error} + end + end - Code.string_to_quoted(fragment, [cursor_completion: true, warnings: false] ++ opts) + {:error, info, _rest, _warnings, _so_far} -> + {:error, :future_elixir.format_token_error(info)} + end end end diff --git a/apps/common/lib/future/code/indentifier.ex b/apps/common/lib/future/code/indentifier.ex index 7f545de5c..ab4aac3d8 100644 --- a/apps/common/lib/future/code/indentifier.ex +++ b/apps/common/lib/future/code/indentifier.ex @@ -1,4 +1,4 @@ -# Copied from https://github.com/elixir-lang/elixir/blob/bacea2cef6323d0ede4222f36ddcedd82cb514e4/lib/elixir/lib/code/identifier.ex +# Copied from https://github.com/elixir-lang/elixir/blob/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/lib/code/identifier.ex defmodule Future.Code.Identifier do @moduledoc false @@ -14,7 +14,7 @@ defmodule Future.Code.Identifier do @spec unary_op(atom) :: {:non_associative, precedence :: pos_integer} | :error def unary_op(op) do cond do - op in [:&] -> {:non_associative, 90} + op in [:&, :...] -> {:non_associative, 90} op in [:!, :^, :not, :+, :-, :"~~~"] -> {:non_associative, 300} op in [:@] -> {:non_associative, 320} true -> :error @@ -45,7 +45,6 @@ defmodule Future.Code.Identifier do op in [:|>, :<<<, :>>>, :<~, :~>, :<<~, :~>>, :<~>, :"<|>"] -> {:left, 160} op in [:in] -> {:left, 170} op in [:"^^^"] -> {:left, 180} - op in [:"//"] -> {:right, 190} op in [:++, :--, :.., :<>, :+++, :---] -> {:right, 200} op in [:+, :-] -> {:left, 210} op in [:*, :/] -> {:left, 220} diff --git a/apps/common/lib/future/code/typespec.ex b/apps/common/lib/future/code/typespec.ex index ae330302e..3b2cb923c 100644 --- a/apps/common/lib/future/code/typespec.ex +++ b/apps/common/lib/future/code/typespec.ex @@ -1,4 +1,4 @@ -# Copied from https://github.com/elixir-lang/elixir/blob/d87aadf8bd280d4ac969a6825637fcbd1e412f81/lib/elixir/lib/code/typespec.ex +# Copied from https://github.com/elixir-lang/elixir/blob/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/lib/code/typespec.ex defmodule Future.Code.Typespec do @moduledoc false @@ -176,7 +176,8 @@ defmodule Future.Code.Typespec do defp get_module_and_beam(module) when is_atom(module) do with {^module, beam, _filename} <- :code.get_object_code(module), - {:ok, ^module} <- beam |> :beam_lib.info() |> Keyword.fetch(:module) do + info_pairs when is_list(info_pairs) <- :beam_lib.info(beam), + {:ok, ^module} <- Keyword.fetch(info_pairs, :module) do {module, beam} else _ -> :error @@ -420,5 +421,13 @@ defmodule Future.Code.Typespec do :error end - defp meta(anno), do: [line: :erl_anno.line(anno)] + defp meta(anno) do + case :erl_anno.location(anno) do + {line, column} -> + [line: line, column: column] + + line when is_integer(line) -> + [line: line] + end + end end diff --git a/apps/common/lib/future/mix/tasks/format.ex b/apps/common/lib/future/mix/tasks/format.ex new file mode 100644 index 000000000..60664990d --- /dev/null +++ b/apps/common/lib/future/mix/tasks/format.ex @@ -0,0 +1,1047 @@ +defmodule Mix.Tasks.Future.Format do + use Mix.Task + + @shortdoc "Formats the given files/patterns" + + @moduledoc """ + Formats the given files and patterns. + + $ mix format mix.exs "lib/**/*.{ex,exs}" "test/**/*.{ex,exs}" + + If any of the files is `-`, then the input is read from stdin and the output + is written to stdout. + + ## Formatting options + + The formatter will read a `.formatter.exs` file in the current directory for + formatter configuration. Evaluating this file should return a keyword list. + + Here is an example of a `.formatter.exs` file that works as a starting point: + + [ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] + ] + + Besides the options listed in `Code.format_string!/2`, the `.formatter.exs` + file supports the following options: + + * `:inputs` (a list of paths and patterns) - specifies the default inputs + to be used by this task. For example, `["mix.exs", "{config,lib,test}/**/*.{ex,exs}"]`. + Patterns are expanded with `Path.wildcard/2`. + + * `:plugins` (a list of modules) (since v1.13.0) - specifies a list of + modules to customize how the formatter works. See the "Plugins" section + below for more information. + + * `:subdirectories` (a list of paths and patterns) - specifies subdirectories + that have their own formatting rules. Each subdirectory should have a + `.formatter.exs` that configures how entries in that subdirectory should be + formatted as. Configuration between `.formatter.exs` are not shared nor + inherited. If a `.formatter.exs` lists "lib/app" as a subdirectory, the rules + in `.formatter.exs` won't be available in `lib/app/.formatter.exs`. + Note that the parent `.formatter.exs` must not specify files inside the "lib/app" + subdirectory in its `:inputs` configuration. If this happens, the behaviour of + which formatter configuration will be picked is unspecified. + + * `:import_deps` (a list of dependencies as atoms) - specifies a list + of dependencies whose formatter configuration will be imported. + See the "Importing dependencies configuration" section below for more + information. + + * `:export` (a keyword list) - specifies formatter configuration to be exported. + See the "Importing dependencies configuration" section below. + + ## Task-specific options + + * `--force` - force formatting to happen on all files, instead of + relying on cache. + + * `--check-formatted` - checks that the file is already formatted. + This is useful in pre-commit hooks and CI scripts if you want to + reject contributions with unformatted code. If the check fails, + the formatted contents are not written to disk. Keep in mind + that the formatted output may differ between Elixir versions as + improvements and fixes are applied to the formatter. + + * `--no-exit` - only valid when used with `--check-formatted`. + Pass this if you don't want this Mix task to fail (and return a non-zero exit code), + but still want to check for format errors and print them to the console. + + * `--dry-run` - does not save files after formatting. + + * `--dot-formatter` - path to the file with formatter configuration. + Defaults to `.formatter.exs` if one is available. See the + "Formatting options" section above for more information. + + * `--stdin-filename` - path to the file being formatted on stdin. + This is useful if you are using plugins to support custom filetypes such + as `.heex`. Without passing this flag, it is assumed that the code being + passed via stdin is valid Elixir code. Defaults to "stdin.exs". + + * `--migrate` - enables the `:migrate` option, which should be able to + automatically fix some deprecation warnings but changes the AST. + This should be safe in typical projects, but there is a non-zero risk + of breaking code for meta-programming heavy projects that relied on a + specific AST. We recommend running this task in its separate commit and + reviewing its output before committing. See the "Migration formatting" + section in `Code.format_string!/2` for more information. + + ## When to format code + + We recommend developers to format code directly in their editors, either + automatically when saving a file or via an explicit command or key binding. If + such option is not available in your editor of choice, adding the required + integration is usually a matter of invoking: + + $ cd $project && mix format $file + + where `$file` refers to the current file and `$project` is the root of your + project. + + It is also possible to format code across the whole project by passing a list + of patterns and files to `mix format`, as shown at the top of this task + documentation. This list can also be set in the `.formatter.exs` file under the + `:inputs` key. + + ## Plugins + + It is possible to customize how the formatter behaves. Plugins must implement + the `Mix.Tasks.Format` behaviour. For example, imagine that your project uses + Markdown in two distinct ways: via a custom `~M` sigil and via files with the + `.md` and `.markdown` extensions. A custom plugin would look like this: + + defmodule MixMarkdownFormatter do + @behaviour Mix.Tasks.Format + + def features(_opts) do + [sigils: [:M], extensions: [".md", ".markdown"]] + end + + def format(contents, opts) do + # logic that formats markdown + end + end + + The `opts` passed to `format/2` contains all the formatting options and either: + + * `:sigil` (atom) - the sigil being formatted, e.g. `:M`. + + * `:modifiers` (charlist) - list of sigil modifiers. + + * `:extension` (string) - the extension of the file being formatted, e.g. `".md"`. + + Now any application can use your formatter as follows: + + # .formatter.exs + [ + # Define the desired plugins + plugins: [MixMarkdownFormatter, AnotherMarkdownFormatter], + # Remember to update the inputs list to include the new extensions + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}", "posts/*.{md,markdown}"] + ] + + Notice that, when running the formatter with plugins, your code will be + compiled first. + + In addition, the order by which you input your plugins is the format order. + So, in the above `.formatter.exs`, the `MixMarkdownFormatter` will format + the markdown files and sigils before `AnotherMarkdownFormatter`. + + ## Importing dependencies configuration + + This task supports importing formatter configuration from dependencies. + + A dependency that wants to export formatter configuration needs to have a + `.formatter.exs` file at the root of the project. In this file, the dependency + can list an `:export` option with configuration to export. For now, only one + option is supported under `:export`: `:locals_without_parens` (whose value has + the same shape as the value of the `:locals_without_parens` in `Code.format_string!/2`). + + The functions listed under `:locals_without_parens` in the `:export` option of + a dependency can be imported in a project by listing that dependency in the + `:import_deps` option of the formatter configuration file of the project. + + For example, consider you have a project called `my_app` that depends on another one called `my_dep`. + `my_dep` wants to export some configuration, so `my_dep/.formatter.exs` + would look like this: + + # my_dep/.formatter.exs + [ + # Regular formatter configuration for my_dep + # ... + + export: [ + locals_without_parens: [some_dsl_call: 2, some_dsl_call: 3] + ] + ] + + In order to import configuration, `my_app`'s `.formatter.exs` would look like + this: + + # my_app/.formatter.exs + [ + import_deps: [:my_dep] + ] + + """ + + @switches [ + check_equivalent: :boolean, + check_formatted: :boolean, + no_exit: :boolean, + dot_formatter: :string, + dry_run: :boolean, + stdin_filename: :string, + force: :boolean, + migrate: :boolean + ] + + @manifest_timestamp "format_timestamp" + @manifest_dot_formatter "cached_dot_formatter" + @manifest_vsn 2 + + @newline "\n" + @blank " " + + @separator "|" + @cr "↵" + @line_num_pad @blank + + @gutter [ + del: " -", + eq: " ", + ins: " +", + skip: " " + ] + + @colors [ + del: [text: :red, space: :red_background], + ins: [text: :green, space: :green_background] + ] + + @doc """ + Returns which features this plugin should plug into. + """ + @callback features(Keyword.t()) :: [sigils: [atom()], extensions: [binary()]] + + @doc """ + Receives a string to be formatted with options and returns said string. + """ + @callback format(String.t(), Keyword.t()) :: String.t() + + @impl true + def run(all_args) do + cwd = File.cwd!() + {opts, args} = OptionParser.parse!(all_args, strict: @switches) + {dot_formatter, formatter_opts} = eval_dot_formatter(cwd, opts) + + if opts[:check_equivalent] do + IO.warn("--check-equivalent has been deprecated and has no effect") + end + + if opts[:no_exit] && !opts[:check_formatted] do + Mix.raise("--no-exit can only be used together with --check-formatted") + end + + {formatter_opts_and_subs, _sources} = + eval_deps_and_subdirectories(cwd, dot_formatter, formatter_opts, [dot_formatter], opts) + + formatter_opts_and_subs = load_plugins(formatter_opts_and_subs, opts) + files = expand_args(args, cwd, dot_formatter, formatter_opts_and_subs, opts) + + maybe_cache_timestamps(all_args, files, fn files -> + files + |> Task.async_stream(&format_file(&1, opts), ordered: false, timeout: :infinity) + |> Enum.reduce({[], []}, &collect_status/2) + |> check!(opts) + end) + end + + defp maybe_cache_timestamps([], files, fun) do + if Mix.Project.get() do + # We fetch the time from before we read files so any future + # change to files are still picked up by the formatter + timestamp = System.os_time(:second) + dir = Mix.Project.manifest_path() + manifest_timestamp = Path.join(dir, @manifest_timestamp) + manifest_dot_formatter = Path.join(dir, @manifest_dot_formatter) + last_modified = Mix.Utils.last_modified(manifest_timestamp) + sources = [Mix.Project.config_mtime(), manifest_dot_formatter, ".formatter.exs"] + + files = + if Mix.Utils.stale?(sources, [last_modified]) do + files + else + Enum.filter(files, fn {file, _opts} -> + Mix.Utils.last_modified(file) > last_modified + end) + end + + try do + fun.(files) + after + File.mkdir_p!(dir) + File.touch!(manifest_timestamp, timestamp) + end + else + fun.(files) + end + end + + defp maybe_cache_timestamps([_ | _], files, fun), do: fun.(files) + + defp load_plugins({formatter_opts, subs}, opts) do + plugins = Keyword.get(formatter_opts, :plugins, []) + + if not is_list(plugins) do + Mix.raise("Expected :plugins to return a list of modules, got: #{inspect(plugins)}") + end + + plugins = + if plugins != [] do + Keyword.get(opts, :plugin_loader, &plugin_loader/1).(plugins) + else + [] + end + + for plugin <- plugins do + cond do + not Code.ensure_loaded?(plugin) -> + Mix.raise("Formatter plugin #{inspect(plugin)} cannot be found") + + not function_exported?(plugin, :features, 1) -> + Mix.raise("Formatter plugin #{inspect(plugin)} does not define features/1") + + true -> + :ok + end + end + + sigils = + for plugin <- plugins, + sigil <- find_sigils_from_plugins(plugin, formatter_opts), + do: {sigil, plugin} + + sigils = + sigils + |> Enum.group_by(&elem(&1, 0), &elem(&1, 1)) + |> Enum.map(fn {sigil, plugins} -> + {sigil, + fn input, opts -> + Enum.reduce(plugins, input, fn plugin, input -> + plugin.format(input, opts ++ formatter_opts) + end) + end} + end) + + {Keyword.put(formatter_opts, :sigils, sigils), + Enum.map(subs, fn {path, formatter_opts_and_subs} -> + {path, load_plugins(formatter_opts_and_subs, opts)} + end)} + end + + defp plugin_loader(plugins) do + if plugins != [] do + Mix.Task.run("loadpaths", []) + end + + if not Enum.all?(plugins, &Code.ensure_loaded?/1) do + Mix.Task.run("compile", []) + end + + plugins + end + + @doc """ + Returns a formatter function and the formatter options to + be used for the given file. + + The function must be called with the contents of the file + to be formatted. Keep in mind that a function is always + returned, even if it doesn't match any of the inputs + specified in the `formatter.exs`. You can retrieve the + `:inputs` from the returned options, alongside the `:root` + option, to validate if the returned file matches the given + `:root` and `:inputs`. + + ## Options + + * `:deps_paths` (since v1.18.0) - the dependencies path to be used to resolve + `import_deps`. It defaults to `Mix.Project.deps_paths`. + + * `:dot_formatter` - use the given file as the `dot_formatter` + root. If this option is not specified, it uses the default one. + The default one is cached, so use this option only if necessary. + + * `:plugin_loader` (since v1.18.0) - a function that receives a list of plugins, + which may or may not yet be loaded, and ensures all of them are + loaded. It must return a list of plugins, which is recommended + to be the exact same list given as argument. You may choose to + skip plugins, but then it means the code will be partially + formatted (as in the plugins will be skipped). By default, + this function calls `mix loadpaths` and then, if not enough, + `mix compile`. + + * `:root` - use the given root as the current working directory. + """ + @doc since: "1.13.0" + def formatter_for_file(file, opts \\ []) do + cwd = Keyword.get_lazy(opts, :root, &File.cwd!/0) + {dot_formatter, formatter_opts} = eval_dot_formatter(cwd, opts) + + {formatter_opts_and_subs, _sources} = + eval_deps_and_subdirectories(cwd, dot_formatter, formatter_opts, [dot_formatter], opts) + + formatter_opts_and_subs = load_plugins(formatter_opts_and_subs, opts) + + find_formatter_and_opts_for_file(Path.expand(file, cwd), cwd, formatter_opts_and_subs) + end + + @doc false + @deprecated "Use formatter_for_file/2 instead" + def formatter_opts_for_file(file, opts \\ []) do + {_, formatter_opts} = formatter_for_file(file, opts) + formatter_opts + end + + defp eval_dot_formatter(cwd, opts) do + {dot_formatter, format_opts} = + cond do + dot_formatter = opts[:dot_formatter] -> + {dot_formatter, eval_file_with_keyword_list(dot_formatter)} + + File.regular?(Path.join(cwd, ".formatter.exs")) -> + dot_formatter = Path.join(cwd, ".formatter.exs") + {".formatter.exs", eval_file_with_keyword_list(dot_formatter)} + + true -> + {".formatter.exs", []} + end + + # the --migrate flag overrides settings from the dot formatter + {dot_formatter, Keyword.take(opts, [:migrate]) ++ format_opts} + end + + # This function reads exported configuration from the imported + # dependencies and subdirectories and deals with caching the result + # of reading such configuration in a manifest file. + defp eval_deps_and_subdirectories(cwd, dot_formatter, formatter_opts, sources, opts) do + deps = Keyword.get(formatter_opts, :import_deps, []) + subs = Keyword.get(formatter_opts, :subdirectories, []) + + if not is_list(deps) do + Mix.raise("Expected :import_deps to return a list of dependencies, got: #{inspect(deps)}") + end + + if not is_list(subs) do + Mix.raise("Expected :subdirectories to return a list of directories, got: #{inspect(subs)}") + end + + if deps == [] and subs == [] do + {{formatter_opts, []}, sources} + else + manifest = Path.join(Mix.Project.manifest_path(), @manifest_dot_formatter) + + {{locals_without_parens, subdirectories}, sources} = + maybe_cache_in_manifest(dot_formatter, manifest, fn -> + {subdirectories, sources} = eval_subs_opts(subs, cwd, sources, opts) + {{eval_deps_opts(deps, opts), subdirectories}, sources} + end) + + formatter_opts = + Keyword.update( + formatter_opts, + :locals_without_parens, + locals_without_parens, + &(locals_without_parens ++ &1) + ) + + {{formatter_opts, subdirectories}, sources} + end + end + + defp maybe_cache_in_manifest(dot_formatter, manifest, fun) do + cond do + is_nil(Mix.Project.get()) or dot_formatter != ".formatter.exs" -> fun.() + entry = read_manifest(manifest) -> entry + true -> write_manifest!(manifest, fun.()) + end + end + + defp read_manifest(manifest) do + with {:ok, binary} <- File.read(manifest), + {:ok, {@manifest_vsn, entry, sources}} <- safe_binary_to_term(binary), + expanded_sources = Enum.flat_map(sources, &Path.wildcard(&1, match_dot: true)), + false <- Mix.Utils.stale?([Mix.Project.config_mtime() | expanded_sources], [manifest]) do + {entry, sources} + else + _ -> nil + end + end + + defp safe_binary_to_term(binary) do + {:ok, :erlang.binary_to_term(binary)} + rescue + _ -> :error + end + + defp write_manifest!(manifest, {entry, sources}) do + File.mkdir_p!(Path.dirname(manifest)) + File.write!(manifest, :erlang.term_to_binary({@manifest_vsn, entry, sources})) + {entry, sources} + end + + defp eval_deps_opts([], _opts) do + [] + end + + defp eval_deps_opts(deps, opts) do + deps_paths = opts[:deps_paths] || Mix.Project.deps_paths() + + for dep <- deps, + dep_path = assert_valid_dep_and_fetch_path(dep, deps_paths), + dep_dot_formatter = Path.join(dep_path, ".formatter.exs"), + File.regular?(dep_dot_formatter), + dep_opts = eval_file_with_keyword_list(dep_dot_formatter), + parenless_call <- dep_opts[:export][:locals_without_parens] || [], + uniq: true, + do: parenless_call + end + + defp eval_subs_opts(subs, cwd, sources, opts) do + {subs, sources} = + Enum.flat_map_reduce(subs, sources, fn sub, sources -> + cwd = Path.expand(sub, cwd) + {Path.wildcard(cwd), [Path.join(cwd, ".formatter.exs") | sources]} + end) + + Enum.flat_map_reduce(subs, sources, fn sub, sources -> + sub_formatter = Path.join(sub, ".formatter.exs") + + if File.exists?(sub_formatter) do + formatter_opts = eval_file_with_keyword_list(sub_formatter) + + {formatter_opts_and_subs, sources} = + eval_deps_and_subdirectories(sub, :in_memory, formatter_opts, sources, opts) + + {[{sub, formatter_opts_and_subs}], sources} + else + {[], sources} + end + end) + end + + defp assert_valid_dep_and_fetch_path(dep, deps_paths) when is_atom(dep) do + with %{^dep => path} <- deps_paths, + true <- File.dir?(path) do + path + else + _ -> + Mix.raise( + "Unknown dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <> + "Make sure the dependency is listed in your mix.exs for environment #{inspect(Mix.env())} " <> + "and you have run \"mix deps.get\"" + ) + end + end + + defp assert_valid_dep_and_fetch_path(dep, _deps_paths) do + Mix.raise("Dependencies in :import_deps should be atoms, got: #{inspect(dep)}") + end + + defp eval_file_with_keyword_list(path) do + {opts, _} = Code.eval_file(path) + + if not Keyword.keyword?(opts) do + Mix.raise("Expected #{inspect(path)} to return a keyword list, got: #{inspect(opts)}") + end + + opts + end + + defp expand_args([], cwd, dot_formatter, formatter_opts_and_subs, _opts) do + if no_entries_in_formatter_opts?(formatter_opts_and_subs) do + Mix.raise( + "Expected one or more files/patterns to be given to mix format " <> + "or for a .formatter.exs file to exist with an :inputs or :subdirectories key" + ) + end + + dot_formatter + |> expand_dot_inputs(cwd, formatter_opts_and_subs, %{}) + |> Enum.map(fn {file, {_dot_formatter, formatter_opts}} -> + {file, find_formatter_for_file(file, formatter_opts)} + end) + end + + defp expand_args(files_and_patterns, cwd, _dot_formatter, {formatter_opts, subs}, opts) do + files = + for file_or_pattern <- files_and_patterns, + file <- stdin_or_wildcard(file_or_pattern), + uniq: true, + do: file + + if files == [] do + Mix.raise( + "Could not find a file to format. The files/patterns given to command line " <> + "did not point to any existing file. Got: #{inspect(files_and_patterns)}" + ) + end + + for file <- files do + if file == :stdin do + stdin_filename = Path.expand(Keyword.get(opts, :stdin_filename, "stdin.exs"), cwd) + + {formatter, _opts} = + find_formatter_and_opts_for_file(stdin_filename, cwd, {formatter_opts, subs}) + + {file, formatter} + else + {formatter, _opts} = find_formatter_and_opts_for_file(file, cwd, {formatter_opts, subs}) + {file, formatter} + end + end + end + + defp expand_dot_inputs(dot_formatter, cwd, {formatter_opts, subs}, acc) do + if no_entries_in_formatter_opts?({formatter_opts, subs}) do + Mix.raise("Expected :inputs or :subdirectories key in #{dot_formatter}") + end + + map = + for input <- List.wrap(formatter_opts[:inputs]), + file <- Path.wildcard(Path.expand(input, cwd), match_dot: true), + do: {file, {dot_formatter, formatter_opts}}, + into: %{} + + acc = + Map.merge(acc, map, fn file, {dot_formatter1, _}, {dot_formatter2, formatter_opts} -> + Mix.shell().error( + "Both #{dot_formatter1} and #{dot_formatter2} specify the file #{file} in their " <> + ":inputs option. To resolve the conflict, the configuration in #{dot_formatter1} " <> + "will be ignored. Please change the list of :inputs in one of the formatter files " <> + "so only one of them matches #{file}" + ) + + {dot_formatter2, formatter_opts} + end) + + Enum.reduce(subs, acc, fn {sub, formatter_opts_and_subs}, acc -> + sub_formatter = Path.join(sub, ".formatter.exs") + expand_dot_inputs(sub_formatter, sub, formatter_opts_and_subs, acc) + end) + end + + defp find_formatter_for_file(file, formatter_opts) do + ext = Path.extname(file) + + cond do + plugins = find_plugins_for_extension(formatter_opts, ext) -> + fn input -> + Enum.reduce(plugins, input, fn plugin, input -> + plugin.format(input, [extension: ext, file: file] ++ formatter_opts) + end) + end + + ext in ~w(.ex .exs) -> + &elixir_format(&1, [file: file] ++ formatter_opts) + + true -> + & &1 + end + end + + defp find_plugins_for_extension(formatter_opts, ext) do + plugins = Keyword.get(formatter_opts, :plugins, []) + + plugins = + Enum.filter(plugins, fn plugin -> + Code.ensure_loaded?(plugin) and function_exported?(plugin, :features, 1) and + ext in List.wrap(plugin.features(formatter_opts)[:extensions]) + end) + + if plugins != [], do: plugins, else: nil + end + + defp find_formatter_and_opts_for_file(file, root, formatter_opts_and_subs) do + {formatter_opts, root} = recur_formatter_opts_for_file(file, root, formatter_opts_and_subs) + {find_formatter_for_file(file, formatter_opts), [root: root] ++ formatter_opts} + end + + defp recur_formatter_opts_for_file(file, root, {formatter_opts, subs}) do + Enum.find_value(subs, {formatter_opts, root}, fn {sub, formatter_opts_and_subs} -> + size = byte_size(sub) + + case file do + <> + when prefix == sub and dir_separator in [?\\, ?/] -> + recur_formatter_opts_for_file(file, sub, formatter_opts_and_subs) + + _ -> + nil + end + end) + end + + defp no_entries_in_formatter_opts?({formatter_opts, subs}) do + is_nil(formatter_opts[:inputs]) and subs == [] + end + + defp stdin_or_wildcard("-"), do: [:stdin] + + defp stdin_or_wildcard(path), + do: path |> Path.expand() |> Path.wildcard(match_dot: true) |> Enum.filter(&File.regular?/1) + + defp elixir_format(content, formatter_opts) do + case Code.format_string!(content, formatter_opts) do + [] -> "" + formatted_content -> IO.iodata_to_binary([formatted_content, ?\n]) + end + end + + defp find_sigils_from_plugins(plugin, formatter_opts) do + if Code.ensure_loaded?(plugin) and function_exported?(plugin, :features, 1) do + List.wrap(plugin.features(formatter_opts)[:sigils]) + else + [] + end + end + + defp read_file(:stdin), do: IO.stream() |> Enum.to_list() |> IO.iodata_to_binary() + defp read_file(file), do: File.read!(file) + + defp format_file({file, formatter}, task_opts) do + input = read_file(file) + output = formatter.(input) + check_formatted? = Keyword.get(task_opts, :check_formatted, false) + dry_run? = Keyword.get(task_opts, :dry_run, false) + + cond do + check_formatted? -> + if input == output, do: :ok, else: {:not_formatted, {file, input, output}} + + dry_run? -> + :ok + + true -> + write_or_print(file, input, output) + end + rescue + exception -> + {:exit, file, exception, __STACKTRACE__} + end + + defp write_or_print(file, input, output) do + cond do + file == :stdin -> IO.write(output) + input == output -> :ok + true -> File.write!(file, output) + end + + :ok + end + + defp collect_status({:ok, :ok}, acc), do: acc + + defp collect_status({:ok, {:exit, _, _, _} = exit}, {exits, not_formatted}) do + {[exit | exits], not_formatted} + end + + defp collect_status({:ok, {:not_formatted, file}}, {exits, not_formatted}) do + {exits, [file | not_formatted]} + end + + defp check!({[], []}, _task_opts) do + :ok + end + + defp check!({[{:exit, :stdin, exception, stacktrace} | _], _not_formatted}, _task_opts) do + Mix.shell().error("mix format failed for stdin") + reraise exception, stacktrace + end + + defp check!({[{:exit, file, exception, stacktrace} | _], _not_formatted}, _task_opts) do + Mix.shell().error("mix format failed for file: #{Path.relative_to_cwd(file)}") + reraise exception, stacktrace + end + + defp check!({_exits, [_ | _] = not_formatted}, task_opts) do + no_exit? = Keyword.get(task_opts, :no_exit, false) + + message = """ + The following files are not formatted: + + #{to_diffs(not_formatted)} + """ + + if no_exit? do + Mix.shell().info(message) + else + Mix.raise(""" + mix format failed due to --check-formatted. + #{message} + """) + end + end + + defp to_diffs(files) do + Enum.map_intersperse(files, "\n", fn + {:stdin, unformatted, formatted} -> + [IO.ANSI.reset(), text_diff_format(unformatted, formatted)] + + {file, unformatted, formatted} -> + [ + IO.ANSI.bright(), + IO.ANSI.red(), + file, + "\n", + IO.ANSI.reset(), + "\n", + text_diff_format(unformatted, formatted) + ] + end) + end + + @doc false + @spec text_diff_format(String.t(), String.t()) :: iolist() + def text_diff_format(old, new, opts \\ []) + + def text_diff_format(code, code, _opts), do: [] + + def text_diff_format(old, new, opts) do + opts = Keyword.validate!(opts, after: 2, before: 2, color: IO.ANSI.enabled?(), line: 1) + crs? = String.contains?(old, "\r") || String.contains?(new, "\r") + + old = String.split(old, "\n") + new = String.split(new, "\n") + + max = max(length(new), length(old)) + line_num_digits = max |> Integer.digits() |> length() + opts = Keyword.put(opts, :line_num_digits, line_num_digits) + + {line, opts} = Keyword.pop!(opts, :line) + + old + |> List.myers_difference(new) + |> insert_cr_symbols(crs?) + |> diff_to_iodata({line, line}, opts) + end + + defp diff_to_iodata(diff, line_nums, opts, iodata \\ []) + + defp diff_to_iodata([], _line_nums, _opts, iodata), do: Enum.reverse(iodata) + + defp diff_to_iodata([{:eq, [""]}], _line_nums, _opts, iodata), do: Enum.reverse(iodata) + + defp diff_to_iodata([{:eq, lines}], line_nums, opts, iodata) do + lines_after = Enum.take(lines, opts[:after]) + iodata = lines(iodata, {:eq, lines_after}, line_nums, opts) + + iodata = + case length(lines) > opts[:after] do + false -> iodata + true -> lines(iodata, :skip, opts) + end + + Enum.reverse(iodata) + end + + defp diff_to_iodata([{:eq, lines} | diff], {line, line}, opts, [] = iodata) do + {start, lines_before} = Enum.split(lines, opts[:before] * -1) + + iodata = + case length(lines) > opts[:before] do + false -> iodata + true -> lines(iodata, :skip, opts) + end + + line = line + length(start) + iodata = lines(iodata, {:eq, lines_before}, {line, line}, opts) + + line = line + length(lines_before) + diff_to_iodata(diff, {line, line}, opts, iodata) + end + + defp diff_to_iodata([{:eq, lines} | diff], line_nums, opts, iodata) do + case length(lines) > opts[:after] + opts[:before] do + true -> + {lines1, lines2, lines3} = split(lines, opts[:after], opts[:before] * -1) + + iodata = + iodata + |> lines({:eq, lines1}, line_nums, opts) + |> lines(:skip, opts) + |> lines({:eq, lines3}, add_line_nums(line_nums, length(lines1) + length(lines2)), opts) + + line_nums = add_line_nums(line_nums, length(lines)) + + diff_to_iodata(diff, line_nums, opts, iodata) + + false -> + iodata = lines(iodata, {:eq, lines}, line_nums, opts) + line_nums = add_line_nums(line_nums, length(lines)) + + diff_to_iodata(diff, line_nums, opts, iodata) + end + end + + defp diff_to_iodata([{:del, [del]}, {:ins, [ins]} | diff], line_nums, opts, iodata) do + iodata = lines(iodata, {:chg, del, ins}, line_nums, opts) + diff_to_iodata(diff, add_line_nums(line_nums, 1), opts, iodata) + end + + defp diff_to_iodata([{kind, lines} | diff], line_nums, opts, iodata) do + iodata = lines(iodata, {kind, lines}, line_nums, opts) + line_nums = add_line_nums(line_nums, length(lines), kind) + + diff_to_iodata(diff, line_nums, opts, iodata) + end + + defp split(list, count1, count2) do + {split1, split2} = Enum.split(list, count1) + {split2, split3} = Enum.split(split2, count2) + {split1, split2, split3} + end + + defp lines(iodata, :skip, opts) do + line_num = String.duplicate(@blank, opts[:line_num_digits] * 2 + 1) + [[line_num, @gutter[:skip], @separator, @newline] | iodata] + end + + defp lines(iodata, {:chg, del, ins}, line_nums, opts) do + {del, ins} = line_diff(del, ins, opts) + + [ + [gutter(line_nums, :ins, opts), ins, @newline], + [gutter(line_nums, :del, opts), del, @newline] + | iodata + ] + end + + defp lines(iodata, {kind, lines}, line_nums, opts) do + lines + |> Enum.with_index() + |> Enum.reduce(iodata, fn {line, offset}, iodata -> + line_nums = add_line_nums(line_nums, offset, kind) + [[gutter(line_nums, kind, opts), colorize(line, kind, false, opts), @newline] | iodata] + end) + end + + defp gutter(line_nums, kind, opts) do + [line_num(line_nums, kind, opts), colorize(@gutter[kind], kind, false, opts), @separator] + end + + defp line_num({line_num_old, line_num_new}, :eq, opts) do + old = + line_num_old + |> to_string() + |> String.pad_leading(opts[:line_num_digits], @line_num_pad) + + new = + line_num_new + |> to_string() + |> String.pad_leading(opts[:line_num_digits], @line_num_pad) + + [old, @blank, new] + end + + defp line_num({line_num_old, _line_num_new}, :del, opts) do + old = + line_num_old + |> to_string() + |> String.pad_leading(opts[:line_num_digits], @line_num_pad) + + new = String.duplicate(@blank, opts[:line_num_digits]) + [old, @blank, new] + end + + defp line_num({_line_num_old, line_num_new}, :ins, opts) do + old = String.duplicate(@blank, opts[:line_num_digits]) + + new = + line_num_new + |> to_string() + |> String.pad_leading(opts[:line_num_digits], @line_num_pad) + + [old, @blank, new] + end + + defp line_diff(del, ins, opts) do + diff = String.myers_difference(del, ins) + + Enum.reduce(diff, {[], []}, fn + {:eq, str}, {del, ins} -> {[del | str], [ins | str]} + {:del, str}, {del, ins} -> {[del | colorize(str, :del, true, opts)], ins} + {:ins, str}, {del, ins} -> {del, [ins | colorize(str, :ins, true, opts)]} + end) + end + + defp colorize(str, kind, space?, opts) do + if Keyword.fetch!(opts, :color) && Keyword.has_key?(@colors, kind) do + color = Keyword.fetch!(@colors, kind) + + if space? do + str + |> String.split(~r/[\t\s]+/, include_captures: true) + |> Enum.map(fn + <> = str when start in ["\t", "\s"] -> + IO.ANSI.format([color[:space], str]) + + str -> + IO.ANSI.format([color[:text], str]) + end) + else + IO.ANSI.format([color[:text], str]) + end + else + str + end + end + + defp add_line_nums({line_num_old, line_num_new}, lines, kind \\ :eq) do + case kind do + :eq -> {line_num_old + lines, line_num_new + lines} + :ins -> {line_num_old, line_num_new + lines} + :del -> {line_num_old + lines, line_num_new} + end + end + + defp insert_cr_symbols(diffs, false), do: diffs + defp insert_cr_symbols(diffs, true), do: do_insert_cr_symbols(diffs, []) + + defp do_insert_cr_symbols([], acc), do: Enum.reverse(acc) + + defp do_insert_cr_symbols([{:del, del}, {:ins, ins} | rest], acc) do + {del, ins} = do_insert_cr_symbols(del, ins, {[], []}) + do_insert_cr_symbols(rest, [{:ins, ins}, {:del, del} | acc]) + end + + defp do_insert_cr_symbols([diff | rest], acc) do + do_insert_cr_symbols(rest, [diff | acc]) + end + + defp do_insert_cr_symbols([left | left_rest], [right | right_rest], {left_acc, right_acc}) do + {left, right} = insert_cr_symbol(left, right) + do_insert_cr_symbols(left_rest, right_rest, {[left | left_acc], [right | right_acc]}) + end + + defp do_insert_cr_symbols([], right, {left_acc, right_acc}) do + left = Enum.reverse(left_acc) + right = right_acc |> Enum.reverse() |> Enum.concat(right) + {left, right} + end + + defp do_insert_cr_symbols(left, [], {left_acc, right_acc}) do + left = left_acc |> Enum.reverse() |> Enum.concat(left) + right = Enum.reverse(right_acc) + {left, right} + end + + defp insert_cr_symbol(left, right) do + case {String.ends_with?(left, "\r"), String.ends_with?(right, "\r")} do + {bool, bool} -> {left, right} + {true, false} -> {String.replace(left, "\r", @cr), right} + {false, true} -> {left, String.replace(right, "\r", @cr)} + end + end +end diff --git a/apps/common/lib/lexical/ast.ex b/apps/common/lib/lexical/ast.ex index c1a035d01..cd1db8e57 100644 --- a/apps/common/lib/lexical/ast.ex +++ b/apps/common/lib/lexical/ast.ex @@ -1,6 +1,19 @@ defmodule Lexical.Ast do @moduledoc """ - Utilities for working with syntax trees. + Utilities for analyzing Lexical documents as syntax trees. + + ## Analysis + + The preferred way to use this module is by first passing a document to + `analyze/1`, which returns a `%Lexical.Ast.Analysis{}` struct that + will have already parsed and analyzed a significant portion of the + document, thus reducing the cost of successive operations. + + An analysis looks at the entire AST, and thus may fail if the document + contains syntax errors that prevent parsing. To a partial analysis up + to a certain point (usually the cursor position), use `reanalyze_to/2`, + which analyzes the document up to the given position and can therefore + be used even if later parts of the document contain syntax errors. ## Differences from `Code` @@ -54,7 +67,7 @@ defmodule Lexical.Ast do """ alias Future.Code, as: Code - alias Lexical.Ast.Aliases + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Edit alias Lexical.Document.Position @@ -74,7 +87,7 @@ defmodule Lexical.Ast do {location :: keyword(), String.t() | {String.t(), String.t()}, String.t()} @type patch :: %{ - optional(:preserver_indentation) => boolean(), + optional(:preserve_indentation) => boolean(), range: patch_range(), change: patch_change() } @@ -88,16 +101,79 @@ defmodule Lexical.Ast do @type short_alias :: atom() @type alias_segments :: [short_alias] + @type comment_metadata :: %{ + line: pos_integer(), + column: pos_integer(), + previous_eol_count: non_neg_integer(), + next_eol_count: non_neg_integer() + } + + @type position :: Position.t() | {Position.line(), Position.character()} + + @doc """ + Analyzes a document. + """ + @spec analyze(Document.t()) :: Analysis.t() + def analyze(%Document{} = document) do + document + |> from() + |> Analysis.new(document) + end + + @doc """ + Reanalyzes a document up to `position` if `analysis` is not valid. + + This can be used to analyze a fragment of an analyzed document up to + the cursor position. If the given analysis is already valid, this + function returns in unchanged. + + Note that the analysis generated for this may give invalid or incomplete + results for positions after the fragment. + + ## Examples + + iex> analysis = Ast.analyze(invalid_document) + %Ast.Analysis{valid?: false} + + iex> Ast.reanalyze_to(analysis, cursor_position) + %Ast.Analysis{} # may be valid or invalid + + """ + @spec reanalyze_to(Analysis.t(), position) :: Analysis.t() + def reanalyze_to(%Analysis{valid?: false} = analysis, position) do + %Position{} = position = normalize_position(position, analysis.document) + + analysis.document + |> fragment(position) + |> Analysis.new(analysis.document) + end + + def reanalyze_to(%Analysis{valid?: true} = analysis, _position) do + analysis + end + @doc """ Returns an AST generated from a valid document or string. """ - @spec from(Document.t() | String.t()) :: {:ok, Macro.t()} | {:error, parse_error()} + @spec from(Document.t() | Analysis.t() | String.t()) :: + {:ok, Macro.t(), comment_metadata()} | {:error, parse_error()} def from(%Document{} = document) do document |> Document.to_string() |> from() end + def from(%Analysis{valid?: true} = analysis) do + comments = + analysis.comments_by_line + |> Map.values() + |> Enum.sort_by(& &1.line) + + {:ok, analysis.ast, comments} + end + + def from(%Analysis{valid?: false, parse_error: error}), do: error + def from(s) when is_binary(s) do do_string_to_quoted(s) end @@ -105,13 +181,13 @@ defmodule Lexical.Ast do @doc """ Returns an AST fragment from the start of the document to the given position. """ - @spec fragment(Document.t(), Position.t()) :: {:ok, Macro.t()} | {:error, parse_error()} - def fragment(%Document{} = document, %Position{} = position) do + @spec fragment(Document.t(), position) :: {:ok, Macro.t()} | {:error, parse_error()} + def fragment(%Document{} = document, position) do # https://github.com/elixir-lang/elixir/issues/12673#issuecomment-1592845875 # Note: because of the above issue: Using `cursor_context` + `container_cursor_to_quoted` # can't deal with some cases like: `alias Foo.Bar, as: AnotherBar`, # so we need to add a new line to make sure we can get the parrent node of the cursor - %{line: line} = position + %{line: line} = normalize_position(position, document) added_new_line_position = Position.new(document, line + 1, 1) fragment = Document.fragment(document, added_new_line_position) @@ -139,51 +215,35 @@ defmodule Lexical.Ast do @doc """ Returns the cursor context of the document at a position. """ - @spec cursor_context(Document.t(), Position.t()) :: + @spec cursor_context(Analysis.t() | Document.t(), position) :: {:ok, cursor_context()} | {:error, :cursor_context} - def cursor_context(%Document{} = document, %Position{} = position) do + def cursor_context(%Analysis{} = analysis, position) do + cursor_context(analysis.document, position) + end + + def cursor_context(%Document{} = document, position) do + %Position{} = position = normalize_position(position, document) + document |> Document.fragment(position) |> do_cursor_context() end - @doc """ - Returns the cursor context of the fragment. - """ - @spec cursor_context(String.t()) :: {:ok, cursor_context()} | {:error, :cursor_context} - def cursor_context(s) when is_binary(s) do - do_cursor_context(s) - end - @doc """ Returns the surround context of the document at a position. """ - @spec surround_context( - Document.t() | String.t(), - Position.t() | {Position.line(), Position.character()} - ) :: + @spec surround_context(Analysis.t() | Document.t(), position) :: {:ok, surround_context()} | {:error, :surround_context} - def surround_context(%Document{} = document, %Position{} = position) do - %{line: line, character: column} = position - - document - |> Document.to_string() - |> do_surround_context({line, column}) + def surround_context(%Analysis{} = analysis, position) do + surround_context(analysis.document, position) end - def surround_context(string, %Position{} = position) do - %{line: line, character: column} = position - do_surround_context(string, {line, column}) - end + def surround_context(%Document{} = document, position) do + %Position{} = position = normalize_position(position, document) - def surround_context(%Document{} = document, {_line, _column} = pos) do document |> Document.to_string() - |> do_surround_context(pos) - end - - def surround_context(string, {_line, _column} = pos) when is_binary(string) do - do_surround_context(string, pos) + |> do_surround_context(position) end @doc """ @@ -192,16 +252,17 @@ defmodule Lexical.Ast do This function differs from `cursor_path/2` in that it expects a valid AST and the returned path will not contain a `:__cursor__` node. """ - @spec path_at(Document.t(), Position.t()) :: + @spec path_at(Analysis.t() | Document.t(), position) :: {:ok, [Macro.t(), ...]} | {:error, :not_found | parse_error()} - @spec path_at(Macro.t(), Position.t()) :: - {:ok, [Macro.t(), ...]} | {:error, :not_found} - def path_at(%Document{} = document, %Position{} = position) do - with {:ok, ast} <- from(document) do + def path_at(%struct{} = document_or_analysis, %Position{} = position) + when struct in [Document, Analysis] do + with {:ok, ast, _} <- from(document_or_analysis) do path_at(ast, position) end end + @spec path_at(Macro.t(), Position.t()) :: + {:ok, [Macro.t(), ...]} | {:error, :not_found} def path_at(ast, %Position{} = position) do path = innermost_path(ast, [], &contains_position?(&1, position)) @@ -219,16 +280,13 @@ defmodule Lexical.Ast do fragment as opposed to a full AST and the call never fails, though it may return an empty list. """ - @spec cursor_path( - Document.t(), - Position.t() | {Position.line(), Position.character()} - ) :: - [Macro.t()] - def cursor_path(%Document{} = doc, {line, character}) do - cursor_path(doc, Position.new(doc, line, character)) + @spec cursor_path(Analysis.t() | Document.t(), position) :: [Macro.t()] + def cursor_path(%Analysis{} = analysis, position) do + cursor_path(analysis.document, position) end - def cursor_path(%Document{} = document, %Position{} = position) do + def cursor_path(%Document{} = document, position) do + %Position{} = position = normalize_position(position, document) document_fragment = Document.fragment(document, position) case do_container_cursor_to_quoted(document_fragment) do @@ -242,62 +300,12 @@ defmodule Lexical.Ast do end end - @doc """ - Traverses the given ast until the given end position. - """ - def prewalk_until( - ast, - acc, - prewalk_fn, - %Position{} = start_position, - %Position{} = end_position - ) do - range = Range.new(start_position, end_position) - - {_, acc} = - ast - |> Zipper.zip() - |> Zipper.traverse_while(acc, fn zipper, acc -> - # We can have a cursor at the end of the document, and due - # to how elixir's AST traversal handles `end` statements (it doesn't), - # we will never receive a callback where we match the end block. Adding - # a cursor node will allow us to place cursors after the document has ended - # and things will still work. - zipper = maybe_insert_cursor(zipper, end_position) - - case Zipper.node(zipper) do - {_, _, _} = element -> - current_line = Sourceror.get_line(element) - current_column = Sourceror.get_column(element) - - cond do - match?({:__cursor__, _, _}, element) -> - new_acc = prewalk_fn.(element, acc) - {:halt, zipper, new_acc} - - within_range?({current_line, current_column}, range) -> - new_acc = prewalk_fn.(element, acc) - {:cont, zipper, new_acc} - - true -> - {:halt, zipper, acc} - end - - element -> - new_acc = prewalk_fn.(element, acc) - {:cont, zipper, new_acc} - end - end) - - acc - end - @doc """ Returns a zipper for the document AST focused at the given position. """ @spec zipper_at(Document.t(), Position.t()) :: {:ok, Zipper.t()} | {:error, parse_error()} def zipper_at(%Document{} = document, %Position{} = position) do - with {:ok, ast} <- from(document) do + with {:ok, ast, _} <- from(document) do zipper_at_position(ast, position) end end @@ -309,21 +317,9 @@ defmodule Lexical.Ast do def contains_position?(ast, %Position{} = position) do case Sourceror.get_range(ast) do %{start: start_pos, end: end_pos} -> - on_same_line? = start_pos[:line] == end_pos[:line] and position.line == start_pos[:line] - - cond do - on_same_line? -> - position.character >= start_pos[:column] and position.character < end_pos[:column] - - position.line == start_pos[:line] -> - position.character >= start_pos[:column] - - position.line == end_pos[:line] -> - position.character < end_pos[:column] - - true -> - position.line > start_pos[:line] and position.line < end_pos[:line] - end + start_pos = {start_pos[:line], start_pos[:column]} + end_pos = {end_pos[:line], end_pos[:column]} + within?(position, start_pos, end_pos) nil -> false @@ -391,131 +387,106 @@ defmodule Lexical.Ast do end end - @doc """ - Expands an alias in the context of the document at a given position. - - When we refer to a module, it's usually a short name, often aliased or - in a nested module. This function finds the full name of the module at - a cursor position. - - For example, if we have: - - ```elixir - defmodule Project do - defmodule Issue do - defstruct [:message] - end - - def message(%Issue{|} = issue) do # cursor marked as `|` - end - end - ``` - - Then the expanded module is `Project.Issue`. - - Another example: - - ```elixir - defmodule Project do - defmodule Issue do - defstruct [:message] - end - end - - defmodule MyModule do - alias Project, as: MyProject - - def message(%MyProject.Issue{|} = issue) do - end - end - ``` - - Then the the expanded module is still `Project.Issue`. - - If no aliases can be found, the given alias is returned unmodified. - """ - @spec expand_aliases( - alias_segments() | module(), - Document.t(), - Position.t() | {Position.line(), Position.character()} - ) :: - {:ok, module()} | :error - def expand_aliases(module_or_segments, %Document{} = document, %Position{} = position) do - with {:ok, quoted} <- fragment(document, position) do - expand_aliases(module_or_segments, document, quoted, position) - end + # Expands aliases given the rules in the special form + # https://hexdocs.pm/elixir/1.13.4/Kernel.SpecialForms.html#__aliases__/1 + def reify_alias(current_module, [:"Elixir" | _] = reified) do + {:ok, [current_module | reified]} end - def expand_aliases(module_or_segments, %Document{} = document, {line, column}) do - expand_aliases(module_or_segments, document, Position.new(document, line, column)) + def reify_alias(current_module, [:__MODULE__ | rest]) do + {:ok, [current_module | rest]} end - @spec expand_aliases(alias_segments() | module(), Document.t(), Macro.t(), Position.t()) :: - {:ok, module()} | :error - def expand_aliases(module, %Document{} = document, quoted_document, %Position{} = position) - when is_atom(module) and not is_nil(module) do - module - |> Module.split() - |> Enum.map(&String.to_atom/1) - |> expand_aliases(document, quoted_document, position) + def reify_alias(current_module, [atom | _rest] = reified) when is_atom(atom) do + {:ok, [current_module | reified]} end - def expand_aliases(segments, %Document{} = document, quoted_document, %Position{} = position) - when is_list(segments) do - with {:ok, aliases_mapping} <- Aliases.at(document, quoted_document, position), - {:ok, resolved} <- resolve_alias(segments, aliases_mapping) do - {:ok, Module.concat(resolved)} - else - _ -> - if Enum.all?(segments, &is_atom/1) do - {:ok, Module.concat(segments)} - else - :error - end + def reify_alias(current_module, [unreified | rest]) do + env = %Macro.Env{module: current_module} + + case Macro.expand(unreified, env) do + module when is_atom(module) -> {:ok, [module | rest]} + _ -> :error end end - def expand_aliases(empty, _, _, _) when empty in [nil, []] do - Logger.warning("Aliases are #{inspect(empty)}, can't expand them") - :error + def reify_alias(current_module, []) do + {:ok, List.wrap(current_module)} end - # Expands aliases given the rules in the special form - # https://hexdocs.pm/elixir/1.13.4/Kernel.SpecialForms.html#__aliases__/1 - def reify_alias(current_module, [:"Elixir" | _] = reified) do - [current_module | reified] - end + @doc """ + Walks a quoted expression with an accumulator, applying `fun` to each + var or pinned var. - def reify_alias(current_module, [:__MODULE__ | rest]) do - [current_module | rest] - end + Returns a tuple where the first element is the potentially modified + expression and the second is the accumulator. + """ + # Adapted from `ExUnit.Assertions.collect_vars_from_pattern/1`, + # licensed under Apache License 2.0: + # https://github.com/elixir-lang/elixir/blob/1e914b04b46125b3b9b251b64ee04380e523afc4/lib/ex_unit/lib/ex_unit/assertions.ex#L657 + @spec prewalk_vars(Macro.t(), acc, (Macro.t(), acc -> {Macro.t(), acc})) :: {Macro.t(), acc} + when acc: term() + def prewalk_vars(ast, acc, fun) do + {ast, {acc, _}} = + Macro.prewalk(ast, {acc, false}, fn + # <> + # ^^^^^^^^^^^ + {:"::", meta, [left, right]}, {acc, _prev_pinned?} -> + {right, acc} = prewalk_vars_in_binary(right, acc, fun) + {{:"::", meta, [left, right]}, {acc, false}} + + # skip vars inside quote or @ + {skip, _, [_]} = node, {acc, _prev_pinned?} when skip in [:@, :quote] -> + {node, {acc, false}} + + # skip _ + {:_, _, context} = node, {acc, _prev_pinned?} when is_atom(context) -> + {node, {acc, false}} + + # ^pinned + # emit the pinned var and set prev_pinned? so the var isn't omitted + # immediately after + {:^, _, [{name, _, context}]} = pinned, {acc, _prev_pinned?} + when is_atom(name) and is_atom(context) -> + {pinned, acc} = fun.(pinned, acc) + {pinned, {acc, true}} + + # var + {name, _, context} = var, {acc, false} when is_atom(name) and is_atom(context) -> + {var, acc} = fun.(var, acc) + {var, {acc, false}} + + # skip everything else + node, {acc, _prev_pinned?} -> + {node, {acc, false}} + end) - def reify_alias(current_module, [atom | _rest] = reified) when is_atom(atom) do - [current_module | reified] + {ast, acc} end - def reify_alias(current_module, [unreified | rest]) do - env = %Macro.Env{module: current_module} - reified = Macro.expand(unreified, env) + defp prewalk_vars_in_binary(right, acc, fun) do + Macro.prewalk(right, acc, fn + {mode, mode_meta, [{name, _, context} = var]}, acc + when is_atom(mode) and is_atom(name) and is_atom(context) -> + {var, acc} = fun.(var, acc) + {{mode, mode_meta, [var]}, acc} - [reified | rest] + node, acc -> + {node, acc} + end) end - # private - defp resolve_alias([first | _] = segments, aliases_mapping) when is_tuple(first) do - with {:ok, current_module} <- Map.fetch(aliases_mapping, :__MODULE__) do - {:ok, reify_alias(current_module, segments)} - end + @doc """ + Returns whether a var with `name` and `context` is in `vars`. + """ + def has_var?(vars, name, context) do + Enum.any?(vars, &match?({^name, _, ^context}, &1)) end - defp resolve_alias([first | rest], aliases_mapping) do - with {:ok, resolved} <- Map.fetch(aliases_mapping, first) do - {:ok, [resolved | rest]} - end - end + # private defp do_string_to_quoted(string) when is_binary(string) do - Code.string_to_quoted(string, + Code.string_to_quoted_with_comments(string, literal_encoder: &{:ok, {:__block__, &2, [&1]}}, token_metadata: true, columns: true, @@ -539,13 +510,33 @@ defmodule Lexical.Ast do end end - defp do_surround_context(fragment, {line, column}) when is_binary(fragment) do - case Code.Fragment.surround_context(fragment, {line, column}) do - :none -> {:error, :surround_context} + defp do_surround_context(fragment, %Position{} = position) when is_binary(fragment) do + pos = {position.line, position.character} + + case Code.Fragment.surround_context(fragment, pos) do + :none -> do_surround_context_again(fragment, pos) context -> {:ok, context} end end + defp do_surround_context_again(_fragment, {_line, 1} = _position) do + {:error, :surround_context} + end + + defp do_surround_context_again(fragment, {line, column} = position) do + case Code.Fragment.surround_context(fragment, {line, column - 1}) do + :none -> + {:error, :surround_context} + + context -> + if context.end == position do + {:ok, context} + else + {:error, :surround_context} + end + end + end + defp patch_to_range(document, start_pos, end_pos) do with {:ok, start_pos} <- patch_to_position(document, start_pos), {:ok, end_pos} <- patch_to_position(document, end_pos) do @@ -600,7 +591,7 @@ defmodule Lexical.Ast do fn %Zipper{node: node} = zipper, {last_position, acc} -> current_position = node_position(node, last_position) - if within_range?(current_position, range) do + if within?(current_position, range.start, range.end) do {zipper, new_acc} = fun.(zipper, acc) {:cont, zipper, {current_position, new_acc}} @@ -614,27 +605,15 @@ defmodule Lexical.Ast do end end - defp within_range?({current_line, current_column}, %Range{} = range) do - start_pos = %Position{} = range.start - end_pos = %Position{} = range.end - - cond do - current_line == start_pos.line -> - current_column >= start_pos.character - - current_line == end_pos.line -> - current_column <= end_pos.character - - true -> - current_line >= start_pos.line and current_line <= end_pos.line - end + defp within?(pos, start_pos, end_pos) do + Position.compare(pos, start_pos) in [:gt, :eq] and + Position.compare(pos, end_pos) in [:lt, :eq] end defp at_or_after?(node, %Position{} = position) do - line = get_line(node, 0) - column = get_column(node, 0) + node_position = node_position(node, {0, 0}) - line > position.line or (line == position.line and column >= position.character) + Position.compare(node_position, position) in [:gt, :eq] end defp one_line_range(%Document{} = document, line_number) do @@ -679,22 +658,6 @@ defmodule Lexical.Ast do default end - defp maybe_insert_cursor(zipper, %Position{} = position) do - case Zipper.next(zipper) do - nil -> - cursor = {:__cursor__, [line: position.line, column: position.character], nil} - - if zipper == Zipper.top(zipper) do - Zipper.insert_child(zipper, cursor) - else - Zipper.insert_right(zipper, cursor) - end - - _ -> - zipper - end - end - # Similar to `Future.Macro.path/3`, but returns the path to the innermost # node for which `fun` returns truthy instead of the path to the first node # that returns such. @@ -759,4 +722,10 @@ defmodule Lexical.Ast do defp innermost_path_list([arg | args], acc, fun) do innermost_path(arg, acc, fun) || innermost_path_list(args, acc, fun) end + + defp normalize_position(%Position{} = position, _document), do: position + + defp normalize_position({line, character}, %Document{} = document) do + Position.new(document, line, character) + end end diff --git a/apps/common/lib/lexical/ast/aliases.ex b/apps/common/lib/lexical/ast/aliases.ex deleted file mode 100644 index e05184bf4..000000000 --- a/apps/common/lib/lexical/ast/aliases.ex +++ /dev/null @@ -1,318 +0,0 @@ -defmodule Lexical.Ast.Aliases do - defmodule Alias do - defstruct [:from, :to] - - def new(from, to) do - %__MODULE__{from: from, to: to} - end - end - - defmodule Scope do - defstruct [:end_position, :current_module, :aliases, :on_exit] - - def new(end_position, current_module, on_exit \\ &Function.identity/1) do - %__MODULE__{ - aliases: %{}, - current_module: current_module, - end_position: end_position, - on_exit: on_exit - } - end - - def global do - new({:infinity, :infinity}, nil) - end - - def ended?(%__MODULE__{end_position: {:infinity, :infinity}}, _) do - false - end - - def ended?(%__MODULE__{} = scope, {line, column}) do - {end_line, end_column} = scope.end_position - - if line == end_line do - column >= end_column - else - line > end_line - end - end - - def put_alias(%__MODULE__{} = scope, _, :skip) do - scope - end - - def put_alias(%__MODULE__{} = scope, from, to) do - [first | rest] = from - - # This allows a pre-existing alias to define another alias like - # alias Foo.Bar.Baz - # alias Baz.Quux - from = - case scope.aliases do - %{^first => to_alias} -> Module.split(to_alias.from) ++ rest - _ -> from - end - - new_alias = Alias.new(ensure_alias(scope, from), ensure_alias(scope, to)) - %__MODULE__{scope | aliases: Map.put(scope.aliases, new_alias.to, new_alias)} - end - - defp ensure_alias(%__MODULE__{} = scope, [:__MODULE__ | rest]) do - Module.concat([scope.current_module | rest]) - end - - defp ensure_alias(%__MODULE__{}, alias_list) when is_list(alias_list) do - Module.concat(alias_list) - end - - defp ensure_alias(%__MODULE__{}, alias_atom) when is_atom(alias_atom) do - alias_atom - end - end - - defmodule Reducer do - alias Lexical.Ast - defstruct scopes: [] - - def new do - %__MODULE__{scopes: [Scope.global()]} - end - - def update(%__MODULE__{} = reducer, elem) do - reducer - |> maybe_pop_scope(elem) - |> apply_ast(elem) - end - - def current_module(%__MODULE__{} = reducer) do - current_scope(reducer).current_module - end - - def aliases(%__MODULE__{} = reducer) do - reducer.scopes - |> Enum.reverse() - |> Enum.flat_map(&Map.to_list(&1.aliases)) - |> Map.new(fn {k, %Alias{} = scope_alias} -> {k, scope_alias.from} end) - |> Map.put(:__MODULE__, current_module(reducer)) - end - - # defmodule MyModule do - defp apply_ast( - %__MODULE__{} = reducer, - {:defmodule, metadata, [{:__aliases__, _, module_name}, _block]} - ) do - module_alias = - case current_module(reducer) do - nil -> - module_name - - current_module -> - Ast.reify_alias(current_module, module_name) - end - - current_module_alias = - case module_name do - [current] -> current - _ -> :skip - end - - reducer - |> push_scope(metadata, module_alias, &put_alias(&1, module_alias, current_module_alias)) - |> put_alias(module_alias, current_module_alias) - end - - # A simple alias: alias Foo.Bar - defp apply_ast(%__MODULE__{} = reducer, {:alias, _metadata, [{:__aliases__, _, from}]}) do - to = List.last(from) - put_alias(reducer, normalize_from(from), to) - end - - # An alias with a specified name: alias Foo.Bar, as: FooBar - defp apply_ast( - %__MODULE__{} = reducer, - {:alias, _metadata, - [{:__aliases__, _, from}, [{{:__block__, _, [:as]}, {:__aliases__, _, [to]}}]]} - ) do - put_alias(reducer, normalize_from(from), to) - end - - # A multiple alias: alias Foo.Bar.{First, Second, Third.Fourth} - defp apply_ast( - %__MODULE__{} = reducer, - {:alias, _, [{{:., _, [{:__aliases__, _, from_alias}, :{}]}, _, destinations}]} - ) do - from_alias = normalize_from(from_alias) - apply_multiple_aliases(reducer, from_alias, destinations) - end - - # An alias for __MODULE__: alias __MODULE__ - - defp apply_ast(%__MODULE__{} = reducer, {:alias, _, [{:__MODULE__, _, _}]}) do - from_alias = reducer |> current_module() |> Module.split() |> Enum.map(&String.to_atom/1) - to = List.last(from_alias) - put_alias(reducer, from_alias, to) - end - - # A muliple alias starting with __MODULE__: alias __MODULE__.{First, Second} - defp apply_ast( - %__MODULE__{} = reducer, - {:alias, _, [{{:., _, [{:__MODULE__, _, _}, :{}]}, _, destinations}]} - ) do - from_alias = [:__MODULE__] - apply_multiple_aliases(reducer, from_alias, destinations) - end - - # This clause will match anything that has a do block, and will push a new scope. - # This will match functions and any block-like macro DSLs people implement - defp apply_ast(%__MODULE__{} = reducer, {_definition, metadata, _body}) do - if Keyword.has_key?(metadata, :end) do - push_scope(reducer, metadata, current_module(reducer)) - else - reducer - end - end - - defp apply_ast(%__MODULE__{} = reducer, _elem) do - reducer - end - - defp apply_multiple_aliases(%__MODULE__{} = reducer, from_alias, destinations) do - Enum.reduce(destinations, reducer, fn - {:__aliases__, _, to_alias}, reducer -> - from = - case from_alias do - [:__MODULE__ | rest] -> - [:__MODULE__ | rest ++ to_alias] - - from -> - from ++ to_alias - end - - to = List.last(from) - put_alias(reducer, from, to) - - {:__cursor__, _, _}, reducer -> - reducer - end) - end - - defp put_alias(%__MODULE__{} = reducer, _, :skip) do - reducer - end - - defp put_alias(%__MODULE__{} = reducer, from, to) do - scope = - reducer - |> current_scope() - |> Scope.put_alias(from, to) - - replace_current_scope(reducer, scope) - end - - defp current_scope(%__MODULE__{scopes: [current | _]}) do - current - end - - defp replace_current_scope(%__MODULE__{scopes: [_ | rest]} = reducer, scope) do - %__MODULE__{reducer | scopes: [scope | rest]} - end - - defp ensure_alias(%__MODULE__{} = reducer, [{:__MODULE__, _, _}, rest]) do - reducer - |> current_module() - |> Module.concat(rest) - end - - defp ensure_alias(%__MODULE__{}, alias_list) when is_list(alias_list) do - Module.concat(alias_list) - end - - defp ensure_alias(%__MODULE__{}, alias_atom) when is_atom(alias_atom) do - alias_atom - end - - defp push_scope( - %__MODULE__{} = reducer, - metadata, - current_module, - on_exit \\ &Function.identity/1 - ) do - end_position = {get_in(metadata, [:end, :line]), get_in(metadata, [:end, :column])} - current_module = ensure_alias(reducer, current_module) - new_scopes = [Scope.new(end_position, current_module, on_exit) | reducer.scopes] - - %__MODULE__{reducer | scopes: new_scopes} - end - - defp maybe_pop_scope(%__MODULE__{} = reducer, {_, metadata, _} = elem) do - with {:ok, current_line} <- Keyword.fetch(metadata, :line), - {:ok, current_column} <- Keyword.fetch(metadata, :column), - [current_scope | scopes] <- reducer.scopes, - true <- Scope.ended?(current_scope, {current_line, current_column}) do - popped_reducer = current_scope.on_exit.(%__MODULE__{reducer | scopes: scopes}) - maybe_pop_scope(popped_reducer, elem) - else - _ -> - reducer - end - end - - defp maybe_pop_scope(%__MODULE__{} = reducer, _) do - reducer - end - - defp normalize_from([{:__MODULE__, _, _} | rest]) do - [:__MODULE__ | rest] - end - - defp normalize_from(from) do - from - end - end - - @moduledoc """ - Support for resolving module aliases. - """ - - alias Lexical.Ast - alias Lexical.Document - alias Lexical.Document.Position - - @doc """ - Returns the aliases available in the document at a given position. - - May return aliases even in the event of syntax errors. - """ - @spec at(Document.t(), Position.t() | {Position.line(), Position.character()}) :: - {:ok, %{Ast.short_alias() => module()}} | {:error, Ast.parse_error()} - def at(%Document{} = doc, {line, character}) do - at(doc, Position.new(doc, line, character)) - end - - def at(%Document{} = document, %Position{} = position) do - with {:ok, quoted} <- Ast.fragment(document, position) do - at(document, quoted, position) - end - end - - @spec at(Document.t(), Macro.t(), Position.t() | {Position.line(), Position.character()}) :: - {:ok, %{Ast.short_alias() => module()}} - def at(%Document{} = document, quoted_document, {line, character}) do - at(document, quoted_document, Position.new(document, line, character)) - end - - def at(%Document{} = document, quoted_document, %Position{} = position) do - start_position = Position.new(document, 0, 0) - - aliases = - quoted_document - |> Ast.prewalk_until(Reducer.new(), &collect/2, start_position, position) - |> Reducer.aliases() - - {:ok, aliases} - end - - defp collect(elem, %Reducer{} = reducer) do - Reducer.update(reducer, elem) - end -end diff --git a/apps/common/lib/lexical/ast/analysis.ex b/apps/common/lib/lexical/ast/analysis.ex new file mode 100644 index 000000000..9d87a043f --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis.ex @@ -0,0 +1,539 @@ +defmodule Lexical.Ast.Analysis do + @moduledoc """ + A data structure representing an analyzed AST. + + See `Lexical.Ast.analyze/1`. + """ + + alias Lexical.Ast.Analysis.Alias + alias Lexical.Ast.Analysis.Import + alias Lexical.Ast.Analysis.Require + alias Lexical.Ast.Analysis.Scope + alias Lexical.Ast.Analysis.State + alias Lexical.Ast.Analysis.Use + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.Identifier + alias Sourceror.Zipper + + defstruct [:ast, :document, :parse_error, scopes: [], comments_by_line: %{}, valid?: true] + + @type t :: %__MODULE__{} + @scope_id :_scope_id + + @block_keywords [:do, :else, :rescue, :catch, :after] + @clauses [:->] + + @doc false + def new(parse_result, document) + + def new({:ok, ast}, %Document{} = document) do + new({:ok, ast, []}, document) + end + + def new({:ok, ast, comments}, %Document{} = document) do + scopes = traverse(ast, document) + comments_by_line = Map.new(comments, fn comment -> {comment.line, comment} end) + + %__MODULE__{ + ast: ast, + document: document, + scopes: scopes, + comments_by_line: comments_by_line + } + end + + def new(error, document) do + %__MODULE__{ + document: document, + parse_error: error, + valid?: false + } + end + + @doc """ + Returns the scopes for the given position, sorted by nearest proximity. + """ + def scopes_at(%__MODULE__{scopes: scopes}, %Position{} = position) do + scopes + |> Enum.filter(fn %Scope{range: range} = scope -> + scope.id == :global or Range.contains?(range, position) + end) + |> Enum.sort_by( + fn + %Scope{id: :global} -> 0 + %Scope{range: range} -> {range.start.line, range.start.character} + end, + :desc + ) + end + + @doc false + def scope_id({_, meta, _}) when is_list(meta) do + Keyword.get(meta, @scope_id) + end + + def scope_id({left, right}) do + {scope_id(left), scope_id(right)} + end + + def scope_id(list) when is_list(list) do + Enum.map(list, &scope_id/1) + end + + def scope_id(_) do + nil + end + + def commented?(%__MODULE__{} = analysis, %Position{} = position) do + case Map.fetch(analysis.comments_by_line, position.line) do + {:ok, comment} -> position.character > comment[:column] + _ -> false + end + end + + @doc """ + Returns the scope of the nearest enclosing module of the given function. + + If there is no enclosing module scope, the global scope is returned + """ + @spec module_scope(t(), Range.t()) :: Scope.t() + def module_scope(%__MODULE__{} = analysis, %Range{} = range) do + enclosing_scopes = + analysis + |> scopes_at(range.start) + |> enclosing_scopes(range) + + first_scope = List.first(enclosing_scopes) + + Enum.reduce_while(enclosing_scopes, first_scope, fn + %Scope{module: same} = current, %Scope{module: same} -> + {:cont, current} + + _, current -> + {:halt, current} + end) + end + + defp enclosing_scopes(scopes, range) do + Enum.filter(scopes, fn scope -> + Range.contains?(scope.range, range.start) + end) + end + + defp traverse(quoted, %Document{} = document) do + quoted = preprocess(quoted) + + {_, state} = + Macro.traverse( + quoted, + State.new(document), + fn quoted, state -> + case analyze_node(quoted, state) do + {new_quoted, new_state} -> + {new_quoted, new_state} + + new_state -> + {quoted, new_state} + end + end, + fn quoted, state -> + case {scope_id(quoted), State.current_scope(state)} do + {id, %Scope{id: id}} -> + {quoted, State.pop_scope(state)} + + _ -> + {quoted, state} + end + end + ) + + unless length(state.scopes) == 1 do + raise RuntimeError, + "invariant not met, :scopes should only contain the global scope: #{inspect(state)}" + end + + state + # pop the final, global state + |> State.pop_scope() + |> Map.fetch!(:visited) + |> Map.reject(fn {_id, scope} -> Scope.empty?(scope) end) + |> correct_ranges(quoted, document) + |> Map.values() + end + + defp preprocess(quoted) do + Macro.prewalk(quoted, &with_scope_id/1) + end + + defp correct_ranges(scopes, quoted, document) do + {_zipper, scopes} = + quoted + |> Zipper.zip() + |> Zipper.traverse(scopes, fn %Zipper{node: node} = zipper, scopes -> + id = scope_id(node) + + if scope = scopes[id] do + {zipper, Map.put(scopes, id, maybe_correct_range(scope, zipper, document))} + else + {zipper, scopes} + end + end) + + scopes + end + + # extend range for block pairs to either the beginning of their next + # sibling or, if they are the last element, the end of their parent + defp maybe_correct_range(scope, %Zipper{node: {_, _}} = zipper, %Document{} = document) do + with %Zipper{node: sibling} <- Zipper.right(zipper), + %{start: sibling_start} <- Sourceror.get_range(sibling) do + new_end = Position.new(document, sibling_start[:line], sibling_start[:column]) + put_in(scope.range.end, new_end) + else + _ -> + # we go up twice to get to the real parent because ast pairs + # are always in a list + %Zipper{node: parent} = zipper |> Zipper.up() |> Zipper.up() + + case Sourceror.get_range(parent) do + %{end: parent_end} -> + new_end = Position.new(document, parent_end[:line], parent_end[:column]) + put_in(scope.range.end, new_end) + + _ -> + scope + end + end + end + + defp maybe_correct_range(scope, _zipper, _document) do + scope + end + + # add a unique ID to 3-element tuples + defp with_scope_id({_, _, _} = quoted) do + id = Identifier.next_global!() + Macro.update_meta(quoted, &Keyword.put(&1, @scope_id, id)) + end + + defp with_scope_id(quoted) do + quoted + end + + @skip :skipped? + + defp skip_leading_do({_, meta, _} = root_ast) do + # Marks the first do block after the passed in node. This is because + # that do block doesn't have accurate ending information, and if we build + # a scope around it, it won't end properly, which will cause information + # contained in scopes to leak out of them. + + case Keyword.fetch(meta, :do) do + {:ok, [line: line, column: column]} -> + Macro.prewalk(root_ast, fn + {:__block__, _meta, [:do]} = block_ast -> + case Sourceror.get_start_position(block_ast) do + [line: ^line, column: ^column] -> + skip(block_ast) + + _ -> + block_ast + end + + other -> + other + end) + + _ -> + root_ast + end + end + + defp skip({_, _, _} = quoted) do + Macro.update_meta(quoted, &Keyword.put(&1, @skip, true)) + end + + defp skipped?({_, meta, _}) when is_list(meta) do + skipped?(meta) + end + + defp skipped?(meta) when is_list(meta) do + Keyword.get(meta, @skip, false) + end + + defp skipped?(_), do: false + + @module_defining_forms [:defmodule, :defprotocol] + # defmodule Foo do or defprotocol MyProtocol do + defp analyze_node({form, _meta, [{:__aliases__, _, segments} | _]} = quoted, state) + when form in @module_defining_forms do + module = + case State.current_module(state) do + [] -> segments + current_module -> reify_alias(current_module, segments) + end + + current_module_alias = Alias.implicit(state.document, quoted, module, :__MODULE__) + + new_state = + state + # implicit alias belongs to the current scope + |> maybe_push_implicit_alias(segments, state.document, quoted) + # new __MODULE__ alias belongs to the new scope + |> State.push_scope_for(quoted, module) + |> State.push_alias(current_module_alias) + + {skip_leading_do(quoted), new_state} + end + + # defimpl Foo, for: SomeProtocol do + defp analyze_node( + {:defimpl, _meta, + [ + {:__aliases__, _, protocol_segments}, + [{_for_keyword, {:__aliases__, _, for_segments}}] | _ + ]} = quoted, + state + ) do + expanded_for = expand_alias(for_segments, state) + module = expand_alias(protocol_segments ++ expanded_for, state) + current_module_alias = Alias.implicit(state.document, quoted, module, :__MODULE__) + for_alias = Alias.implicit(state.document, quoted, expanded_for, :"@for") + protocol_alias = Alias.implicit(state.document, quoted, protocol_segments, :"@protocol") + + new_state = + state + |> State.push_scope_for(quoted, module) + |> State.push_alias(current_module_alias) + |> State.push_alias(for_alias) + |> State.push_alias(protocol_alias) + + {skip_leading_do(quoted), new_state} + end + + # alias Foo.{Bar, Baz, Buzz.Qux} + defp analyze_node( + {:alias, _meta, [{{:., _, [aliases, :{}]}, _, aliases_nodes}]} = quoted, + state + ) do + base_segments = expand_alias(aliases, state) + + Enum.reduce(aliases_nodes, state, fn {:__aliases__, _, segments}, state -> + alias = + Alias.explicit(state.document, quoted, base_segments ++ segments, List.last(segments)) + + State.push_alias(state, alias) + end) + end + + # alias Foo + # alias Foo.Bar + # alias __MODULE__.Foo + defp analyze_node({:alias, _meta, [aliases]} = quoted, state) do + case expand_alias(aliases, state) do + [_ | _] = segments -> + alias = Alias.explicit(state.document, quoted, segments, List.last(segments)) + State.push_alias(state, alias) + + [] -> + state + end + end + + # alias Foo, as: Bar + defp analyze_node({:alias, meta, [aliases, options]} = quoted, state) do + with {:ok, alias_as} <- fetch_alias_as(options), + [_ | _] = segments <- expand_alias(aliases, state) do + alias = Alias.explicit(state.document, quoted, segments, alias_as) + State.push_alias(state, alias) + else + _ -> + analyze_node({:alias, meta, [aliases]}, state) + end + end + + # import with selector import MyModule, only: :functions + defp analyze_node( + {:import, _meta, [{:__aliases__, _aliases, module}, selector]} = quoted, + state + ) do + State.push_import(state, Import.new(state.document, quoted, module, selector)) + end + + # wholesale import import MyModule + defp analyze_node({:import, _meta, [{:__aliases__, _aliases, module}]} = quoted, state) do + State.push_import(state, Import.new(state.document, quoted, module)) + end + + # require MyModule, as: Alias + defp analyze_node({:require, _meta, [{:__aliases__, _, module}, options]} = quoted, state) do + case fetch_alias_as(options) do + {:ok, as_module} -> + State.push_require(state, Require.new(state.document, quoted, module, as_module)) + + :error -> + state + end + end + + # require MyModule + defp analyze_node( + {:require, _meta, [{:__aliases__, _, module}]} = quoted, + state + ) do + State.push_require(state, Require.new(state.document, quoted, module)) + end + + # use MyModule + defp analyze_node( + {:use, _meta, [{:__aliases__, _, module} | opts]} = use, + state + ) do + State.push_use(state, Use.new(state.document, use, module, opts)) + end + + # stab clauses: -> + defp analyze_node({clause, _, _} = quoted, state) when clause in @clauses do + maybe_push_scope_for(state, quoted) + end + + # blocks: do, else, etc. + defp analyze_node({{:__block__, meta, [block]}, _} = quoted, state) + when block in @block_keywords do + if skipped?(meta) do + state + else + maybe_push_scope_for(state, quoted) + end + end + + # catch-all + defp analyze_node(_quoted, state) do + state + end + + defp maybe_push_implicit_alias(%State{} = state, [first_segment | _], document, quoted) + when is_atom(first_segment) do + segments = + case State.current_module(state) do + # the head element of top-level modules can be aliased, so we + # must expand them + [] -> + expand_alias([first_segment], state) + + # if we have a current module, we prefix the first segment with it + current_module -> + current_module ++ [first_segment] + end + + implicit_alias = Alias.implicit(document, quoted, segments, first_segment) + State.push_alias(state, implicit_alias) + end + + # don't create an implicit alias if the module is defined using complex forms: + # defmodule __MODULE__.Foo do + # defmodule unquote(...) do + defp maybe_push_implicit_alias(%State{} = state, [non_atom | _], _, _) + when not is_atom(non_atom) do + state + end + + defp expand_alias({:__MODULE__, _, nil}, state) do + State.current_module(state) + end + + defp expand_alias({:__aliases__, _, segments}, state) do + expand_alias(segments, state) + end + + defp expand_alias([{:__MODULE__, _, nil} | segments], state) do + State.current_module(state) ++ segments + end + + defp expand_alias([first | rest], state) do + alias_map = state |> State.current_scope() |> Scope.alias_map() + + case alias_map do + %{^first => existing_alias} -> + existing_alias.module ++ rest + + _ -> + [first | rest] + end + end + + defp expand_alias(quoted, state) do + reify_alias(State.current_module(state), List.wrap(quoted)) + end + + # Expands aliases given the rules in the special form + # https://hexdocs.pm/elixir/1.13.4/Kernel.SpecialForms.html#__aliases__/1 + + # When the head element is the atom :"Elixir", no expansion happens + defp reify_alias(_, [:"Elixir" | _] = reified) do + reified + end + + # Without a current module, we can't expand a non-atom head element + defp reify_alias([], [non_atom | rest]) when not is_atom(non_atom) do + rest + end + + # With no current module and an atom head, no expansion occurs + defp reify_alias([], [atom | _] = reified) when is_atom(atom) do + reified + end + + # Expand current module + defp reify_alias(current_module, [{:__MODULE__, _, nil} | rest]) do + current_module ++ rest + end + + # With a current module and an atom head, the alias is nested in the + # current module + defp reify_alias(current_module, [atom | _rest] = reified) when is_atom(atom) do + current_module ++ reified + end + + # In other cases, attempt to expand the unreified head element + defp reify_alias(current_module, [unreified | rest]) do + module = Module.concat(current_module) + env = %Macro.Env{module: module} + reified = Macro.expand(unreified, env) + + if is_atom(reified) do + reified_segments = reified |> Module.split() |> Enum.map(&String.to_atom/1) + reified_segments ++ rest + else + rest + end + end + + defp fetch_alias_as(options) when is_list(options) do + alias_as = + Enum.find_value(options, fn + {{:__block__, _, [:as]}, {:__aliases__, _, [alias_as]}} -> alias_as + _ -> nil + end) + + case alias_as do + nil -> :error + _ -> {:ok, alias_as} + end + end + + # When the `as` section is incomplete, like: `alias Foo, a` + defp fetch_alias_as(_) do + :error + end + + defp maybe_push_scope_for(%State{} = state, ast) do + if skipped?(ast) do + state + else + State.maybe_push_scope_for(state, ast) + end + end +end diff --git a/apps/common/lib/lexical/ast/analysis/alias.ex b/apps/common/lib/lexical/ast/analysis/alias.ex new file mode 100644 index 000000000..4e9ecf620 --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis/alias.ex @@ -0,0 +1,52 @@ +defmodule Lexical.Ast.Analysis.Alias do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + + defstruct [:module, :as, :range, explicit?: true] + + @type t :: %__MODULE__{ + module: [atom], + as: module(), + range: Range.t() | nil + } + + def explicit(%Document{} = document, ast, module, as) when is_list(module) do + range = range_for_ast(document, ast, module, as) + %__MODULE__{module: module, as: as, range: range} + end + + def implicit(%Document{} = document, ast, module, as) when is_list(module) do + range = implicit_range(document, ast) + %__MODULE__{module: module, as: as, range: range, explicit?: false} + end + + def to_module(%__MODULE__{} = alias) do + Module.concat(alias.module) + end + + @implicit_aliases [:__MODULE__, :"@for", :"@protocol"] + defp range_for_ast(document, ast, _alias, as) when as in @implicit_aliases do + implicit_range(document, ast) + end + + defp range_for_ast(document, ast, _alias, _as) do + # All other kinds of aliases defined with the `alias` special form + Ast.Range.get(ast, document) + end + + defp implicit_range(%Document{} = document, ast) do + # There are kinds of aliases that are automatically generated by elixir + # such as __MODULE__, these don't really have any code that defines them, + with [line: line, column: _] <- Sourceror.get_start_position(ast), + {:ok, line_text} <- Document.fetch_text_at(document, line) do + line_length = String.length(line_text) + alias_start = Position.new(document, line, line_length) + Range.new(alias_start, alias_start) + else + _ -> + nil + end + end +end diff --git a/apps/common/lib/lexical/ast/analysis/import.ex b/apps/common/lib/lexical/ast/analysis/import.ex new file mode 100644 index 000000000..1be02a549 --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis/import.ex @@ -0,0 +1,92 @@ +defmodule Lexical.Ast.Analysis.Import do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Document.Range + + defstruct module: nil, selector: :all, range: nil, explicit?: true + + @type function_name :: atom() + @type function_arity :: {function_name(), arity()} + @type selector :: + :functions + | :macros + | :sigils + | [only: [function_arity()]] + | [except: [function_arity()]] + @type t :: %{ + module: module(), + selector: selector(), + line: non_neg_integer() + } + def new(%Document{} = document, ast, module) do + %__MODULE__{module: module, range: Ast.Range.get(ast, document)} + end + + def new(%Document{} = document, ast, module, selector) do + %__MODULE__{ + module: module, + selector: expand_selector(selector), + range: Ast.Range.get(ast, document) + } + end + + def implicit(%Range{} = range, module) do + %__MODULE__{module: module, range: range, explicit?: false} + end + + defp expand_selector(selectors) when is_list(selectors) do + selectors = + Enum.reduce(selectors, [], fn + {{:__block__, _, [type]}, {:__block__, _, [selector]}}, acc + when type in [:only, :except] -> + expanded = + case selector do + :functions -> + :functions + + :macros -> + :macros + + :sigils -> + :sigils + + keyword when is_list(keyword) -> + keyword + |> Enum.reduce([], &expand_function_keywords/2) + |> Enum.reverse() + + _ -> + # they're likely in the middle of typing in something, and have produced an + # invalid import + [] + end + + [{type, expanded} | acc] + + _, acc -> + acc + end) + + if selectors == [] do + :all + else + selectors + end + end + + # If the selectors is not valid, like: `import SomeModule, o `, we default to :all + defp expand_selector(_) do + :all + end + + defp expand_function_keywords( + {{:__block__, _, [function_name]}, {:__block__, _, [arity]}}, + acc + ) + when is_atom(function_name) and is_number(arity) do + [{function_name, arity} | acc] + end + + defp expand_function_keywords(_ignored, acc), + do: acc +end diff --git a/apps/common/lib/lexical/ast/analysis/require.ex b/apps/common/lib/lexical/ast/analysis/require.ex new file mode 100644 index 000000000..190cbe7dd --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis/require.ex @@ -0,0 +1,10 @@ +defmodule Lexical.Ast.Analysis.Require do + alias Lexical.Ast + alias Lexical.Document + defstruct [:module, :as, :range] + + def new(%Document{} = document, ast, module, as \\ nil) when is_list(module) do + range = Ast.Range.get(ast, document) + %__MODULE__{module: module, as: as || module, range: range} + end +end diff --git a/apps/common/lib/lexical/ast/analysis/scope.ex b/apps/common/lib/lexical/ast/analysis/scope.ex new file mode 100644 index 000000000..26f9ebd28 --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis/scope.ex @@ -0,0 +1,86 @@ +defmodule Lexical.Ast.Analysis.Scope do + alias Lexical.Ast.Analysis.Alias + alias Lexical.Document.Position + alias Lexical.Document.Range + + defstruct [ + :id, + :range, + module: [], + aliases: [], + imports: [], + requires: [], + uses: [] + ] + + @type import_mfa :: {module(), atom(), non_neg_integer()} + @type scope_position :: Position.t() | Position.line() | :end + + @type t :: %__MODULE__{ + id: any(), + range: Range.t(), + module: [atom()], + aliases: [Alias.t()], + imports: [import_mfa()] + } + + def new(%__MODULE__{} = parent_scope, id, %Range{} = range, module \\ []) do + uses = + if module == parent_scope.module do + # if we're still in the same module, we have the same uses + parent_scope.uses + else + [] + end + + %__MODULE__{ + id: id, + aliases: parent_scope.aliases, + imports: parent_scope.imports, + requires: parent_scope.requires, + module: module, + range: range, + uses: uses + } + end + + def global(%Range{} = range) do + %__MODULE__{id: :global, range: range} + end + + @spec alias_map(t(), scope_position()) :: %{module() => t()} + def alias_map(%__MODULE__{} = scope, position \\ :end) do + scope.aliases + # sorting by line ensures that aliases on later lines + # override aliases on earlier lines + |> Enum.sort_by(& &1.range.start.line) + |> Enum.take_while(fn %Alias{range: alias_range} -> + case position do + %Position{} = pos -> + pos.line >= alias_range.start.line + + line when is_integer(line) -> + line >= alias_range.start.line + + :end -> + true + end + end) + |> Map.new(&{&1.as, &1}) + end + + def fetch_alias_with_prefix(%__MODULE__{} = scope, prefix) do + case Enum.find(scope.aliases, fn %Alias{} = alias -> alias.as == prefix end) do + %Alias{} = existing -> {:ok, existing} + _ -> :error + end + end + + def empty?(%__MODULE__{id: :global}), do: false + def empty?(%__MODULE__{aliases: [], imports: []}), do: true + def empty?(%__MODULE__{}), do: false + + def end_line(%__MODULE__{} = scope, :end), do: scope.range.end.line + def end_line(_, %Position{} = position), do: position.line + def end_line(_, line) when is_integer(line), do: line +end diff --git a/apps/common/lib/lexical/ast/analysis/state.ex b/apps/common/lib/lexical/ast/analysis/state.ex new file mode 100644 index 000000000..49f315bf8 --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis/state.ex @@ -0,0 +1,180 @@ +defmodule Lexical.Ast.Analysis.State do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Alias + alias Lexical.Ast.Analysis.Import + alias Lexical.Ast.Analysis.Require + alias Lexical.Ast.Analysis.Scope + alias Lexical.Ast.Analysis.Use + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + + defstruct [:document, scopes: [], visited: %{}] + + def new(%Document{} = document) do + state = %__MODULE__{document: document} + + scope = + document + |> global_range() + |> Scope.global() + + push_scope(state, scope) + end + + def current_scope(%__MODULE__{scopes: [scope | _]}), do: scope + + def current_module(%__MODULE__{} = state) do + current_scope(state).module + end + + def push_scope(%__MODULE__{} = state, %Scope{} = scope) do + Map.update!(state, :scopes, &[scope | &1]) + end + + def push_scope(%__MODULE__{} = state, id, %Range{} = range, module) when is_list(module) do + scope = + state + |> current_scope() + |> Scope.new(id, range, module) + + push_scope(state, scope) + end + + def push_scope_for(%__MODULE__{} = state, quoted, %Range{} = range, module) do + module = module || current_module(state) + + id = Analysis.scope_id(quoted) + push_scope(state, id, range, module) + end + + def push_scope_for(%__MODULE__{} = state, quoted, module) do + range = get_range(quoted, state.document) + push_scope_for(state, quoted, range, module) + end + + def maybe_push_scope_for(%__MODULE__{} = state, quoted) do + case get_range(quoted, state.document) do + %Range{} = range -> + push_scope_for(state, quoted, range, nil) + + nil -> + state + end + end + + def pop_scope(%__MODULE__{scopes: [scope | rest]} = state) do + %__MODULE__{state | scopes: rest, visited: Map.put(state.visited, scope.id, scope)} + end + + def push_alias(%__MODULE__{} = state, %Alias{} = alias) do + update_current_scope(state, fn %Scope{} = scope -> + [prefix | rest] = alias.module + + alias = + case Scope.fetch_alias_with_prefix(scope, prefix) do + {:ok, %Alias{} = existing_alias} -> + %Alias{alias | module: existing_alias.module ++ rest} + + :error -> + alias + end + + Map.update!(scope, :aliases, &[alias | &1]) + end) + end + + def push_import(%__MODULE__{} = state, %Import{} = import) do + update_current_scope(state, fn %Scope{} = scope -> + Map.update!(scope, :imports, &[import | &1]) + end) + end + + def push_require(%__MODULE__{} = state, %Require{} = require) do + update_current_scope(state, fn %Scope{} = scope -> + Map.update!(scope, :requires, &[require | &1]) + end) + end + + def push_use(%__MODULE__{} = state, %Use{} = use) do + update_current_scope(state, fn %Scope{} = scope -> + Map.update!(scope, :uses, &[use | &1]) + end) + end + + defp update_current_scope(%__MODULE__{} = state, fun) do + update_in(state, [Access.key(:scopes), Access.at!(0)], fn %Scope{} = scope -> + fun.(scope) + end) + end + + # if there is no code after a stab operator, then the end position + # it gives us can be in the middle of the line, as it's derived from + # the start of some entity on the last line. So we increment the line + # by one, and that should be the end of the stab block + defp get_range({:->, _, _} = quoted, %Document{} = document) do + start_pos = get_start_position(quoted) + + case Sourceror.get_end_position(quoted, line: -1, column: -1) do + [line: -1, column: -1] -> + nil + + [line: line, column: 1] -> + Range.new( + Position.new(document, start_pos[:line], start_pos[:column]), + Position.new(document, line + 1, 1) + ) + + [line: line, column: _] -> + Range.new( + Position.new(document, start_pos[:line], start_pos[:column]), + Position.new(document, line + 1, 1) + ) + end + end + + defp get_range(quoted, %Document{} = document) do + start_pos = get_start_position(quoted) + + case Sourceror.get_end_position(quoted, line: -1, column: -1) do + [line: -1, column: -1] -> + nil + + [line: end_line, column: end_column] -> + Range.new( + Position.new(document, start_pos[:line], start_pos[:column]), + Position.new(document, end_line, end_column) + ) + end + end + + defp global_range(%Document{} = document) do + num_lines = Document.size(document) + + Range.new( + Position.new(document, 1, 1), + Position.new(document, num_lines + 1, 1) + ) + end + + defp get_start_position({_, metadata, _} = ast) do + case Keyword.fetch(metadata, :do) do + {:ok, [line: line, column: column]} -> + # add 2 to position us after the do keyword + [line: line, column: column + 2] + + _ -> + Sourceror.get_start_position(ast) + end + end + + defp get_start_position({block_meta, _rest}) do + case Sourceror.get_start_position(block_meta) do + [line: line, column: column] -> + [line: line, column: column + 2] + + other -> + other + end + end +end diff --git a/apps/common/lib/lexical/ast/analysis/use.ex b/apps/common/lib/lexical/ast/analysis/use.ex new file mode 100644 index 000000000..e98c10452 --- /dev/null +++ b/apps/common/lib/lexical/ast/analysis/use.ex @@ -0,0 +1,10 @@ +defmodule Lexical.Ast.Analysis.Use do + alias Lexical.Ast + alias Lexical.Document + defstruct [:module, :range, :opts] + + def new(%Document{} = document, ast, module, opts) do + range = Ast.Range.get(ast, document) + %__MODULE__{range: range, module: module, opts: opts} + end +end diff --git a/apps/common/lib/lexical/ast/detection.ex b/apps/common/lib/lexical/ast/detection.ex new file mode 100644 index 000000000..b248b9287 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection.ex @@ -0,0 +1,109 @@ +defmodule Lexical.Ast.Detection do + @moduledoc """ + A behavior for context detection + + A context recognizer can recognize the type of code at a current position. + It is useful for identifying the "part of speech" of a position. + + Note: a given context might be detected by more than one module. + """ + + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Document.Position + alias Lexical.Document.Range + + @doc """ + Returns true if the given position is detected by the current module + """ + @callback detected?(Analysis.t(), Position.t()) :: boolean() + + defmacro __using__(_) do + quote do + @behaviour unquote(__MODULE__) + import unquote(__MODULE__) + end + end + + def ancestor_is_def?(%Analysis{} = analysis, %Position{} = position) do + analysis + |> Ast.cursor_path(position) + |> Enum.any?(fn + {:def, _, _} -> + true + + {:defp, _, _} -> + true + + _ -> + false + end) + end + + @type_keys [:type, :typep, :opaque] + def ancestor_is_type?(%Analysis{} = analysis, %Position{} = position) do + ancestor_is_attribute?(analysis, position, @type_keys) + end + + def ancestor_is_spec?(%Analysis{} = analysis, %Position{} = position) do + ancestor_is_attribute?(analysis, position, :spec) + end + + def ancestor_is_attribute?(%Analysis{} = analysis, %Position{} = position, attr_name \\ nil) do + analysis + |> Ast.cursor_path(position) + |> Enum.any?(fn + {:@, metadata, [{found_name, _, _}]} -> + # single line attribute + attribute_names_match?(attr_name, found_name) and cursor_in_range?(position, metadata) + + {:__block__, _, [{:@, metadata, [{found_name, _, _}]}, _]} -> + # multi-line attribute + attribute_names_match?(attr_name, found_name) and cursor_in_range?(position, metadata) + + _ -> + false + end) + end + + def fetch_range(ast) do + fetch_range(ast, 0, 0) + end + + def fetch_range(ast, start_offset, end_offset) do + case Sourceror.get_range(ast) do + %{start: [line: start_line, column: start_col], end: [line: end_line, column: end_col]} -> + range = + Range.new( + %Position{line: start_line, character: start_col + start_offset}, + %Position{line: end_line, character: end_col + end_offset} + ) + + {:ok, range} + + nil -> + :error + end + end + + defp cursor_in_range?(position, metadata) do + expression_end_line = get_in(metadata, [:end_of_expression, :line]) + expression_end_column = get_in(metadata, [:end_of_expression, :column]) + cursor_line = position.line + cursor_column = position.character + + if cursor_line == expression_end_line do + expression_end_column > cursor_column + else + cursor_line < expression_end_line + end + end + + defp attribute_names_match?(expected_names, actual_name) + when is_list(expected_names), + do: actual_name in expected_names + + defp attribute_names_match?(nil, _), do: true + defp attribute_names_match?(same, same), do: true + defp attribute_names_match?(_, _), do: false +end diff --git a/apps/common/lib/lexical/ast/detection/alias.ex b/apps/common/lib/lexical/ast/detection/alias.ex new file mode 100644 index 000000000..72cac9ef6 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/alias.ex @@ -0,0 +1,64 @@ +defmodule Lexical.Ast.Detection.Alias do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Ast.Tokens + alias Lexical.Document.Position + + use Detection + + @doc """ + Recognizes an alias at the current position + + Aliases are complicated, especially if we're trying to find out if we're in + them from the current cursor position moving backwards. + I'll try to describe the state machine below. + First off, if we're outside of a } on the current line, we cannot be in an alias, so that + halts with false. + Similarly an alias on the current line is also simple, we just backtrack until we see the alias identifier. + However, if we're on the current line, and see an EOL, we set that as our accumulator, then we get + to the previous line, we see if it ends in a comma. If not, we can't be in an alias. If it does, we keep + backtracking until we hit the alias keyword. + So basically, if we hit an EOL, and the previous token isn't an open curly or a comma, we stop, otherwise + we backtrack until we hit the alias keyword + """ + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + result = + analysis.document + |> Tokens.prefix_stream(position) + |> Stream.with_index() + |> Enum.reduce_while(false, fn + {{:curly, :"{", _}, _index}, :eol -> + {:cont, false} + + {{:comma, _, _}, _index}, :eol -> + {:cont, false} + + {{:eol, _, _}, _index}, _acc -> + {:cont, :eol} + + {{_, _, _}, _}, :eol -> + {:halt, false} + + {{:curly, :"}", _}, _index}, _ -> + {:halt, false} + + {{:identifier, ~c"alias", _}, 0}, _ -> + # there is nothing after the alias directive, so we're not + # inside the context *yet* + + {:halt, false} + + {{:identifier, ~c"alias", _}, _index}, _ -> + {:halt, true} + + _, _ -> + {:cont, false} + end) + + case result do + b when is_boolean(b) -> b + :eol -> false + end + end +end diff --git a/apps/common/lib/lexical/ast/detection/bitstring.ex b/apps/common/lib/lexical/ast/detection/bitstring.ex new file mode 100644 index 000000000..a03bbef3f --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/bitstring.ex @@ -0,0 +1,26 @@ +defmodule Lexical.Ast.Detection.Bitstring do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Ast.Tokens + alias Lexical.Document + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + document = analysis.document + Document.fragment(document, Position.new(document, position.line, 1), position) + + document + |> Tokens.prefix_stream(position) + |> Enum.reduce_while( + false, + fn + {:operator, :">>", _}, _ -> {:halt, false} + {:operator, :"<<", _}, _ -> {:halt, true} + _, _ -> {:cont, false} + end + ) + end +end diff --git a/apps/common/lib/lexical/ast/detection/comment.ex b/apps/common/lib/lexical/ast/detection/comment.ex new file mode 100644 index 000000000..743d1cb18 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/comment.ex @@ -0,0 +1,8 @@ +defmodule Lexical.Ast.Detection.Comment do + alias Lexical.Ast.Analysis + alias Lexical.Document.Position + + def detected?(%Analysis{} = analysis, %Position{} = position) do + Analysis.commented?(analysis, position) + end +end diff --git a/apps/common/lib/lexical/ast/detection/directive.ex b/apps/common/lib/lexical/ast/detection/directive.ex new file mode 100644 index 000000000..76cde1736 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/directive.ex @@ -0,0 +1,21 @@ +defmodule Lexical.Ast.Detection.Directive do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Tokens + alias Lexical.Document.Position + + def detected?(%Analysis{} = analysis, %Position{} = position, directive_type) do + analysis.document + |> Tokens.prefix_stream(position) + |> Enum.to_list() + |> Enum.reduce_while(false, fn + {:identifier, ^directive_type, _}, _ -> + {:halt, true} + + {:eol, _, _}, _ -> + {:halt, false} + + _, _ -> + {:cont, false} + end) + end +end diff --git a/apps/common/lib/lexical/ast/detection/function_capture.ex b/apps/common/lib/lexical/ast/detection/function_capture.ex new file mode 100644 index 000000000..25ef50063 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/function_capture.ex @@ -0,0 +1,32 @@ +defmodule Lexical.Ast.Detection.FunctionCapture do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Ast.Tokens + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + analysis.document + |> Tokens.prefix_stream(position) + |> Enum.reduce_while(false, fn + {:paren, :")", _}, _ -> + {:halt, false} + + {:operator, :&, _}, _ -> + {:halt, true} + + {:int, _, _} = maybe_arity, _ -> + {:cont, maybe_arity} + + {:operator, :/, _}, {:int, _, _} -> + # if we encounter a trailing / in the prefix, the + # function capture is complete, and we're not inside it + {:halt, false} + + _, _ -> + {:cont, false} + end) + end +end diff --git a/apps/common/lib/lexical/ast/detection/import.ex b/apps/common/lib/lexical/ast/detection/import.ex new file mode 100644 index 000000000..181500027 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/import.ex @@ -0,0 +1,13 @@ +defmodule Lexical.Ast.Detection.Import do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Ast.Detection.Directive + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + Directive.detected?(analysis, position, ~c"import") + end +end diff --git a/apps/common/lib/lexical/ast/detection/module_attribute.ex b/apps/common/lib/lexical/ast/detection/module_attribute.ex new file mode 100644 index 000000000..32c3e0bf9 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/module_attribute.ex @@ -0,0 +1,16 @@ +defmodule Lexical.Ast.Detection.ModuleAttribute do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + ancestor_is_attribute?(analysis, position) + end + + def detected?(%Analysis{} = analysis, %Position{} = position, name) do + ancestor_is_attribute?(analysis, position, name) + end +end diff --git a/apps/common/lib/lexical/ast/detection/pipe.ex b/apps/common/lib/lexical/ast/detection/pipe.ex new file mode 100644 index 000000000..f24f674ba --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/pipe.ex @@ -0,0 +1,34 @@ +defmodule Lexical.Ast.Detection.Pipe do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Ast.Tokens + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + analysis.document + |> Tokens.prefix_stream(position) + |> Enum.to_list() + |> Enum.reduce_while(false, fn + {:identifier, _, _}, _ -> + {:cont, false} + + {:operator, :., _}, _ -> + {:cont, false} + + {:alias, _, _}, _ -> + {:cont, false} + + {:arrow_op, nil, _}, _ -> + {:halt, true} + + {:atom, _, _}, _ -> + {:cont, false} + + _, _acc -> + {:halt, false} + end) + end +end diff --git a/apps/common/lib/lexical/ast/detection/require.ex b/apps/common/lib/lexical/ast/detection/require.ex new file mode 100644 index 000000000..fa07d47ce --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/require.ex @@ -0,0 +1,13 @@ +defmodule Lexical.Ast.Detection.Require do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Ast.Detection.Directive + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + Directive.detected?(analysis, position, ~c"require") + end +end diff --git a/apps/common/lib/lexical/ast/detection/spec.ex b/apps/common/lib/lexical/ast/detection/spec.ex new file mode 100644 index 000000000..20309e8ae --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/spec.ex @@ -0,0 +1,12 @@ +defmodule Lexical.Ast.Detection.Spec do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + ancestor_is_spec?(analysis, position) + end +end diff --git a/apps/common/lib/lexical/ast/detection/string.ex b/apps/common/lib/lexical/ast/detection/string.ex new file mode 100644 index 000000000..abfb51337 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/string.ex @@ -0,0 +1,109 @@ +defmodule Lexical.Ast.Detection.String do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Document.Position + alias Lexical.Document.Position + alias Lexical.Document.Range + + use Detection + + @string_sigils [ + :sigil_s, + :sigil_S + ] + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + case Ast.path_at(analysis, position) do + {:ok, path} -> + detect_string(path, position) + + _ -> + false + end + end + + defp detect_string(paths, %Position{} = position) do + {_, detected?} = + Macro.postwalk(paths, false, fn + ast, false -> + detected? = do_detect(ast, position) + {ast, detected?} + + ast, true -> + {ast, true} + end) + + detected? + end + + # a string literal + defp do_detect({:__block__, _, [literal]} = ast, %Position{} = position) + when is_binary(literal) do + case fetch_range(ast, 0, -1) do + {:ok, range} -> Range.contains?(range, position) + :error -> false + end + end + + # a possible string with interpolation + defp do_detect({:<<>>, meta, _} = ast, %Position{} = position) do + # this might also be a binary match / construction + Keyword.has_key?(meta, :delimiter) and detect_interpolation(ast, position) + end + + # String sigils + defp do_detect({sigil, _, _} = ast, %Position{} = position) + when sigil in @string_sigils do + case fetch_range(ast, 0, 0) do + {:ok, range} -> Range.contains?(range, position) + _ -> false + end + end + + defp do_detect(_, _), + do: false + + # a string with interpolation + defp detect_interpolation( + {:<<>>, meta, interpolations} = ast, + %Position{} = position + ) do + delimiter_length = + meta + |> Keyword.get(:delimiter, "\"") + |> String.length() + + with {:ok, string_range} <- fetch_range(ast, delimiter_length, -1), + {:ok, interpolation_ranges} <- collect_interpolation_ranges(interpolations) do + Range.contains?(string_range, position) and + not Enum.any?(interpolation_ranges, &Range.contains?(&1, position)) + else + _ -> + false + end + end + + defp collect_interpolation_ranges(interpolations) do + {_, result} = + Macro.prewalk(interpolations, {:ok, []}, fn + ast, :error -> + {ast, :error} + + {:"::", _, _} = interpolation, {:ok, acc} -> + case fetch_range(interpolation, 1, -1) do + {:ok, range} -> + {interpolation, {:ok, [range | acc]}} + + :error -> + {interpolation, :error} + end + + ast, acc -> + {ast, acc} + end) + + result + end +end diff --git a/apps/common/lib/lexical/ast/detection/struct_field_key.ex b/apps/common/lib/lexical/ast/detection/struct_field_key.ex new file mode 100644 index 000000000..31562c5f9 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/struct_field_key.ex @@ -0,0 +1,21 @@ +defmodule Lexical.Ast.Detection.StructFieldKey do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + cursor_path = Ast.cursor_path(analysis, position) + + match?( + # in the key position, the cursor will always be followed by the + # map node because, in any other case, there will minimally be a + # 2-element key-value tuple containing the cursor + [{:__cursor__, _, _}, {:%{}, _, _}, {:%, _, _} | _], + cursor_path + ) + end +end diff --git a/apps/common/lib/lexical/ast/detection/struct_field_value.ex b/apps/common/lib/lexical/ast/detection/struct_field_value.ex new file mode 100644 index 000000000..b7562f0a5 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/struct_field_value.ex @@ -0,0 +1,11 @@ +defmodule Lexical.Ast.Detection.StructFieldValue do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection.StructFieldKey + alias Lexical.Ast.Detection.StructFields + alias Lexical.Document.Position + + def detected?(%Analysis{} = analysis, %Position{} = position) do + StructFields.detected?(analysis, position) and + not StructFieldKey.detected?(analysis, position) + end +end diff --git a/apps/common/lib/lexical/ast/detection/struct_fields.ex b/apps/common/lib/lexical/ast/detection/struct_fields.ex new file mode 100644 index 000000000..5239e9a61 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/struct_fields.ex @@ -0,0 +1,15 @@ +defmodule Lexical.Ast.Detection.StructFields do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + analysis.document + |> Ast.cursor_path(position) + |> Enum.any?(&match?({:%, _, _}, &1)) + end +end diff --git a/apps/common/lib/lexical/ast/detection/struct_reference.ex b/apps/common/lib/lexical/ast/detection/struct_reference.ex new file mode 100644 index 000000000..2b5b86daf --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/struct_reference.ex @@ -0,0 +1,43 @@ +defmodule Lexical.Ast.Detection.StructReference do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Ast.Tokens + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + case Ast.cursor_context(analysis, position) do + {:ok, {:struct, []}} -> + false + + {:ok, {:struct, _}} -> + true + + {:ok, {:local_or_var, [?_ | _rest] = possible_module_struct}} -> + # a reference to `%__MODULE`, often in a function head, as in + # def foo(%__) + + starts_with_percent? = + analysis.document + |> Tokens.prefix_stream(position) + |> Enum.take(2) + |> Enum.any?(fn + {:percent, :%, _} -> true + _ -> false + end) + + starts_with_percent? and possible_dunder_module(possible_module_struct) and + (ancestor_is_def?(analysis, position) or ancestor_is_type?(analysis, position)) + + _ -> + false + end + end + + def possible_dunder_module(charlist) do + String.starts_with?("__MODULE__", to_string(charlist)) + end +end diff --git a/apps/common/lib/lexical/ast/detection/type.ex b/apps/common/lib/lexical/ast/detection/type.ex new file mode 100644 index 000000000..9dc9a55a8 --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/type.ex @@ -0,0 +1,12 @@ +defmodule Lexical.Ast.Detection.Type do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + ancestor_is_type?(analysis, position) + end +end diff --git a/apps/common/lib/lexical/ast/detection/use.ex b/apps/common/lib/lexical/ast/detection/use.ex new file mode 100644 index 000000000..2115dcf8f --- /dev/null +++ b/apps/common/lib/lexical/ast/detection/use.ex @@ -0,0 +1,13 @@ +defmodule Lexical.Ast.Detection.Use do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Detection + alias Lexical.Ast.Detection.Directive + alias Lexical.Document.Position + + use Detection + + @impl Detection + def detected?(%Analysis{} = analysis, %Position{} = position) do + Directive.detected?(analysis, position, ~c"use") + end +end diff --git a/apps/common/lib/lexical/ast/env.ex b/apps/common/lib/lexical/ast/env.ex index 26e6827c3..bd36c956d 100644 --- a/apps/common/lib/lexical/ast/env.ex +++ b/apps/common/lib/lexical/ast/env.ex @@ -1,55 +1,96 @@ defmodule Lexical.Ast.Env do @moduledoc """ Representation of the environment at a given position in a document. - - This module implements the `Lexical.Ast.Environment` behaviour. """ - alias Future.Code, as: Code alias Lexical.Ast - alias Lexical.Ast.Environment + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Scope + alias Lexical.Ast.Detection + alias Lexical.Ast.Tokens alias Lexical.Document alias Lexical.Document.Position alias Lexical.Project defstruct [ :project, + :analysis, :document, :line, :prefix, :suffix, :position, - :zero_based_character + :position_module, + :zero_based_character, + :detected_contexts ] @type t :: %__MODULE__{ - project: Lexical.Project.t(), - document: Lexical.Document.t(), + project: Project.t(), + analysis: Analysis.t(), + document: Document.t(), + line: String.t(), prefix: String.t(), suffix: String.t(), - position: Lexical.Document.Position.t(), - zero_based_character: non_neg_integer() + position: Position.t(), + position_module: String.t(), + zero_based_character: non_neg_integer(), + detected_contexts: %{atom() => boolean()} } - @behaviour Environment - def new(%Project{} = project, %Document{} = document, %Position{} = cursor_position) do + @type token_value :: String.t() | charlist() | atom() + @type lexer_token :: {atom, token_value, {line :: pos_integer(), col :: pos_integer()}} + @type token_count :: pos_integer | :all + + @type context_type :: + :pipe + | :alias + | :struct_reference + | :struct_fields + | :struct_field_key + | :struct_field_value + | :function_capture + | :bitstring + | :comment + | :string + | :use + | :impl + | :spec + | :type + + def new(%Project{} = project, %Analysis{} = analysis, %Position{} = cursor_position) do zero_based_character = cursor_position.character - 1 - case Document.fetch_text_at(document, cursor_position.line) do + case Document.fetch_text_at(analysis.document, cursor_position.line) do {:ok, line} -> prefix = String.slice(line, 0, zero_based_character) - suffix = String.slice(line, zero_based_character..-1) + suffix = String.slice(line, zero_based_character..-1//1) + + analysis = Ast.reanalyze_to(analysis, cursor_position) + + cursor_module = + case Analysis.scopes_at(analysis, cursor_position) do + [%Scope{module: local_module} | _] -> + Enum.join(local_module, ".") + + [] -> + "" + end env = %__MODULE__{ - document: document, + analysis: analysis, + document: analysis.document, line: line, position: cursor_position, + position_module: cursor_module, prefix: prefix, project: project, suffix: suffix, zero_based_character: zero_based_character } + env = detect_contexts(env) + {:ok, env} _ -> @@ -57,367 +98,119 @@ defmodule Lexical.Ast.Env do end end - @impl Environment + @spec prefix_tokens(t, token_count) :: [lexer_token] def prefix_tokens(%__MODULE__{} = env, count \\ :all) do + stream = Tokens.prefix_stream(env.document, env.position) + case count do :all -> - prefix_token_stream(env) + stream count when is_integer(count) -> - env - |> prefix_token_stream() - |> Enum.take(count) + Enum.take(stream, count) end end - @impl Environment + @detectors %{ + :alias => Detection.Alias, + :behaviour => {Detection.ModuleAttribute, [:behaviour]}, + :bitstring => Detection.Bitstring, + :callback => {Detection.ModuleAttribute, [:callback]}, + :comment => Detection.Comment, + :doc => {Detection.ModuleAttribute, [:doc]}, + :function_capture => Detection.FunctionCapture, + :impl => {Detection.ModuleAttribute, [:impl]}, + :import => Detection.Import, + :macrocallback => {Detection.ModuleAttribute, [:macrocallback]}, + :moduledoc => {Detection.ModuleAttribute, [:moduledoc]}, + :pipe => Detection.Pipe, + :require => Detection.Require, + :spec => Detection.Spec, + :string => Detection.String, + :struct_field_key => Detection.StructFieldKey, + :struct_field_value => Detection.StructFieldValue, + :struct_fields => Detection.StructFields, + :struct_reference => Detection.StructReference, + :type => Detection.Type, + :use => Detection.Use + } + + def detect_contexts(%__MODULE__{} = env) do + detected_contexts = + Map.new(@detectors, fn + {context_name, {detector, extra_args}} -> + {context_name, apply(detector, :detected?, [env.analysis, env.position | extra_args])} + + {context_name, detector} -> + {context_name, detector.detected?(env.analysis, env.position)} + end) + + %__MODULE__{env | detected_contexts: detected_contexts} + end + + @spec in_context?(t, context_type) :: boolean() + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity def in_context?(%__MODULE__{} = env, context_type) do - do_in_context?(env, context_type) + analysis = env.analysis + position = env.position + + case context_type do + {:module_attribute, name} -> + Detection.ModuleAttribute.detected?(analysis, position, name) + + context_type -> + Map.get(env.detected_contexts, context_type) + end end - @impl Environment + @spec empty?(String.t()) :: boolean() def empty?("") do true end - @impl Environment def empty?(string) when is_binary(string) do String.trim(string) == "" end - defp do_in_context?(env, :function_capture) do - env - |> prefix_token_stream() - |> Enum.reduce_while(false, fn - {:paren, :")", _}, _ -> - {:halt, false} - - {:operator, :&, _}, _ -> - {:halt, true} - - {:int, _, _} = maybe_arity, _ -> - {:cont, maybe_arity} - - {:operator, :/, _}, {:int, _, _} -> - # if we encounter a trailing / in the prefix, the - # function capture is complete, and we're not inside it - {:halt, false} - - _, _ -> - {:cont, false} - end) - end - - defp do_in_context?(env, :struct_reference) do - case cursor_context(env) do - {:ok, _line, {:struct, _}} -> - true - - {:ok, _line, {:local_or_var, [?_ | _rest]}} -> - # a reference to `%__MODULE`, often in a function head, as in - # def foo(%__) - - starts_with_percent? = - env - |> prefix_tokens(2) - |> Enum.any?(fn - {:percent, :%, _} -> true - _ -> false - end) - - starts_with_percent? and (ancestor_is_def?(env) or ancestor_is_type?(env)) - - _ -> - false - end - end - - defp do_in_context?(env, :struct_fields) do - env.document - |> Ast.cursor_path(env.position) - |> Enum.any?(&match?({:%, _, _}, &1)) - end - - defp do_in_context?(env, :struct_field_key) do - cursor_path = Ast.cursor_path(env.document, env.position) - - match?( - # in the key position, the cursor will always be followed by the - # map node because, in any other case, there will minimally be a - # 2-element key-value tuple containing the cursor - [{:__cursor__, _, _}, {:%{}, _, _}, {:%, _, _} | _], - cursor_path - ) - end - - defp do_in_context?(env, :struct_field_value) do - do_in_context?(env, :struct_fields) and not do_in_context?(env, :struct_field_key) - end - - defp do_in_context?(env, :pipe) do - env - |> prefix_token_stream() - |> Enum.reduce_while(false, fn - {:identifier, _, _}, _ -> - {:cont, false} - - {:operator, :., _}, _ -> - {:cont, false} - - {:alias, _, _}, _ -> - {:cont, false} - - {:arrow_op, nil, _}, _ -> - {:halt, true} - - _x, _acc -> - {:halt, false} - end) - end - - defp do_in_context?(env, :bitstring) do - env - |> prefix_tokens(:all) - |> Enum.reduce_while( - false, - fn - {:operator, :">>", _}, _ -> {:halt, false} - {:operator, :"<<", _}, _ -> {:halt, true} - _, _ -> {:cont, false} - end - ) - end - - defp do_in_context?(env, :alias) do - # Aliases are complicated, especially if we're trying to find out if we're in - # them from the current cursor position moving backwards. - # I'll try to describe the state machine below. - # First off, if we're outside of a } on the current line, we cannot be in an alias, so that - # halts with false. - # Similarly an alias on the current line is also simple, we just backtrack until we see the alias identifier. - # However, if we're on the current line, and see an EOL, we set that as our accumulator, then we get - # to the previous line, we see if it ends in a comma. If not, we can't be in an alias. If it does, we keep - # backtracking until we hit the alias keyword. - # So basically, if we hit an EOL, and the previous token isn't an open curly or a comma, we stop, otherwise - # we backtrack until we hit the alias keyword - - env - |> prefix_token_stream() - |> Stream.with_index() - |> Enum.reduce_while(false, fn - {{:curly, :"{", _}, _index}, :eol -> - {:cont, false} - - {{:comma, _, _}, _index}, :eol -> - {:cont, false} - - {{:eol, _, _}, _index}, _acc -> - {:cont, :eol} - - {{_, _, _}, _}, :eol -> - {:halt, false} - - {{:curly, :"}", _}, _index}, _ -> - {:halt, false} - - {{:identifier, ~c"alias", _}, 0}, _ -> - # there is nothing after the alias directive, so we're not - # inside the context *yet* - {:halt, false} - - {{:identifier, ~c"alias", _}, _index}, _ -> - {:halt, true} - - _, _ -> - {:cont, false} - end) - end - - defp do_in_context?(env, :import) do - in_directive?(env, ~c"import") - end - - defp do_in_context?(env, :use) do - in_directive?(env, ~c"use") - end - - defp do_in_context?(env, :require) do - in_directive?(env, ~c"require") - end - - defp in_directive?(%__MODULE__{} = env, context_name) do - env - |> prefix_token_stream() - |> Enum.reduce_while(false, fn - {:identifier, ^context_name, _}, _ -> - {:halt, true} - - {:eol, _, _}, _ -> - {:halt, false} - - _, _ -> - {:cont, false} - end) - end - - defp cursor_context(%__MODULE__{} = env) do - with {:ok, line} <- Document.fetch_text_at(env.document, env.position.line) do - fragment = String.slice(line, 0..(env.zero_based_character - 1)) - {:ok, line, Code.Fragment.cursor_context(fragment)} - end - end - - # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity - defp normalize_token(token) do - case token do - :eol -> - {:eol, ~c"\n", []} - - {:bin_string, context, [string_value]} -> - {:string, string_value, to_position(context)} - - {:bin_string, context, interpolated} -> - {:interpolated_string, interpolated, to_position(context)} - - {:capture_op, context, value} -> - {:operator, value, to_position(context)} - - {:dual_op, context, value} -> - {:operator, value, to_position(context)} - - {:type_op, context, _value} -> - {:operator, :"::", to_position(context)} - - {:mult_op, context, operator} -> - {:operator, operator, to_position(context)} - - {:in_op, context, _} -> - {:operator, :in, to_position(context)} - - {:operator, context, value} -> - {:operator, value, to_position(context)} - - {:sigil, {line, column, _}, sigil_char, _sigil_context, _, _opts, delim} -> - # NOTE: should we need to return context too? - {:sigil, [sigil_char], {line, column}, delim} - - {type, {line, column, nil}, value} when is_list(value) -> - {normalize_type(type), value, {line, column}} - - {type, {line, column, token_value}, _} -> - {normalize_type(type), token_value, {line, column}} - - {type, context, value} when is_atom(value) -> - {normalize_type(type), value, to_position(context)} - - {operator, context} -> - {map_operator(operator), operator, to_position(context)} - end + @doc """ + Returns the position of the next non-whitespace token on a line after `env.position`. + """ + @spec next_significant_position(t) :: {:ok, Position.t()} | :error + def next_significant_position(%__MODULE__{} = env) do + find_significant_position(env.document, env.position.line + 1, 1) end - defp to_position({line, column, _}) do - {line, column} + @doc """ + Returns the position of the next non-whitespace token on a line before `env.position`. + """ + @spec prev_significant_position(t) :: {:ok, Position.t()} | :error + def prev_significant_position(%__MODULE__{} = env) do + find_significant_position(env.document, env.position.line - 1, -1) end - defp map_operator(:"("), do: :paren - defp map_operator(:")"), do: :paren - defp map_operator(:"{"), do: :curly - defp map_operator(:"}"), do: :curly - defp map_operator(:","), do: :comma - defp map_operator(:%{}), do: :map_new - defp map_operator(:%), do: :percent - defp map_operator(_), do: :operator - - defp normalize_type(:flt), do: :float - defp normalize_type(:bin_string), do: :string - defp normalize_type(type), do: type - - defp prefix_token_stream(%__MODULE__{} = env) do - init_function = fn -> - {env, ~c"", env.position.line} - end - - next_function = fn - {env, _, 0} -> - {:halt, env} - - {env, current_context, line_number} -> - case find_and_tokenize(env, line_number, current_context) do - {:ok, tokens, new_context} -> - prev_line_number = line_number - 1 - - tokens = - if prev_line_number > 0 do - tokens ++ [:eol] - else - tokens - end + defp find_significant_position(%Document{} = document, line, inc_by) do + case Document.fetch_text_at(document, line) do + {:ok, text} -> + case fetch_leading_whitespace_count(text) do + {:ok, count} -> + {:ok, Position.new(document, line, count + 1)} - {tokens, {env, new_context, prev_line_number}} - - :stop -> - {:halt, env} + :error -> + find_significant_position(document, line + inc_by, inc_by) end - end - - finalize_function = fn _ -> :ok end - - init_function - |> Stream.resource(next_function, finalize_function) - |> Stream.map(&normalize_token/1) - end - - defp find_and_tokenize(%__MODULE__{position: %{line: line_number}} = env, line_number, context) do - tokenize(env.prefix, line_number, context) - end - - defp find_and_tokenize(%__MODULE__{} = env, line_number, context) do - case Document.fetch_text_at(env.document, line_number) do - {:ok, line_text} -> - tokenize(line_text, line_number, context) :error -> - :stop + :error end end - defp tokenize(line_text, line_number, context) do - line_charlist = String.to_charlist(line_text) - current_context = line_charlist ++ context - - case :future_elixir_tokenizer.tokenize(current_context, line_number, 1, []) do - {:ok, _, _, _, tokens} -> - {:ok, Enum.reverse(tokens), ~c""} + defp fetch_leading_whitespace_count(string, count \\ 0) - {:error, {_, _, ~c"unexpected token: ", _}, _, _, _} -> - {:ok, [], ~c"\n" ++ current_context} - - {:error, _, _, _, tokens} -> - {:ok, tokens, ~c""} - end + defp fetch_leading_whitespace_count(<<" ", rest::binary>>, count) do + fetch_leading_whitespace_count(rest, count + 1) end - defp ancestor_is_def?(env) do - env.document - |> Ast.cursor_path(env.position) - |> Enum.any?(fn - {:def, _, _} -> - true - - {:defp, _, _} -> - true - - _ -> - false - end) - end - - defp ancestor_is_type?(env) do - env.document - |> Ast.cursor_path(env.position) - |> Enum.any?(fn - {:type, _, _} -> true - _ -> false - end) - end + defp fetch_leading_whitespace_count(<<>>, _count), do: :error + defp fetch_leading_whitespace_count(<<"\n" <> _::binary>>, _count), do: :error + defp fetch_leading_whitespace_count(<<_non_whitespace::binary>>, count), do: {:ok, count} end diff --git a/apps/common/lib/lexical/ast/environment.ex b/apps/common/lib/lexical/ast/environment.ex deleted file mode 100644 index 173841578..000000000 --- a/apps/common/lib/lexical/ast/environment.ex +++ /dev/null @@ -1,24 +0,0 @@ -defmodule Lexical.Ast.Environment do - @type t :: any - - @type maybe_binary :: binary | nil - @type token_value :: String.t() | charlist - @type lexer_token :: {atom, token_value} - @type token_count :: pos_integer | :all - - @type context_type :: - :pipe - | :alias - | :struct_reference - | :struct_fields - | :struct_field_key - | :struct_field_value - | :function_capture - | :bitstring - - @callback in_context?(t, context_type) :: boolean - - @callback empty?(maybe_binary) :: boolean - @callback prefix_tokens(t) :: [lexer_token] - @callback prefix_tokens(t, token_count) :: [lexer_token] -end diff --git a/apps/common/lib/lexical/ast/module.ex b/apps/common/lib/lexical/ast/module.ex index 20948809a..ea424f22d 100644 --- a/apps/common/lib/lexical/ast/module.ex +++ b/apps/common/lib/lexical/ast/module.ex @@ -28,4 +28,72 @@ defmodule Lexical.Ast.Module do |> inspect() |> name() end + + @doc """ + local module name is the last part of a module name + + ## Examples: + iex> local_name('Lexical.Ast.Module') + "Module" + """ + def local_name(entity) when is_list(entity) do + entity + |> to_string() + |> local_name() + end + + def local_name(entity) when is_binary(entity) do + entity + |> String.split(".") + |> List.last() + end + + @doc """ + Splits a module into is parts, but handles erlang modules + + Module.split will explode violently when called on an erlang module. This + implementation will tell you which kind of module it has split, and return the + pieces. You can also use the options to determine if the pieces are returned as + strings or atoms + + Options: + `as` :atoms or :binaries. Default is :binary. Determines what type the elements + of the returned list are. + + Returns: + A tuple where the first element is either `:elixir` or `:erlang`, which tells you + the kind of module that has been split. The second element is a list of the + module's components. Note: Erlang modules will only ever have a single component. + """ + @type split_opt :: {:as, :binaries | :atoms} + @type split_opts :: [split_opt()] + @type split_return :: {:elixir | :erlang, [String.t()] | [atom()]} + + @spec safe_split(module()) :: split_return() + @spec safe_split(module(), split_opts()) :: split_return() + def safe_split(module, opts \\ []) + + def safe_split(module, opts) when is_atom(module) do + string_name = Atom.to_string(module) + + {type, split_module} = + case String.split(string_name, ".") do + ["Elixir" | rest] -> + {:elixir, rest} + + [_erlang_module] = module -> + {:erlang, module} + end + + split_module = + case Keyword.get(opts, :as, :binaries) do + :binaries -> + split_module + + :atoms -> + Enum.map(split_module, &String.to_atom/1) + end + + {type, split_module} + end end diff --git a/apps/common/lib/lexical/ast/range.ex b/apps/common/lib/lexical/ast/range.ex new file mode 100644 index 000000000..cb3afd715 --- /dev/null +++ b/apps/common/lib/lexical/ast/range.ex @@ -0,0 +1,48 @@ +defmodule Lexical.Ast.Range do + @moduledoc """ + Utilities for extracting ranges from ast nodes + """ + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + + @spec fetch(Macro.t(), Document.t()) :: {:ok, Range.t()} | :error + def fetch(ast, %Document{} = document) do + case Sourceror.get_range(ast) do + %{start: start_pos, end: end_pos} -> + [line: start_line, column: start_column] = start_pos + [line: end_line, column: end_column] = end_pos + + range = + Range.new( + Position.new(document, start_line, start_column), + Position.new(document, end_line, end_column) + ) + + {:ok, range} + + _ -> + :error + end + end + + @spec fetch!(Macro.t(), Document.t()) :: Range.t() + def fetch!(ast, %Document{} = document) do + case fetch(ast, document) do + {:ok, range} -> + range + + :error -> + raise ArgumentError, + message: "Could not get a range for #{inspect(ast)} in #{document.path}" + end + end + + @spec get(Macro.t(), Document.t()) :: Range.t() | nil + def get(ast, %Document{} = document) do + case fetch(ast, document) do + {:ok, range} -> range + :error -> nil + end + end +end diff --git a/apps/common/lib/lexical/ast/tokens.ex b/apps/common/lib/lexical/ast/tokens.ex new file mode 100644 index 000000000..f7bf30266 --- /dev/null +++ b/apps/common/lib/lexical/ast/tokens.ex @@ -0,0 +1,169 @@ +defmodule Lexical.Ast.Tokens do + alias Lexical.Document + alias Lexical.Document.Position + + @doc """ + Returns a stream of tokens starting at the given position and working backwards through + the document. + """ + def prefix_stream(%Document{} = document, %Position{} = position) do + init_function = fn -> + {~c"", position.line} + end + + next_function = fn + {_, 0} -> + {:halt, []} + + {current_context, line_number} -> + case find_and_tokenize(document, position, line_number, current_context) do + {:ok, tokens, new_context} -> + prev_line_number = line_number - 1 + + tokens = + if prev_line_number > 0 do + tokens ++ [:eol] + else + tokens + end + + {tokens, {new_context, prev_line_number}} + + :stop -> + {:halt, []} + end + end + + finalize_function = fn _ -> :ok end + + init_function + |> Stream.resource(next_function, finalize_function) + |> Stream.map(&normalize_token/1) + end + + defp find_and_tokenize( + %Document{} = document, + %Position{line: line_number} = position, + line_number, + context + ) do + document + |> prefix(position) + |> tokenize(line_number, context) + end + + defp find_and_tokenize(%Document{} = document, %Position{}, line_number, context) do + case Document.fetch_text_at(document, line_number) do + {:ok, line_text} -> + tokenize(line_text, line_number, context) + + :error -> + :stop + end + end + + defp tokenize(line_text, line_number, context) do + line_charlist = String.to_charlist(line_text) + current_context = line_charlist ++ context + + case :future_elixir_tokenizer.tokenize(current_context, line_number, 1, []) do + {:ok, _, _, _, tokens, _} -> + {:ok, tokens, ~c""} + + {:error, {_, ~c"unexpected token: ", _}, _, _, _} -> + {:ok, [], ~c"\n" ++ current_context} + + {:error, _, _, _, tokens} -> + {:ok, tokens, ~c""} + end + end + + defp prefix(%Document{} = document, %Position{} = position) do + zero_based_character = position.character - 1 + {:ok, line_text} = Document.fetch_text_at(document, position.line) + String.slice(line_text, 0, zero_based_character) + end + + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + defp normalize_token(token) do + case token do + :eol -> + {:eol, ~c"\n", []} + + {:bin_string, context, [string_value]} -> + {:string, string_value, to_position(context)} + + {:bin_string, context, interpolated} -> + {:interpolated_string, interpolation_ranges(interpolated), to_position(context)} + + {:capture_op, context, value} -> + {:operator, value, to_position(context)} + + {:dual_op, context, value} -> + {:operator, value, to_position(context)} + + {:type_op, context, _value} -> + {:operator, :"::", to_position(context)} + + {:mult_op, context, operator} -> + {:operator, operator, to_position(context)} + + {:in_op, context, _} -> + {:operator, :in, to_position(context)} + + {:operator, context, value} -> + {:operator, value, to_position(context)} + + {:sigil, {line, column, _}, sigil_char, _sigil_context, _, _opts, delim} -> + # NOTE: should we need to return context too? + {:sigil, [sigil_char], {line, column}, delim} + + {type, {line, column, token_value}, _} -> + {normalize_type(type), token_value, {line, column}} + + {type, context, value} when is_atom(value) -> + {normalize_type(type), value, to_position(context)} + + {operator, context} -> + {map_operator(operator), operator, to_position(context)} + end + end + + defp to_position({line, column, _}) do + {line, column} + end + + defp map_operator(:"("), do: :paren + defp map_operator(:")"), do: :paren + defp map_operator(:"{"), do: :curly + defp map_operator(:"}"), do: :curly + defp map_operator(:","), do: :comma + defp map_operator(:%{}), do: :map_new + defp map_operator(:%), do: :percent + defp map_operator(_), do: :operator + + defp normalize_type(:flt), do: :float + defp normalize_type(:bin_string), do: :string + defp normalize_type(type), do: type + + defp interpolation_ranges(interpolations) do + {_, ranges} = + Enum.reduce(interpolations, {{1, 1}, []}, fn + literal, {{line, column}, acc} when is_binary(literal) -> + end_pos = {line, column + String.length(literal)} + range = {{line, column}, end_pos} + {end_pos, [{:literal, literal, range} | acc]} + + {_, {end_line, end_column, _}, interp}, {_, acc} -> + start_pos = get_start_pos(interp) + range = {start_pos, {end_line, end_column}} + {{end_line, end_column}, [{:interpolation, interp, range} | acc]} + end) + + Enum.reverse(ranges) + end + + defp get_start_pos([{_, {start_line, start_column, _}, _} | _]) do + {start_line, start_column} + end +end diff --git a/apps/common/lib/lexical/code_unit.ex b/apps/common/lib/lexical/code_unit.ex index c311bb409..63ef7ba55 100644 --- a/apps/common/lib/lexical/code_unit.ex +++ b/apps/common/lib/lexical/code_unit.ex @@ -1,63 +1,40 @@ defmodule Lexical.CodeUnit do @moduledoc """ - Code unit and offset conversions - - The LSP protocol speaks in positions, which defines where something happens in a document. - Positions have a start and an end, which are defined as code unit _offsets_ from the beginning - of a line. this module helps to convert between utf8, which most of the world speaks - natively, and utf16, which has been forced upon us by microsoft. - - Converting between offsets and code units is 0(n), and allocations only happen if a - multi-byte character is detected, at which point, only that character is allocated. - This exploits the fact that most source code consists of ascii characters, with at best, - sporadic multi-byte characters in it. Thus, the vast majority of documents will not require - any allocations at all. - """ - @type utf8_code_unit :: non_neg_integer() - @type utf16_code_unit :: non_neg_integer() - @type utf8_offset :: non_neg_integer() - @type utf16_offset :: non_neg_integer() + Code unit and offset conversions. - @type error :: {:error, :misaligned} | {:error, :out_of_bounds} + LSP positions are encoded as UTF-16 code unit offsets from the beginning of a line, + while positions in Elixir are UTF-8 character positions (graphemes). This module + deals with converting between the two. + """ - # public + @type utf8_character_position :: non_neg_integer() + @type utf8_code_unit_offset :: non_neg_integer() + @type utf16_code_unit_offset :: non_neg_integer() - @doc """ - Converts a utf8 character offset into a utf16 character offset. This implementation - clamps the maximum size of an offset so that any initial character position can be - passed in and the offset returned will reflect the end of the line. - """ - @spec utf16_offset(String.t(), utf8_offset()) :: utf16_offset() - def utf16_offset(binary, character_position) do - do_utf16_offset(binary, character_position, 0) - end + @type error :: {:error, :misaligned} | {:error, :out_of_bounds} @doc """ - Converts a utf16 character offset into a utf8 character offset. This implementation - clamps the maximum size of an offset so that any initial character position can be - passed in and the offset returned will reflect the end of the line. + Converts a 0-based UTF-8 character position to a UTF-16 code unit offset. """ - @spec utf8_offset(String.t(), utf16_offset()) :: utf8_offset() - def utf8_offset(binary, character_position) do - do_utf8_offset(binary, character_position, 1) + @spec utf8_position_to_utf16_offset(String.t(), utf8_character_position()) :: + utf16_code_unit_offset() + def utf8_position_to_utf16_offset(binary, character_position) do + binary + |> String.slice(0, character_position) + |> :unicode.characters_to_binary(:utf8, :utf16) + |> byte_size() + |> div(2) end @doc """ - Converts a utf16 position into a corresponding utf8 position + Converts a 0-based UTF-16 code unit offset to a UTF-8 code unit offset. """ - @spec to_utf8(String.t(), utf16_code_unit()) :: {:ok, utf8_code_unit()} | error - def to_utf8(binary, utf16_unit) do + @spec utf16_offset_to_utf8_offset(String.t(), utf16_code_unit_offset()) :: + {:ok, utf8_code_unit_offset()} | error + def utf16_offset_to_utf8_offset(binary, utf16_unit) do do_to_utf8(binary, utf16_unit, 1) end - @doc """ - Converts a utf8 position into a corresponding utf16 position - """ - @spec to_utf16(String.t(), utf8_code_unit()) :: {:ok, utf16_code_unit()} | error - def to_utf16(binary, utf16_unit) do - do_to_utf16(binary, utf16_unit, 0) - end - @doc """ Counts the number of utf16 code units in the binary """ @@ -99,70 +76,8 @@ defmodule Lexical.CodeUnit do do_count_utf8(rest, count + increment) end - defp do_utf16_offset(_, 0, offset) do - offset - end - - defp do_utf16_offset(<<>>, _, offset) do - # this clause pegs the offset at the end of the string - # no matter the character index - offset - end - - defp do_utf16_offset(<>, remaining, offset) when c < 128 do - do_utf16_offset(rest, remaining - 1, offset + 1) - end - - defp do_utf16_offset(<>, remaining, offset) do - increment = code_unit_size(c, :utf16) - do_utf16_offset(rest, remaining - 1, offset + increment) - end - - defp do_to_utf16(_, 0, utf16_unit) do - {:ok, utf16_unit} - end - - defp do_to_utf16(_, utf8_unit, _) when utf8_unit < 0 do - {:error, :misaligned} - end - - defp do_to_utf16(<<>>, _remaining, _utf16_unit) do - {:error, :out_of_bounds} - end - - defp do_to_utf16(<>, utf8_unit, utf16_unit) when c < 128 do - do_to_utf16(rest, utf8_unit - 1, utf16_unit + 1) - end - - defp do_to_utf16(<>, utf8_unit, utf16_unit) do - increment = code_unit_size(c, :utf16) - decrement = code_unit_size(c, :utf8) - - do_to_utf16(rest, utf8_unit - decrement, utf16_unit + increment) - end - # UTF-8 - defp do_utf8_offset(_, 0, offset) do - offset - end - - defp do_utf8_offset(<<>>, _, offset) do - # this clause pegs the offset at the end of the string - # no matter the character index - offset - end - - defp do_utf8_offset(<>, remaining, offset) when c < 128 do - do_utf8_offset(rest, remaining - 1, offset + 1) - end - - defp do_utf8_offset(<>, remaining, offset) do - increment = code_unit_size(c, :utf8) - decrement = code_unit_size(c, :utf16) - do_utf8_offset(rest, remaining - decrement, offset + increment) - end - defp do_to_utf8(_, 0, utf8_unit) do {:ok, utf8_unit} end diff --git a/apps/common/lib/lexical/completion/builder.ex b/apps/common/lib/lexical/completion/builder.ex deleted file mode 100644 index 10a02ba44..000000000 --- a/apps/common/lib/lexical/completion/builder.ex +++ /dev/null @@ -1,98 +0,0 @@ -defmodule Lexical.Completion.Builder do - alias Lexical.Ast.Environment - - @type insert_text_format :: :plain_text | :snippet - - @type completion_item_kind :: - :text - | :method - | :function - | :constructor - | :field - | :variable - | :class - | :interface - | :module - | :property - | :unit - | :value - | :enum - | :keyword - | :snippet - | :color - | :file - | :reference - | :folder - | :enum_member - | :constant - | :struct - | :event - | :operator - | :type_parameter - - @type completion_item_tag :: :deprecated - - @type item_opt :: - {:deprecated, boolean} - | {:detail, String.t()} - | {:documentation, String.t()} - | {:filter_text, String.t()} - | {:insert_text, String.t()} - | {:kind, completion_item_kind} - | {:label, String.t()} - | {:preselect, boolean()} - | {:sort_text, String.t()} - | {:tags, [completion_item_tag]} - - @type item_opts :: [item_opt] - - @type maybe_string :: String.t() | nil - - @opaque translated_item :: %{ - __struct__: module(), - detail: maybe_string(), - documentation: maybe_string(), - filter_text: maybe_string(), - insert_text: String.t(), - kind: completion_item_kind(), - label: String.t(), - preselect: boolean | nil, - sort_text: maybe_string(), - tags: [completion_item_tag] | nil - } - - @type t :: module() - - @type result :: translated_item | :skip - - @type line_range :: {start_character :: pos_integer, end_character :: pos_integer} - - @callback snippet(Environment.t(), String.t()) :: translated_item() - @callback snippet(Environment.t(), String.t(), item_opts) :: translated_item() - - @callback plain_text(Environment.t(), String.t()) :: translated_item() - @callback plain_text(Environment.t(), String.t(), item_opts) :: translated_item() - - @callback text_edit(Environment.t(), String.t(), line_range) :: translated_item() - @callback text_edit(Environment.t(), String.t(), line_range, item_opts) :: translated_item() - - @callback text_edit_snippet(Environment.t(), String.t(), line_range) :: translated_item() - @callback text_edit_snippet(Environment.t(), String.t(), line_range, item_opts) :: - translated_item() - - @callback fallback(any, any) :: any - - @doc """ - Boosts a translated item. - - Provides the ability to boost the relevance of an item above its peers. - The boost is hierarchical, and split into a local boost and a global boost. - Use the local boost to increase (or decrease) the prominence of individual functions or modules relative to another - item of the same type. For example, you can use the local boost to increase the prominence of test functions inside of test files. - Use the global boost to boost a certain kind of item above other kinds. For example, modules are sorted - above functions, so they carry a global boost of 2, which will put them above functions, which have no global boost. - """ - @callback boost(translated_item, local_boost :: 0..9, global_boost :: 0..9) :: translated_item - @callback boost(translated_item, local_bost :: 0..9) :: translated_item - @callback boost(translated_item) :: translated_item -end diff --git a/apps/common/lib/lexical/completion/translatable.ex b/apps/common/lib/lexical/completion/translatable.ex deleted file mode 100644 index a4d792f71..000000000 --- a/apps/common/lib/lexical/completion/translatable.ex +++ /dev/null @@ -1,16 +0,0 @@ -defprotocol Lexical.Completion.Translatable do - alias Lexical.Ast.Environment - alias Lexical.Completion.Builder - - @type t :: any() - - @fallback_to_any true - @spec translate(t(), Builder.t(), Environment.t()) :: Builder.result() - def translate(item, builder, env) -end - -defimpl Lexical.Completion.Translatable, for: Any do - def translate(_any, _builder, _environment) do - :skip - end -end diff --git a/apps/common/lib/lexical/identifier.ex b/apps/common/lib/lexical/identifier.ex new file mode 100644 index 000000000..a953458dc --- /dev/null +++ b/apps/common/lib/lexical/identifier.ex @@ -0,0 +1,27 @@ +defmodule Lexical.Identifier do + @doc """ + Returns the next globally unique identifier. + Raises a MatchError if this cannot be computed. + """ + def next_global! do + {:ok, next_id} = Snowflake.next_id() + next_id + end + + def to_unix(id) do + Snowflake.Util.real_timestamp_of_id(id) + end + + def to_datetime(id) do + id + |> to_unix() + |> DateTime.from_unix!(:millisecond) + end + + def to_erl(id) do + %DateTime{year: year, month: month, day: day, hour: hour, minute: minute, second: second} = + to_datetime(id) + + {{year, month, day}, {hour, minute, second}} + end +end diff --git a/apps/common/lib/lexical/vm/versions.ex b/apps/common/lib/lexical/vm/versions.ex index 32e305280..ef66576c5 100644 --- a/apps/common/lib/lexical/vm/versions.ex +++ b/apps/common/lib/lexical/vm/versions.ex @@ -33,7 +33,7 @@ defmodule Lexical.VM.Versions do This function uses the code server to find `.elixir` and `.erlang` files in the code path. Each of these files represent the version of the runtime the artifact was compiled with. """ - @spec compiled() :: {:ok, t} | {:error, atom()} + @spec compiled() :: {:ok, t} | {:error, term()} def compiled do with {:ok, elixir_path} <- code_find_file(version_file(:elixir)), {:ok, erlang_path} <- code_find_file(version_file(:erlang)), @@ -64,7 +64,7 @@ defmodule Lexical.VM.Versions do |> Path.dirname() |> compatible?() - :error -> + _ -> false end end @@ -196,20 +196,16 @@ defmodule Lexical.VM.Versions do Enum.join(normalized, ".") end - require Logger - defp code_find_file(file_name) when is_binary(file_name) do file_name |> String.to_charlist() |> code_find_file() end - defp code_find_file(file_name) do - Logger.info("file name is #{file_name}") - + defp code_find_file(file_name) when is_list(file_name) do case :code.where_is_file(file_name) do :non_existing -> - :error + {:error, {:file_missing, file_name}} path -> {:ok, List.to_string(path)} diff --git a/apps/common/mix.exs b/apps/common/mix.exs index 56901857a..7eea23ad9 100644 --- a/apps/common/mix.exs +++ b/apps/common/mix.exs @@ -4,7 +4,7 @@ defmodule Common.MixProject do def project do [ app: :common, - version: "0.3.0", + version: "0.5.0", build_path: "../../_build", config_path: "../../config/config.exs", deps_path: "../../deps", @@ -12,7 +12,8 @@ defmodule Common.MixProject do elixir: "~> 1.13", elixirc_paths: elixirc_paths(Mix.env()), start_permanent: Mix.env() == :prod, - deps: deps() + deps: deps(), + compilers: [:yecc] ++ Mix.compilers() ] end @@ -33,7 +34,9 @@ defmodule Common.MixProject do defp deps do [ {:lexical_shared, path: "../../projects/lexical_shared"}, - {:sourceror, "~> 0.14.0"}, + {:lexical_test, path: "../../projects/lexical_test", only: :test}, + {:snowflake, "~> 1.0"}, + {:sourceror, "~> 1.4"}, {:stream_data, "~> 0.6", only: [:test], runtime: false}, {:patch, "~> 0.12", only: [:test], optional: true, runtime: false} ] diff --git a/apps/common/src/future_elixir.erl b/apps/common/src/future_elixir.erl index 3ad5f5ba4..4580963f3 100644 --- a/apps/common/src/future_elixir.erl +++ b/apps/common/src/future_elixir.erl @@ -1,19 +1,19 @@ -%% Copied from https://github.com/elixir-lang/elixir/blob/bacea2cef6323d0ede4222f36ddcedd82cb514e4/lib/elixir/src/elixir.erl -%% And I changed string_to_tokens/5 to use the future_elixir_tokenizer module -%% and tokens_to_quoted/3 need to use the future_elixir_parser module +%% Copied from https://github.com/elixir-lang/elixir/blob/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/src/elixir.erl %% Main entry point for Elixir functions. All of those functions are %% private to the Elixir compiler and reserved to be used by Elixir only. -module(future_elixir). -behaviour(application). --export([start_cli/0, start/0, start_iex/0]). +-export([start_cli/0, start/0]). -export([start/2, stop/1, config_change/3]). -export([ string_to_tokens/5, tokens_to_quoted/3, 'string_to_quoted!'/5, env_for_eval/1, quoted_to_erl/2, eval_forms/3, eval_quoted/3, - eval_quoted/4 + eval_quoted/4, eval_local_handler/2, eval_external_handler/3, + format_token_error/1 ]). -include("future_elixir.hrl"). -define(system, 'Elixir.System'). +-define(elixir_eval_env, {elixir, eval_env}). %% Top level types %% TODO: Remove char_list type on v2.0 @@ -67,7 +67,7 @@ start(_Type, _Args) -> Tokenizer = case code:ensure_loaded('Elixir.String.Tokenizer') of {module, Mod} -> Mod; - _ -> elixir_tokenizer + _ -> future_elixir_tokenizer end, URIConfig = [ @@ -92,7 +92,7 @@ start(_Type, _Args) -> {ignore_already_consolidated, false}, {ignore_module_conflict, false}, {on_undefined_variable, raise}, - {parser_options, []}, + {parser_options, [{columns, true}]}, {debug_info, true}, {warnings_as_errors, false}, {relative_paths, true}, @@ -141,10 +141,10 @@ preload_common_modules() -> parse_otp_release() -> %% Whenever we change this check, we should also change Makefile. case string:to_integer(erlang:system_info(otp_release)) of - {Num, _} when Num >= 24 -> + {Num, _} when Num >= 25 -> Num; _ -> - io:format(standard_error, "ERROR! Unsupported Erlang/OTP version, expected Erlang/OTP 24+~n", []), + io:format(standard_error, "ERROR! Unsupported Erlang/OTP version, expected Erlang/OTP 25+~n", []), erlang:halt(1) end. @@ -178,6 +178,19 @@ check_file_encoding(Encoding) -> end. %% Boot and process given options. Invoked by Elixir's script. +%% TODO: Delete prim_tty branches on Erlang/OTP 26. + +start() -> + case code:ensure_loaded(prim_tty) of + {module, _} -> + user_drv:start(#{initial_shell => iex:shell()}); + {error, _} -> + case init:get_argument(elixir_root) of + {ok, [[Root]]} -> code:add_patha(Root ++ "/iex/ebin"); + _ -> ok + end, + 'Elixir.IEx.CLI':deprecated() + end. start_cli() -> {ok, _} = application:ensure_all_started(elixir), @@ -192,32 +205,7 @@ start_cli() -> {error, _} -> ok end, - 'Elixir.Kernel.CLI':main(init:get_plain_arguments()), - elixir_config:booted(). - -%% TODO: Delete prim_tty branches and -user on Erlang/OTP 26. -start() -> - case code:ensure_loaded(prim_tty) of - {module, _} -> - user_drv:start(#{initial_shell => noshell}); - {error, _} -> - case init:get_argument(elixir_root) of - {ok, [[Root]]} -> code:add_patha(Root ++ "/iex/ebin"); - _ -> ok - end, - 'Elixir.IEx.CLI':deprecated() - end. -start_iex() -> - case code:ensure_loaded(prim_tty) of - {module, _} -> - spawn(fun() -> - elixir_config:wait_until_booted(), - (shell:whereis() =:= undefined) andalso 'Elixir.IEx':cli() - end); - - {error, _} -> - ok - end. + 'Elixir.Kernel.CLI':main(init:get_plain_arguments()). %% EVAL HOOKS @@ -358,8 +346,27 @@ eval_forms(Tree, Binding, OrigE, Opts) -> _ -> [Erl] end, - ExternalHandler = eval_external_handler(NewE), - {value, Value, NewBinding} = erl_eval:exprs(Exprs, ErlBinding, none, ExternalHandler), + %% We use remote names so eval works across Elixir versions. + LocalHandler = {value, fun ?MODULE:eval_local_handler/2}, + ExternalHandler = {value, fun ?MODULE:eval_external_handler/3}, + + {value, Value, NewBinding} = + try + %% ?elixir_eval_env is used by the external handler. + %% + %% The reason why we use the process dictionary to pass the environment + %% is because we want to avoid passing closures to erl_eval, as that + %% would effectively tie the eval code to the Elixir version and it is + %% best if it depends solely on Erlang/OTP. + %% + %% The downside is that functions that escape the eval context will no + %% longer have the original environment they came from. + erlang:put(?elixir_eval_env, NewE), + erl_eval:exprs(Exprs, ErlBinding, LocalHandler, ExternalHandler) + after + erlang:erase(?elixir_eval_env) + end, + PruneBefore = if Prune -> length(Binding); true -> -1 end, {DumpedBinding, DumpedVars} = @@ -368,54 +375,63 @@ eval_forms(Tree, Binding, OrigE, Opts) -> {Value, DumpedBinding, NewE#{versioned_vars := DumpedVars}} end. -%% TODO: Remove conditional once we require Erlang/OTP 25+. --if(?OTP_RELEASE >= 25). -eval_external_handler(Env) -> - Fun = fun(Ann, FunOrModFun, Args) -> - try - case FunOrModFun of - {Mod, Fun} -> apply(Mod, Fun, Args); - Fun -> apply(Fun, Args) - end - catch - Kind:Reason:Stacktrace -> - %% Take everything up to the Elixir module - Pruned = - lists:takewhile(fun - ({elixir,_,_,_}) -> false; - (_) -> true - end, Stacktrace), - - Caller = - lists:dropwhile(fun - ({elixir,_,_,_}) -> false; - (_) -> true - end, Stacktrace), - - %% Now we prune any shared code path from erl_eval - {current_stacktrace, Current} = - erlang:process_info(self(), current_stacktrace), - - %% We need to make sure that we don't generate more - %% frames than supported. So we do our best to drop - %% from the Caller, but if the caller has no frames, - %% we need to drop from Pruned. - {DroppedCaller, ToDrop} = - case Caller of - [] -> {[], true}; - _ -> {lists:droplast(Caller), false} - end, - - Reversed = drop_common(lists:reverse(Current), lists:reverse(Pruned), ToDrop), - File = elixir_utils:characters_to_list(?key(Env, file)), - Location = [{file, File}, {line, erl_anno:line(Ann)}], - - %% Add file+line information at the bottom - Custom = lists:reverse([{elixir_eval, '__FILE__', 1, Location} | Reversed], DroppedCaller), - erlang:raise(Kind, Reason, Custom) +eval_local_handler(FunName, Args) -> + {current_stacktrace, Stack} = erlang:process_info(self(), current_stacktrace), + Opts = [{module, nil}, {function, FunName}, {arity, length(Args)}, {reason, 'undefined local'}], + Exception = 'Elixir.UndefinedFunctionError':exception(Opts), + erlang:raise(error, Exception, Stack). + +eval_external_handler(Ann, FunOrModFun, Args) -> + try + case FunOrModFun of + {Mod, Fun} -> apply(Mod, Fun, Args); + Fun -> apply(Fun, Args) end - end, - {value, Fun}. + catch + Kind:Reason:Stacktrace -> + %% Take everything up to the Elixir module + Pruned = + lists:takewhile(fun + ({elixir,_,_,_}) -> false; + (_) -> true + end, Stacktrace), + + Caller = + lists:dropwhile(fun + ({elixir,_,_,_}) -> false; + (_) -> true + end, Stacktrace), + + %% Now we prune any shared code path from erl_eval + {current_stacktrace, Current} = + erlang:process_info(self(), current_stacktrace), + + %% We need to make sure that we don't generate more + %% frames than supported. So we do our best to drop + %% from the Caller, but if the caller has no frames, + %% we need to drop from Pruned. + {DroppedCaller, ToDrop} = + case Caller of + [] -> {[], true}; + _ -> {lists:droplast(Caller), false} + end, + + Reversed = drop_common(lists:reverse(Current), lists:reverse(Pruned), ToDrop), + + %% Add file+line information at the bottom + Bottom = + case erlang:get(?elixir_eval_env) of + #{file := File} -> + [{elixir_eval, '__FILE__', 1, + [{file, elixir_utils:characters_to_list(File)}, {line, erl_anno:line(Ann)}]}]; + + _ -> + [] + end, + + Custom = lists:reverse(Bottom ++ Reversed, DroppedCaller), + erlang:raise(Kind, Reason, Custom) + end. %% We need to check if we have dropped any frames. %% If we have not dropped frames, then we need to drop one @@ -427,10 +443,6 @@ drop_common([_ | T1], T2, ToDrop) -> drop_common(T1, T2, ToDrop); drop_common([], [{?MODULE, _, _, _} | T2], _ToDrop) -> T2; drop_common([], [_ | T2], true) -> T2; drop_common([], T2, _) -> T2. --else. -eval_external_handler(_Env) -> - none. --endif. %% Converts a quoted expression to Erlang abstract format @@ -449,20 +461,21 @@ quoted_to_erl(Quoted, ErlS, ExS, Env) -> string_to_tokens(String, StartLine, StartColumn, File, Opts) when is_integer(StartLine), is_binary(File) -> case future_elixir_tokenizer:tokenize(String, StartLine, StartColumn, Opts) of - {ok, _Line, _Column, [], Tokens} -> - {ok, Tokens}; - {ok, _Line, _Column, Warnings, Tokens} -> - (lists:keyfind(warnings, 1, Opts) /= {warnings, false}) andalso - [elixir_errors:erl_warn(L, File, M) || {L, M} <- lists:reverse(Warnings)], - {ok, Tokens}; - {error, {Line, Column, {ErrorPrefix, ErrorSuffix}, Token}, _Rest, _Warnings, _SoFar} -> - Location = [{line, Line}, {column, Column}], - {error, {Location, {to_binary(ErrorPrefix), to_binary(ErrorSuffix)}, to_binary(Token)}}; - {error, {Line, Column, Error, Token}, _Rest, _Warnings, _SoFar} -> - Location = [{line, Line}, {column, Column}], - {error, {Location, to_binary(Error), to_binary(Token)}} + {ok, _Line, _Column, [], Tokens, Terminators} -> + {ok, lists:reverse(Tokens, Terminators)}; + {ok, _Line, _Column, Warnings, Tokens, Terminators} -> + (lists:keyfind(emit_warnings, 1, Opts) /= {emit_warnings, false}) andalso + [future_elixir_errors:erl_warn(L, File, M) || {L, M} <- lists:reverse(Warnings)], + {ok, lists:reverse(Tokens, Terminators)}; + {error, Info, _Rest, _Warnings, _SoFar} -> + {error, format_token_error(Info)} end. +format_token_error({Location, {ErrorPrefix, ErrorSuffix}, Token}) -> + {Location, {to_binary(ErrorPrefix), to_binary(ErrorSuffix)}, to_binary(Token)}; +format_token_error({Location, Error, Token}) -> + {Location, to_binary(Error), to_binary(Token)}. + tokens_to_quoted(Tokens, WarningFile, Opts) -> handle_parsing_opts(WarningFile, Opts), @@ -501,10 +514,10 @@ parser_location(Meta) -> {ok, Forms} -> Forms; {error, {Meta, Error, Token}} -> - elixir_errors:parse_error(Meta, File, Error, Token, {String, StartLine, StartColumn}) + future_elixir_errors:parse_error(Meta, File, Error, Token, {String, StartLine, StartColumn}) end; {error, {Meta, Error, Token}} -> - elixir_errors:parse_error(Meta, File, Error, Token, {String, StartLine, StartColumn}) + future_elixir_errors:parse_error(Meta, File, Error, Token, {String, StartLine, StartColumn}) end. to_binary(List) when is_list(List) -> elixir_utils:characters_to_binary(List); @@ -512,8 +525,8 @@ to_binary(Atom) when is_atom(Atom) -> atom_to_binary(Atom). handle_parsing_opts(File, Opts) -> WarningFile = - case lists:keyfind(warnings, 1, Opts) of - {warnings, false} -> nil; + case lists:keyfind(emit_warnings, 1, Opts) of + {emit_warnings, false} -> nil; _ -> File end, LiteralEncoder = diff --git a/apps/common/src/future_elixir.hrl b/apps/common/src/future_elixir.hrl index 9c15eff72..66635295a 100644 --- a/apps/common/src/future_elixir.hrl +++ b/apps/common/src/future_elixir.hrl @@ -1,4 +1,4 @@ -%% Copied from https://github.com/elixir-lang/elixir/blob/bacea2cef6323d0ede4222f36ddcedd82cb514e4/lib/elixir/src/elixir.hrl +%% Copied from https://github.com/elixir-lang/elixir/blob/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/src/elixir.hrl -define(key(M, K), maps:get(K, M)). -define(ann(Meta), elixir_erl:get_ann(Meta)). -define(line(Meta), elixir_utils:get_line(Meta)). @@ -9,9 +9,10 @@ -record(elixir_ex, { caller=false, %% stores if __CALLER__ is allowed %% TODO: Remove warn and everywhere it is set in v2.0 - prematch=warn, %% {Read, Counter, {bitsize, Original} | none} | warn | raise | pin + prematch=raise, %% {Read, Counter, {bitsize, Original} | none} | warn | raise | pin stacktrace=false, %% stores if __STACKTRACE__ is allowed unused={#{}, 0}, %% a map of unused vars and a version counter for vars + runtime_modules=[], %% a list of modules defined in functions (runtime) vars={#{}, false} %% a tuple with maps of read and optional write current vars }). @@ -36,7 +37,7 @@ identifier_tokenizer=elixir_tokenizer, ascii_identifiers_only=true, indentation=0, + column=1, mismatch_hints=[], - warn_on_unnecessary_quotes=true, warnings=[] }). diff --git a/apps/common/src/future_elixir_errors.erl b/apps/common/src/future_elixir_errors.erl new file mode 100644 index 000000000..af0aadc79 --- /dev/null +++ b/apps/common/src/future_elixir_errors.erl @@ -0,0 +1,526 @@ +%% Copied from https://raw.githubusercontent.com/elixir-lang/elixir/31f5f126a6df0ceb4fe93d01545255380b74f4e8/lib/elixir/src/elixir_errors.erl +%% A bunch of helpers to help to deal with errors in Elixir source code. +%% This is not exposed in the Elixir language. +%% +%% Note that this is also called by the Erlang backend, so we also support +%% the line number to be none (as it may happen in some erlang errors). +-module(future_elixir_errors). +-export([compile_error/1, compile_error/3, parse_error/5]). +-export([function_error/4, module_error/4, file_error/4]). +-export([format_snippet/6]). +-export([erl_warn/3, file_warn/4]). +-export([prefix/1]). +-export([print_diagnostics/1, print_diagnostic/2, emit_diagnostic/6]). +-export([print_warning/3]). +-include("future_elixir.hrl"). +-type location() :: non_neg_integer() | {non_neg_integer(), non_neg_integer()}. + +%% Diagnostic API + +%% TODO: Remove me on Elixir v2.0. +%% Called by deprecated Kernel.ParallelCompiler.print_warning. +print_warning(Position, File, Message) -> + Output = format_snippet(warning, Position, File, Message, nil, #{}), + io:put_chars(standard_error, [Output, $\n, $\n]). + +read_snippet(nil, _Position) -> + nil; +read_snippet(<<"nofile">>, _Position) -> + nil; +read_snippet(File, Position) -> + LineNumber = extract_line(Position), + get_file_line(File, LineNumber). + +get_file_line(File, LineNumber) when is_integer(LineNumber), LineNumber > 0 -> + case file:open(File, [read, binary]) of + {ok, IoDevice} -> + Line = traverse_file_line(IoDevice, LineNumber), + ok = file:close(IoDevice), + Line; + {error, _} -> + nil + end; +get_file_line(_, _) -> nil. + +traverse_file_line(IoDevice, 1) -> + case file:read_line(IoDevice) of + {ok, Line} -> binary:replace(Line, <<"\n">>, <<>>); + _ -> nil + end; +traverse_file_line(IoDevice, N) -> + file:read_line(IoDevice), + traverse_file_line(IoDevice, N - 1). + +%% Used by Module.ParallelChecker. +print_diagnostics([Diagnostic | Others]) -> + #{file := File, position := Position, message := Message} = Diagnostic, + Snippet = read_snippet(File, Position), + Formatted = format_snippet(warning, Position, File, Message, Snippet, Diagnostic), + LineNumber = extract_line(Position), + LineDigits = get_line_number_digits(LineNumber, 1), + Padding = case Snippet of + nil -> 0; + _ -> max(4, LineDigits + 2) + end, + Locations = [["\n", n_spaces(Padding), "└─ ", 'Elixir.Exception':format_stacktrace_entry(ES)] || #{stacktrace := [ES]} <- Others], + io:put_chars(standard_error, [Formatted, Locations, $\n, $\n]). + +print_diagnostic(#{severity := S, message := M, position := P, file := F} = Diagnostic, ReadSnippet) -> + Snippet = + case ReadSnippet of + true -> read_snippet(F, P); + false -> nil + end, + + Output = format_snippet(S, P, F, M, Snippet, Diagnostic), + + MaybeStack = + case (F /= nil) orelse elixir_config:is_bootstrap() of + true -> []; + false -> [["\n ", 'Elixir.Exception':format_stacktrace_entry(E)] || E <- ?key(Diagnostic, stacktrace)] + end, + + io:put_chars(standard_error, [Output, MaybeStack, $\n, $\n]), + Diagnostic. + +emit_diagnostic(Severity, Position, File, Message, Stacktrace, Options) -> + ReadSnippet = proplists:get_value(read_snippet, Options, false), + + Span = case lists:keyfind(span, 1, Options) of + {span, {EndLine, EndCol}} -> {EndLine, EndCol}; + _ -> nil + end, + + Diagnostic = #{ + severity => Severity, + source => File, + file => File, + position => Position, + message => unicode:characters_to_binary(Message), + stacktrace => Stacktrace, + span => Span + }, + + case get(elixir_code_diagnostics) of + undefined -> + case get(elixir_compiler_info) of + undefined -> print_diagnostic(Diagnostic, ReadSnippet); + {CompilerPid, _} -> CompilerPid ! {diagnostic, Diagnostic, ReadSnippet} + end; + + {Tail, true} -> + put(elixir_code_diagnostics, {[print_diagnostic(Diagnostic, ReadSnippet) | Tail], true}); + + {Tail, false} -> + put(elixir_code_diagnostics, {[Diagnostic | Tail], false}) + end, + + ok. + +extract_line({L, _}) -> L; +extract_line(L) -> L. + +extract_column({_, C}) -> C; +extract_column(_) -> nil. + +%% Format snippets +%% "Snippet" here refers to the source code line where the diagnostic/error occurred + +format_snippet(Severity, _Position, nil, Message, nil, _Diagnostic) -> + Formatted = [prefix(Severity), " ", Message], + unicode:characters_to_binary(Formatted); + +format_snippet(Severity, Position, File, Message, nil, Diagnostic) -> + Location = location_format(Position, File, maps:get(stacktrace, Diagnostic, [])), + + Formatted = io_lib:format( + "~ts ~ts\n" + "└─ ~ts", + [prefix(Severity), Message, Location] + ), + + unicode:characters_to_binary(Formatted); + +format_snippet(Severity, Position, File, Message, Snippet, Diagnostic) -> + Column = extract_column(Position), + LineNumber = extract_line(Position), + LineDigits = get_line_number_digits(LineNumber, 1), + Spacing = n_spaces(max(2, LineDigits) + 1), + LineNumberSpacing = if LineDigits =:= 1 -> 1; true -> 0 end, + {FormattedLine, ColumnsTrimmed} = format_line(Snippet), + Location = location_format(Position, File, maps:get(stacktrace, Diagnostic, [])), + MessageDetail = format_detail(Diagnostic, Message), + + Highlight = + case Column of + nil -> + highlight_below_line(FormattedLine, Severity); + _ -> + Length = calculate_span_length({LineNumber, Column}, Diagnostic), + highlight_at_position(Column - ColumnsTrimmed, Severity, Length) + end, + + Formatted = io_lib:format( + " ~ts~ts ~ts\n" + " ~ts│\n" + " ~ts~p │ ~ts\n" + " ~ts│ ~ts\n" + " ~ts│\n" + " ~ts└─ ~ts", + [ + Spacing, prefix(Severity), format_message(MessageDetail, LineDigits, 2 + LineNumberSpacing), + Spacing, + n_spaces(LineNumberSpacing), LineNumber, FormattedLine, + Spacing, Highlight, + Spacing, + Spacing, Location + ]), + + unicode:characters_to_binary(Formatted). + +format_detail(#{details := #{typing_traces := _}}, Message) -> [Message | "\ntyping violation found at:"]; +format_detail(_, Message) -> Message. + +calculate_span_length({StartLine, StartCol}, #{span := {StartLine, EndCol}}) -> EndCol - StartCol; +calculate_span_length({StartLine, _}, #{span := {EndLine, _}}) when EndLine > StartLine -> 1; +calculate_span_length({_, _}, #{}) -> 1. + +format_line(Line) -> + case trim_line(Line, 0) of + {Trimmed, SpacesMatched} when SpacesMatched >= 27 -> + ColumnsTrimmed = SpacesMatched - 22, + {["...", n_spaces(19), Trimmed], ColumnsTrimmed}; + + {_, _} -> + {Line, 0} + end. + +trim_line(<<$\s, Rest/binary>>, Count) -> trim_line(Rest, Count + 1); +trim_line(<<$\t, Rest/binary>>, Count) -> trim_line(Rest, Count + 8); +trim_line(Rest, Count) -> {Rest, Count}. + +format_message(Message, NDigits, PaddingSize) -> + Padding = list_to_binary([$\n, n_spaces(NDigits + PaddingSize)]), + Bin = unicode:characters_to_binary(Message), + pad_line(binary:split(Bin, <<"\n">>, [global]), Padding). + +pad_line([Last], _Padding) -> [Last]; +pad_line([First, <<"">> | Rest], Padding) -> [First, "\n" | pad_line([<<"">> | Rest], Padding)]; +pad_line([First | Rest], Padding) -> [First, Padding | pad_line(Rest, Padding)]. + +highlight_at_position(Column, Severity, Length) -> + Spacing = n_spaces(max(Column - 1, 0)), + case Severity of + warning -> highlight([Spacing, lists:duplicate(Length, $~)], warning); + error -> highlight([Spacing, lists:duplicate(Length, $^)], error) + end. + +highlight_below_line(Line, Severity) -> + % Don't highlight leading whitespaces in line + {_, SpacesMatched} = trim_line(Line, 0), + + Length = string:length(Line), + Highlight = case Severity of + warning -> highlight(lists:duplicate(Length - SpacesMatched, $~), warning); + error -> highlight(lists:duplicate(Length - SpacesMatched, $^), error) + end, + + [n_spaces(SpacesMatched), Highlight]. + +get_line_number_digits(Number, Acc) when Number < 10 -> Acc; +get_line_number_digits(Number, Acc) -> + get_line_number_digits(Number div 10, Acc + 1). + +n_spaces(N) -> lists:duplicate(N, " "). + +%% Compilation error/warn handling. + +%% Low-level warning, should be used only from Erlang passes. +-spec erl_warn(location() | none, unicode:chardata(), unicode:chardata()) -> ok. +erl_warn(none, File, Warning) -> + erl_warn(0, File, Warning); +erl_warn(Location, File, Warning) when is_binary(File) -> + emit_diagnostic(warning, Location, File, Warning, [], [{read_snippet, true}]). + +-spec file_warn(list(), binary() | #{file := binary(), _ => _}, module(), any()) -> ok. +file_warn(Meta, File, Module, Desc) when is_list(Meta), is_binary(File) -> + file_warn(Meta, #{file => File}, Module, Desc); +file_warn(Meta, E, Module, Desc) when is_list(Meta) -> + % Skip warnings during bootstrap, they will be reported during recompilation + case elixir_config:is_bootstrap() of + true -> ok; + false -> + {EnvPosition, EnvFile, EnvStacktrace} = env_format(Meta, E), + Message = Module:format_error(Desc), + emit_diagnostic(warning, EnvPosition, EnvFile, Message, EnvStacktrace, [{read_snippet, true} | Meta]) + end. + +-spec file_error(list(), binary() | #{file := binary(), _ => _}, module(), any()) -> no_return(). +file_error(Meta, File, Module, Desc) when is_list(Meta), is_binary(File) -> + file_error(Meta, #{file => File}, Module, Desc); +file_error(Meta, Env, Module, Desc) when is_list(Meta) -> + print_error(Meta, Env, Module, Desc), + compile_error(Env). + +%% A module error is one where it can continue if there is a module +%% being compiled. If there is no module, it is a regular file_error. +-spec module_error(list(), #{file := binary(), module => module() | nil, _ => _}, module(), any()) -> ok. +module_error(Meta, #{module := EnvModule} = Env, Module, Desc) when EnvModule /= nil -> + print_error(Meta, Env, Module, Desc), + case elixir_module:taint(EnvModule) of + true -> ok; + false -> compile_error(Env) + end; +module_error(Meta, Env, Module, Desc) -> + file_error(Meta, Env, Module, Desc). + +%% A function error is one where it can continue if there is a function +%% being compiled. If there is no function, it is falls back to file_error. +-spec function_error(list(), #{file := binary(), function => {term(), term()} | nil, _ => _}, module(), any()) -> ok. +function_error(Meta, #{function := {_, _}} = Env, Module, Desc) -> + module_error(Meta, Env, Module, Desc); +function_error(Meta, Env, Module, Desc) -> + file_error(Meta, Env, Module, Desc). + +print_error(Meta, Env, Module, Desc) -> + {EnvPosition, EnvFile, EnvStacktrace} = env_format(Meta, Env), + Message = Module:format_error(Desc), + emit_diagnostic(error, EnvPosition, EnvFile, Message, EnvStacktrace, [{read_snippet, true} | Meta]), + ok. + +%% Compilation error. + +-spec compile_error(#{file := binary(), _ => _}) -> no_return(). +%% We check for the lexical tracker because pry() inside a module +%% will have the environment but not a tracker. +compile_error(#{module := Module, file := File, lexical_tracker := LT}) when Module /= nil, LT /= nil -> + Inspected = elixir_aliases:inspect(Module), + Message = io_lib:format("cannot compile module ~ts (errors have been logged)", [Inspected]), + compile_error([], File, Message); +compile_error(#{file := File}) -> + compile_error([], File, "cannot compile file (errors have been logged)"). + +-spec compile_error(list(), binary(), binary() | unicode:charlist()) -> no_return(). +compile_error(Meta, File, Message) when is_binary(Message) -> + {File, Position} = meta_location(Meta, File), + raise('Elixir.CompileError', Message, [{file, File} | Position]); +compile_error(Meta, File, Message) when is_list(Message) -> + {File, Position} = meta_location(Meta, File), + raise('Elixir.CompileError', elixir_utils:characters_to_binary(Message), [{file, File} | Position]). + +%% Tokenization parsing/errors. + +-spec parse_error(elixir:keyword(), binary() | {binary(), binary()}, + binary(), binary(), {unicode:charlist(), integer(), integer()}) -> no_return(). +parse_error(Location, File, Error, <<>>, Input) -> + Message = case Error of + <<"syntax error before: ">> -> <<"syntax error: expression is incomplete">>; + _ -> <> + end, + + raise_snippet(Location, File, Input, 'Elixir.TokenMissingError', Message); + +%% Show a nicer message for end of line +parse_error(Location, File, <<"syntax error before: ">>, <<"eol">>, Input) -> + raise_snippet(Location, File, Input, 'Elixir.SyntaxError', + <<"unexpectedly reached end of line. The current expression is invalid or incomplete">>); + +%% Show a nicer message for keywords pt1 (Erlang keywords show up wrapped in single quotes) +parse_error(Location, File, <<"syntax error before: ">>, Keyword, Input) + when Keyword == <<"'not'">>; + Keyword == <<"'and'">>; + Keyword == <<"'or'">>; + Keyword == <<"'when'">>; + Keyword == <<"'after'">>; + Keyword == <<"'catch'">>; + Keyword == <<"'end'">> -> + raise_reserved(Location, File, Input, binary_part(Keyword, 1, byte_size(Keyword) - 2)); + +%% Show a nicer message for keywords pt2 (Elixir keywords show up as is) +parse_error(Location, File, <<"syntax error before: ">>, Keyword, Input) + when Keyword == <<"fn">>; + Keyword == <<"else">>; + Keyword == <<"rescue">>; + Keyword == <<"true">>; + Keyword == <<"false">>; + Keyword == <<"nil">>; + Keyword == <<"in">> -> + raise_reserved(Location, File, Input, Keyword); + +%% Produce a human-readable message for errors before a sigil +parse_error(Location, File, <<"syntax error before: ">>, <<"{sigil,", _Rest/binary>> = Full, Input) -> + {ok, {sigil, _, Atom, [Content | _], _, _, _}} = parse_erl_term(Full), + Content2 = case is_binary(Content) of + true -> Content; + false -> <<>> + end, + + % :static_atoms_encoder might encode :sigil_ atoms as arbitrary terms + MaybeSigil = case is_atom(Atom) of + true -> case atom_to_binary(Atom) of + <<"sigil_", Chars/binary>> -> <<"\~", Chars/binary, " ">>; + _ -> <<>> + end; + false -> <<>> + end, + + Message = <<"syntax error before: sigil ", MaybeSigil/binary, "starting with content '", Content2/binary, "'">>, + raise_snippet(Location, File, Input, 'Elixir.SyntaxError', Message); + +%% Binaries (and interpolation) are wrapped in [<<...>>] +parse_error(Location, File, Error, <<"[", _/binary>> = Full, Input) when is_binary(Error) -> + Term = case parse_erl_term(Full) of + {ok, [H | _]} when is_binary(H) -> <<$", H/binary, $">>; + _ -> <<$">> + end, + raise_snippet(Location, File, Input, 'Elixir.SyntaxError', <>); + +%% Given a string prefix and suffix to insert the token inside the error message rather than append it +parse_error(Location, File, {ErrorPrefix, ErrorSuffix}, Token, Input) when is_binary(ErrorPrefix), is_binary(ErrorSuffix), is_binary(Token) -> + Message = <>, + raise_snippet(Location, File, Input, 'Elixir.SyntaxError', Message); + +%% Misplaced char tokens (for example, {char, _, 97}) are translated by Erlang into +%% the char literal (i.e., the token in the previous example becomes $a), +%% because {char, _, _} is a valid Erlang token for an Erlang char literal. We +%% want to represent that token as ?a in the error, according to the Elixir +%% syntax. +parse_error(Location, File, <<"syntax error before: ">>, <<$$, Char/binary>>, Input) -> + Message = <<"syntax error before: ?", Char/binary>>, + raise_snippet(Location, File, Input, 'Elixir.SyntaxError', Message); + +%% Everything else is fine as is +parse_error(Location, File, Error, Token, Input) when is_binary(Error), is_binary(Token) -> + Message = <>, + case lists:keytake(error_type, 1, Location) of + {value, {error_type, mismatched_delimiter}, Loc} -> + raise_snippet(Loc, File, Input, 'Elixir.MismatchedDelimiterError', Message); + _ -> + raise_snippet(Location, File, Input, 'Elixir.SyntaxError', Message) + end. + +parse_erl_term(Term) -> + case erl_scan:string(binary_to_list(Term)) of + {ok, Tokens, _} -> + case erl_parse:parse_term(Tokens ++ [{dot, 1}]) of + {ok, Parsed} -> {ok, Parsed}; + _ -> error + end; + _ -> error + end. + +raise_reserved(Location, File, Input, Keyword) -> + raise_snippet(Location, File, Input, 'Elixir.SyntaxError', + <<"syntax error before: ", Keyword/binary, ". \"", Keyword/binary, "\" is a " + "reserved word in Elixir and therefore its usage is limited. For instance, " + "it can't be used as a variable or be defined nor invoked as a regular function">>). + +raise_snippet(Location, File, Input, Kind, Message) when is_binary(File) -> + Snippet = cut_snippet(Location, Input), + raise(Kind, Message, [{file, File}, {snippet, Snippet} | Location]). + +cut_snippet(Location, Input) -> + case lists:keyfind(column, 1, Location) of + {column, _} -> + {line, Line} = lists:keyfind(line, 1, Location), + + case lists:keyfind(end_line, 1, Location) of + {end_line, EndLine} -> + cut_snippet(Input, Line, EndLine - Line + 1); + + false -> + Snippet = cut_snippet(Input, Line, 1), + case string:trim(Snippet, leading) of + <<>> -> nil; + _ -> Snippet + end + end; + + false -> + nil + end. + +cut_snippet({InputString, StartLine, StartColumn}, Line, Span) -> + %% In case the code is indented, we need to add the indentation back + %% for the snippets to match the reported columns. + Indent = binary:copy(<<" ">>, StartColumn - 1), + Lines = string:split(InputString, "\n", all), + [Head | Tail] = lists:nthtail(Line - StartLine, Lines), + IndentedTail = indent_n(Tail, Span - 1, <<"\n", Indent/binary>>), + elixir_utils:characters_to_binary([Indent, Head, IndentedTail]). + +indent_n([], _Count, _Indent) -> []; +indent_n(_Lines, 0, _Indent) -> []; +indent_n([H | T], Count, Indent) -> [Indent, H | indent_n(T, Count - 1, Indent)]. + +%% Helpers + +prefix(warning) -> highlight(<<"warning:">>, warning); +prefix(error) -> highlight(<<"error:">>, error); +prefix(hint) -> highlight(<<"hint:">>, hint). + +highlight(Message, Severity) -> + case {Severity, application:get_env(elixir, ansi_enabled, false)} of + {warning, true} -> yellow(Message); + {error, true} -> red(Message); + {hint, true} -> blue(Message); + _ -> Message + end. + +yellow(Msg) -> ["\e[33m", Msg, "\e[0m"]. +blue(Msg) -> ["\e[34m", Msg, "\e[0m"]. +red(Msg) -> ["\e[31m", Msg, "\e[0m"]. + +env_format(Meta, #{file := EnvFile} = E) -> + {File, Position} = meta_location(Meta, EnvFile), + Line = ?line(Position), + + Stacktrace = + case E of + #{function := {Name, Arity}, module := Module} -> + [{Module, Name, Arity, [{file, elixir_utils:relative_to_cwd(File)} | Position ]}]; + #{module := Module} when Module /= nil -> + [{Module, '__MODULE__', 0, [{file, elixir_utils:relative_to_cwd(File)} | Position]}]; + #{} -> + [] + end, + + case lists:keyfind(column, 1, Position) of + {column, Column} -> {{Line, Column}, File, Stacktrace}; + _ -> {Line, File, Stacktrace} + end. + +%% We prefer the stacktrace, if available, as it also contains module/function. +location_format(_Position, _File, [E | _]) -> + 'Elixir.Exception':format_stacktrace_entry(E); +location_format(Position, File, []) -> + file_format(Position, File). + +file_format({0, _Column}, File) -> + elixir_utils:relative_to_cwd(File); +file_format({Line, nil}, File) -> + file_format(Line, File); +file_format({Line, Column}, File) -> + io_lib:format("~ts:~w:~w", [elixir_utils:relative_to_cwd(File), Line, Column]); +file_format(0, File) -> + elixir_utils:relative_to_cwd(File); +file_format(Line, File) -> + io_lib:format("~ts:~w", [elixir_utils:relative_to_cwd(File), Line]). + +meta_location(Meta, File) -> + case elixir_utils:meta_keep(Meta) of + {F, L} -> {F, [{line, L}]}; + nil -> {File, maybe_add_col([{line, ?line(Meta)}], Meta)} + end. + +maybe_add_col(Position, Meta) -> + case lists:keyfind(column, 1, Meta) of + {column, Col} when is_integer(Col) -> [{column, Col} | Position]; + false -> Position + end. + +raise(Kind, Message, Opts) when is_binary(Message) -> + Stacktrace = try throw(ok) catch _:_:Stack -> Stack end, + Exception = Kind:exception([{description, Message} | Opts]), + erlang:raise(error, Exception, tl(Stacktrace)). + diff --git a/apps/common/src/future_elixir_interpolation.erl b/apps/common/src/future_elixir_interpolation.erl index 61e2b765d..1f62eb6f4 100644 --- a/apps/common/src/future_elixir_interpolation.erl +++ b/apps/common/src/future_elixir_interpolation.erl @@ -1,3 +1,4 @@ +% Copied from https://github.com/elixir-lang/elixir/blob/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/src/elixir_interpolation.erl % Handle string and string-like interpolations. -module(future_elixir_interpolation). -export([extract/6, unescape_string/1, unescape_string/2, @@ -33,7 +34,17 @@ extract([$\n | Rest], Buffer, Output, Line, _Column, Scope, Interpol, Last) -> extract_nl(Rest, [$\n | Buffer], Output, Line, Scope, Interpol, Last); extract([$\\, Last | Rest], Buffer, Output, Line, Column, Scope, Interpol, Last) -> - extract(Rest, [Last | Buffer], Output, Line, Column+2, Scope, Interpol, Last); + NewScope = + %% TODO: Remove this on Elixir v2.0 + case Interpol of + true -> + Scope; + false -> + Msg = "using \\~ts to escape the closing of an uppercase sigil is deprecated, please use another delimiter or a lowercase sigil instead", + prepend_warning(Line, Column, io_lib:format(Msg, [[Last]]), Scope) + end, + + extract(Rest, [Last | Buffer], Output, Line, Column+2, NewScope, Interpol, Last); extract([$\\, Last, Last, Last | Rest], Buffer, Output, Line, Column, Scope, Interpol, [Last, Last, Last] = All) -> extract(Rest, [Last, Last, Last | Buffer], Output, Line, Column+4, Scope, Interpol, All); @@ -44,17 +55,19 @@ extract([$\\, $#, ${ | Rest], Buffer, Output, Line, Column, Scope, true, Last) - extract([$#, ${ | Rest], Buffer, Output, Line, Column, Scope, true, Last) -> Output1 = build_string(Buffer, Output), case future_elixir_tokenizer:tokenize(Rest, Line, Column + 2, Scope#elixir_tokenizer{terminators=[]}) of - {error, {EndLine, EndColumn, _, "}"}, [$} | NewRest], Warnings, Tokens} -> + {error, {Location, _, "}"}, [$} | NewRest], Warnings, Tokens} -> NewScope = Scope#elixir_tokenizer{warnings=Warnings}, + {line, EndLine} = lists:keyfind(line, 1, Location), + {column, EndColumn} = lists:keyfind(column, 1, Location), Output2 = build_interpol(Line, Column, EndLine, EndColumn, lists:reverse(Tokens), Output1), extract(NewRest, [], Output2, EndLine, EndColumn + 1, NewScope, true, Last); {error, Reason, _, _, _} -> {error, Reason}; - {ok, EndLine, EndColumn, Warnings, Tokens} when Scope#elixir_tokenizer.cursor_completion /= false -> + {ok, EndLine, EndColumn, Warnings, Tokens, Terminators} when Scope#elixir_tokenizer.cursor_completion /= false -> NewScope = Scope#elixir_tokenizer{warnings=Warnings, cursor_completion=noprune}, - Output2 = build_interpol(Line, Column, EndLine, EndColumn, Tokens, Output1), + Output2 = build_interpol(Line, Column, EndLine, EndColumn, lists:reverse(Tokens, Terminators), Output1), extract([], [], Output2, EndLine, EndColumn, NewScope, true, Last); - {ok, _, _, _, _} -> + {ok, _, _, _, _, _} -> {error, {string, Line, Column, "missing interpolation terminator: \"}\"", []}} end; @@ -76,7 +89,7 @@ extract_char(Rest, Buffer, Output, Line, Column, Scope, Interpol, Last) -> Token = io_lib:format("\\u~4.16.0B", [Char]), Pre = "invalid bidirectional formatting character in string: ", Pos = io_lib:format(". If you want to use such character, use it in its escaped ~ts form instead", [Token]), - {error, {Line, Column, {Pre, Pos}, Token}}; + {error, {?LOC(Line, Column), {Pre, Pos}, Token}}; [Char | NewRest] -> extract(NewRest, [Char | Buffer], Output, Line, Column + 1, Scope, Interpol, Last); @@ -95,7 +108,7 @@ extract_nl(Rest, Buffer, Output, Line, Scope, Interpol, [H,H,H] = Last) -> extract(NewRest, NewBuffer, Output, Line + 1, Column, Scope, Interpol, Last) end; extract_nl(Rest, Buffer, Output, Line, Scope, Interpol, Last) -> - extract(Rest, Buffer, Output, Line + 1, 1, Scope, Interpol, Last). + extract(Rest, Buffer, Output, Line + 1, Scope#elixir_tokenizer.column, Scope, Interpol, Last). strip_horizontal_space([H | T], Buffer, Counter) when H =:= $\s; H =:= $\t -> strip_horizontal_space(T, [H | Buffer], Counter + 1); @@ -276,4 +289,7 @@ build_string([], Output) -> Output; build_string(Buffer, Output) -> [lists:reverse(Buffer) | Output]. build_interpol(Line, Column, EndLine, EndColumn, Buffer, Output) -> - [{{Line, Column, nil}, {EndLine, EndColumn, nil}, Buffer} | Output]. \ No newline at end of file + [{{Line, Column, nil}, {EndLine, EndColumn, nil}, Buffer} | Output]. + +prepend_warning(Line, Column, Msg, #elixir_tokenizer{warnings=Warnings} = Scope) -> + Scope#elixir_tokenizer{warnings = [{{Line, Column}, Msg} | Warnings]}. diff --git a/apps/common/src/future_elixir_parser.yrl b/apps/common/src/future_elixir_parser.yrl index 30752db50..804fcb3a9 100644 --- a/apps/common/src/future_elixir_parser.yrl +++ b/apps/common/src/future_elixir_parser.yrl @@ -1,9 +1,9 @@ -%% Copied from https://github.com/elixir-lang/elixir/blob/bacea2cef6323d0ede4222f36ddcedd82cb514e4/lib/elixir/src/elixir_parser.yrl +%% Copied from https://github.com/elixir-lang/elixir/blob/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/src/elixir_parser.yrl Nonterminals grammar expr_list expr container_expr block_expr access_expr no_parens_expr no_parens_zero_expr no_parens_one_expr no_parens_one_ambig_expr - bracket_expr bracket_at_expr bracket_arg matched_expr unmatched_expr + bracket_expr bracket_at_expr bracket_arg matched_expr unmatched_expr sub_matched_expr unmatched_op_expr matched_op_expr no_parens_op_expr no_parens_many_expr comp_op_eol at_op_eol unary_op_eol and_op_eol or_op_eol capture_op_eol dual_op_eol mult_op_eol power_op_eol concat_op_eol xor_op_eol pipe_op_eol @@ -13,8 +13,8 @@ Nonterminals list list_args open_bracket close_bracket tuple open_curly close_curly bitstring open_bit close_bit - map map_op map_close map_args struct_expr struct_op - assoc_op_eol assoc_expr assoc_base assoc_update assoc_update_kw assoc + map map_op map_base_expr map_close map_args + assoc_op_eol assoc_expr assoc_base assoc assoc_update assoc_update_kw container_args_base container_args call_args_parens_expr call_args_parens_base call_args_parens parens_call call_args_no_parens_one call_args_no_parens_ambig call_args_no_parens_expr @@ -24,7 +24,7 @@ Nonterminals kw_eol kw_base kw_data kw_call call_args_no_parens_kw_expr call_args_no_parens_kw dot_op dot_alias dot_bracket_identifier dot_call_identifier dot_identifier dot_op_identifier dot_do_identifier dot_paren_identifier - do_block fn_eoe do_eoe end_eoe block_eoe block_item block_list + do_block fn_eoe do_eoe block_eoe block_item block_list . Terminals @@ -33,7 +33,7 @@ Terminals fn 'end' alias atom atom_quoted atom_safe atom_unsafe bin_string list_string sigil bin_heredoc list_heredoc - comp_op at_op unary_op and_op or_op arrow_op match_op in_op in_match_op + comp_op at_op unary_op and_op or_op arrow_op match_op in_op in_match_op ellipsis_op type_op dual_op mult_op power_op concat_op range_op xor_op pipe_op stab_op when_op capture_int capture_op assoc_op rel_op ternary_op dot_call_op 'true' 'false' 'nil' 'do' eol ';' ',' '.' @@ -66,6 +66,7 @@ Right 60 type_op_eol. %% :: Right 70 pipe_op_eol. %% | Right 80 assoc_op_eol. %% => Nonassoc 90 capture_op_eol. %% & +Nonassoc 90 ellipsis_op. %% ... Right 100 match_op_eol. %% = Left 120 or_op_eol. %% ||, |||, or Left 130 and_op_eol. %% &&, &&&, and @@ -91,8 +92,8 @@ Nonassoc 330 dot_identifier. grammar -> eoe : {'__block__', meta_from_token('$1'), []}. grammar -> expr_list : build_block(reverse('$1')). grammar -> eoe expr_list : build_block(reverse('$2')). -grammar -> expr_list eoe : build_block(reverse('$1')). -grammar -> eoe expr_list eoe : build_block(reverse('$2')). +grammar -> expr_list eoe : build_block(reverse(annotate_eoe('$2', '$1'))). +grammar -> eoe expr_list eoe : build_block(reverse(annotate_eoe('$3', '$2'))). grammar -> '$empty' : {'__block__', [], []}. % Note expressions are on reverse order @@ -146,10 +147,9 @@ matched_expr -> matched_expr matched_op_expr : build_op('$1', '$2'). matched_expr -> unary_op_eol matched_expr : build_unary_op('$1', '$2'). matched_expr -> at_op_eol matched_expr : build_unary_op('$1', '$2'). matched_expr -> capture_op_eol matched_expr : build_unary_op('$1', '$2'). +matched_expr -> ellipsis_op matched_expr : build_unary_op('$1', '$2'). matched_expr -> no_parens_one_expr : '$1'. -matched_expr -> no_parens_zero_expr : '$1'. -matched_expr -> access_expr : '$1'. -matched_expr -> access_expr kw_identifier : error_invalid_kw_identifier('$2'). +matched_expr -> sub_matched_expr : '$1'. unmatched_expr -> matched_expr unmatched_op_expr : build_op('$1', '$2'). unmatched_expr -> unmatched_expr matched_op_expr : build_op('$1', '$2'). @@ -158,12 +158,14 @@ unmatched_expr -> unmatched_expr no_parens_op_expr : warn_no_parens_after_do_op( unmatched_expr -> unary_op_eol expr : build_unary_op('$1', '$2'). unmatched_expr -> at_op_eol expr : build_unary_op('$1', '$2'). unmatched_expr -> capture_op_eol expr : build_unary_op('$1', '$2'). +unmatched_expr -> ellipsis_op expr : build_unary_op('$1', '$2'). unmatched_expr -> block_expr : '$1'. no_parens_expr -> matched_expr no_parens_op_expr : build_op('$1', '$2'). no_parens_expr -> unary_op_eol no_parens_expr : build_unary_op('$1', '$2'). no_parens_expr -> at_op_eol no_parens_expr : build_unary_op('$1', '$2'). no_parens_expr -> capture_op_eol no_parens_expr : build_unary_op('$1', '$2'). +no_parens_expr -> ellipsis_op no_parens_expr : build_unary_op('$1', '$2'). no_parens_expr -> no_parens_one_ambig_expr : '$1'. no_parens_expr -> no_parens_many_expr : '$1'. @@ -191,6 +193,10 @@ matched_op_expr -> pipe_op_eol matched_expr : {'$1', '$2'}. matched_op_expr -> comp_op_eol matched_expr : {'$1', '$2'}. matched_op_expr -> rel_op_eol matched_expr : {'$1', '$2'}. matched_op_expr -> arrow_op_eol matched_expr : {'$1', '$2'}. + +%% We warn exclusively for |> and friends because they are used +%% in other languages with lower precedence than function application, +%% which can be the source of confusion. matched_op_expr -> arrow_op_eol no_parens_one_expr : warn_pipe('$1', '$2'), {'$1', '$2'}. unmatched_op_expr -> match_op_eol unmatched_expr : {'$1', '$2'}. @@ -229,9 +235,7 @@ no_parens_op_expr -> when_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> pipe_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> comp_op_eol no_parens_expr : {'$1', '$2'}. no_parens_op_expr -> rel_op_eol no_parens_expr : {'$1', '$2'}. -no_parens_op_expr -> arrow_op_eol no_parens_expr : {'$1', '$2'}. -no_parens_op_expr -> arrow_op_eol no_parens_one_ambig_expr : warn_pipe('$1', '$2'), {'$1', '$2'}. -no_parens_op_expr -> arrow_op_eol no_parens_many_expr : warn_pipe('$1', '$2'), {'$1', '$2'}. +no_parens_op_expr -> arrow_op_eol no_parens_expr : warn_pipe('$1', '$2'), {'$1', '$2'}. %% Allow when (and only when) with keywords no_parens_op_expr -> when_op_eol call_args_no_parens_kw : {'$1', '$2'}. @@ -244,8 +248,14 @@ no_parens_many_expr -> dot_identifier call_args_no_parens_many_strict : build_no no_parens_one_expr -> dot_op_identifier call_args_no_parens_one : build_no_parens('$1', '$2'). no_parens_one_expr -> dot_identifier call_args_no_parens_one : build_no_parens('$1', '$2'). -no_parens_zero_expr -> dot_do_identifier : build_no_parens('$1', nil). -no_parens_zero_expr -> dot_identifier : build_no_parens('$1', nil). +no_parens_zero_expr -> dot_do_identifier : build_identifier('$1'). +no_parens_zero_expr -> dot_identifier : build_identifier('$1'). + +sub_matched_expr -> no_parens_zero_expr : '$1'. +sub_matched_expr -> range_op : build_nullary_op('$1'). +sub_matched_expr -> ellipsis_op : build_nullary_op('$1'). +sub_matched_expr -> access_expr : '$1'. +sub_matched_expr -> access_expr kw_identifier : error_invalid_kw_identifier('$2'). %% From this point on, we just have constructs that can be %% used with the access syntax. Note that (dot_)identifier @@ -254,11 +264,9 @@ no_parens_zero_expr -> dot_identifier : build_no_parens('$1', nil). access_expr -> bracket_at_expr : '$1'. access_expr -> bracket_expr : '$1'. access_expr -> capture_int int : build_unary_op('$1', number_value('$2')). -access_expr -> fn_eoe stab end_eoe : build_fn('$1', '$2', '$3'). -access_expr -> open_paren stab close_paren : build_paren_stab('$1', '$2', '$3'). -access_expr -> open_paren stab ';' close_paren : build_paren_stab('$1', '$2', '$4'). -access_expr -> open_paren ';' stab ';' close_paren : build_paren_stab('$1', '$3', '$5'). -access_expr -> open_paren ';' stab close_paren : build_paren_stab('$1', '$3', '$4'). +access_expr -> fn_eoe stab_eoe 'end' : build_fn('$1', '$2', '$3'). +access_expr -> open_paren stab_eoe ')' : build_paren_stab('$1', '$2', '$3'). +access_expr -> open_paren ';' stab_eoe ')' : build_paren_stab('$1', '$3', '$4'). access_expr -> open_paren ';' close_paren : build_paren_stab('$1', [], '$3'). access_expr -> empty_paren : warn_empty_paren('$1'), {'__block__', [], []}. access_expr -> int : handle_number(number_value('$1'), '$1', ?exprs('$1')). @@ -282,7 +290,6 @@ access_expr -> atom_safe : build_quoted_atom('$1', true, delimiter(<<$">>)). access_expr -> atom_unsafe : build_quoted_atom('$1', false, delimiter(<<$">>)). access_expr -> dot_alias : '$1'. access_expr -> parens_call : '$1'. -access_expr -> range_op : build_nullary_op('$1'). %% Also used by maps and structs parens_call -> dot_call_identifier call_args_parens : build_parens('$1', '$2', {[], []}). @@ -293,19 +300,19 @@ bracket_arg -> open_bracket container_expr close_bracket : build_access_arg('$1' bracket_arg -> open_bracket container_expr ',' close_bracket : build_access_arg('$1', '$2', '$4'). bracket_arg -> open_bracket container_expr ',' container_args close_bracket : error_too_many_access_syntax('$3'). -bracket_expr -> dot_bracket_identifier bracket_arg : build_access(build_no_parens('$1', nil), '$2'). +bracket_expr -> dot_bracket_identifier bracket_arg : build_access(build_identifier('$1'), meta_with_from_brackets('$2')). bracket_expr -> access_expr bracket_arg : build_access('$1', meta_with_from_brackets('$2')). bracket_at_expr -> at_op_eol dot_bracket_identifier bracket_arg : - build_access(build_unary_op('$1', build_no_parens('$2', nil)), '$3'). + build_access(build_unary_op('$1', build_identifier('$2')), meta_with_from_brackets('$3')). bracket_at_expr -> at_op_eol access_expr bracket_arg : - build_access(build_unary_op('$1', '$2'), '$3'). + build_access(build_unary_op('$1', '$2'), meta_with_from_brackets('$3')). %% Blocks do_block -> do_eoe 'end' : {do_end_meta('$1', '$2'), [[{handle_literal(do, '$1'), {'__block__', [], []}}]]}. -do_block -> do_eoe stab end_eoe : +do_block -> do_eoe stab_eoe 'end' : {do_end_meta('$1', '$3'), [[{handle_literal(do, '$1'), build_stab('$2')}]]}. do_block -> do_eoe block_list 'end' : {do_end_meta('$1', '$3'), [[{handle_literal(do, '$1'), {'__block__', [], []}} | '$2']]}. @@ -322,9 +329,6 @@ fn_eoe -> 'fn' eoe : next_is_eol('$1', '$2'). do_eoe -> 'do' : '$1'. do_eoe -> 'do' eoe : '$1'. -end_eoe -> 'end' : '$1'. -end_eoe -> eoe 'end' : '$2'. - block_eoe -> block_identifier : '$1'. block_eoe -> block_identifier eoe : '$1'. @@ -332,7 +336,7 @@ stab -> stab_expr : ['$1']. stab -> stab eoe stab_expr : ['$3' | annotate_eoe('$2', '$1')]. stab_eoe -> stab : '$1'. -stab_eoe -> stab eoe : '$1'. +stab_eoe -> stab eoe : annotate_eoe('$2', '$1'). stab_expr -> expr : '$1'. @@ -599,6 +603,11 @@ bitstring -> open_bit container_args close_bit : build_bit('$1', '$2', '$3'). % Map and structs +map_base_expr -> sub_matched_expr : '$1'. +map_base_expr -> at_op_eol map_base_expr : build_unary_op('$1', '$2'). +map_base_expr -> unary_op_eol map_base_expr : build_unary_op('$1', '$2'). +map_base_expr -> ellipsis_op map_base_expr : build_unary_op('$1', '$2'). + assoc_op_eol -> assoc_op : '$1'. assoc_op_eol -> assoc_op eol : '$1'. @@ -606,9 +615,7 @@ assoc_expr -> matched_expr assoc_op_eol matched_expr : {'$1', '$3'}. assoc_expr -> unmatched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}. assoc_expr -> matched_expr assoc_op_eol unmatched_expr : {'$1', '$3'}. assoc_expr -> unmatched_expr assoc_op_eol matched_expr : {'$1', '$3'}. -assoc_expr -> dot_identifier : build_identifier('$1', nil). -assoc_expr -> no_parens_one_expr : '$1'. -assoc_expr -> parens_call : '$1'. +assoc_expr -> map_base_expr : '$1'. assoc_update -> matched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}. assoc_update -> unmatched_expr pipe_op_eol assoc_expr : {'$2', '$1', ['$3']}. @@ -636,18 +643,9 @@ map_args -> open_curly assoc_update ',' close_curly : build_map_update('$1', '$2 map_args -> open_curly assoc_update ',' map_close : build_map_update('$1', '$2', element(2, '$4'), element(1, '$4')). map_args -> open_curly assoc_update_kw close_curly : build_map_update('$1', '$2', '$3', []). -struct_op -> '%' : '$1'. -struct_expr -> atom : handle_literal(?exprs('$1'), '$1', []). -struct_expr -> atom_quoted : handle_literal(?exprs('$1'), '$1', delimiter(<<$">>)). -struct_expr -> dot_alias : '$1'. -struct_expr -> dot_identifier : build_identifier('$1', nil). -struct_expr -> at_op_eol struct_expr : build_unary_op('$1', '$2'). -struct_expr -> unary_op_eol struct_expr : build_unary_op('$1', '$2'). -struct_expr -> parens_call : '$1'. - -map -> map_op map_args : '$2'. -map -> struct_op struct_expr map_args : {'%', meta_from_token('$1'), ['$2', '$3']}. -map -> struct_op struct_expr eol map_args : {'%', meta_from_token('$1'), ['$2', '$4']}. +map -> map_op map_args : adjust_map_column('$2'). +map -> '%' map_base_expr map_args : {'%', meta_from_token('$1'), ['$2', '$3']}. +map -> '%' map_base_expr eol map_args : {'%', meta_from_token('$1'), ['$2', '$4']}. Erlang code. @@ -779,14 +777,27 @@ build_map_update(Left, {Pipe, Struct, Map}, Right, Extra) -> Op = build_op(Struct, Pipe, append_non_empty(Map, Extra)), {'%{}', newlines_pair(Left, Right) ++ meta_from_token(Left), [Op]}. +adjust_map_column(Map) -> + case ?columns() of + true -> + {'%{}', Meta, Pairs} = Map, + UpdatedMeta = [{Key, if Key =:= column -> Value - 1; true -> Value end} || + {Key, Value} <- Meta], + {'%{}', UpdatedMeta, Pairs}; + false -> + Map + end. + %% Blocks -build_block([{unquote_splicing, _, [_]}]=Exprs) -> - {'__block__', [], Exprs}; -build_block([Expr]) -> +build_block(Exprs) -> build_block(Exprs, []). + +build_block([{unquote_splicing, _, [_]}]=Exprs, Meta) -> + {'__block__', Meta, Exprs}; +build_block([Expr], _Meta) -> Expr; -build_block(Exprs) -> - {'__block__', [], Exprs}. +build_block(Exprs, Meta) -> + {'__block__', Meta, Exprs}. %% Newlines @@ -820,7 +831,7 @@ annotate_eoe(Token, Stack) -> {{_, Location}, [{'->', StabMeta, [StabArgs, {Left, Meta, Right}]} | Rest]} when is_list(Meta) -> [{'->', StabMeta, [StabArgs, {Left, [{end_of_expression, end_of_expression(Location)} | Meta], Right}]} | Rest]; - {{_, Location}, [{Left, Meta, Right} | Rest]} when is_list(Meta) -> + {{_, Location}, [{Left, Meta, Right} | Rest]} when is_list(Meta), Left =/= '->' -> [{Left, [{end_of_expression, end_of_expression(Location)} | Meta], Right} | Rest]; _ -> @@ -886,37 +897,35 @@ build_nested_parens(Dot, Args1, {Args2Meta, Args2}, {BlockMeta, Block}) -> {Identifier, Meta, append_non_empty(Args2, Block)}. build_parens(Expr, {ArgsMeta, Args}, {BlockMeta, Block}) -> - {BuiltExpr, BuiltMeta, BuiltArgs} = build_identifier(Expr, append_non_empty(Args, Block)), + {BuiltExpr, BuiltMeta, BuiltArgs} = build_call(Expr, append_non_empty(Args, Block)), {BuiltExpr, BlockMeta ++ ArgsMeta ++ BuiltMeta, BuiltArgs}. build_no_parens_do_block(Expr, Args, {BlockMeta, Block}) -> - {BuiltExpr, BuiltMeta, BuiltArgs} = build_no_parens(Expr, Args ++ Block), + {BuiltExpr, BuiltMeta, BuiltArgs} = build_call(Expr, Args ++ Block), {BuiltExpr, BlockMeta ++ BuiltMeta, BuiltArgs}. build_no_parens(Expr, Args) -> - build_identifier(Expr, Args). + build_call(Expr, Args). -build_identifier({'.', Meta, IdentifierLocation, DotArgs}, nil) -> +build_identifier({'.', Meta, IdentifierLocation, DotArgs}) -> {{'.', Meta, DotArgs}, [{no_parens, true} | IdentifierLocation], []}; -build_identifier({'.', Meta, IdentifierLocation, DotArgs}, Args) -> - {{'.', Meta, DotArgs}, IdentifierLocation, Args}; - -build_identifier({'.', Meta, _} = Dot, nil) -> +build_identifier({'.', Meta, _} = Dot) -> {Dot, [{no_parens, true} | Meta], []}; -build_identifier({'.', Meta, _} = Dot, Args) -> +build_identifier({_, Location, Identifier}) -> + {Identifier, meta_from_location(Location), nil}. + +build_call({'.', Meta, IdentifierLocation, DotArgs}, Args) -> + {{'.', Meta, DotArgs}, IdentifierLocation, Args}; + +build_call({'.', Meta, _} = Dot, Args) -> {Dot, Meta, Args}; -build_identifier({op_identifier, Location, Identifier}, [Arg]) -> +build_call({op_identifier, Location, Identifier}, [Arg]) -> {Identifier, [{ambiguous_op, nil} | meta_from_location(Location)], [Arg]}; -%% TODO: Either remove ... or make it an operator on v2.0 -build_identifier({_, {Line, Column, _} = Location, '...'}, Args) when is_list(Args) -> - warn({Line, Column}, "... is no longer supported as a function call and it must receive no arguments"), - {'...', meta_from_location(Location), Args}; - -build_identifier({_, Location, Identifier}, Args) -> +build_call({_, Location, Identifier}, Args) -> {Identifier, meta_from_location(Location), Args}. %% Fn @@ -940,11 +949,11 @@ build_access(Expr, {List, Meta}) -> %% Interpolation aware -build_sigil({sigil, Location, Sigil, Parts, Modifiers, Indentation, Delimiter}) -> +build_sigil({sigil, Location, Atom, Parts, Modifiers, Indentation, Delimiter}) -> Meta = meta_from_location(Location), MetaWithDelimiter = [{delimiter, Delimiter} | Meta], MetaWithIndentation = meta_with_indentation(Meta, Indentation), - {list_to_atom("sigil_" ++ Sigil), + {Atom, MetaWithDelimiter, [{'<<>>', MetaWithIndentation, string_parts(Parts)}, Modifiers]}. @@ -1020,7 +1029,7 @@ charlist_part({Begin, End, Tokens}) -> true -> [{closing, meta_from_location(End)} | Meta]; false -> Meta end, - {{'.', Meta, ['Elixir.Kernel', to_string]}, MetaWithExtra, [Form]}. + {{'.', Meta, ['Elixir.Kernel', to_string]}, [{from_interpolation, true} | MetaWithExtra], [Form]}. string_parts(Parts) -> [string_part(Part) || Part <- Parts]. @@ -1031,10 +1040,10 @@ string_part({Begin, End, Tokens}) -> Meta = meta_from_location(Begin), MetaWithExtra = case ?token_metadata() of - true -> [{closing, meta_from_location(End)} | meta_from_location(Begin)]; - false -> meta_from_location(Begin) + true -> [{closing, meta_from_location(End)} | Meta]; + false -> Meta end, - {'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, MetaWithExtra, [Form]}, {binary, Meta, nil}]}. + {'::', Meta, [{{'.', Meta, ['Elixir.Kernel', to_string]}, [{from_interpolation, true} | MetaWithExtra], [Form]}, {binary, Meta, nil}]}. string_tokens_parse(Tokens) -> case parse(Tokens) of @@ -1066,11 +1075,9 @@ build_stab(Stab) -> build_paren_stab(_Before, [{Op, _, [_]}]=Exprs, _After) when ?rearrange_uop(Op) -> {'__block__', [], Exprs}; build_paren_stab(Before, Stab, After) -> - case build_stab(Stab) of - {'__block__', Meta, Block} -> - {'__block__', Meta ++ meta_from_token_with_closing(Before, After), Block}; - Other -> - Other + case check_stab(Stab, none) of + block -> build_block(reverse(Stab), meta_from_token_with_closing(Before, After)); + stab -> handle_literal(collect_stab(Stab, [], []), Before, newlines_pair(Before, After)) end. collect_stab([{'->', Meta, [Left, Right]} | T], Exprs, Stabs) -> @@ -1273,5 +1280,5 @@ warn_empty_stab_clause({stab_op, {Line, Column, _}, '->'}) -> warn(LineColumn, Message) -> case get(elixir_parser_warning_file) of nil -> ok; - File -> elixir_errors:erl_warn(LineColumn, File, Message) + File -> future_elixir_errors:erl_warn(LineColumn, File, Message) end. diff --git a/apps/common/src/future_elixir_tokenizer.erl b/apps/common/src/future_elixir_tokenizer.erl index c278eb03f..0ace9a69d 100644 --- a/apps/common/src/future_elixir_tokenizer.erl +++ b/apps/common/src/future_elixir_tokenizer.erl @@ -1,9 +1,9 @@ -%% Copied from https://github.com/elixir-lang/elixir/blob/bacea2cef6323d0ede4222f36ddcedd82cb514e4/lib/elixir/src/elixir_tokenizer.erl +%% Copied from https://github.com/elixir-lang/elixir/blob/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/src/elixir_tokenizer.erl %% I changed module name and includes -module(future_elixir_tokenizer). -include("future_elixir.hrl"). -include("future_elixir_tokenizer.hrl"). --export([tokenize/1, tokenize/3, tokenize/4, invalid_do_error/1]). +-export([tokenize/1, tokenize/3, tokenize/4, invalid_do_error/1, terminator/1]). -define(at_op(T), T =:= $@). @@ -97,6 +97,9 @@ -define(pipe_op(T), T =:= $|). +-define(ellipsis_op3(T1, T2, T3), + T1 =:= $., T2 =:= $., T3 =:= $.). + %% Deprecated operators -define(unary_op3(T1, T2, T3), @@ -125,11 +128,9 @@ tokenize(String, Line, Column, Opts) -> Acc#elixir_tokenizer{preserve_comments=PreserveComments}; ({unescape, Unescape}, Acc) when is_boolean(Unescape) -> Acc#elixir_tokenizer{unescape=Unescape}; - ({warn_on_unnecessary_quotes, Unnecessary}, Acc) when is_boolean(Unnecessary) -> - Acc#elixir_tokenizer{warn_on_unnecessary_quotes=Unnecessary}; (_, Acc) -> Acc - end, #elixir_tokenizer{identifier_tokenizer=IdentifierTokenizer}, Opts), + end, #elixir_tokenizer{identifier_tokenizer=IdentifierTokenizer, column=Column}, Opts), tokenize(String, Line, Column, Scope, []). @@ -139,30 +140,38 @@ tokenize(String, Line, Opts) -> tokenize([], Line, Column, #elixir_tokenizer{cursor_completion=Cursor} = Scope, Tokens) when Cursor /= false -> #elixir_tokenizer{ascii_identifiers_only=Ascii, terminators=Terminators, warnings=Warnings} = Scope, - {CursorColumn, CursorTerminators, CursorTokens} = + {CursorColumn, CursorTerminators, AccTokens} = add_cursor(Line, Column, Cursor, Terminators, Tokens), AllWarnings = maybe_unicode_lint_warnings(Ascii, Tokens, Warnings), - AccTokens = cursor_complete(Line, CursorColumn, CursorTerminators, CursorTokens), - {ok, Line, Column, AllWarnings, AccTokens}; + {AccTerminators, _AccColumn} = cursor_complete(Line, CursorColumn, CursorTerminators), + {ok, Line, CursorColumn, AllWarnings, AccTokens, AccTerminators}; -tokenize([], EndLine, Column, #elixir_tokenizer{terminators=[{Start, StartLine, _} | _]} = Scope, Tokens) -> +tokenize([], EndLine, EndColumn, #elixir_tokenizer{terminators=[{Start, {StartLine, StartColumn, _}, _} | _]} = Scope, Tokens) -> End = terminator(Start), Hint = missing_terminator_hint(Start, End, Scope), - Message = "missing terminator: ~ts (for \"~ts\" starting at line ~B)", - Formatted = io_lib:format(Message, [End, Start, StartLine]), - error({EndLine, Column, [Formatted, Hint], []}, [], Scope, Tokens); + Message = "missing terminator: ~ts", + Formatted = io_lib:format(Message, [End]), + Meta = [ + {opening_delimiter, Start}, + {expected_delimiter, End}, + {line, StartLine}, + {column, StartColumn}, + {end_line, EndLine}, + {end_column, EndColumn} + ], + error({Meta, [Formatted, Hint], []}, [], Scope, Tokens); tokenize([], Line, Column, #elixir_tokenizer{} = Scope, Tokens) -> #elixir_tokenizer{ascii_identifiers_only=Ascii, warnings=Warnings} = Scope, AllWarnings = maybe_unicode_lint_warnings(Ascii, Tokens, Warnings), - {ok, Line, Column, AllWarnings, lists:reverse(Tokens)}; + {ok, Line, Column, AllWarnings, Tokens, []}; % VC merge conflict tokenize(("<<<<<<<" ++ _) = Original, Line, 1, Scope, Tokens) -> FirstLine = lists:takewhile(fun(C) -> C =/= $\n andalso C =/= $\r end, Original), - Reason = {Line, 1, "found an unexpected version control marker, please resolve the conflicts: ", FirstLine}, + Reason = {?LOC(Line, 1), "found an unexpected version control marker, please resolve the conflicts: ", FirstLine}, error(Reason, Original, Scope, Tokens); % Base integers @@ -203,7 +212,7 @@ tokenize([$~, H | _T] = Original, Line, Column, Scope, Tokens) when ?is_upcase(H % We tokenize char literals (?a) as {char, _, CharInt} instead of {number, _, % CharInt}. This is exactly what Erlang does with Erlang char literals % ($a). This means we'll have to adjust the error message for char literals in -% elixir_errors.erl as by default {char, _, _} tokens are "hijacked" by Erlang +% future_elixir_errors.erl as by default {char, _, _} tokens are "hijacked" by Erlang % and printed with Erlang syntax ($a) in the parser's error messages. tokenize([$?, $\\, H | T], Line, Column, Scope, Tokens) -> @@ -248,26 +257,26 @@ tokenize([$?, Char | T], Line, Column, Scope, Tokens) -> tokenize("\"\"\"" ++ T, Line, Column, Scope, Tokens) -> handle_heredocs(T, Line, Column, $", Scope, Tokens); -%% TODO: Deprecate single-quoted in Elixir v1.17 +%% TODO: Remove me in Elixir v2.0 tokenize("'''" ++ T, Line, Column, Scope, Tokens) -> - handle_heredocs(T, Line, Column, $', Scope, Tokens); + NewScope = prepend_warning(Line, Column, "single-quoted string represent charlists. Use ~c''' if you indeed want a charlist or use \"\"\" instead", Scope), + handle_heredocs(T, Line, Column, $', NewScope, Tokens); % Strings tokenize([$" | T], Line, Column, Scope, Tokens) -> handle_strings(T, Line, Column + 1, $", Scope, Tokens); -%% TODO: Deprecate single-quoted in Elixir v1.17 +%% TODO: Remove me in Elixir v2.0 tokenize([$' | T], Line, Column, Scope, Tokens) -> - handle_strings(T, Line, Column + 1, $', Scope, Tokens); + NewScope = prepend_warning(Line, Column, "single-quoted strings represent charlists. Use ~c\"\" if you indeed want a charlist or use \"\" instead", Scope), + handle_strings(T, Line, Column + 1, $', NewScope, Tokens); % Operator atoms tokenize(".:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> tokenize(Rest, Line, Column + 2, Scope, [{kw_identifier, {Line, Column, nil}, '.'} | Tokens]); -tokenize("...:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> - tokenize(Rest, Line, Column + 4, Scope, [{kw_identifier, {Line, Column, nil}, '...'} | Tokens]); tokenize("<<>>:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> tokenize(Rest, Line, Column + 5, Scope, [{kw_identifier, {Line, Column, nil}, '<<>>'} | Tokens]); tokenize("%{}:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> @@ -281,8 +290,6 @@ tokenize("{}:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> tokenize("..//:" ++ Rest, Line, Column, Scope, Tokens) when ?is_space(hd(Rest)) -> tokenize(Rest, Line, Column + 5, Scope, [{kw_identifier, {Line, Column, nil}, '..//'} | Tokens]); -tokenize(":..." ++ Rest, Line, Column, Scope, Tokens) -> - tokenize(Rest, Line, Column + 4, Scope, [{atom, {Line, Column, nil}, '...'} | Tokens]); tokenize(":<<>>" ++ Rest, Line, Column, Scope, Tokens) -> tokenize(Rest, Line, Column + 5, Scope, [{atom, {Line, Column, nil}, '<<>>'} | Tokens]); tokenize(":%{}" ++ Rest, Line, Column, Scope, Tokens) -> @@ -297,7 +304,7 @@ tokenize(":..//" ++ Rest, Line, Column, Scope, Tokens) -> % ## Three Token Operators tokenize([$:, T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?unary_op3(T1, T2, T3); ?comp_op3(T1, T2, T3); ?and_op3(T1, T2, T3); ?or_op3(T1, T2, T3); - ?arrow_op3(T1, T2, T3); ?xor_op3(T1, T2, T3); ?concat_op3(T1, T2, T3) -> + ?arrow_op3(T1, T2, T3); ?xor_op3(T1, T2, T3); ?concat_op3(T1, T2, T3); ?ellipsis_op3(T1, T2, T3) -> Token = {atom, {Line, Column, nil}, list_to_atom([T1, T2, T3])}, tokenize(Rest, Line, Column + 4, Scope, [Token | Tokens]); @@ -325,13 +332,6 @@ tokenize([$:, T | Rest], Line, Column, Scope, Tokens) when % ## Stand-alone tokens -%% TODO: Consider either making ... as nullary operator (same as ..) -%% or deprecating it. In Elixir itself it is only used in typespecs. -tokenize("..." ++ Rest, Line, Column, Scope, Tokens) -> - NewScope = maybe_warn_too_many_of_same_char("...", Rest, Line, Column, Scope), - Token = check_call_identifier(Line, Column, "...", '...', Rest), - tokenize(Rest, Line, Column + 3, NewScope, [Token | Tokens]); - tokenize("=>" ++ Rest, Line, Column, Scope, Tokens) -> Token = {assoc_op, {Line, Column, previous_was_eol(Tokens)}, '=>'}, tokenize(Rest, Line, Column + 2, Scope, add_token_with_eol(Token, Tokens)); @@ -351,6 +351,9 @@ tokenize("..//" ++ Rest = String, Line, Column, Scope, Tokens) -> tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?unary_op3(T1, T2, T3) -> handle_unary_op(Rest, Line, Column, unary_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); +tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?ellipsis_op3(T1, T2, T3) -> + handle_unary_op(Rest, Line, Column, ellipsis_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); + tokenize([T1, T2, T3 | Rest], Line, Column, Scope, Tokens) when ?comp_op3(T1, T2, T3) -> handle_op(Rest, Line, Column, comp_op, 3, list_to_atom([T1, T2, T3]), Scope, Tokens); @@ -392,7 +395,7 @@ tokenize([${ | Rest], Line, Column, Scope, [{'%', _} | _] = Tokens) -> "If you want to define a map, write %{...}, with no spaces.\n" "If you want to define a struct, write %StructName{...}.\n\n" "Syntax error before: ", - error({Line, Column, Message, [${]}, Rest, Scope, Tokens); + error({?LOC(Line, Column), Message, [${]}, Rest, Scope, Tokens); tokenize([T | Rest], Line, Column, Scope, Tokens) when T =:= $(; T =:= ${; T =:= $[ -> Token = {list_to_atom([T]), {Line, Column, nil}}, @@ -528,7 +531,7 @@ tokenize([$:, H | T] = Original, Line, Column, Scope, Tokens) when ?is_quote(H) {error, Reason} -> Message = " (for atom starting at line ~B)", - interpolation_error(Reason, Original, Scope, Tokens, Message, [Line]) + interpolation_error(Reason, Original, Scope, Tokens, Message, [Line], Line, Column + 1, [H], [H]) end; tokenize([$: | String] = Original, Line, Column, Scope, Tokens) -> @@ -555,7 +558,7 @@ tokenize([$: | String] = Original, Line, Column, Scope, Tokens) -> tokenize([H | T], Line, Column, Scope, Tokens) when ?is_digit(H) -> case tokenize_number(T, [H], 1, false) of {error, Reason, Original} -> - error({Line, Column, Reason, Original}, T, Scope, Tokens); + error({?LOC(Line, Column), Reason, Original}, T, Scope, Tokens); {[I | Rest], Number, Original, _Length} when ?is_upcase(I); ?is_downcase(I); I == $_ -> if Number == 0, (I =:= $x) orelse (I =:= $o) orelse (I =:= $b), Rest == [], @@ -572,7 +575,7 @@ tokenize([H | T], Line, Column, Scope, Tokens) when ?is_digit(H) -> [[I], Original] ), - error({Line, Column, Msg, [I]}, T, Scope, Tokens) + error({?LOC(Line, Column), Msg, [I]}, T, Scope, Tokens) end; {Rest, Number, Original, Length} when is_integer(Number) -> Token = {int, {Line, Column, Number}, Original}, @@ -597,13 +600,13 @@ tokenize(";" ++ Rest, Line, Column, Scope, [Top | _] = Tokens) when element(1, T tokenize(Rest, Line, Column + 1, Scope, [{';', {Line, Column, 0}} | Tokens]); tokenize("\\" = Original, Line, Column, Scope, Tokens) -> - error({Line, Column, "invalid escape \\ at end of file", []}, Original, Scope, Tokens); + error({?LOC(Line, Column), "invalid escape \\ at end of file", []}, Original, Scope, Tokens); tokenize("\\\n" = Original, Line, Column, Scope, Tokens) -> - error({Line, Column, "invalid escape \\ at end of file", []}, Original, Scope, Tokens); + error({?LOC(Line, Column), "invalid escape \\ at end of file", []}, Original, Scope, Tokens); tokenize("\\\r\n" = Original, Line, Column, Scope, Tokens) -> - error({Line, Column, "invalid escape \\ at end of file", []}, Original, Scope, Tokens); + error({?LOC(Line, Column), "invalid escape \\ at end of file", []}, Original, Scope, Tokens); tokenize("\\\n" ++ Rest, Line, _Column, Scope, Tokens) -> tokenize_eol(Rest, Line, Scope, Tokens); @@ -620,11 +623,11 @@ tokenize("\r\n" ++ Rest, Line, Column, Scope, Tokens) -> % Others tokenize([$%, $( | Rest], Line, Column, Scope, Tokens) -> - Reason = {Line, Column, "expected %{ to define a map, got: ", [$%, $(]}, + Reason = {?LOC(Line, Column), "expected %{ to define a map, got: ", [$%, $(]}, error(Reason, Rest, Scope, Tokens); tokenize([$%, $[ | Rest], Line, Column, Scope, Tokens) -> - Reason = {Line, Column, "expected %{ to define a map, got: ", [$%, $[]}, + Reason = {?LOC(Line, Column), "expected %{ to define a map, got: ", [$%, $[]}, error(Reason, Rest, Scope, Tokens); tokenize([$%, ${ | T], Line, Column, Scope, Tokens) -> @@ -651,15 +654,15 @@ tokenize(String, Line, Column, OriginalScope, Tokens) -> [$: | T] when hd(T) =/= $: -> AtomName = atom_to_list(Atom) ++ [$:], - Reason = {Line, Column, "keyword argument must be followed by space after: ", AtomName}, + Reason = {?LOC(Line, Column), "keyword argument must be followed by space after: ", AtomName}, error(Reason, String, Scope, Tokens); _ when HasAt -> - Reason = {Line, Column, invalid_character_error(Kind, $@), atom_to_list(Atom)}, + Reason = {?LOC(Line, Column), invalid_character_error(Kind, $@), atom_to_list(Atom)}, error(Reason, String, Scope, Tokens); _ when Atom == '__aliases__'; Atom == '__block__' -> - error({Line, Column, "reserved token: ", atom_to_list(Atom)}, Rest, Scope, Tokens); + error({?LOC(Line, Column), "reserved token: ", atom_to_list(Atom)}, Rest, Scope, Tokens); _ when Kind == alias -> tokenize_alias(Rest, Line, Column, Unencoded, Atom, Length, Ascii, Special, Scope, Tokens); @@ -704,12 +707,12 @@ unexpected_token([T | Rest], Line, Column, Scope, Tokens) -> false -> io_lib:format("\"~ts\" (column ~p, code point U+~4.16.0B)", [[T], Column, T]) end, - error({Line, Column, "unexpected token: ", Message}, Rest, Scope, Tokens). + error({?LOC(Line, Column), "unexpected token: ", Message}, Rest, Scope, Tokens). tokenize_eol(Rest, Line, Scope, Tokens) -> - {StrippedRest, Indentation} = strip_horizontal_space(Rest, 0), - IndentedScope = Scope#elixir_tokenizer{indentation=Indentation}, - tokenize(StrippedRest, Line + 1, Indentation + 1, IndentedScope, Tokens). + {StrippedRest, Column} = strip_horizontal_space(Rest, Scope#elixir_tokenizer.column), + IndentedScope = Scope#elixir_tokenizer{indentation=Column-1}, + tokenize(StrippedRest, Line + 1, Column, IndentedScope, Tokens). strip_horizontal_space([H | T], Counter) when ?is_horizontal_space(H) -> strip_horizontal_space(T, Counter + 1); @@ -725,12 +728,12 @@ tokenize_dot(T, Line, Column, DotInfo, Scope, Tokens) -> {Rest, Comment} -> preserve_comments(Line, Column, Tokens, Comment, Rest, Scope), - tokenize_dot(Rest, Line, 1, DotInfo, Scope, Tokens) + tokenize_dot(Rest, Line, Scope#elixir_tokenizer.column, DotInfo, Scope, Tokens) end; {"\r\n" ++ Rest, _} -> - tokenize_dot(Rest, Line + 1, 1, DotInfo, Scope, Tokens); + tokenize_dot(Rest, Line + 1, Scope#elixir_tokenizer.column, DotInfo, Scope, Tokens); {"\n" ++ Rest, _} -> - tokenize_dot(Rest, Line + 1, 1, DotInfo, Scope, Tokens); + tokenize_dot(Rest, Line + 1, Scope#elixir_tokenizer.column, DotInfo, Scope, Tokens); {Rest, Length} -> handle_dot([$. | Rest], Line, Column + Length, DotInfo, Scope, Tokens) end. @@ -769,7 +772,7 @@ handle_heredocs(T, Line, Column, H, Scope, Tokens) -> handle_strings(T, Line, Column, H, Scope, Tokens) -> case future_elixir_interpolation:extract(Line, Column, Scope, true, T, H) of {error, Reason} -> - interpolation_error(Reason, [H | T], Scope, Tokens, " (for string starting at line ~B)", [Line]); + interpolation_error(Reason, [H | T], Scope, Tokens, " (for string starting at line ~B)", [Line], Line, Column-1, [H], [H]); {NewLine, NewColumn, Parts, [$: | Rest], InterScope} when ?is_space(hd(Rest)) -> NewScope = case is_unnecessary_quote(Parts, InterScope) of @@ -795,7 +798,7 @@ handle_strings(T, Line, Column, H, Scope, Tokens) -> Token = {kw_identifier, {Line, Column - 1, nil}, Atom}, tokenize(Rest, NewLine, NewColumn + 1, NewScope, [Token | Tokens]); {error, Reason} -> - {error, Reason, Rest, Tokens} + error(Reason, Rest, NewScope, Tokens) end; {ok, Unescaped} -> @@ -878,7 +881,7 @@ handle_dot([$., T1, T2, T3 | Rest], Line, Column, DotInfo, Scope, Tokens) when handle_dot([$., T1, T2 | Rest], Line, Column, DotInfo, Scope, Tokens) when ?comp_op2(T1, T2); ?rel_op2(T1, T2); ?and_op(T1, T2); ?or_op(T1, T2); ?arrow_op(T1, T2); ?in_match_op(T1, T2); ?concat_op(T1, T2); ?power_op(T1, T2); - ?type_op(T1, T2); ?range_op(T1, T2) -> + ?type_op(T1, T2) -> handle_call_identifier(Rest, Line, Column, DotInfo, 2, [T1, T2], Scope, Tokens); % ## Single Token Operators @@ -920,9 +923,9 @@ handle_dot([$., H | T] = Original, Line, Column, DotInfo, Scope, Tokens) when ?i end; {_NewLine, _NewColumn, _Parts, Rest, NewScope} -> Message = "interpolation is not allowed when calling function/macro. Found interpolation in a call starting with: ", - error({Line, Column, Message, [H]}, Rest, NewScope, Tokens); + error({?LOC(Line, Column), Message, [H]}, Rest, NewScope, Tokens); {error, Reason} -> - interpolation_error(Reason, Original, Scope, Tokens, " (for function name starting at line ~B)", [Line]) + interpolation_error(Reason, Original, Scope, Tokens, " (for function name starting at line ~B)", [Line], Line, Column, [H], [H]) end; handle_dot([$. | Rest], Line, Column, DotInfo, Scope, Tokens) -> @@ -935,17 +938,18 @@ handle_call_identifier(Rest, Line, Column, DotInfo, Length, UnencodedOp, Scope, tokenize(Rest, Line, Column + Length, Scope, [Token | TokensSoFar]). % ## Ambiguous unary/binary operators tokens -handle_space_sensitive_tokens([Sign, NotMarker | T], Line, Column, Scope, [{Identifier, _, _} = H | Tokens]) when - ?dual_op(Sign), - not(?is_space(NotMarker)), - NotMarker =/= $(, NotMarker =/= $[, NotMarker =/= $<, NotMarker =/= ${, %% containers - NotMarker =/= $%, NotMarker =/= $+, NotMarker =/= $-, NotMarker =/= $/, NotMarker =/= $>, %% operators - NotMarker =/= $:, %% keywords - Identifier == identifier -> +% Keywords are not ambiguous operators +handle_space_sensitive_tokens([Sign, $:, Space | _] = String, Line, Column, Scope, Tokens) when ?dual_op(Sign), ?is_space(Space) -> + tokenize(String, Line, Column, Scope, Tokens); + +% But everything else, except other operators, are +handle_space_sensitive_tokens([Sign, NotMarker | T], Line, Column, Scope, [{identifier, _, _} = H | Tokens]) when + ?dual_op(Sign), not(?is_space(NotMarker)), NotMarker =/= Sign, NotMarker =/= $/, NotMarker =/= $> -> Rest = [NotMarker | T], DualOpToken = {dual_op, {Line, Column, nil}, list_to_atom([Sign])}, tokenize(Rest, Line, Column + 1, Scope, [DualOpToken, setelement(1, H, op_identifier) | Tokens]); +% Handle cursor completion handle_space_sensitive_tokens([], Line, Column, #elixir_tokenizer{cursor_completion=Cursor} = Scope, [{identifier, Info, Identifier} | Tokens]) when Cursor /= false -> @@ -965,19 +969,18 @@ eol(_Line, _Column, [{eol, {Line, Column, Count}} | Tokens]) -> eol(Line, Column, Tokens) -> [{eol, {Line, Column, 1}} | Tokens]. -is_unnecessary_quote([Part], #elixir_tokenizer{warn_on_unnecessary_quotes=true} = Scope) when is_list(Part) -> +is_unnecessary_quote([Part], Scope) when is_list(Part) -> case (Scope#elixir_tokenizer.identifier_tokenizer):tokenize(Part) of {identifier, _, [], _, true, Special} -> not lists:member(at, Special); _ -> false end; - is_unnecessary_quote(_Parts, _Scope) -> false. unsafe_to_atom(Part, Line, Column, #elixir_tokenizer{}) when is_binary(Part) andalso byte_size(Part) > 255; is_list(Part) andalso length(Part) > 255 -> - {error, {Line, Column, "atom length must be less than system limit: ", elixir_utils:characters_to_list(Part)}}; + {error, {?LOC(Line, Column), "atom length must be less than system limit: ", elixir_utils:characters_to_list(Part)}}; unsafe_to_atom(Part, Line, Column, #elixir_tokenizer{static_atoms_encoder=StaticAtomsEncoder}) when is_function(StaticAtomsEncoder) -> Value = elixir_utils:characters_to_binary(Part), @@ -985,13 +988,13 @@ unsafe_to_atom(Part, Line, Column, #elixir_tokenizer{static_atoms_encoder=Static {ok, Term} -> {ok, Term}; {error, Reason} when is_binary(Reason) -> - {error, {Line, Column, elixir_utils:characters_to_list(Reason) ++ ": ", elixir_utils:characters_to_list(Part)}} + {error, {?LOC(Line, Column), elixir_utils:characters_to_list(Reason) ++ ": ", elixir_utils:characters_to_list(Part)}} end; unsafe_to_atom(Binary, Line, Column, #elixir_tokenizer{existing_atoms_only=true}) when is_binary(Binary) -> try {ok, binary_to_existing_atom(Binary, utf8)} catch - error:badarg -> {error, {Line, Column, "unsafe atom does not exist: ", elixir_utils:characters_to_list(Binary)}} + error:badarg -> {error, {?LOC(Line, Column), "unsafe atom does not exist: ", elixir_utils:characters_to_list(Binary)}} end; unsafe_to_atom(Binary, _Line, _Column, #elixir_tokenizer{}) when is_binary(Binary) -> {ok, binary_to_atom(Binary, utf8)}; @@ -999,7 +1002,7 @@ unsafe_to_atom(List, Line, Column, #elixir_tokenizer{existing_atoms_only=true}) try {ok, list_to_existing_atom(List)} catch - error:badarg -> {error, {Line, Column, "unsafe atom does not exist: ", List}} + error:badarg -> {error, {?LOC(Line, Column), "unsafe atom does not exist: ", List}} end; unsafe_to_atom(List, _Line, _Column, #elixir_tokenizer{}) when is_list(List) -> {ok, list_to_atom(List)}. @@ -1028,12 +1031,12 @@ extract_heredoc_with_interpolation(Line, Column, Scope, Interpol, T, H) -> {ok, NewLine, NewColumn, tokens_to_binary(Parts2), Rest, NewScope}; {error, Reason} -> - {error, interpolation_format(Reason, " (for heredoc starting at line ~B)", [Line])} + {error, interpolation_format(Reason, " (for heredoc starting at line ~B)", [Line], Line, Column, [H, H, H], [H, H, H])} end; error -> Message = "heredoc allows only whitespace characters followed by a new line after opening ", - {error, {Line, Column + 3, io_lib:format(Message, []), [H, H, H]}} + {error, {?LOC(Line, Column + 3), io_lib:format(Message, []), [H, H, H]}} end. extract_heredoc_header("\r\n" ++ Rest) -> @@ -1094,7 +1097,7 @@ unescape_tokens(Tokens, Line, Column, #elixir_tokenizer{unescape=true}) -> {ok, Result}; {error, Message, Token} -> - {error, {Line, Column, Message ++ ". Syntax error after: ", Token}} + {error, {?LOC(Line, Column), Message ++ ". Syntax error after: ", Token}} end; unescape_tokens(Tokens, _Line, _Column, #elixir_tokenizer{unescape=false}) -> {ok, tokens_to_binary(Tokens)}. @@ -1191,7 +1194,7 @@ tokenize_comment([], Acc) -> error_comment(H, Comment, Line, Column, Scope, Tokens) -> Token = io_lib:format("\\u~4.16.0B", [H]), - Reason = {Line, Column, "invalid bidirectional formatting character in comment: ", Token}, + Reason = {?LOC(Line, Column), "invalid bidirectional formatting character in comment: ", Token}, error(Reason, Comment, Scope, Tokens). preserve_comments(Line, Column, Tokens, Comment, Rest, Scope) -> @@ -1244,10 +1247,10 @@ tokenize_identifier(String, Line, Column, Scope, MaybeKeyword) -> no_suggestion -> %% we append a pointer to more info if we aren't appending a suggestion MoreInfo = "\nSee https://hexdocs.pm/elixir/unicode-syntax.html for more information.", - {error, {Line, Column, {Prefix, Suffix ++ MoreInfo}, Wrong}}; + {error, {?LOC(Line, Column), {Prefix, Suffix ++ MoreInfo}, Wrong}}; - {_, {Line, WrongColumn, _, SuggestionMessage}} = _SuggestionError -> - {error, {Line, WrongColumn, {Prefix, Suffix ++ SuggestionMessage}, Wrong}} + {_, {Location, _, SuggestionMessage}} = _SuggestionError -> + {error, {Location, {Prefix, Suffix ++ SuggestionMessage}, Wrong}} end; {error, {unexpected_token, Wrong}} -> @@ -1281,7 +1284,7 @@ suggest_simpler_unexpected_token_in_error(Wrong, Line, WrongColumn, Scope) -> "You could write the above in a similar way that is accepted by Elixir:", Simpler, "See https://hexdocs.pm/elixir/unicode-syntax.html for more information."), - {error, {Line, WrongColumn, "unexpected token: ", Message}}; + {error, {?LOC(Line, WrongColumn), "unexpected token: ", Message}}; _other -> no_suggestion end; @@ -1291,7 +1294,7 @@ suggest_simpler_unexpected_token_in_error(Wrong, Line, WrongColumn, Scope) -> "You could write the above in a compatible format that is accepted by Elixir:", NFKC, "See https://hexdocs.pm/elixir/unicode-syntax.html for more information."), - {error, {Line, WrongColumn, "unexpected token: ", Message}} + {error, {?LOC(Line, WrongColumn), "unexpected token: ", Message}} end. suggest_change(Intro, WrongForm, Hint, HintedForm, Ending) -> @@ -1313,7 +1316,7 @@ tokenize_alias(Rest, Line, Column, Unencoded, Atom, Length, Ascii, Special, Scop if not Ascii or (Special /= []) -> Invalid = hd([C || C <- Unencoded, (C < $A) or (C > 127)]), - Reason = {Line, Column, invalid_character_error("alias (only ASCII characters, without punctuation, are allowed)", Invalid), Unencoded}, + Reason = {?LOC(Line, Column), invalid_character_error("alias (only ASCII characters, without punctuation, are allowed)", Invalid), Unencoded}, error(Reason, Unencoded ++ Rest, Scope, Tokens); true -> @@ -1341,12 +1344,20 @@ previous_was_eol(_) -> nil. %% Error handling -interpolation_error(Reason, Rest, Scope, Tokens, Extension, Args) -> - error(interpolation_format(Reason, Extension, Args), Rest, Scope, Tokens). - -interpolation_format({string, Line, Column, Message, Token}, Extension, Args) -> - {Line, Column, [Message, io_lib:format(Extension, Args)], Token}; -interpolation_format({_, _, _, _} = Reason, _Extension, _Args) -> +interpolation_error(Reason, Rest, Scope, Tokens, Extension, Args, Line, Column, Opening, Closing) -> + error(interpolation_format(Reason, Extension, Args, Line, Column, Opening, Closing), Rest, Scope, Tokens). + +interpolation_format({string, EndLine, EndColumn, Message, Token}, Extension, Args, Line, Column, Opening, Closing) -> + Meta = [ + {opening_delimiter, list_to_atom(Opening)}, + {expected_delimiter, list_to_atom(Closing)}, + {line, Line}, + {column, Column}, + {end_line, EndLine}, + {end_column, EndColumn} + ], + {Meta, [Message, io_lib:format(Extension, Args)], Token}; +interpolation_format({_, _, _} = Reason, _Extension, _Args, _Line, _Column, _Opening, _Closing) -> Reason. %% Terminators @@ -1365,7 +1376,7 @@ handle_terminator(Rest, _, _, Scope, {'(', {Line, Column, _}}, [{alias, _, Alias [Alias] ), - error({Line, Column, Reason, ["("]}, atom_to_list(Alias) ++ [$( | Rest], Scope, Tokens); + error({?LOC(Line, Column), Reason, ["("]}, atom_to_list(Alias) ++ [$( | Rest], Scope, Tokens); handle_terminator(Rest, Line, Column, #elixir_tokenizer{terminators=none} = Scope, Token, Tokens) -> tokenize(Rest, Line, Column, Scope, [Token | Tokens]); handle_terminator(Rest, Line, Column, Scope, Token, Tokens) -> @@ -1378,12 +1389,12 @@ handle_terminator(Rest, Line, Column, Scope, Token, Tokens) -> tokenize(Rest, Line, Column, New, [Token | Tokens]) end. -check_terminator({Start, {Line, _, _}}, Terminators, Scope) +check_terminator({Start, Meta}, Terminators, Scope) when Start == '('; Start == '['; Start == '{'; Start == '<<' -> Indentation = Scope#elixir_tokenizer.indentation, - {ok, Scope#elixir_tokenizer{terminators=[{Start, Line, Indentation} | Terminators]}}; + {ok, Scope#elixir_tokenizer{terminators=[{Start, Meta, Indentation} | Terminators]}}; -check_terminator({Start, {Line, _, _}}, Terminators, Scope) when Start == 'fn'; Start == 'do' -> +check_terminator({Start, Meta}, Terminators, Scope) when Start == 'fn'; Start == 'do' -> Indentation = Scope#elixir_tokenizer.indentation, NewScope = @@ -1396,7 +1407,7 @@ check_terminator({Start, {Line, _, _}}, Terminators, Scope) when Start == 'fn'; Scope end, - {ok, NewScope#elixir_tokenizer{terminators=[{Start, Line, Indentation} | Terminators]}}; + {ok, NewScope#elixir_tokenizer{terminators=[{Start, Meta, Indentation} | Terminators]}}; check_terminator({'end', {EndLine, _, _}}, [{'do', _, Indentation} | Terminators], Scope) -> NewScope = @@ -1412,36 +1423,41 @@ check_terminator({'end', {EndLine, _, _}}, [{'do', _, Indentation} | Terminators {ok, NewScope#elixir_tokenizer{terminators=Terminators}}; -check_terminator({End, {EndLine, EndColumn, _}}, [{Start, StartLine, _} | Terminators], Scope) +check_terminator({End, {EndLine, EndColumn, _}}, [{Start, {StartLine, StartColumn, _}, _} | Terminators], Scope) when End == 'end'; End == ')'; End == ']'; End == '}'; End == '>>' -> case terminator(Start) of End -> {ok, Scope#elixir_tokenizer{terminators=Terminators}}; ExpectedEnd -> - Suffix = - io_lib:format( - "\n\n HINT: the \"~ts\" on line ~B is missing terminator \"~ts\"\n", - [Start, StartLine, ExpectedEnd] - ), - {error, {EndLine, EndColumn, {unexpected_token_or_reserved(End), Suffix}, [atom_to_list(End)]}} + Meta = [ + {line, StartLine}, + {column, StartColumn}, + {end_line, EndLine}, + {end_column, EndColumn}, + {error_type, mismatched_delimiter}, + {opening_delimiter, Start}, + {closing_delimiter, End}, + {expected_delimiter, ExpectedEnd} + ], + {error, {Meta, unexpected_token_or_reserved(End), [atom_to_list(End)]}} end; check_terminator({'end', {Line, Column, _}}, [], #elixir_tokenizer{mismatch_hints=Hints}) -> Suffix = case lists:keyfind('end', 1, Hints) of {'end', HintLine, _Identation} -> - io_lib:format("\n\n HINT: the \"end\" on line ~B may not have a matching \"do\" " - "defined before it (based on indentation)\n", [HintLine]); + io_lib:format("\n~ts the \"end\" on line ~B may not have a matching \"do\" " + "defined before it (based on indentation)", [future_elixir_errors:prefix(hint), HintLine]); false -> "" end, - {error, {Line, Column, {"unexpected reserved word: ", Suffix}, "end"}}; + {error, {?LOC(Line, Column), {"unexpected reserved word: ", Suffix}, "end"}}; check_terminator({End, {Line, Column, _}}, [], _Scope) when End == ')'; End == ']'; End == '}'; End == '>>' -> - {error, {Line, Column, "unexpected token: ", atom_to_list(End)}}; + {error, {?LOC(Line, Column), "unexpected token: ", atom_to_list(End)}}; check_terminator(_, _, Scope) -> {ok, Scope}. @@ -1451,9 +1467,9 @@ unexpected_token_or_reserved(_) -> "unexpected token: ". missing_terminator_hint(Start, End, #elixir_tokenizer{mismatch_hints=Hints}) -> case lists:keyfind(Start, 1, Hints) of - {Start, HintLine, _} -> - io_lib:format("\n\n HINT: it looks like the \"~ts\" on line ~B does not have a matching \"~ts\"\n", - [Start, HintLine, End]); + {Start, {HintLine, _, _}, _} -> + io_lib:format("\n~ts it looks like the \"~ts\" on line ~B does not have a matching \"~ts\"", + [future_elixir_errors:prefix(hint), Start, HintLine, End]); false -> "" end. @@ -1505,7 +1521,7 @@ tokenize_keyword(terminator, Rest, Line, Column, Atom, Length, Scope, Tokens) -> {ok, [Check | T]} -> handle_terminator(Rest, Line, Column + Length, Scope, Check, T); {error, Message, Token} -> - error({Line, Column, Message, Token}, Token ++ Rest, Scope, Tokens) + error({?LOC(Line, Column), Message, Token}, Token ++ Rest, Scope, Tokens) end; tokenize_keyword(token, Rest, Line, Column, Atom, Length, Scope, Tokens) -> @@ -1540,7 +1556,7 @@ tokenize_sigil([$~ | T], Line, Column, Scope, Tokens) -> tokenize_sigil_contents(Rest, Name, NewLine, NewColumn, NewScope, NewTokens); {error, Message, Token} -> - Reason = {Line, Column, Message, Token}, + Reason = {?LOC(Line, Column), Message, Token}, error(Reason, T, Scope, Tokens) end. @@ -1563,12 +1579,8 @@ tokenize_sigil_contents([H, H, H | T] = Original, [S | _] = SigilName, Line, Col when ?is_quote(H) -> case extract_heredoc_with_interpolation(Line, Column, Scope, ?is_downcase(S), T, H) of {ok, NewLine, NewColumn, Parts, Rest, NewScope} -> - {Final, Modifiers} = collect_modifiers(Rest, []), Indentation = NewColumn - 4, - TokenColumn = Column - 1 - length(SigilName), - Token = {sigil, {Line, TokenColumn, nil}, SigilName, Parts, Modifiers, Indentation, <>}, - NewColumnWithModifiers = NewColumn + length(Modifiers), - tokenize(Final, NewLine, NewColumnWithModifiers, NewScope, [Token | Tokens]); + add_sigil_token(SigilName, Line, Column, NewLine, NewColumn, Parts, Rest, NewScope, Tokens, Indentation, <>); {error, Reason} -> error(Reason, [$~] ++ SigilName ++ Original, Scope, Tokens) @@ -1578,17 +1590,13 @@ tokenize_sigil_contents([H | T] = Original, [S | _] = SigilName, Line, Column, S when ?is_sigil(H) -> case future_elixir_interpolation:extract(Line, Column + 1, Scope, ?is_downcase(S), T, sigil_terminator(H)) of {NewLine, NewColumn, Parts, Rest, NewScope} -> - {Final, Modifiers} = collect_modifiers(Rest, []), Indentation = nil, - TokenColumn = Column - 1 - length(SigilName), - Token = {sigil, {Line, TokenColumn, nil}, SigilName, tokens_to_binary(Parts), Modifiers, Indentation, <>}, - NewColumnWithModifiers = NewColumn + length(Modifiers), - tokenize(Final, NewLine, NewColumnWithModifiers, NewScope, [Token | Tokens]); + add_sigil_token(SigilName, Line, Column, NewLine, NewColumn, tokens_to_binary(Parts), Rest, NewScope, Tokens, Indentation, <>); {error, Reason} -> Sigil = [$~, S, H], Message = " (for sigil ~ts starting at line ~B)", - interpolation_error(Reason, [$~] ++ SigilName ++ Original, Scope, Tokens, Message, [Sigil, Line]) + interpolation_error(Reason, [$~] ++ SigilName ++ Original, Scope, Tokens, Message, [Sigil, Line], Line, Column, [H], [sigil_terminator(H)]) end; tokenize_sigil_contents([H | _] = Original, SigilName, Line, Column, Scope, Tokens) -> @@ -1597,12 +1605,30 @@ tokenize_sigil_contents([H | _] = Original, SigilName, Line, Column, Scope, Toke "//, ||, \"\", '', (), [], {}, <>", Message = io_lib:format(MessageString, [[H], Column, H]), ErrorColumn = Column - 1 - length(SigilName), - error({Line, ErrorColumn, "invalid sigil delimiter: ", Message}, [$~] ++ SigilName ++ Original, Scope, Tokens); + error({?LOC(Line, ErrorColumn), "invalid sigil delimiter: ", Message}, [$~] ++ SigilName ++ Original, Scope, Tokens); % Incomplete sigil. tokenize_sigil_contents([], _SigilName, Line, Column, Scope, Tokens) -> tokenize([], Line, Column, Scope, Tokens). +add_sigil_token(SigilName, Line, Column, NewLine, NewColumn, Parts, Rest, Scope, Tokens, Indentation, Delimiter) -> + TokenColumn = Column - 1 - length(SigilName), + MaybeEncoded = case SigilName of + % Single-letter sigils present no risk of atom exhaustion (limited possibilities) + [_Char] -> {ok, list_to_atom("sigil_" ++ SigilName)}; + _ -> unsafe_to_atom("sigil_" ++ SigilName, Line, TokenColumn, Scope) + end, + case MaybeEncoded of + {ok, Atom} -> + {Final, Modifiers} = collect_modifiers(Rest, []), + Token = {sigil, {Line, TokenColumn, nil}, Atom, Parts, Modifiers, Indentation, Delimiter}, + NewColumnWithModifiers = NewColumn + length(Modifiers), + tokenize(Final, NewLine, NewColumnWithModifiers, Scope, [Token | Tokens]); + + {error, Reason} -> + error(Reason, Rest, Scope, Tokens) + end. + %% Fail early on invalid do syntax. For example, after %% most keywords, after comma and so on. tokenize_keyword_terminator(DoLine, DoColumn, do, [{identifier, {Line, Column, Meta}, Atom} | T]) -> @@ -1659,12 +1685,10 @@ invalid_do_with_fn_error(Prefix) -> % TODO: Turn into an error on v2.0 maybe_warn_too_many_of_same_char([T | _] = Token, [T | _] = _Rest, Line, Column, Scope) -> - Warning = - case T of - $. -> "please use parens around \"...\" instead"; - _ -> io_lib:format("please use a space between \"~ts\" and the next \"~ts\"", [Token, [T]]) - end, - Message = io_lib:format("found \"~ts\" followed by \"~ts\", ~ts", [Token, [T], Warning]), + Message = io_lib:format( + "found \"~ts\" followed by \"~ts\", please use a space between \"~ts\" and the next \"~ts\"", + [Token, [T], Token, [T]] + ), prepend_warning(Line, Column, Message, Scope); maybe_warn_too_many_of_same_char(_Token, _Rest, _Line, _Column, Scope) -> Scope. @@ -1706,19 +1730,15 @@ error(Reason, Rest, #elixir_tokenizer{warnings=Warnings}, Tokens) -> %% Cursor handling -cursor_complete(Line, Column, Terminators, Tokens) -> - {AccTokens, _} = - lists:foldl( - fun({Start, _, _}, {NewTokens, NewColumn}) -> - End = terminator(Start), - AccTokens = [{End, {Line, NewColumn, nil}} | NewTokens], - AccColumn = NewColumn + length(erlang:atom_to_list(End)), - {AccTokens, AccColumn} - end, - {Tokens, Column}, - Terminators - ), - lists:reverse(AccTokens). +cursor_complete(Line, Column, Terminators) -> + lists:mapfoldl( + fun({Start, _, _}, AccColumn) -> + End = terminator(Start), + {{End, {Line, AccColumn, nil}}, AccColumn + length(erlang:atom_to_list(End))} + end, + Column, + Terminators + ). add_cursor(_Line, Column, noprune, Terminators, Tokens) -> {Column, Terminators, Tokens}; @@ -1761,8 +1781,6 @@ prune_tokens([{'{', _} | Tokens], ['}' | Opener], Terminators) -> prune_tokens([{'<<', _} | Tokens], ['>>' | Opener], Terminators) -> prune_tokens(Tokens, Opener, Terminators); %%% Handle anonymous functions -prune_tokens(Tokens, [], [{'fn', _, _} | Terminators]) -> - prune_tokens(drop_including(Tokens, 'fn'), [], Terminators); prune_tokens([{'(', _}, {capture_op, _, _} | Tokens], [], [{'(', _, _} | Terminators]) -> prune_tokens(Tokens, [], Terminators); %%% or it is time to stop... @@ -1772,6 +1790,8 @@ prune_tokens([{'eol', _} | _] = Tokens, [], Terminators) -> {Tokens, Terminators}; prune_tokens([{',', _} | _] = Tokens, [], Terminators) -> {Tokens, Terminators}; +prune_tokens([{'fn', _} | _] = Tokens, [], Terminators) -> + {Tokens, Terminators}; prune_tokens([{'do', _} | _] = Tokens, [], Terminators) -> {Tokens, Terminators}; prune_tokens([{'(', _} | _] = Tokens, [], Terminators) -> @@ -1798,14 +1818,10 @@ prune_tokens([{OpType, _, _} | _] = Tokens, [], Terminators) OpType =:= in_match_op; OpType =:= type_op; OpType =:= dual_op; OpType =:= mult_op; OpType =:= power_op; OpType =:= concat_op; OpType =:= range_op; OpType =:= xor_op; OpType =:= pipe_op; OpType =:= stab_op; OpType =:= when_op; OpType =:= assoc_op; - OpType =:= rel_op; OpType =:= ternary_op; OpType =:= capture_op -> + OpType =:= rel_op; OpType =:= ternary_op; OpType =:= capture_op; OpType =:= ellipsis_op -> {Tokens, Terminators}; %%% or we traverse until the end. prune_tokens([_ | Tokens], Opener, Terminators) -> prune_tokens(Tokens, Opener, Terminators); prune_tokens([], [], Terminators) -> {[], Terminators}. - -drop_including([{Token, _} | Tokens], Token) -> Tokens; -drop_including([_ | Tokens], Token) -> drop_including(Tokens, Token); -drop_including([], _Token) -> []. diff --git a/apps/common/src/future_elixir_tokenizer.hrl b/apps/common/src/future_elixir_tokenizer.hrl index 84a37922f..00caf1830 100644 --- a/apps/common/src/future_elixir_tokenizer.hrl +++ b/apps/common/src/future_elixir_tokenizer.hrl @@ -1,4 +1,4 @@ -%% Copied from https://github.com/elixir-lang/elixir/blob/bacea2cef6323d0ede4222f36ddcedd82cb514e4/lib/elixir/src/elixir_tokenizer.hrl +%% Copied from https://github.com/elixir-lang/elixir/blob/d7ea2fa2e4e5de1990297be19495fc93740b2e8b/lib/elixir/src/elixir_tokenizer.hrl %% Numbers -define(is_hex(S), (?is_digit(S) orelse (S >= $A andalso S =< $F) orelse (S >= $a andalso S =< $f))). -define(is_bin(S), (S >= $0 andalso S =< $1)). @@ -13,6 +13,7 @@ -define(is_quote(S), (S =:= $" orelse S =:= $')). -define(is_sigil(S), (S =:= $/ orelse S =:= $< orelse S =:= $" orelse S =:= $' orelse S =:= $[ orelse S =:= $( orelse S =:= ${ orelse S =:= $|)). +-define(LOC(Line, Column), [{line, Line}, {column, Column}]). %% Spaces -define(is_horizontal_space(S), (S =:= $\s orelse S =:= $\t)). @@ -30,3 +31,4 @@ C =:= 16#2068; C =:= 16#202C; C =:= 16#2069). + diff --git a/apps/common/test/lexical/ast/aliases_test.exs b/apps/common/test/lexical/ast/aliases_test.exs deleted file mode 100644 index 4dff32b3e..000000000 --- a/apps/common/test/lexical/ast/aliases_test.exs +++ /dev/null @@ -1,397 +0,0 @@ -defmodule Lexical.Ast.AliasesTest do - alias Lexical.Ast.Aliases - - import Lexical.Test.CursorSupport - import Lexical.Test.CodeSigil - - use ExUnit.Case - - def aliases_at_cursor(text) do - {position, document} = pop_cursor(text, as: :document) - Aliases.at(document, position) - end - - describe "top level aliases" do - test "a useless alias" do - {:ok, aliases} = - ~q[ - alias Foo - | - ] - |> aliases_at_cursor() - - assert aliases[:Foo] == Foo - end - - test "an alias outside of a module" do - {:ok, aliases} = - ~q[ - alias Foo.Bar.Baz - defmodule Parent do - | - end - ] - |> aliases_at_cursor() - - assert aliases[:Baz] == Foo.Bar.Baz - end - - test "an alias inside the body of a module" do - {:ok, aliases} = - ~q[ - defmodule Basic do - alias Foo.Bar - | - end - ] - |> aliases_at_cursor() - - assert aliases == %{__MODULE__: Basic, Bar: Foo.Bar, Basic: Basic} - end - - test "an alias using as" do - {:ok, aliases} = - ~q[ - defmodule TopLevel do - alias Foo.Bar, as: FooBar - | - end - ] - |> aliases_at_cursor() - - assert aliases[:__MODULE__] == TopLevel - assert aliases[:FooBar] == Foo.Bar - end - - test "multiple aliases off of single alias" do - {:ok, aliases} = - ~q[ - defmodule TopLevel do - alias Foo.{First, Second, Third.Fourth} - | - end - ] - |> aliases_at_cursor() - - assert aliases[:First] == Foo.First - assert aliases[:Second] == Foo.Second - assert aliases[:Fourth] == Foo.Third.Fourth - end - - test "multiple aliases off of nested alias" do - {:ok, aliases} = - ~q[ - defmodule TopLevel do - alias Foo.Bar.{First, Second, Third.Fourth} - | - end - ] - |> aliases_at_cursor() - - assert aliases[:First] == Foo.Bar.First - assert aliases[:Second] == Foo.Bar.Second - assert aliases[:Fourth] == Foo.Bar.Third.Fourth - end - - test "aliasing __MODULE__" do - {:ok, aliases} = - ~q[ - defmodule Something.Is.Nested do - alias __MODULE__| - - end - ] - |> aliases_at_cursor() - - assert aliases[:Nested] == Something.Is.Nested - end - - test "multiple aliases leading by current module" do - {:ok, aliases} = - ~q[ - defmodule TopLevel do - alias __MODULE__.{First, Second} - | - end - ] - |> aliases_at_cursor() - - assert aliases[:First] == TopLevel.First - assert aliases[:Second] == TopLevel.Second - end - - test "multiple aliases leading by current module's child" do - {:ok, aliases} = - ~q[ - defmodule TopLevel do - alias __MODULE__.Child.{First, Second} - | - end - ] - |> aliases_at_cursor() - - assert aliases[:First] == TopLevel.Child.First - assert aliases[:Second] == TopLevel.Child.Second - end - - test "aliases expanding other aliases" do - {:ok, aliases} = - ~q[ - alias Foo.Bar.Baz - alias Baz.Quux| - ] - |> aliases_at_cursor() - - assert aliases[:Baz] == Foo.Bar.Baz - assert aliases[:Quux] == Foo.Bar.Baz.Quux - end - - test "aliases expanding current module" do - {:ok, aliases} = ~q[ - defmodule TopLevel do - alias __MODULE__.Foo| - end - ] |> aliases_at_cursor() - - assert aliases[:Foo] == TopLevel.Foo - end - - test "aliases expanding current module using as" do - {:ok, aliases} = ~q[ - defmodule TopLevel do - alias __MODULE__.Foo|, as: OtherAlias - end - ] |> aliases_at_cursor() - - assert aliases[:OtherAlias] == TopLevel.Foo - end - - test "allows overrides" do - {:ok, aliases} = - ~q[ - alias Foo.Bar.Baz - alias Other.Baz - ] - |> aliases_at_cursor() - - assert aliases[:Baz] == Other.Baz - end - end - - describe "nested modules" do - test "no aliases are defined for modules with dots" do - {:ok, aliases} = - ~q[ - defmodule GrandParent.Parent.Child do - | - end - ] - |> aliases_at_cursor() - - refute Map.has_key?(aliases, :Child) - end - - test "with children get their parents name" do - {:ok, aliases} = - ~q[ - defmodule Grandparent.Parent do - defmodule Child do - | - end - end - ] - |> aliases_at_cursor() - - assert aliases[:Child] == Grandparent.Parent.Child - assert aliases[:__MODULE__] == Grandparent.Parent.Child - end - - test "with a child that has an explicit parent" do - {:ok, aliases} = - ~q[ - defmodule Parent do - defmodule __MODULE__.Child do - | - end - end - ] - |> aliases_at_cursor() - - assert aliases[:__MODULE__] == Parent.Child - end - end - - describe "alias scopes" do - test "aliases are removed when leaving a module" do - {:ok, aliases} = - ~q[ - defmodule Basic do - alias Foo.Bar - end| - ] - |> aliases_at_cursor() - - assert aliases == %{Basic: Basic, __MODULE__: nil} - end - - test "aliases inside of nested modules" do - {:ok, aliases} = - ~q[ - defmodule Parent do - alias Foo.Grandparent - - defmodule Child do - alias Foo.Something - | - end - end - ] - |> aliases_at_cursor() - - assert aliases[:Grandparent] == Foo.Grandparent - assert aliases[:Something] == Foo.Something - assert aliases[:__MODULE__] == Parent.Child - assert aliases[:Child] == Parent.Child - end - - test "multiple nested module are aliased after definition" do - {:ok, aliases} = - ~q[ - defmodule Parent do - alias Foo.Grandparent - - defmodule Child do - alias Foo.Something - end - - defmodule AnotherChild do - alias Foo.Something - end - | - end - ] - |> aliases_at_cursor() - - assert aliases[:AnotherChild] == Parent.AnotherChild - assert aliases[:Child] == Parent.Child - end - - test "an alias defined in a named function" do - {:ok, aliases} = - ~q[ - defmodule Parent do - def fun do - alias Foo.Parent - | - end - end - ] - |> aliases_at_cursor() - - assert aliases[:Parent] == Foo.Parent - end - - test "an alias defined in a named function doesn't leak" do - {:ok, aliases} = - ~q[ - defmodule Parent do - def fun do - alias Foo.Parent - end| - end - ] - |> aliases_at_cursor() - - assert aliases[:Parent] == Parent - end - - test "an alias defined in a private named function" do - {:ok, aliases} = - ~q[ - defmodule Parent do - defp fun do - alias Foo.Parent - | - end - end - ] - |> aliases_at_cursor() - - assert aliases[:Parent] == Foo.Parent - end - - test "an alias defined in a private named function doesn't leak" do - {:ok, aliases} = - ~q[ - defmodule Parent do - defp fun do - alias Foo.InFun - end| - end - ] - |> aliases_at_cursor() - - refute aliases[:InFun] - end - - test "an alias defined in a DSL" do - {:ok, aliases} = - ~q[ - defmodule Parent do - my_dsl do - alias Foo.Parent - | - end - end - ] - |> aliases_at_cursor() - - assert aliases[:Parent] == Foo.Parent - end - - test "an alias defined in a DSL does not leak" do - {:ok, aliases} = - ~q[ - defmodule Parent do - my_dsl do - alias Foo.InDSL - end - | - end - ] - |> aliases_at_cursor() - - refute aliases[InDsl] - end - - test "sibling modules with nested blocks" do - {:ok, aliases} = - ~q[ - defmodule First do - defstuff do - field :x - end - end - - defmodule Second do - defstuff do - field :y - end - end - | - ] - |> aliases_at_cursor() - - assert aliases[:First] == First - assert aliases[:Second] == Second - end - - # Note: it looks like Code.container_cursor_to_quoted doesn't work with - # anonymous functions - @tag :skip - test "an alias defined in a anonymous function" - - @tag :skip - test "an alias defined in a anonymous function doesn't leak" - end -end diff --git a/apps/common/test/lexical/ast/detection/alias_test.exs b/apps/common/test/lexical/ast/detection/alias_test.exs new file mode 100644 index 000000000..43c93ccfd --- /dev/null +++ b/apps/common/test/lexical/ast/detection/alias_test.exs @@ -0,0 +1,7 @@ +defmodule Lexical.Ast.Detection.AliasTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.Alias, + assertions: [[:alias, :*]] +end diff --git a/apps/common/test/lexical/ast/detection/bitstring_test.exs b/apps/common/test/lexical/ast/detection/bitstring_test.exs new file mode 100644 index 000000000..4ac1ee531 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/bitstring_test.exs @@ -0,0 +1,6 @@ +defmodule Lexical.Ast.Detection.BitstringTest do + use Lexical.Test.DetectionCase, + for: Lexical.Ast.Detection.Bitstring, + assertions: [[:bitstring, :*]], + variations: [:match, :function_arguments, :function_body] +end diff --git a/apps/common/test/lexical/ast/detection/comment_test.exs b/apps/common/test/lexical/ast/detection/comment_test.exs new file mode 100644 index 000000000..acc11b366 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/comment_test.exs @@ -0,0 +1,7 @@ +defmodule Lexical.Ast.Detection.CommentTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.Comment, + assertions: [[:comment, :*]] +end diff --git a/apps/common/test/lexical/ast/detection/function_capture_test.exs b/apps/common/test/lexical/ast/detection/function_capture_test.exs new file mode 100644 index 000000000..5031c9369 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/function_capture_test.exs @@ -0,0 +1,32 @@ +defmodule Lexical.Ast.Detection.FunctionCaptureTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.FunctionCapture, + assertions: [[:function_capture, :*]], + variations: [:match, :function_body] + + test "detected if the capture is inside an unformatted function call" do + assert_detected ~q[list = Enum.map(1..10,&«Enum»)] + end + + test "detected if the capture is inside a function call after the dot" do + assert_detected ~q[list = Enum.map(1..10, &«Enum.f»)] + end + + test "detected if the capture is in the body of a for" do + assert_detected ~q[for x <- Enum.map(1..10, &«String.»)] + end + + test "is not detected if the capture is inside an unformatted function call" do + refute_detected ~q[list = Enum.map(1..10,Enum)] + end + + test "is not detected if the capture is inside a function call after the dot" do + refute_detected ~q[list = Enum.map(1..10, Enum.f)] + end + + test "is not detected if the capture is in the body of a for" do + refute_detected ~q[for x <- Enum.map(1..10, String.)] + end +end diff --git a/apps/common/test/lexical/ast/detection/import_test.exs b/apps/common/test/lexical/ast/detection/import_test.exs new file mode 100644 index 000000000..ad578d258 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/import_test.exs @@ -0,0 +1,16 @@ +defmodule Lexical.Ast.Detection.ImportTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.Import, + assertions: [[:import, :*]] + + test "works on multi line" do + assert_detected ~q( + import« Some.Module, only: »[ + foo: 3, + bar: 6 + ] + ) + end +end diff --git a/apps/common/test/lexical/ast/detection/module_attribute_test.exs b/apps/common/test/lexical/ast/detection/module_attribute_test.exs new file mode 100644 index 000000000..68c6249b4 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/module_attribute_test.exs @@ -0,0 +1,17 @@ +defmodule Lexical.Ast.Detection.ModuleAttributeTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.ModuleAttribute, + assertions: [ + [:module_attribute, :*], + [:callbacks, :*] + ], + skip: [ + [:doc, :*], + [:module_doc, :*], + [:spec, :*], + [:type, :*] + ], + variations: [:module] +end diff --git a/apps/common/test/lexical/ast/detection/pipe_test.exs b/apps/common/test/lexical/ast/detection/pipe_test.exs new file mode 100644 index 000000000..ef0c259a2 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/pipe_test.exs @@ -0,0 +1,13 @@ +defmodule Lexical.Ast.Detection.PipeTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.Pipe, + assertions: [[:pipe, :*]], + variations: [:function_arguments], + skip: [[:module_attribute, :multi_line_pipe]] + + test "is false if there is no pipe in the string" do + refute_detected ~q[Enum.foo] + end +end diff --git a/apps/common/test/lexical/ast/detection/require_test.exs b/apps/common/test/lexical/ast/detection/require_test.exs new file mode 100644 index 000000000..f57d81d97 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/require_test.exs @@ -0,0 +1,7 @@ +defmodule Lexical.Ast.Detection.RequireTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.Require, + assertions: [[:require, :*]] +end diff --git a/apps/common/test/lexical/ast/detection/spec_test.exs b/apps/common/test/lexical/ast/detection/spec_test.exs new file mode 100644 index 000000000..3222a0866 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/spec_test.exs @@ -0,0 +1,7 @@ +defmodule Lexical.Ast.Detection.SpecTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.Spec, + assertions: [[:spec, :*]] +end diff --git a/apps/common/test/lexical/ast/detection/string_test.exs b/apps/common/test/lexical/ast/detection/string_test.exs new file mode 100644 index 000000000..4ffedd337 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/string_test.exs @@ -0,0 +1,25 @@ +defmodule Lexical.Ast.Detection.StringTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.String, + assertions: [[:strings, :*]], + # we skip other tests that have strings in them + skip: [ + [:doc, :*], + [:keyword, :single_line], + [:keyword, :multi_line], + [:module_doc, :*] + ], + variations: [ + :function_arguments, + :function_body, + :function_call, + :match, + :module + ] + + test "is detected if a string is keyword values" do + assert_detected ~q/def func(string: "v«alue»", atom: :value2, int: 6, float: 2.0, list: [1, 2], tuple: {3, 4}) do/ + end +end diff --git a/apps/common/test/lexical/ast/detection/struct_field_key_test.exs b/apps/common/test/lexical/ast/detection/struct_field_key_test.exs new file mode 100644 index 000000000..7267d184c --- /dev/null +++ b/apps/common/test/lexical/ast/detection/struct_field_key_test.exs @@ -0,0 +1,17 @@ +defmodule Lexical.Ast.Detection.StructFieldKeyTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.StructFieldKey, + assertions: [[:struct_field_key, :*]], + skip: [ + [:struct_fields, :*], + [:struct_reference, :*], + [:struct_field_value, :*] + ], + variations: [:module] + + test "is detected if a key is partially typed" do + assert_detected ~q[%User{«fo»}] + end +end diff --git a/apps/common/test/lexical/ast/detection/struct_field_value_test.exs b/apps/common/test/lexical/ast/detection/struct_field_value_test.exs new file mode 100644 index 000000000..bfff2d6e1 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/struct_field_value_test.exs @@ -0,0 +1,25 @@ +defmodule Lexical.Ast.Detection.StructFieldValueTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.StructFieldValue, + assertions: [[:struct_field_value, :*]], + skip: [ + [:struct_fields, :*], + [:struct_reference, :*], + [:struct_field_key, :*] + ], + variations: [:module] + + test "is detected directly after the colon" do + assert_detected ~q[%User{foo: «»}] + end + + test "is not detected if the cursor is a multiple line definition in a key position" do + assert_detected ~q[ + %User{ + foo: «1,» + } + ] + end +end diff --git a/apps/common/test/lexical/ast/detection/struct_fields_test.exs b/apps/common/test/lexical/ast/detection/struct_fields_test.exs new file mode 100644 index 000000000..a3aa2f6b6 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/struct_fields_test.exs @@ -0,0 +1,33 @@ +defmodule Lexical.Ast.Detection.StructFieldsTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.StructFields, + assertions: [[:struct_fields, :*]], + variations: [:match, :function_body, :function_arguments, :module], + skip: [ + [:struct_reference, :*], + [:struct_field_value, :*], + [:struct_field_key, :*] + ] + + test "is true if the cursor is in current module arguments" do + assert_detected ~q[%__MODULE__{«»}] + end + + test "is true even if the value of a struct key is a tuple" do + assert_detected ~q[%User{«favorite_numbers: {3}»}] + end + + test "is true even if the cursor is at a nested struct" do + assert_detected ~q[%User{«address: %Address{}»] + end + + test "is detected if it spans multiple lines" do + assert_detected ~q[ + %User{ + «name: "John", + »} + ] + end +end diff --git a/apps/common/test/lexical/ast/detection/struct_reference_test.exs b/apps/common/test/lexical/ast/detection/struct_reference_test.exs new file mode 100644 index 000000000..426ffb019 --- /dev/null +++ b/apps/common/test/lexical/ast/detection/struct_reference_test.exs @@ -0,0 +1,45 @@ +defmodule Lexical.Ast.Detection.StructReferenceTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.StructReference, + assertions: [[:struct_reference, :*]], + skip: [[:struct_fields, :*], [:struct_field_value, :*], [:struct_field_key, :*]], + variations: [:match, :function_arguments] + + test "is detected if a module reference starts in function arguments" do + assert_detected ~q[def my_function(%_«»)] + end + + test "is detected if a module reference start in a t type spec" do + assert_detected ~q[@type t :: %_«»] + end + + test "is detected if the reference is for %__MOD in a function definition " do + assert_detected ~q[def my_fn(%_«_MOD»] + end + + test "is detected if the reference is on the right side of a match" do + assert_detected ~q[foo = %U«se»] + end + + test "is detected if the reference is on the left side of a match" do + assert_detected ~q[ %U«se» = foo] + end + + test "is detected if the reference is for %__} " do + assert_detected ~q[%__] + end + + test "is not detected if the reference is for %__MOC in a function definition" do + refute_detected ~q[def my_fn(%__MOC)] + end + + test "is detected if module reference starts with %" do + assert_detected ~q[def something(my_thing, %S«truct»{})] + end + + test "is not detected if a module reference lacks a %" do + refute_detected ~q[def my_function(__)] + end +end diff --git a/apps/common/test/lexical/ast/detection/type_test.exs b/apps/common/test/lexical/ast/detection/type_test.exs new file mode 100644 index 000000000..1d40ec4fe --- /dev/null +++ b/apps/common/test/lexical/ast/detection/type_test.exs @@ -0,0 +1,18 @@ +defmodule Lexical.Ast.Detection.TypeTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.Type, + assertions: [[:type, :*]] + + test "is not detected if you're in a variable named type" do + refute_detected ~q[type = 3] + end + + test "is not detected right after the type ends" do + refute_detected ~q[ + @type« my_type :: atom» + + ] + end +end diff --git a/apps/common/test/lexical/ast/detection/use_test.exs b/apps/common/test/lexical/ast/detection/use_test.exs new file mode 100644 index 000000000..4dd0ec3de --- /dev/null +++ b/apps/common/test/lexical/ast/detection/use_test.exs @@ -0,0 +1,7 @@ +defmodule Lexical.Ast.Detection.UseTest do + alias Lexical.Ast.Detection + + use Lexical.Test.DetectionCase, + for: Detection.Use, + assertions: [[:use, :*]] +end diff --git a/apps/common/test/lexical/ast/env_test.exs b/apps/common/test/lexical/ast/env_test.exs index 1553b2d59..f88ef9473 100644 --- a/apps/common/test/lexical/ast/env_test.exs +++ b/apps/common/test/lexical/ast/env_test.exs @@ -1,15 +1,18 @@ defmodule Lexical.Ast.EnvTest do use ExUnit.Case, async: true + alias Lexical.Ast + import Lexical.Ast.Env - import Lexical.Test.CodeSigil import Lexical.Test.CursorSupport import Lexical.Test.Fixtures - def new_env(text) do + def new_env(text, opts \\ []) do + opts = Keyword.merge([as: :document], opts) project = project() - {position, document} = pop_cursor(text, as: :document) - {:ok, env} = new(project, document, position) + {position, document} = pop_cursor(text, opts) + analysis = Ast.analyze(document) + {:ok, env} = new(project, analysis, position) env end @@ -44,7 +47,12 @@ defmodule Lexical.Ast.EnvTest do |> new_env() |> prefix_tokens(1) - assert [{:interpolated_string, ["hello" | _], _}] = tokens + assert [{:interpolated_string, interpolations, {1, 1}}] = tokens + + assert interpolations == [ + {:literal, "hello", {{1, 1}, {1, 6}}}, + {:interpolation, [{:identifier, {1, 9, ~c"a"}, :a}], {{1, 9}, {1, 10}}} + ] end test "works with maps with atom keys" do @@ -162,612 +170,67 @@ defmodule Lexical.Ast.EnvTest do end end - describe "in_context?(env, :bitstring)" do - test "is true if the reference starts in a bitstring at the start of a line" do - env = new_env("<<|") - assert in_context?(env, :bitstring) - end - - test "is true if the reference starts in a bitstring with matches" do - env = new_env("<>|") - refute in_context?(env, :bitstring) - - env = new_env("<> = |") - refute in_context?(env, :bitstring) + describe "in_context?/2" do + test "can detect module attributes" do + env = new_env("@my_attr 3") - env = new_env("<> = str|") - refute in_context?(env, :bitstring) + assert in_context?(env, {:module_attribute, :my_attr}) + refute in_context?(env, {:module_attribute, :other}) end - test "is false if in a function capture" do - env = new_env("&MyModule.fun|") - refute in_context?(env, :bitstring) - end + test "can detect behaviours" do + env = new_env("@behaviour Modul|e") - test "is false if in an alias" do - env = new_env("alias MyModule.Othe|") - refute in_context?(env, :bitstring) + assert in_context?(env, :behaviour) end - test "is false if in an import" do - env = new_env("import MyModule.Othe|") - refute in_context?(env, :bitstring) - end + test "can detect behaviour implementations" do + env = new_env("@impl GenServer|") - test "is false if in a require" do - env = new_env("require MyModule.Othe|") - refute in_context?(env, :bitstring) + assert in_context?(env, :impl) end - test "is false if in a use" do - env = new_env("alias MyModule.Othe|") - refute in_context?(env, :bitstring) - end + test "can detect docstrings " do + env = new_env("@doc false|") + assert in_context?(env, :doc) - test "is false if in a pipe" do - env = new_env("|> in_|") - refute in_context?(env, :bitstring) - end - end + env = new_env(~S[@doc "hi"]) + assert in_context?(env, :doc) - describe "in_context?(env, :struct_fields)" do - def wrap_with_module(text) do - """ - defmodule MyModule do - #{text} - end - """ + env = new_env(~S[@doc """ + Multi - line + """| + ]) + assert in_context?(env, :doc) end - def wrap_with_function(text) do - """ - def func do - #{text} - end - """ - end - - def wrap_with_function_arguments(text) do - """ - def func(#{text}) do - end - """ - end - - test "is true if the cursor is directly after the opening curly" do - env = "%User{|}" |> wrap_with_module() |> new_env() - assert in_context?(env, :struct_fields) - end - - test "is true when the struct is in the function variable" do - env = "%User{|}" |> wrap_with_function() |> wrap_with_module() |> new_env() - assert in_context?(env, :struct_fields) - end - - test "is true when the struct is in the function arguments" do - env = "%User{|}" |> wrap_with_function_arguments() |> wrap_with_module() |> new_env() - assert in_context?(env, :struct_fields) - end - - test "is true if the cursor is after the field name" do - env = "%User{name: |}" |> wrap_with_module() |> new_env() - assert in_context?(env, :struct_fields) - end - - test "is true if the cursor is after the field value" do - env = "%User{name: \"John\"|}" |> wrap_with_module() |> new_env() - assert in_context?(env, :struct_fields) - end - - test "is true if the cursor starts in the middle of the struct" do - env = "%User{name: \"John\", |}" |> wrap_with_module() |> new_env() - assert in_context?(env, :struct_fields) - end - - test "is false if the cursor is after the closing curly" do - env = "%User{}|" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_fields) - end - - test "is true if the cursor is in current module arguments" do - env = "%__MODULE__{|}" |> wrap_with_function() |> wrap_with_module() |> new_env() - assert in_context?(env, :struct_fields) - end - - test "is true if the struct alias spans multiple lines" do - source = ~q[ - %User{ - name: "John", - | - } - ] - env = new_env(source) - assert in_context?(env, :struct_fields) - end - - test "is true even if the value of a struct key is a tuple" do - env = new_env("%User{favorite_numbers: {3}|") - assert in_context?(env, :struct_fields) - end - - test "is true even if the cursor is at a nested struct" do - env = new_env("%User{address: %Address{}|") - assert in_context?(env, :struct_fields) - end - - test "is false if the cursor is in a map" do - env = "%{|field: value}" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_fields) - end - end - - describe "in_context?(env, :struct_field_value)" do - test "is true if the cursor is after a value character" do - env = new_env("%User{foo: 1|}") - assert in_context?(env, :struct_field_value) - end - - test "is true if the cursor is after a colon" do - env = new_env("%User{foo: |}") - assert in_context?(env, :struct_field_value) - end - - test "is false if the cursor is in a multiple lines key positon" do - source = ~q[ - %User{ - foo: 1, - | - } - ] - - env = new_env(source) - refute in_context?(env, :struct_field_value) - end - - test "is false in static keywords" do - env = "[foo: |]" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_field_value) - end - - test "is false when is in static keywords and starts with a character" do - env = "[foo: :a|]" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_field_value) - end + test "can detect moduledocs " do + env = new_env("@moduledoc false|") + assert in_context?(env, :moduledoc) - test "is false in map field value position" do - env = "%{foo: |}" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_field_value) - end - end - - describe "in_context?(env, :struct_field_key)" do - test "is true if the cursor is after the struct opening" do - env = new_env("%User{|}") - assert in_context?(env, :struct_field_key) - end - - test "is true if a key is partially typed" do - env = new_env("%User{fo|}") - assert in_context?(env, :struct_field_key) - end - - test "is true if after a comma" do - env = new_env("%User{foo: 1, |}") - assert in_context?(env, :struct_field_key) - end - - test "is true if after a comma on another line" do - source = ~q[ - %User{ - foo: 1, - | - } - ] - - env = new_env(source) - assert in_context?(env, :struct_field_key) - end - - test "is false in static keywords" do - env = "[fo|]" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_field_key) - end - - test "is false in static keywords nested in a struct" do - env = "%User{foo: [fo|]}" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_field_key) - end - - test "is false in map field key position" do - env = "%{|}" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_field_key) - end - - test "is false in map field key position nested in a struct" do - env = "%User{foo: %{|}}" |> wrap_with_module() |> new_env() - refute in_context?(env, :struct_field_key) - end - end - - describe "in_context?(env, :struct_reference)" do - test "is true if the reference starts on the beginning of the line" do - env = new_env("%User|") - assert in_context?(env, :struct_reference) - end - - test "is true if the reference starts in function arguments" do - env = new_env("def my_function(%Use|)") - assert in_context?(env, :struct_reference) - end - - test "is true if a module reference starts in function arguments" do - env = new_env("def my_function(%_|)") - assert in_context?(env, :struct_reference) - end - - test "is ture if a module reference start in a t type spec" do - env = new_env("@type t :: %_|") - assert in_context?(env, :struct_reference) - end - - test "is false if module reference not starts with %" do - env = new_env("def something(my_thing|, %Struct{})") - refute in_context?(env, :struct_reference) - end - - test "is true if the reference is for %__MOD in a function definition " do - env = new_env("def my_fn(%__MOD") - assert in_context?(env, :struct_reference) - end - - test "is false if the reference is for %__MOC in a function definition" do - env = new_env("def my_fn(%__MOC)") - refute in_context?(env, :struct_reference) - end - - test "is false if a module reference lacks a %" do - env = new_env("def my_function(__|)") - refute in_context?(env, :struct_reference) - end - - test "is true if the reference is on the right side of a match" do - env = new_env("foo = %Use|") - assert in_context?(env, :struct_reference) - end - - test "is true if the reference is on the left side of a match" do - env = new_env(" %Use| = foo") - assert in_context?(env, :struct_reference) - end - - test "is true if the reference is for %__} " do - env = new_env("%__") - assert in_context?(env, :struct_reference) - end - - test "is false if in a function capture" do - env = new_env("&MyModule.fun|") - refute in_context?(env, :struct_reference) - end - - test "is false if in an alias" do - env = new_env("alias MyModule.Othe|") - refute in_context?(env, :struct_reference) - end - - test "is false if in an import" do - env = new_env("import MyModule.Othe|") - refute in_context?(env, :struct_reference) - end - - test "is false if in a require" do - env = new_env("require MyModule.Othe|") - refute in_context?(env, :struct_reference) - end - - test "is false if in a use" do - env = new_env("alias MyModule.Othe|") - refute in_context?(env, :struct_reference) - end - - test "is false if in a bitstring" do - env = new_env("<< foo::in|") - refute in_context?(env, :struct_reference) - end - end - - describe "in_context?(env, :function_capture)" do - test "is true for arity one local functions" do - env = new_env("&is_map|") - assert in_context?(env, :function_capture) - end - - test "is true for arity two local functions with a variable" do - env = new_env("&is_map_key(&1, l|)") - assert in_context?(env, :function_capture) - end - - test "is true if the capture starts at the beginning of the line" do - env = new_env("&Enum") - assert in_context?(env, :function_capture) - end - - test "is true if the capture is inside a function call" do - env = new_env("list = Enum.map(1..10, &Enum|)") - assert in_context?(env, :function_capture) - end - - test "is true if the capture is inside an unformatted function call" do - env = new_env("list = Enum.map(1..10,&Enum|)") - assert in_context?(env, :function_capture) - end - - test "is true if the capture is inside a function call after the dot" do - env = new_env("list = Enum.map(1..10, &Enum.f|)") - assert in_context?(env, :function_capture) - end - - test "is true if the capture is in the body of a for" do - env = new_env("for x <- Enum.map(1..10, &String.|)") - assert in_context?(env, :function_capture) - end - - test "is false if the position is after a capture with no arguments" do - env = new_env("&something/1|") - refute in_context?(env, :function_capture) - end - - test "is false if the position is after a capture with arguments" do - env = new_env("&captured(&1, :foo)|") - refute in_context?(env, :function_capture) - end - - test "is false if the capture starts at the beginning of the line" do - env = new_env("Enum|") - refute in_context?(env, :function_capture) - end - - test "is false if the capture is inside a function call" do - env = new_env("list = Enum.map(1..10, Enum|)") - refute in_context?(env, :function_capture) - end - - test "is false if the capture is inside an unformatted function call" do - env = new_env("list = Enum.map(1..10,Enum|)") - refute in_context?(env, :function_capture) - end - - test "is false if the capture is inside a function call after the dot" do - env = new_env("list = Enum.map(1..10, Enum.f|)") - refute in_context?(env, :function_capture) - end - - test "is false if the capture is in the body of a for" do - env = new_env("for x <- Enum.map(1..10, String.|)") - refute in_context?(env, :function_capture) - end - - test "is false if in an alias" do - env = new_env("alias MyModule.Othe|") - refute in_context?(env, :function_capture) - end - - test "is false if in an import" do - env = new_env("import MyModule.Othe|") - refute in_context?(env, :function_capture) - end - - test "is false if in a require" do - env = new_env("require MyModule.Othe|") - refute in_context?(env, :function_capture) - end - - test "is false if in a use" do - env = new_env("alias MyModule.Othe|") - refute in_context?(env, :function_capture) - end - - test "is false if in a bitstring" do - env = new_env("<< foo::in|") - refute in_context?(env, :function_capture) - end - - test "is false if in a pipe" do - env = new_env("|> MyThing.|") - refute in_context?(env, :function_capture) - end - end - - describe "in_context?(env, :pipe)" do - test "is true if the pipe is on the start of the line" do - env = new_env("|> foo|()") - assert in_context?(env, :pipe) - end - - test "is true if the pipe is in a function call" do - env = new_env("foo( a |> b |> c|)") - assert in_context?(env, :pipe) - end - - test "is true if the pipe is in a remote function call" do - env = new_env("[] |> Enum.|") - assert in_context?(env, :pipe) - end - - test "is false if the pipe is in a function call and the cursor is outside it" do - env = new_env("foo( a |> b |> c)|") - refute in_context?(env, :pipe) - end - - test "is false if there is no pipe in the string" do - env = new_env("Enum.|foo") - refute in_context?(env, :pipe) - end - - test "is false if in a function capture" do - env = new_env("&MyModule.fun|") - refute in_context?(env, :pipe) - end - - test "is false if in an alias" do - env = new_env("alias MyModule.Othe|") - refute in_context?(env, :pipe) - end - - test "is false if in an import" do - env = new_env("import MyModule.Othe|") - refute in_context?(env, :pipe) - end - - test "is false if in a require" do - env = new_env("require MyModule.Othe|") - refute in_context?(env, :pipe) - end - - test "is false if in a use" do - env = new_env("alias MyModule.Othe|") - refute in_context?(env, :pipe) - end - - test "is false if in a bitstring" do - env = new_env("<< foo::in|") - refute in_context?(env, :pipe) - end - end - - describe "in_context?(env, :alias)" do - test "should be true if this is a single alias" do - env = new_env("alias MyThing.Other") - assert in_context?(env, :alias) - end - - test "should be true if this is an alias using as" do - env = new_env("alias MyThing.Other, as: AnotherThing") - assert in_context?(env, :alias) - end - - test "should be true if this is a multiple alias on one line" do - env = new_env("alias MyThing.{Foo, Bar, Ba|}") - assert in_context?(env, :alias) - end - - test "should be true if this is a multiple alias on multiple lines" do - env = - ~q[ - alias Foo.{ - Bar, - Baz| - } - ]t - |> new_env() - - assert in_context?(env, :alias) - end - - test "should be false if the statement is not an alias" do - env = new_env("x = %{foo: 3}|") - refute in_context?(env, :alias) - - env = new_env("x = %{foo: 3|}") - refute in_context?(env, :alias) - end - - test "should be false if this is after a multiple alias on one line" do - env = new_env("alias MyThing.{Foo, Bar, Baz}|") - refute in_context?(env, :alias) - end - - test "should be false if this is after a multiple alias on multiple lines" do - env = - ~q[ - alias Foo.{ - Bar, - Baz - }| - ]t - |> new_env() - - refute in_context?(env, :alias) - end - - test "should be false if this is after a multiple alias on multiple lines (second form)" do - env = - ~q[ - alias Foo.{ Bar, - Baz - }| - ]t - |> new_env() - - refute in_context?(env, :alias) - end - - test "should be false if this is after a multiple alias on multiple lines (third form)" do - env = - ~q[ - alias Foo.{ Bar, Baz - }| - ]t - |> new_env() - - refute in_context?(env, :alias) - end - - test "is false if there is nothing after the alias call" do - env = new_env("alias|") - refute in_context?(env, :alias) - end - - test "is false if the alias is on another line" do - env = - ~q[ - alias Something.Else - Macro.| - ]t - |> new_env() - - refute in_context?(env, :alias) - end - - test "is false if in a function capture" do - env = new_env("&MyModule.fun|") - refute in_context?(env, :alias) - end - - test "is false if in an import" do - env = new_env("import MyModule.Othe|") - refute in_context?(env, :alias) - end + env = new_env(~S[@moduledoc "hi"]) + assert in_context?(env, :moduledoc) - test "is false if in a require" do - env = new_env("require MyModule.Othe|") - refute in_context?(env, :alias) + env = new_env(~S[@moduledoc """ + Multi - line + """| + ]) + assert in_context?(env, :moduledoc) end - test "is false if in a use" do - env = new_env("use MyModule.Othe|") - refute in_context?(env, :alias) + test "can detect callbacks" do + env = new_env("@callback do_stuff|(integer(), map()) :: any()") + assert in_context?(env, :callback) end - test "is false if in a bitstring" do - env = new_env("<< foo::in|") - refute in_context?(env, :alias) + test "can detect macro callbacks" do + env = new_env("@macrocallback write_code(integer(), map(|)) :: any()") + assert in_context?(env, :macrocallback) end - test "is false if in a pipe" do - env = new_env("|> MyThing.|") - refute in_context?(env, :alias) + test "can detect strings" do + env = new_env(~s/var = "in |a string"/) + assert in_context?(env, :string) end end end diff --git a/apps/common/test/lexical/ast/module_test.exs b/apps/common/test/lexical/ast/module_test.exs new file mode 100644 index 000000000..7930b6466 --- /dev/null +++ b/apps/common/test/lexical/ast/module_test.exs @@ -0,0 +1,26 @@ +defmodule Lexical.Ast.ModuleTest do + import Lexical.Ast.Module + use ExUnit.Case, async: true + + describe "safe_split/2" do + test "splits elixir modules into binaries by default" do + assert {:elixir, ~w(Lexical Document Store)} == safe_split(Lexical.Document.Store) + end + + test "splits elixir modules into binaries" do + assert {:elixir, ~w(Lexical Document Store)} == + safe_split(Lexical.Document.Store, as: :binaries) + end + + test "splits elixir modules into atoms" do + assert {:elixir, ~w(Lexical Document Store)a} == + safe_split(Lexical.Document.Store, as: :atoms) + end + + test "splits erlang modules" do + assert {:erlang, ["ets"]} = safe_split(:ets) + assert {:erlang, ["ets"]} = safe_split(:ets, as: :binaries) + assert {:erlang, [:ets]} = safe_split(:ets, as: :atoms) + end + end +end diff --git a/apps/common/test/lexical/ast/tokens_test.exs b/apps/common/test/lexical/ast/tokens_test.exs new file mode 100644 index 000000000..837b8fcb4 --- /dev/null +++ b/apps/common/test/lexical/ast/tokens_test.exs @@ -0,0 +1,51 @@ +defmodule Lexical.Ast.TokensTest do + alias Lexical.Ast.Tokens + + import Lexical.Test.CodeSigil + import Lexical.Test.CursorSupport + + use ExUnit.Case, async: true + + describe "prefix_stream/2" do + test "works as intended" do + text = ~q[ + defmodule Foo do + def bar do + | + end + end + ] + + {position, document} = pop_cursor(text, as: :document) + + tokens = Tokens.prefix_stream(document, position) + + assert Enum.to_list(tokens) == [ + {:eol, ~c"\n", []}, + {:operator, :do, {2, 11}}, + {:do_identifier, ~c"bar", {2, 7}}, + {:identifier, ~c"def", {2, 3}}, + {:eol, ~c"\n", []}, + {:operator, :do, {1, 15}}, + {:alias, ~c"Foo", {1, 11}}, + {:identifier, ~c"defmodule", {1, 1}} + ] + end + + test "returns nothing when cursor is at start" do + text = ~q[ + |defmodule Foo do + def bar do + :bar + end + end + ] + + {position, document} = pop_cursor(text, as: :document) + + tokens = Tokens.prefix_stream(document, position) + + assert Enum.to_list(tokens) == [] + end + end +end diff --git a/apps/common/test/lexical/ast_test.exs b/apps/common/test/lexical/ast_test.exs index c66fb526e..76fa7c854 100644 --- a/apps/common/test/lexical/ast_test.exs +++ b/apps/common/test/lexical/ast_test.exs @@ -1,5 +1,6 @@ defmodule Lexical.AstTest do alias Lexical.Ast + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Position alias Sourceror.Zipper @@ -45,7 +46,7 @@ defmodule Lexical.AstTest do test "returns [] when can't parse the AST" do text = ~q[ - foo(bar do baz, bat| + foo(bar do baz, [bat| ] path = cursor_path(text) @@ -59,6 +60,20 @@ defmodule Lexical.AstTest do Ast.path_at(document, position) end + defp end_location({_, metadata, _}), do: end_location(metadata) + + defp end_location(metadata) when is_list(metadata) do + case metadata do + [line: line, column: column] -> + {line, column} + + metadata -> + [end_line: line, end_column: column] = Keyword.take(metadata, [:end_line, :end_column]) + + {line, column} + end + end + test "returns an error if the cursor cannot be found in any node" do code = ~q[ | @@ -74,7 +89,8 @@ defmodule Lexical.AstTest do defmodule |Foo do ] - assert {:error, {[line: 2, column: 1], "missing terminator: end" <> _, ""}} = path_at(code) + assert {:error, {metadata, "missing terminator: end" <> _, ""}} = path_at(code) + assert end_location(metadata) == {2, 1} end test "returns a path to the innermost leaf at position" do @@ -195,7 +211,7 @@ defmodule Lexical.AstTest do setup do {range, code} = pop_range(~q| [ - «single_line_call(1, 2, 3») + «single_line_call(1, 2, 3)» ] |) @@ -231,7 +247,7 @@ defmodule Lexical.AstTest do [ «multi_line_call( 1, 2, 3 - ») + )» ] |) @@ -275,27 +291,57 @@ defmodule Lexical.AstTest do end end - describe "expand_aliases/4" do - test "works with __MODULE__ aliases" do - {position, document} = - ~q[ - defmodule Parent do - defmodule __MODULE__.Child do - | - end - end - ] - |> pop_cursor(as: :document) + describe "analyze/1" do + test "creates an analysis from a document with valid ast" do + code = ~q[ + defmodule Valid do + end + ] + + assert %Analysis{} = analysis = analyze(code) + assert {:defmodule, _, _} = analysis.ast + end + + test "creates an analysis from a document with invalid ast" do + code = ~q[ + defmodule Invalid do + ] + + assert %Analysis{} = analysis = analyze(code) + refute analysis.ast + assert {:error, _} = analysis.parse_error + end + + test "creates an analysis from a document with incomplete `as` section" do + code = ~q[ + defmodule Invalid do + alias Foo, a + end + ] - assert {:ok, Parent.Child} = - Ast.expand_aliases([quote(do: __MODULE__), nil], document, position) + assert %Analysis{} = analysis = analyze(code) + assert {:defmodule, _, _} = analysis.ast + end + + test "handles incomplete imports" do + code = ~q[ + defmodule Invalid do + import Other, only: :m + end + ] + assert %Analysis{} = analyze(code) end end defp ast(s) do case Ast.from(s) do - {:ok, {:__block__, _, [node]}} -> node - {:ok, node} -> node + {:ok, {:__block__, _, [node]}, _comments} -> node + {:ok, node, _comments} -> node end end + + defp analyze(code) when is_binary(code) do + document = Document.new("file:///file.ex", code, 0) + Ast.analyze(document) + end end diff --git a/apps/common/test/lexical/code_unit_test.exs b/apps/common/test/lexical/code_unit_test.exs index cc883e613..25166f863 100644 --- a/apps/common/test/lexical/code_unit_test.exs +++ b/apps/common/test/lexical/code_unit_test.exs @@ -2,212 +2,76 @@ defmodule Lexical.CodeUnitTest do alias Lexical.CodeUnit use ExUnit.Case - use ExUnitProperties - import CodeUnit - - describe "utf8 offsets" do - test "handles single-byte characters" do - s = "do" - assert 1 == utf8_offset(s, 0) - assert 2 == utf8_offset(s, 1) - assert 3 == utf8_offset(s, 2) - assert 3 == utf8_offset(s, 3) - assert 3 == utf8_offset(s, 4) - end - - test "caps offsets at the end of the string and beyond" do - line = "🎸" - - # reminder, the offsets below are utf-16 - # character code unit offsets, which differ - # from utf8's, and can have gaps. - - assert 5 == utf8_offset(line, 1) - assert 5 == utf8_offset(line, 2) - assert 5 == utf8_offset(line, 3) - assert 5 == utf8_offset(line, 4) - end - - test "handles multi-byte characters properly" do - line = "b🎸abc" - # reminder, the offsets below are utf-16 - # character code unit offsets, which differ - # from utf8's, and can have gaps. - - assert 1 == utf8_offset(line, 0) - assert 2 == utf8_offset(line, 1) - assert 6 == utf8_offset(line, 3) - assert 7 == utf8_offset(line, 4) - assert 8 == utf8_offset(line, 5) - assert 9 == utf8_offset(line, 6) - assert 9 == utf8_offset(line, 7) - end - end + import CodeUnit - describe "utf16_offset/2" do + describe "utf8_position_to_utf16_offset/2" do test "handles single-byte characters" do s = "do" - assert 0 == utf16_offset(s, 0) - assert 1 == utf16_offset(s, 1) - assert 2 == utf16_offset(s, 2) - assert 2 == utf16_offset(s, 3) - assert 2 == utf16_offset(s, 4) + assert 0 == utf8_position_to_utf16_offset(s, 0) + assert 1 == utf8_position_to_utf16_offset(s, 1) + assert 2 == utf8_position_to_utf16_offset(s, 2) + assert 2 == utf8_position_to_utf16_offset(s, 3) + assert 2 == utf8_position_to_utf16_offset(s, 4) end test "caps offsets at the end of the string and beyond" do line = "🎸" - assert 2 == utf16_offset(line, 1) - assert 2 == utf16_offset(line, 2) - assert 2 == utf16_offset(line, 3) - assert 2 == utf16_offset(line, 4) + assert 2 == utf8_position_to_utf16_offset(line, 1) + assert 2 == utf8_position_to_utf16_offset(line, 2) + assert 2 == utf8_position_to_utf16_offset(line, 3) + assert 2 == utf8_position_to_utf16_offset(line, 4) end test "handles multi-byte characters properly" do # guitar is 2 code units in utf16 but 4 in utf8 line = "b🎸abc" - assert 0 == utf16_offset(line, 0) - assert 1 == utf16_offset(line, 1) - assert 3 == utf16_offset(line, 2) - assert 4 == utf16_offset(line, 3) - assert 5 == utf16_offset(line, 4) - assert 6 == utf16_offset(line, 5) - assert 6 == utf16_offset(line, 6) + assert 0 == utf8_position_to_utf16_offset(line, 0) + assert 1 == utf8_position_to_utf16_offset(line, 1) + assert 3 == utf8_position_to_utf16_offset(line, 2) + assert 4 == utf8_position_to_utf16_offset(line, 3) + assert 5 == utf8_position_to_utf16_offset(line, 4) + assert 6 == utf8_position_to_utf16_offset(line, 5) + assert 6 == utf8_position_to_utf16_offset(line, 6) end end - describe "converting to utf8" do - test "bounds are respected" do - assert {:error, :out_of_bounds} = to_utf16("h", 2) - end - + describe "utf16_offset_to_utf8_offset" do test "with a multi-byte character" do line = "🏳️‍🌈" code_unit_count = count_utf8_code_units(line) - assert to_utf8(line, 0) == {:ok, 1} - assert to_utf8(line, 1) == {:error, :misaligned} - assert to_utf8(line, 2) == {:ok, 5} - assert to_utf8(line, 3) == {:ok, 8} - assert to_utf8(line, 4) == {:ok, 11} - assert to_utf8(line, 5) == {:error, :misaligned} - assert to_utf8(line, 6) == {:ok, code_unit_count + 1} + assert utf16_offset_to_utf8_offset(line, 0) == {:ok, 1} + assert utf16_offset_to_utf8_offset(line, 1) == {:error, :misaligned} + assert utf16_offset_to_utf8_offset(line, 2) == {:ok, 5} + assert utf16_offset_to_utf8_offset(line, 3) == {:ok, 8} + assert utf16_offset_to_utf8_offset(line, 4) == {:ok, 11} + assert utf16_offset_to_utf8_offset(line, 5) == {:error, :misaligned} + assert utf16_offset_to_utf8_offset(line, 6) == {:ok, code_unit_count + 1} end test "after a unicode character" do line = " {\"🎸\", \"ok\"}" - assert to_utf8(line, 0) == {:ok, 1} - assert to_utf8(line, 1) == {:ok, 2} - assert to_utf8(line, 4) == {:ok, 5} - assert to_utf8(line, 5) == {:ok, 6} - assert to_utf8(line, 6) == {:ok, 7} - assert to_utf8(line, 7) == {:error, :misaligned} + assert utf16_offset_to_utf8_offset(line, 0) == {:ok, 1} + assert utf16_offset_to_utf8_offset(line, 1) == {:ok, 2} + assert utf16_offset_to_utf8_offset(line, 4) == {:ok, 5} + assert utf16_offset_to_utf8_offset(line, 5) == {:ok, 6} + assert utf16_offset_to_utf8_offset(line, 6) == {:ok, 7} + assert utf16_offset_to_utf8_offset(line, 7) == {:error, :misaligned} # after the guitar character - assert to_utf8(line, 8) == {:ok, 11} - assert to_utf8(line, 9) == {:ok, 12} - assert to_utf8(line, 10) == {:ok, 13} - assert to_utf8(line, 11) == {:ok, 14} - assert to_utf8(line, 12) == {:ok, 15} - assert to_utf8(line, 13) == {:ok, 16} - assert to_utf8(line, 17) == {:ok, 20} + assert utf16_offset_to_utf8_offset(line, 8) == {:ok, 11} + assert utf16_offset_to_utf8_offset(line, 9) == {:ok, 12} + assert utf16_offset_to_utf8_offset(line, 10) == {:ok, 13} + assert utf16_offset_to_utf8_offset(line, 11) == {:ok, 14} + assert utf16_offset_to_utf8_offset(line, 12) == {:ok, 15} + assert utf16_offset_to_utf8_offset(line, 13) == {:ok, 16} + assert utf16_offset_to_utf8_offset(line, 17) == {:ok, 20} end end - describe "converting to utf16" do - test "respects bounds" do - assert {:error, :out_of_bounds} = to_utf16("h", 2) - end - - test "with a multi-byte character" do - line = "🏳️‍🌈" - - code_unit_count = count_utf16_code_units(line) - utf8_code_unit_count = count_utf8_code_units(line) - - assert to_utf16(line, 0) == {:ok, 0} - assert to_utf16(line, 1) == {:error, :misaligned} - assert to_utf16(line, 2) == {:error, :misaligned} - assert to_utf16(line, 3) == {:error, :misaligned} - assert to_utf16(line, 4) == {:ok, 2} - assert to_utf16(line, utf8_code_unit_count - 1) == {:error, :misaligned} - assert to_utf16(line, utf8_code_unit_count) == {:ok, code_unit_count} - end - - test "after a multi-byte character" do - line = " {\"🎸\", \"ok\"}" - - utf16_code_unit_count = count_utf16_code_units(line) - utf8_code_unit_count = count_utf8_code_units(line) - - # before, the character, there is no difference between utf8 and utf16 - for index <- 0..5 do - assert to_utf16(line, index) == {:ok, index} - end - - assert to_utf16(line, 6) == {:ok, 6} - assert to_utf16(line, 7) == {:error, :misaligned} - assert to_utf16(line, 8) == {:error, :misaligned} - assert to_utf16(line, 9) == {:error, :misaligned} - - for index <- 10..19 do - assert to_utf16(line, index) == {:ok, index - 2} - end - - assert to_utf16(line, utf8_code_unit_count - 1) == {:ok, utf16_code_unit_count - 1} - end - end - - property "to_utf8 and to_utf16 are inverses of each other" do - check all(s <- filter(string(:printable), &utf8?/1)) do - utf8_code_unit_count = count_utf8_code_units(s) - utf16_unit_count = count_utf16_code_units(s) - - assert {:ok, utf16_unit} = to_utf16(s, utf8_code_unit_count) - assert utf16_unit == utf16_unit_count - - assert {:ok, utf8_unit} = to_utf8(s, utf16_unit) - # adding 1 here because our utf8 conversion is one-based - assert utf8_unit == utf8_code_unit_count + 1 - end - end - - property "to_utf16 and to_utf8 are inverses" do - check all(s <- filter(string(:printable), &utf8?/1)) do - utf16_code_unit_count = count_utf16_code_units(s) - utf8_code_unit_count = count_utf8_code_units(s) - - assert {:ok, utf8_code_unit} = to_utf8(s, utf16_code_unit_count) - # adding 1 here because our utf8 conversion is one-based - assert utf8_code_unit == utf8_code_unit_count + 1 - - # subtracting 1 here because our utf8 conversion is one-based - assert {:ok, utf16_unit} = to_utf16(s, utf8_code_unit - 1) - assert utf16_unit == utf16_code_unit_count - end - end - - defp count_utf16_code_units(utf8_string) do - utf8_string - |> :unicode.characters_to_binary(:utf8, :utf16) - |> byte_size() - |> div(2) - end - defp count_utf8_code_units(utf8_string) do byte_size(utf8_string) end - - defp utf8?(<<_::utf8>>) do - true - end - - defp utf8?(<<_::utf8, rest::binary>>) do - utf8?(rest) - end - - defp utf8?(_) do - false - end end diff --git a/apps/common/test/support/lexical/test/detection_case.ex b/apps/common/test/support/lexical/test/detection_case.ex new file mode 100644 index 000000000..a825b5985 --- /dev/null +++ b/apps/common/test/support/lexical/test/detection_case.ex @@ -0,0 +1,337 @@ +defmodule Lexical.Test.DetectionCase do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Tokens + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.Test.DetectionCase.Suite + alias Lexical.Test.Variations + + import Lexical.Test.RangeSupport + import ExUnit.Assertions + use ExUnit.CaseTemplate + + using(args) do + context = Keyword.fetch!(args, :for) + assertion_query = Keyword.fetch!(args, :assertions) + variations = Keyword.get(args, :variations, []) ++ [:nothing] + # sometimes one type of thing can be found in another (for example, a struct reference is + # found in a struct field case), so we don't want to build a refutation + # for that thing. `skip` allows us to remove certain cases from refutes. + skip = Keyword.get(args, :skip, []) + + {assertions, refutes} = Enum.split_with(Suite.get(), &matches?(&1, assertion_query)) + + if Enum.empty?(assertions) do + query = + assertion_query + |> Macro.expand(__CALLER__) + |> Macro.to_string() + + flunk("No assertions matched the query #{query}") + end + + refutes = Enum.reject(refutes, &matches?(&1, skip)) + assertions = build_assertion_cases(context, assertions, variations) + refutations = build_refutation_cases(context, refutes, variations) + + quote location: :keep do + @context unquote(context) + alias Lexical.Test.DetectionCase + + import Lexical.Test.CodeSigil + + import unquote(__MODULE__), + only: [ + assert_detected: 2, + refute_detected: 2 + ] + + unquote_splicing(assertions) + unquote_splicing(refutations) + + def assert_detected(code) do + assert_detected @context, code + end + + def refute_detected(code) do + refute_detected @context, code + end + end + end + + def refute_detected(context, code) do + document = Document.new("file:///file.ex", code, 1) + analysis = Ast.analyze(document) + + for position <- position_stream(document) do + try do + refute context.detected?(analysis, position) + rescue + e in ExUnit.AssertionError -> + flunk(error_for(document, position, context, e)) + end + end + end + + def assert_detected(context, code) do + {ranges, code} = pop_all_ranges(code) + + document = Document.new("file:///file.ex", code, 1) + analysis = Ast.analyze(document) + assert_contexts_in_range(analysis, context, ranges) + end + + defp includes?(%Range{} = range, %Position{} = position) do + cond do + range.start.line == position.line and range.end.line == position.line -> + position.character >= range.start.character and + position.character <= range.end.character + + range.start.line == position.line -> + position.character >= range.start.character + + range.end.line == position.line -> + position.character <= range.end.character + + true -> + position.line > range.start.line and position.line < range.end.line + end + end + + defp matches?({type, _}, assertions) do + Enum.any?(assertions, &wildcard_matches?(&1, type)) + end + + defp wildcard_matches?(wildcard, type) do + wildcard + |> Enum.zip(type) + |> Enum.reduce_while(true, fn + {same, same}, _ -> + {:cont, true} + + {:*, _}, _ -> + {:halt, true} + + {_, _}, _ -> + {:halt, false} + end) + end + + defp build_assertion_cases(context, assertions, variations) do + for {type, test} <- assertions, + variation <- variations do + build_assertion_variation(context, type, variation, test) + end + end + + defp build_assertion_variation(context, type, variation, test) do + assertion_text = Variations.wrap_with(variation, test) + test_name = type_to_name(type, variation) + + quote generated: true do + test unquote(test_name) do + assert_detected unquote(context), unquote(assertion_text) + end + end + end + + defp assert_contexts_in_range(%Analysis{} = analysis, context, ranges) do + positions_by_range = + analysis.document + |> position_stream() + |> Enum.group_by(fn position -> Enum.find(ranges, &includes?(&1, position)) end) + + for {range, positions} <- positions_by_range, + position <- positions do + try do + if range do + assert context.detected?(analysis, position) + else + refute context.detected?(analysis, position) + end + rescue + e in ExUnit.AssertionError -> + analysis.document + |> error_for(position, context, e) + |> ExUnit.Assertions.flunk() + end + end + end + + defp build_refutation_cases(context, assertions, variations) do + for {type, test} <- assertions, + variation <- variations do + build_refutation_variation(context, type, variation, test) + end + end + + defp build_refutation_variation(context, type, variation, test) do + {_range, refutation_text} = + variation + |> Variations.wrap_with(test) + |> pop_range() + + test_name = type_to_name(type, variation) + + quote generated: true do + test unquote(test_name) do + refute_detected unquote(context), unquote(refutation_text) + end + end + end + + defp error_for(%Document{} = doc, %Position{} = pos, context, assertion_error) do + message = message_for_assertion_type(assertion_error, context, pos) + + test_text = + doc + |> insert_cursor(pos) + |> Document.to_string() + + [ + IO.ANSI.red(), + message, + IO.ANSI.reset(), + "\n", + "document:", + "\n\n", + test_text, + "\n\n" + ] + |> IO.ANSI.format() + |> IO.iodata_to_binary() + end + + defp message_for_assertion_type(%ExUnit.AssertionError{} = error, context, position) do + context = context |> Module.split() |> List.last() + + case assertion_type(error.expr) do + :assert -> + "The cursor at {#{position.line}, #{position.character}} should have been detected as a #{context}, but it wasn't." + + :refute -> + "The cursor at {#{position.line}, #{position.character}} was detected as #{context}, but it shouldn't have been" + end + end + + defp assertion_type({type, _, _}) do + case Atom.to_string(type) do + "assert" <> _ -> :assert + _ -> :refute + end + end + + defp insert_cursor(%Document{} = document, %Position{} = position) do + cursor = + [ + IO.ANSI.bright(), + IO.ANSI.light_red(), + "|", + IO.ANSI.reset() + ] + |> IO.ANSI.format() + |> IO.iodata_to_binary() + + range = Range.new(position, position) + edit = Document.Edit.new(cursor, range) + {:ok, document} = Document.apply_content_changes(document, document.version + 1, [edit]) + + document + end + + defp type_to_name(type, variation) do + words = fn atom -> + atom + |> Atom.to_string() + |> String.split("_") + |> Enum.join(" ") + end + + base_name = Enum.map_join(type, ", ", words) + + variation = + if variation == :nothing do + "" + else + "(inside #{words.(variation)})" + end + + "#{base_name} #{variation}" + end + + def position_stream(%Document{} = document) do + line_count = Document.size(document) + + init_fn = fn -> + 1 + end + + next_fn = fn + line_number when line_number <= line_count -> + case Document.fetch_text_at(document, line_number) do + {:ok, line_text} -> + token_positions = + document + |> Tokens.prefix_stream( + Position.new(document, line_number, String.length(line_text) + 1) + ) + |> Stream.filter(fn + {_token, _, {^line_number, _character}} -> + true + + _ -> + false + end) + |> Enum.reduce( + [], + fn + {:string, contents, position}, acc -> + string_literal_positions(document, contents, position) ++ acc + + {:interpolated_string, interpolations, _}, acc -> + interpolation_positions(document, interpolations) ++ acc + + {_token_type, _token, {_line, character}}, acc -> + pos = Position.new(document, line_number, character) + [pos | acc] + end + ) + + {token_positions, line_number + 1} + end + + _ -> + {:halt, :ok} + end + + finalize = fn _ -> :ok end + + Stream.resource(init_fn, next_fn, finalize) + end + + defp string_literal_positions(%Document{} = document, string_contents, {line, column}) do + # add two for the quotes + string_length = String.length(string_contents) + 2 + before_pos = Position.new(document, line, column + 1) + after_pos = Position.new(document, line, column + string_length) + + [after_pos, before_pos] + end + + defp interpolation_positions(%Document{} = document, interpolations) do + interpolations + |> Enum.flat_map(fn {_, _, {{start_line, start_col}, {_end_line, end_col}}} -> + in_between_positions = + Enum.map(start_col..end_col, fn column -> + Position.new(document, start_line, column) + end) + + start_pos = Position.new(document, start_line, start_col) + + [start_pos | in_between_positions] + end) + |> Enum.uniq() + end +end diff --git a/apps/common/test/support/lexical/test/detection_case/suite.ex b/apps/common/test/support/lexical/test/detection_case/suite.ex new file mode 100644 index 000000000..269de3b62 --- /dev/null +++ b/apps/common/test/support/lexical/test/detection_case/suite.ex @@ -0,0 +1,309 @@ +defmodule Lexical.Test.DetectionCase.Suite do + @moduledoc """ + Defines a test suite for the detection case tests. + """ + import Lexical.Test.CodeSigil + + @doc """ + Returns a list of tuples where: + + The first element is the path of the suite. Test cases can select and + skip parts of the suite based on the path + + The second element is the code, defined via the code sigil. The code can contain + multiple ranges, defined with the `«` and `»` characters. Ranges define the areas + of the code that contain the part of the code that is expected to be detected by + the recognizer with the same name as the first element of the list. + """ + + def suite do + [ + alias: [ + single: ~q(alias F«oo»), + multiple: ~q( + alias M«yModule.{ + First, + Second, + Third» + } + ), + as: ~q[alias M«yModule.Submodule, as: Alias»], + # Note: we need the token after the alias for the test, since + # we can't place a range on an empty space + multiple_on_one_line: ~q[alias F«oo.{Bar, Baz, Quux»};3 ] + ], + bitstring: [ + one_line: ~q[<<«foo::integer, bar::binary»>>], + multi_line: ~q[ + <<«foo::integer, + bar::binary-size(6) + »>> + ] + ], + callbacks: [ + callback: [ + zero_arg: "@«callback my_cb() :: boolean()»", + one_arg: "@«callback my_cb(foo :: integer) :: String.t()»", + multiple_args: "@«callback my_cb(foo :: integer, bar:: String.t()) :: [pos_integer()]»", + multiple_line: """ + @«callback my_cb( + foo :: String.t(), + bar :: pos_integer()) :: pos_integer()» + """ + ], + macrocallback: [ + zero_arg: "@«macrocallback my_cb() :: boolean()»", + one_arg: "@«macrocallback my_cb(foo :: integer) :: String.t()»", + multiple_args: + "@«macrocallback my_cb(foo :: integer, bar:: String.t()) :: [pos_integer()]»", + multiple_line: """ + @«macrocallback my_cb( + foo :: String.t(), + bar :: pos_integer()) :: pos_integer()» + """ + ] + ], + comment: [ + start_of_line: "«# IO.puts»", + end_of_line: "IO.puts(thing) «# IO.puts»" + ], + doc: [ + empty: ~S[@«doc ""»], + false: "@«doc false»", + single_line: ~S[@«doc "this is my doc»"], + multi_line: ~S[@«doc """ + This is the doc + """» + ] + ], + function_capture: [ + local_arity: ~q[&«my_fun/1»], + local_argument: ~q[&«my_fun(arg, &1)»], + remote_arity: ~q[&«Remote.my_fun/1»], + remote_argument: ~q[&«Remote.my_fun(arg, &1)»] + ], + import: [ + single: ~q(import« MyModule»), + chain: ~q(import« MyModule.SubModule»), + only: [ + single_line: ~q(import« OtherModule, only: [something: 3, other_thing: 2]»), + multi_line: ~q(import« OtherModule, only: »[ + something: 3, + other_thing: 2 + ]) + ], + except: [ + single_line: ~q(import« OtherModule, except: [something: 3, other_thing: 2]»), + multi_line: ~q(import« OtherModule, except: »[ + something: 3, + other_thing: 2 + ]) + ] + ], + keyword: [ + single_line: + ~q(«string: "value", atom: :value2, int: 6, float: 2.0, list: [1, 2], tuple: {3, 4}»), + multi_line: ~q( + [« + string: "value", + atom: :value2, + int: 6, + float: 2.0, + list: [1, 2], + tuple: {3, 4} + »]) + ], + map: [ + single_line: + ~q(%{«string: "value", atom: :value2, int: 6 float: 2.0, list: [1, 2], tuple: {3, 4}}») + ], + module_doc: [ + empty: ~S[@«moduledoc ""»], + false: "@«moduledoc false»", + single_line: ~S[@«moduledoc "this is my moduledoc»"], + multi_line: ~S[@«moduledoc """ + This is the moduledoc + """» + ] + ], + module_attribute: [ + single_line: "@«attr 45»", + multi_line_pipe: """ + @«attr other_thing» + |> «Enum.shuffle()» + |> «Enum.max()» + """, + multi_line_list: """ + @«attrs [» + «:foo», + «:bar», + «:baz» + ] + """ + ], + pipe: [ + one_line: ~q[foo |> «bar»() |> «RemoteCall.fun»() |> «:remote_erlang.call»()], + multi_line: ~q[ + document + |> «local_call»() + |> «RemoteModule.call»() + |> «:remote_erlang.call»() + ] + ], + require: [ + single: ~q(require« MyModule»), + chain: ~q(require« MyModule.Submodule») + ], + spec: [ + simple_function: ~q{@spec« function_name(String.t) :: any()»}, + multi_line: ~q{ + @spec «on_multiple_lines :: integer()» + | «String.t()» + | «something()» + }, + or_type: ~q{@spec« my_func() :: :yours | :mine | :the_truth»} + ], + struct_field_key: [ + simple: ~q[%User{«foo:» 3,« bar:» 8}] + ], + struct_field_value: [ + single_line: ~q[%User{field_name:« 3», next_name:« :atom»}] + ], + struct_fields: [ + one_line: [ + empty: ~q[%User{«»}], + simple: ~q[%User{«field_name: 3, other_name: 9»}] + ], + multi_line: [ + simple: ~q[ + %Struct{« + amount: 1, + kind: :tomatoes» + } + ] + ] + ], + strings: [ + literal: ~q["«this is a string»"], + interpolation: [ + variable: ~S["«before »#{interp}« after»"], + math: ~S["«before »#{3 + 1}« after»"], + function_call: ~S["«before »#{my_fun(arg)}« after»"], + multiple: ~S["«before »#{first}« middle »#{second}« after»"] + ], + heredocs: [ + simple: ~S[ + """ + «This is in the heredoc + It's multiple lines» + """ + ], + interpolation: ~S[ + """ + «This is the heredoc + #{something} is interpolated» + """ + ] + ], + sigils: [ + s: [ + single_line: ~S[ + ~s/«this is a string»/ + ], + multi_line: ~S[ + ~s/« + this is a string + that spans + many lines + »/ + ] + ], + S: [ + single_line: ~S[ + ~S/«this is a string»/ + ], + multi_line: ~S[ + ~S/« + this is a string + that spans + many lines + »/ + ] + ] + ] + ], + struct_reference: [ + single_line: ~q(%U«ser»{#{keys_and_values()}}), + nested: ~q[%U«ser»{account: %A«ccount»{#{keys_and_values()}}}] + ], + type: [ + private: ~q[@typep« my_type :: integer()»], + opaque: ~q[@opaque« opaque :: integer()»], + single_line: [ + simple: ~q[@type« my_type :: integer()»], + composite: ~q[@type« my_type :: :foo | :bar | :baz»] + ], + multi_line: [ + composite: ~q[ + @type« multi_line ::» + «integer()» + |« String.t()» + |« Something.t()» + ] + ] + ], + use: [ + simple: ~q(use« SomeModule»), + params: [ + single_line: ~q(use« SomeModule, param: 3, other_param: :doggy»), + multi_line: ~q( + use «SomeModule, »[ + with: :features, + that_are: :great + ] + ) + ] + ] + ] + end + + def get do + flatten(suite()) + end + + defp flatten(keyword) do + keyword + |> do_flatten([], []) + |> Enum.map(fn {path, value} -> {Enum.reverse(path), value} end) + end + + defp do_flatten(keyword, prefix, acc) do + Enum.reduce(keyword, acc, fn + {k, v}, acc when is_list(v) -> + do_flatten(v, [k | prefix], acc) + + {k, v}, acc -> + [{[k | prefix], v} | acc] + end) + end + + defp list_literal do + ~q([:atom, 1, 2.0, "string", %{a: 1, b: 2}, [1, 2, 3], {1, 2}]) + end + + defp map_literal do + ~q(%{foo: 3, bar: 6}) + end + + defp keyword_literal do + ~q([key_1: 3, key_2: #{list_literal()}]) + end + + defp tuple_litral do + ~q({:atom, 1, 2.0, "string", %{a: 1, b: 2}, [1, 2, 3], {1, 2}}) + end + + defp keys_and_values do + ~q(string: "value", atom: :value2, int: 6 float: 2.0, keyword: #{keyword_literal()} map: #{map_literal()}, list: #{list_literal()}, tuple: #{tuple_litral()}) + end +end diff --git a/apps/common/test/support/lexical/test/variations.ex b/apps/common/test/support/lexical/test/variations.ex new file mode 100644 index 000000000..72854881c --- /dev/null +++ b/apps/common/test/support/lexical/test/variations.ex @@ -0,0 +1,50 @@ +defmodule Lexical.Test.Variations do + def wrap_with(:nothing, text) do + text + end + + def wrap_with(:match, text) do + """ + x = #{text} + """ + end + + def wrap_with(:module, text) do + """ + defmodule MyModule do + #{text} + end + """ + end + + def wrap_with(:function_body, text) do + body = """ + def func do + #{text} + end + """ + + wrap_with(:module, body) + end + + def wrap_with(:function_arguments, text) do + args = """ + def func(#{text}) do + end + """ + + wrap_with(:module, args) + end + + def wrap_with(:comprehension_generator, text) do + """ + for x <- #{text} do + x + end + """ + end + + def wrap_with(:function_call, text) do + "Enum.map(things, #{text})" + end +end diff --git a/apps/common/test/test_helper.exs b/apps/common/test/test_helper.exs index 869559e70..db057cfd5 100644 --- a/apps/common/test/test_helper.exs +++ b/apps/common/test/test_helper.exs @@ -1 +1,2 @@ +Application.ensure_all_started(:snowflake) ExUnit.start() diff --git a/apps/proto/.formatter.exs b/apps/proto/.formatter.exs index d2d325383..ff1e17901 100644 --- a/apps/proto/.formatter.exs +++ b/apps/proto/.formatter.exs @@ -7,7 +7,9 @@ proto_dsl = [ defrequest: 1, defrequest: 2, defresponse: 1, - deftype: 1 + deftype: 1, + server_request: 2, + server_request: 3 ] [ diff --git a/apps/proto/lib/lexical/proto.ex b/apps/proto/lib/lexical/proto.ex index bd33686af..5cd0aba6b 100644 --- a/apps/proto/lib/lexical/proto.ex +++ b/apps/proto/lib/lexical/proto.ex @@ -10,7 +10,10 @@ defmodule Lexical.Proto do import Proto.Alias, only: [defalias: 1] import Proto.Enum, only: [defenum: 1] import Proto.Notification, only: [defnotification: 1, defnotification: 2] - import Proto.Request, only: [defrequest: 1, defrequest: 2] + + import Proto.Request, + only: [defrequest: 1, defrequest: 2, server_request: 2, server_request: 3] + import Proto.Response, only: [defresponse: 1] import Proto.Type, only: [deftype: 1] end diff --git a/apps/proto/lib/lexical/proto/request.ex b/apps/proto/lib/lexical/proto/request.ex index ae5a5bbf6..72163551a 100644 --- a/apps/proto/lib/lexical/proto/request.ex +++ b/apps/proto/lib/lexical/proto/request.ex @@ -10,13 +10,39 @@ defmodule Lexical.Proto.Request do end defmacro defrequest(method, params_module_ast) do + types = fetch_types(params_module_ast, __CALLER__) + do_defrequest(method, types, __CALLER__) + end + + defmacro server_request(method, params_module_ast, response_module_ast) do + types = fetch_types(params_module_ast, __CALLER__) + + quote do + unquote(do_defrequest(method, types, __CALLER__)) + + def parse_response(response) do + unquote(response_module_ast).parse(response) + end + end + end + + defmacro server_request(method, response_module_ast) do + quote do + unquote(do_defrequest(method, [], __CALLER__)) + + def parse_response(response) do + unquote(response_module_ast).parse(response) + end + end + end + + defp fetch_types(params_module_ast, env) do params_module = params_module_ast - |> Macro.expand(__CALLER__) + |> Macro.expand(env) |> Code.ensure_compiled!() - types = params_module.__meta__(:raw_types) - do_defrequest(method, types, __CALLER__) + params_module.__meta__(:raw_types) end defp do_defrequest(method, types, caller) do @@ -78,11 +104,17 @@ defmodule Lexical.Proto.Request do defimpl Jason.Encoder, for: unquote(lsp_module_name) do def encode(request, opts) do + params = + case Map.take(request, unquote(param_names)) do + empty when map_size(empty) == 0 -> nil + params -> params + end + %{ id: request.id, jsonrpc: "2.0", method: unquote(method), - params: Map.take(request, unquote(param_names)) + params: params } |> Jason.Encode.map(opts) end diff --git a/apps/proto/lib/lexical/proto/response.ex b/apps/proto/lib/lexical/proto/response.ex index d3e8a519e..84b26e759 100644 --- a/apps/proto/lib/lexical/proto/response.ex +++ b/apps/proto/lib/lexical/proto/response.ex @@ -4,6 +4,7 @@ defmodule Lexical.Proto.Response do alias Lexical.Proto.Macros.{ Access, Meta, + Parse, Struct, Typespec } @@ -23,8 +24,8 @@ defmodule Lexical.Proto.Response do unquote(Struct.build(jsonrpc_types, __CALLER__)) @type t :: unquote(Typespec.typespec()) unquote(Meta.build(jsonrpc_types)) - unquote(constructors()) + unquote(Parse.build(result: response_type)) defimpl Jason.Encoder, for: unquote(__CALLER__.module) do def encode(%_{error: nil} = response, opts) do diff --git a/apps/proto/mix.exs b/apps/proto/mix.exs index 561fd2bce..593f6cfb8 100644 --- a/apps/proto/mix.exs +++ b/apps/proto/mix.exs @@ -4,7 +4,7 @@ defmodule Proto.MixProject do def project do [ app: :proto, - version: "0.3.0", + version: "0.5.0", build_path: "../../_build", config_path: "../../config/config.exs", deps_path: "../../deps", diff --git a/apps/protocol/lib/generated/lexical/protocol/types/code_lens.ex b/apps/protocol/lib/generated/lexical/protocol/types/code_lens.ex new file mode 100644 index 000000000..fb2cff13e --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/code_lens.ex @@ -0,0 +1,7 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.CodeLens do + alias Lexical.Proto + alias Lexical.Protocol.Types + use Proto + deftype command: optional(Types.Command), data: optional(any()), range: Types.Range +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/code_lens/params.ex b/apps/protocol/lib/generated/lexical/protocol/types/code_lens/params.ex new file mode 100644 index 000000000..d23491513 --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/code_lens/params.ex @@ -0,0 +1,10 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.CodeLens.Params do + alias Lexical.Proto + alias Lexical.Protocol.Types + use Proto + + deftype partial_result_token: optional(Types.Progress.Token), + text_document: Types.TextDocument.Identifier, + work_done_token: optional(Types.Progress.Token) +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/document/symbol.ex b/apps/protocol/lib/generated/lexical/protocol/types/document/symbol.ex new file mode 100644 index 000000000..ac39f53d7 --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/document/symbol.ex @@ -0,0 +1,15 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.Document.Symbol do + alias Lexical.Proto + alias Lexical.Protocol.Types + use Proto + + deftype children: optional(list_of(Types.Document.Symbol)), + deprecated: optional(boolean()), + detail: optional(string()), + kind: Types.Symbol.Kind, + name: string(), + range: Types.Range, + selection_range: Types.Range, + tags: optional(list_of(Types.Symbol.Tag)) +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/document/symbol/params.ex b/apps/protocol/lib/generated/lexical/protocol/types/document/symbol/params.ex new file mode 100644 index 000000000..35f5d65bd --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/document/symbol/params.ex @@ -0,0 +1,10 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.Document.Symbol.Params do + alias Lexical.Proto + alias Lexical.Protocol.Types + use Proto + + deftype partial_result_token: optional(Types.Progress.Token), + text_document: Types.TextDocument.Identifier, + work_done_token: optional(Types.Progress.Token) +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/execute_command/params.ex b/apps/protocol/lib/generated/lexical/protocol/types/execute_command/params.ex new file mode 100644 index 000000000..b378f92d8 --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/execute_command/params.ex @@ -0,0 +1,10 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.ExecuteCommand.Params do + alias Lexical.Proto + alias Lexical.Protocol.Types + use Proto + + deftype arguments: optional(list_of(any())), + command: string(), + work_done_token: optional(Types.Progress.Token) +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/execute_command/registration/options.ex b/apps/protocol/lib/generated/lexical/protocol/types/execute_command/registration/options.ex new file mode 100644 index 000000000..c9df46a07 --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/execute_command/registration/options.ex @@ -0,0 +1,6 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.ExecuteCommand.Registration.Options do + alias Lexical.Proto + use Proto + deftype commands: list_of(string()), work_done_progress: optional(boolean()) +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/location/link.ex b/apps/protocol/lib/generated/lexical/protocol/types/location/link.ex new file mode 100644 index 000000000..5472c56a9 --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/location/link.ex @@ -0,0 +1,11 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.Location.Link do + alias Lexical.Proto + alias Lexical.Protocol.Types + use Proto + + deftype origin_selection_range: optional(Types.Range), + target_range: Types.Range, + target_selection_range: Types.Range, + target_uri: string() +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/lsp_object.ex b/apps/protocol/lib/generated/lexical/protocol/types/lsp_object.ex new file mode 100644 index 000000000..01261f0ee --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/lsp_object.ex @@ -0,0 +1,6 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.LSPObject do + alias Lexical.Proto + use Proto + deftype [] +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/message/action_item.ex b/apps/protocol/lib/generated/lexical/protocol/types/message/action_item.ex new file mode 100644 index 000000000..21331da8b --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/message/action_item.ex @@ -0,0 +1,6 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.Message.ActionItem do + alias Lexical.Proto + use Proto + deftype title: string() +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/show_message_request/params.ex b/apps/protocol/lib/generated/lexical/protocol/types/show_message_request/params.ex new file mode 100644 index 000000000..191ae489e --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/show_message_request/params.ex @@ -0,0 +1,10 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.ShowMessageRequest.Params do + alias Lexical.Proto + alias Lexical.Protocol.Types + use Proto + + deftype actions: optional(list_of(Types.Message.ActionItem)), + message: string(), + type: Types.Message.Type +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/workspace/symbol.ex b/apps/protocol/lib/generated/lexical/protocol/types/workspace/symbol.ex new file mode 100644 index 000000000..4938ed1ca --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/workspace/symbol.ex @@ -0,0 +1,19 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.Workspace.Symbol do + alias Lexical.Proto + alias Lexical.Protocol.Types + + defmodule Location do + use Proto + deftype uri: string() + end + + use Proto + + deftype container_name: optional(string()), + data: optional(any()), + kind: Types.Symbol.Kind, + location: one_of([Types.Location, Lexical.Protocol.Types.Workspace.Symbol.Location]), + name: string(), + tags: optional(list_of(Types.Symbol.Tag)) +end diff --git a/apps/protocol/lib/generated/lexical/protocol/types/workspace/symbol/params.ex b/apps/protocol/lib/generated/lexical/protocol/types/workspace/symbol/params.ex new file mode 100644 index 000000000..288eac723 --- /dev/null +++ b/apps/protocol/lib/generated/lexical/protocol/types/workspace/symbol/params.ex @@ -0,0 +1,10 @@ +# This file's contents are auto-generated. Do not edit. +defmodule Lexical.Protocol.Types.Workspace.Symbol.Params do + alias Lexical.Proto + alias Lexical.Protocol.Types + use Proto + + deftype partial_result_token: optional(Types.Progress.Token), + query: string(), + work_done_token: optional(Types.Progress.Token) +end diff --git a/apps/protocol/lib/lexical/protocol/conversions.ex b/apps/protocol/lib/lexical/protocol/conversions.ex index 59877a2a5..26dc384c4 100644 --- a/apps/protocol/lib/lexical/protocol/conversions.ex +++ b/apps/protocol/lib/lexical/protocol/conversions.ex @@ -143,10 +143,9 @@ defmodule Lexical.Protocol.Conversions do end defp extract_lsp_character(%ElixirPosition{context_line: line(text: utf8_text)} = position) do - with {:ok, code_unit} <- CodeUnit.to_utf16(utf8_text, position.character - 1) do - character = min(code_unit, CodeUnit.count(:utf16, utf8_text)) - {:ok, character} - end + code_unit = CodeUnit.utf8_position_to_utf16_offset(utf8_text, position.character - 1) + character = min(code_unit, CodeUnit.count(:utf16, utf8_text)) + {:ok, character} end defp extract_elixir_character(%LSPosition{} = position, line(ascii?: true, text: text)) do @@ -155,7 +154,7 @@ defmodule Lexical.Protocol.Conversions do end defp extract_elixir_character(%LSPosition{} = position, line(text: utf8_text)) do - with {:ok, code_unit} <- CodeUnit.to_utf8(utf8_text, position.character) do + with {:ok, code_unit} <- CodeUnit.utf16_offset_to_utf8_offset(utf8_text, position.character) do character = min(code_unit, byte_size(utf8_text) + 1) {:ok, character} end diff --git a/apps/protocol/lib/lexical/protocol/json_rpc.ex b/apps/protocol/lib/lexical/protocol/json_rpc.ex index 8860251ec..970bf220d 100644 --- a/apps/protocol/lib/lexical/protocol/json_rpc.ex +++ b/apps/protocol/lib/lexical/protocol/json_rpc.ex @@ -2,6 +2,8 @@ defmodule Lexical.Protocol.JsonRpc do alias Lexical.Protocol.Notifications alias Lexical.Protocol.Requests + require Logger + @crlf "\r\n" def decode(message_string) do @@ -30,14 +32,23 @@ defmodule Lexical.Protocol.JsonRpc do {:ok, json_rpc} end + # These messages appear to be empty Responses (per LSP spec) sent to + # aknowledge Requests sent from the language server to the client. defp do_decode(%{"id" => _id, "result" => nil}) do - :error + {:error, :empty_response} end defp do_decode(%{"method" => method, "id" => _id} = request) do Requests.decode(method, request) end + defp do_decode(%{"id" => _id, "result" => _result} = response) do + # this is due to a client -> server message, but we can't decode it properly yet. + # since we can't match up the response type to the message. + + {:ok, response} + end + defp do_decode(%{"method" => method} = notification) do Notifications.decode(method, notification) end diff --git a/apps/protocol/lib/lexical/protocol/requests.ex b/apps/protocol/lib/lexical/protocol/requests.ex index 8bcf0af5f..29ac9f721 100644 --- a/apps/protocol/lib/lexical/protocol/requests.ex +++ b/apps/protocol/lib/lexical/protocol/requests.ex @@ -1,5 +1,6 @@ defmodule Lexical.Protocol.Requests do alias Lexical.Proto + alias Lexical.Protocol.Responses alias Lexical.Protocol.Types # Client -> Server request @@ -51,6 +52,12 @@ defmodule Lexical.Protocol.Requests do defrequest "textDocument/codeAction", Types.CodeAction.Params end + defmodule CodeLens do + use Proto + + defrequest "textDocument/codeLens", Types.CodeLens.Params + end + defmodule Completion do use Proto @@ -63,12 +70,44 @@ defmodule Lexical.Protocol.Requests do defrequest "textDocument/hover", Types.Hover.Params end + defmodule ExecuteCommand do + use Proto + + defrequest "workspace/executeCommand", Types.ExecuteCommand.Params + end + + defmodule DocumentSymbols do + use Proto + + defrequest "textDocument/documentSymbol", Types.Document.Symbol.Params + end + + defmodule WorkspaceSymbol do + use Proto + + defrequest "workspace/symbol", Types.Workspace.Symbol.Params + end + # Server -> Client requests defmodule RegisterCapability do use Proto - defrequest "client/registerCapability", Types.Registration.Params + server_request "client/registerCapability", Types.Registration.Params, Responses.Empty + end + + defmodule ShowMessageRequest do + use Proto + + server_request "window/showMessageRequest", + Types.ShowMessageRequest.Params, + Responses.ShowMessage + end + + defmodule CodeLensRefresh do + use Proto + + server_request "workspace/codeLens/refresh", Responses.Empty end use Proto, decoders: :requests diff --git a/apps/protocol/lib/lexical/protocol/responses.ex b/apps/protocol/lib/lexical/protocol/responses.ex index a26c27a81..847805dd9 100644 --- a/apps/protocol/lib/lexical/protocol/responses.ex +++ b/apps/protocol/lib/lexical/protocol/responses.ex @@ -3,6 +3,12 @@ defmodule Lexical.Protocol.Responses do alias Lexical.Proto.Typespecs alias Lexical.Protocol.Types + defmodule Empty do + use Proto + + defresponse optional(Types.LSPObject) + end + defmodule InitializeResult do use Proto @@ -33,12 +39,29 @@ defmodule Lexical.Protocol.Responses do defresponse optional(list_of(Types.CodeAction)) end + defmodule CodeLens do + use Proto + defresponse optional(list_of(Types.CodeLens)) + end + defmodule Completion do use Proto defresponse optional(list_of(one_of([list_of(Types.Completion.Item), Types.Completion.List]))) end + defmodule DocumentSymbols do + use Proto + + defresponse optional(list_of(Types.Document.Symbol)) + end + + defmodule WorkspaceSymbol do + use Proto + + defresponse optional(list_of(Types.Workspace.Symbol)) + end + defmodule Shutdown do use Proto # yeah, this is odd... it has no params @@ -51,5 +74,18 @@ defmodule Lexical.Protocol.Responses do defresponse optional(Types.Hover) end + defmodule ExecuteCommand do + use Proto + + defresponse optional(any()) + end + + # Client -> Server responses + + defmodule ShowMessage do + use Proto + defresponse optional(Types.Message.ActionItem) + end + use Typespecs, for: :responses end diff --git a/apps/protocol/mix.exs b/apps/protocol/mix.exs index 89be824f5..02150fb66 100644 --- a/apps/protocol/mix.exs +++ b/apps/protocol/mix.exs @@ -4,7 +4,7 @@ defmodule Lexical.Protocol.MixProject do def project do [ app: :protocol, - version: "0.3.0", + version: "0.5.0", build_path: "../../_build", config_path: "../../config/config.exs", deps_path: "../../deps", diff --git a/apps/protocol/test/lexical/protocol/conversions_test.exs b/apps/protocol/test/lexical/protocol/conversions_test.exs index 8c2e4a3cb..c6ae2f302 100644 --- a/apps/protocol/test/lexical/protocol/conversions_test.exs +++ b/apps/protocol/test/lexical/protocol/conversions_test.exs @@ -87,7 +87,7 @@ defmodule Lexical.Protocol.ConversionsTest do test "single line utf8" do doc = doc("🏳️‍🌈abcde") - assert {:ok, pos} = Conversions.to_lsp(ex_position(doc, 1, 15)) + assert {:ok, pos} = Conversions.to_lsp(ex_position(doc, 1, 2)) assert %LSPosition{character: 6, line: 0} == pos end diff --git a/apps/remote_control/.formatter.exs b/apps/remote_control/.formatter.exs index 41b7c5c9b..62e3d4dd5 100644 --- a/apps/remote_control/.formatter.exs +++ b/apps/remote_control/.formatter.exs @@ -20,7 +20,7 @@ impossible_to_format = [ Path.join([current_directory, "test", "fixtures", "parse_errors", "lib", "parse_errors.ex"]) ] -locals_without_parens = [with_progress: 2, with_progress: 3, defkey: 2, defkey: 3] +locals_without_parens = [with_progress: 2, with_progress: 3, defkey: 2, defkey: 3, with_wal: 2] [ locals_without_parens: locals_without_parens, diff --git a/apps/remote_control/benchmarks/ast_analyze.exs b/apps/remote_control/benchmarks/ast_analyze.exs new file mode 100644 index 000000000..95511ee24 --- /dev/null +++ b/apps/remote_control/benchmarks/ast_analyze.exs @@ -0,0 +1,20 @@ +alias Lexical.Ast +alias Lexical.Document + +path = + [__DIR__, "**", "enum.ex"] + |> Path.join() + |> Path.wildcard() + |> List.first() + +{:ok, contents} = File.read(path) +doc = Document.new("file://#{path}", contents, 1) + +Benchee.run( + %{ + "Ast.analyze" => fn -> + Ast.analyze(doc) + end + }, + profile_after: true +) diff --git a/apps/remote_control/benchmarks/data/enum.ex b/apps/remote_control/benchmarks/data/enum.ex new file mode 100644 index 000000000..3144b8b40 --- /dev/null +++ b/apps/remote_control/benchmarks/data/enum.ex @@ -0,0 +1,4871 @@ +# credo:disable-for-this-file + +defprotocol Enumerable do + @moduledoc """ + Enumerable protocol used by `Enum` and `Stream` modules. + + When you invoke a function in the `Enum` module, the first argument + is usually a collection that must implement this protocol. + For example, the expression `Enum.map([1, 2, 3], &(&1 * 2))` + invokes `Enumerable.reduce/3` to perform the reducing operation that + builds a mapped list by calling the mapping function `&(&1 * 2)` on + every element in the collection and consuming the element with an + accumulated list. + + Internally, `Enum.map/2` is implemented as follows: + + def map(enumerable, fun) do + reducer = fn x, acc -> {:cont, [fun.(x) | acc]} end + Enumerable.reduce(enumerable, {:cont, []}, reducer) |> elem(1) |> :lists.reverse() + end + + Note that the user-supplied function is wrapped into a `t:reducer/0` function. + The `t:reducer/0` function must return a tagged tuple after each step, + as described in the `t:acc/0` type. At the end, `Enumerable.reduce/3` + returns `t:result/0`. + + This protocol uses tagged tuples to exchange information between the + reducer function and the data type that implements the protocol. This + allows enumeration of resources, such as files, to be done efficiently + while also guaranteeing the resource will be closed at the end of the + enumeration. This protocol also allows suspension of the enumeration, + which is useful when interleaving between many enumerables is required + (as in the `zip/1` and `zip/2` functions). + + This protocol requires four functions to be implemented, `reduce/3`, + `count/1`, `member?/2`, and `slice/1`. The core of the protocol is the + `reduce/3` function. All other functions exist as optimizations paths + for data structures that can implement certain properties in better + than linear time. + """ + + @typedoc """ + An enumerable of elements of type `element`. + + This type is equivalent to `t:t/0` but is especially useful for documentation. + + For example, imagine you define a function that expects an enumerable of + integers and returns an enumerable of strings: + + @spec integers_to_strings(Enumerable.t(integer())) :: Enumerable.t(String.t()) + def integers_to_strings(integers) do + Stream.map(integers, &Integer.to_string/1) + end + + """ + @typedoc since: "1.14.0" + @type t(_element) :: t() + + @typedoc """ + The accumulator value for each step. + + It must be a tagged tuple with one of the following "tags": + + * `:cont` - the enumeration should continue + * `:halt` - the enumeration should halt immediately + * `:suspend` - the enumeration should be suspended immediately + + Depending on the accumulator value, the result returned by + `Enumerable.reduce/3` will change. Please check the `t:result/0` + type documentation for more information. + + In case a `t:reducer/0` function returns a `:suspend` accumulator, + it must be explicitly handled by the caller and never leak. + """ + @type acc :: {:cont, term} | {:halt, term} | {:suspend, term} + + @typedoc """ + The reducer function. + + Should be called with the `enumerable` element and the + accumulator contents. + + Returns the accumulator for the next enumeration step. + """ + @type reducer :: (element :: term, current_acc :: acc -> updated_acc :: acc) + + @typedoc """ + The result of the reduce operation. + + It may be *done* when the enumeration is finished by reaching + its end, or *halted*/*suspended* when the enumeration was halted + or suspended by the tagged accumulator. + + In case the tagged `:halt` accumulator is given, the `:halted` tuple + with the accumulator must be returned. Functions like `Enum.take_while/2` + use `:halt` underneath and can be used to test halting enumerables. + + In case the tagged `:suspend` accumulator is given, the caller must + return the `:suspended` tuple with the accumulator and a continuation. + The caller is then responsible of managing the continuation and the + caller must always call the continuation, eventually halting or continuing + until the end. `Enum.zip/2` uses suspension, so it can be used to test + whether your implementation handles suspension correctly. You can also use + `Stream.zip/2` with `Enum.take_while/2` to test the combination of + `:suspend` with `:halt`. + """ + @type result :: + {:done, term} + | {:halted, term} + | {:suspended, term, continuation} + + @typedoc """ + A partially applied reduce function. + + The continuation is the closure returned as a result when + the enumeration is suspended. When invoked, it expects + a new accumulator and it returns the result. + + A continuation can be trivially implemented as long as the reduce + function is defined in a tail recursive fashion. If the function + is tail recursive, all the state is passed as arguments, so + the continuation is the reducing function partially applied. + """ + @type continuation :: (acc -> result) + + @typedoc """ + A slicing function that receives the initial position, + the number of elements in the slice, and the step. + + The `start` position is a number `>= 0` and guaranteed to + exist in the `enumerable`. The length is a number `>= 1` + in a way that `start + length * step <= count`, where + `count` is the maximum amount of elements in the enumerable. + + The function should return a non empty list where + the amount of elements is equal to `length`. + """ + @type slicing_fun :: + (start :: non_neg_integer, length :: pos_integer, step :: pos_integer -> [term()]) + + @typedoc """ + Receives an enumerable and returns a list. + """ + @type to_list_fun :: (t -> [term()]) + + @doc """ + Reduces the `enumerable` into an element. + + Most of the operations in `Enum` are implemented in terms of reduce. + This function should apply the given `t:reducer/0` function to each + element in the `enumerable` and proceed as expected by the returned + accumulator. + + See the documentation of the types `t:result/0` and `t:acc/0` for + more information. + + ## Examples + + As an example, here is the implementation of `reduce` for lists: + + def reduce(_list, {:halt, acc}, _fun), do: {:halted, acc} + def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)} + def reduce([], {:cont, acc}, _fun), do: {:done, acc} + def reduce([head | tail], {:cont, acc}, fun), do: reduce(tail, fun.(head, acc), fun) + + """ + @spec reduce(t, acc, reducer) :: result + def reduce(enumerable, acc, fun) + + @doc """ + Retrieves the number of elements in the `enumerable`. + + It should return `{:ok, count}` if you can count the number of elements + in `enumerable` in a faster way than fully traversing it. + + Otherwise it should return `{:error, __MODULE__}` and a default algorithm + built on top of `reduce/3` that runs in linear time will be used. + """ + @spec count(t) :: {:ok, non_neg_integer} | {:error, module} + def count(enumerable) + + @doc """ + Checks if an `element` exists within the `enumerable`. + + It should return `{:ok, boolean}` if you can check the membership of a + given element in `enumerable` with `===/2` without traversing the whole + of it. + + Otherwise it should return `{:error, __MODULE__}` and a default algorithm + built on top of `reduce/3` that runs in linear time will be used. + + When called outside guards, the [`in`](`in/2`) and [`not in`](`in/2`) + operators work by using this function. + """ + @spec member?(t, term) :: {:ok, boolean} | {:error, module} + def member?(enumerable, element) + + @doc """ + Returns a function that slices the data structure contiguously. + + It should return either: + + * `{:ok, size, slicing_fun}` - if the `enumerable` has a known + bound and can access a position in the `enumerable` without + traversing all previous elements. The `slicing_fun` will receive + a `start` position, the `amount` of elements to fetch, and a + `step`. + + * `{:ok, size, to_list_fun}` - if the `enumerable` has a known bound + and can access a position in the `enumerable` by first converting + it to a list via `to_list_fun`. + + * `{:error, __MODULE__}` - the enumerable cannot be sliced efficiently + and a default algorithm built on top of `reduce/3` that runs in + linear time will be used. + + ## Differences to `count/1` + + The `size` value returned by this function is used for boundary checks, + therefore it is extremely important that this function only returns `:ok` + if retrieving the `size` of the `enumerable` is cheap, fast, and takes + constant time. Otherwise the simplest of operations, such as + `Enum.at(enumerable, 0)`, will become too expensive. + + On the other hand, the `count/1` function in this protocol should be + implemented whenever you can count the number of elements in the collection + without traversing it. + """ + @spec slice(t) :: + {:ok, size :: non_neg_integer(), slicing_fun() | to_list_fun()} + | {:error, module()} + def slice(enumerable) +end + +defmodule Enum do + import Kernel, except: [max: 2, min: 2] + + @moduledoc """ + Functions for working with collections (known as enumerables). + + In Elixir, an enumerable is any data type that implements the + `Enumerable` protocol. `List`s (`[1, 2, 3]`), `Map`s (`%{foo: 1, bar: 2}`) + and `Range`s (`1..3`) are common data types used as enumerables: + + iex> Enum.map([1, 2, 3], fn x -> x * 2 end) + [2, 4, 6] + + iex> Enum.sum([1, 2, 3]) + 6 + + iex> Enum.map(1..3, fn x -> x * 2 end) + [2, 4, 6] + + iex> Enum.sum(1..3) + 6 + + iex> map = %{"a" => 1, "b" => 2} + iex> Enum.map(map, fn {k, v} -> {k, v * 2} end) + [{"a", 2}, {"b", 4}] + + However, many other enumerables exist in the language, such as `MapSet`s + and the data type returned by `File.stream!/3` which allows a file to be + traversed as if it was an enumerable. + + The functions in this module work in linear time. This means that, the + time it takes to perform an operation grows at the same rate as the length + of the enumerable. This is expected on operations such as `Enum.map/2`. + After all, if we want to traverse every element on a list, the longer the + list, the more elements we need to traverse, and the longer it will take. + + This linear behaviour should also be expected on operations like `count/1`, + `member?/2`, `at/2` and similar. While Elixir does allow data types to + provide performant variants for such operations, you should not expect it + to always be available, since the `Enum` module is meant to work with a + large variety of data types and not all data types can provide optimized + behaviour. + + Finally, note the functions in the `Enum` module are eager: they will + traverse the enumerable as soon as they are invoked. This is particularly + dangerous when working with infinite enumerables. In such cases, you should + use the `Stream` module, which allows you to lazily express computations, + without traversing collections, and work with possibly infinite collections. + See the `Stream` module for examples and documentation. + """ + + @compile :inline_list_funcs + + @type t :: Enumerable.t() + @type acc :: any + @type element :: any + + @typedoc "Zero-based index. It can also be a negative integer." + @type index :: integer + + @type default :: any + + require Stream.Reducers, as: R + + defmacrop skip(acc) do + acc + end + + defmacrop next(_, entry, acc) do + quote(do: [unquote(entry) | unquote(acc)]) + end + + defmacrop acc(head, state, _) do + quote(do: {unquote(head), unquote(state)}) + end + + defmacrop next_with_acc(_, entry, head, state, _) do + quote do + {[unquote(entry) | unquote(head)], unquote(state)} + end + end + + @doc """ + Returns `true` if all elements in `enumerable` are truthy. + + When an element has a falsy value (`false` or `nil`) iteration stops immediately + and `false` is returned. In all other cases `true` is returned. + + ## Examples + + iex> Enum.all?([1, 2, 3]) + true + + iex> Enum.all?([1, nil, 3]) + false + + iex> Enum.all?([]) + true + + """ + @spec all?(t) :: boolean + def all?(enumerable) when is_list(enumerable) do + all_list(enumerable) + end + + def all?(enumerable) do + Enumerable.reduce(enumerable, {:cont, true}, fn entry, _ -> + if entry, do: {:cont, true}, else: {:halt, false} + end) + |> elem(1) + end + + @doc """ + Returns `true` if `fun.(element)` is truthy for all elements in `enumerable`. + + Iterates over `enumerable` and invokes `fun` on each element. If `fun` ever + returns a falsy value (`false` or `nil`), iteration stops immediately and + `false` is returned. Otherwise, `true` is returned. + + ## Examples + + iex> Enum.all?([2, 4, 6], fn x -> rem(x, 2) == 0 end) + true + + iex> Enum.all?([2, 3, 4], fn x -> rem(x, 2) == 0 end) + false + + iex> Enum.all?([], fn _ -> nil end) + true + + As the last example shows, `Enum.all?/2` returns `true` if `enumerable` is + empty, regardless of `fun`. In an empty enumerable there is no element for + which `fun` returns a falsy value, so the result must be `true`. This is a + well-defined logical argument for empty collections. + + """ + @spec all?(t, (element -> as_boolean(term))) :: boolean + def all?(enumerable, fun) when is_list(enumerable) do + predicate_list(enumerable, true, fun) + end + + def all?(first..last//step, fun) do + predicate_range(first, last, step, true, fun) + end + + def all?(enumerable, fun) do + Enumerable.reduce(enumerable, {:cont, true}, fn entry, _ -> + if fun.(entry), do: {:cont, true}, else: {:halt, false} + end) + |> elem(1) + end + + @doc """ + Returns `true` if at least one element in `enumerable` is truthy. + + When an element has a truthy value (neither `false` nor `nil`) iteration stops + immediately and `true` is returned. In all other cases `false` is returned. + + ## Examples + + iex> Enum.any?([false, false, false]) + false + + iex> Enum.any?([false, true, false]) + true + + iex> Enum.any?([]) + false + + """ + @spec any?(t) :: boolean + def any?(enumerable) when is_list(enumerable) do + any_list(enumerable) + end + + def any?(enumerable) do + Enumerable.reduce(enumerable, {:cont, false}, fn entry, _ -> + if entry, do: {:halt, true}, else: {:cont, false} + end) + |> elem(1) + end + + @doc """ + Returns `true` if `fun.(element)` is truthy for at least one element in `enumerable`. + + Iterates over the `enumerable` and invokes `fun` on each element. When an invocation + of `fun` returns a truthy value (neither `false` nor `nil`) iteration stops + immediately and `true` is returned. In all other cases `false` is returned. + + ## Examples + + iex> Enum.any?([2, 4, 6], fn x -> rem(x, 2) == 1 end) + false + + iex> Enum.any?([2, 3, 4], fn x -> rem(x, 2) == 1 end) + true + + iex> Enum.any?([], fn x -> x > 0 end) + false + + """ + @spec any?(t, (element -> as_boolean(term))) :: boolean + def any?(enumerable, fun) when is_list(enumerable) do + predicate_list(enumerable, false, fun) + end + + def any?(first..last//step, fun) do + predicate_range(first, last, step, false, fun) + end + + def any?(enumerable, fun) do + Enumerable.reduce(enumerable, {:cont, false}, fn entry, _ -> + if fun.(entry), do: {:halt, true}, else: {:cont, false} + end) + |> elem(1) + end + + @doc """ + Finds the element at the given `index` (zero-based). + + Returns `default` if `index` is out of bounds. + + A negative `index` can be passed, which means the `enumerable` is + enumerated once and the `index` is counted from the end (for example, + `-1` finds the last element). + + ## Examples + + iex> Enum.at([2, 4, 6], 0) + 2 + + iex> Enum.at([2, 4, 6], 2) + 6 + + iex> Enum.at([2, 4, 6], 4) + nil + + iex> Enum.at([2, 4, 6], 4, :none) + :none + + """ + @spec at(t, index, default) :: element | default + def at(enumerable, index, default \\ nil) when is_integer(index) do + case slice_forward(enumerable, index, 1, 1) do + [value] -> value + [] -> default + end + end + + @doc false + @deprecated "Use Enum.chunk_every/2 instead" + def chunk(enumerable, count), do: chunk(enumerable, count, count, nil) + + @doc false + @deprecated "Use Enum.chunk_every/3 instead" + def chunk(enum, n, step) do + chunk_every(enum, n, step, :discard) + end + + @doc false + @deprecated "Use Enum.chunk_every/4 instead" + def chunk(enumerable, count, step, leftover) do + chunk_every(enumerable, count, step, leftover || :discard) + end + + @doc """ + Shortcut to `chunk_every(enumerable, count, count)`. + """ + @doc since: "1.5.0" + @spec chunk_every(t, pos_integer) :: [list] + def chunk_every(enumerable, count), do: chunk_every(enumerable, count, count, []) + + @doc """ + Returns list of lists containing `count` elements each, where + each new chunk starts `step` elements into the `enumerable`. + + `step` is optional and, if not passed, defaults to `count`, i.e. + chunks do not overlap. Chunking will stop as soon as the collection + ends or when we emit an incomplete chunk. + + If the last chunk does not have `count` elements to fill the chunk, + elements are taken from `leftover` to fill in the chunk. If `leftover` + does not have enough elements to fill the chunk, then a partial chunk + is returned with less than `count` elements. + + If `:discard` is given in `leftover`, the last chunk is discarded + unless it has exactly `count` elements. + + ## Examples + + iex> Enum.chunk_every([1, 2, 3, 4, 5, 6], 2) + [[1, 2], [3, 4], [5, 6]] + + iex> Enum.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, :discard) + [[1, 2, 3], [3, 4, 5]] + + iex> Enum.chunk_every([1, 2, 3, 4, 5, 6], 3, 2, [7]) + [[1, 2, 3], [3, 4, 5], [5, 6, 7]] + + iex> Enum.chunk_every([1, 2, 3, 4], 3, 3, []) + [[1, 2, 3], [4]] + + iex> Enum.chunk_every([1, 2, 3, 4], 10) + [[1, 2, 3, 4]] + + iex> Enum.chunk_every([1, 2, 3, 4, 5], 2, 3, []) + [[1, 2], [4, 5]] + + iex> Enum.chunk_every([1, 2, 3, 4], 3, 3, Stream.cycle([0])) + [[1, 2, 3], [4, 0, 0]] + + """ + @doc since: "1.5.0" + @spec chunk_every(t, pos_integer, pos_integer, t | :discard) :: [list] + def chunk_every(enumerable, count, step, leftover \\ []) + when is_integer(count) and count > 0 and is_integer(step) and step > 0 do + R.chunk_every(&chunk_while/4, enumerable, count, step, leftover) + end + + @doc """ + Chunks the `enumerable` with fine grained control when every chunk is emitted. + + `chunk_fun` receives the current element and the accumulator and must return: + + * `{:cont, chunk, acc}` to emit a chunk and continue with the accumulator + * `{:cont, acc}` to not emit any chunk and continue with the accumulator + * `{:halt, acc}` to halt chunking over the `enumerable`. + + `after_fun` is invoked with the final accumulator when iteration is + finished (or `halt`ed) to handle any trailing elements that were returned + as part of an accumulator, but were not emitted as a chunk by `chunk_fun`. + It must return: + + * `{:cont, chunk, acc}` to emit a chunk. The chunk will be appended to the + list of already emitted chunks. + * `{:cont, acc}` to not emit a chunk + + The `acc` in `after_fun` is required in order to mirror the tuple format + from `chunk_fun` but it will be discarded since the traversal is complete. + + Returns a list of emitted chunks. + + ## Examples + + iex> chunk_fun = fn element, acc -> + ...> if rem(element, 2) == 0 do + ...> {:cont, Enum.reverse([element | acc]), []} + ...> else + ...> {:cont, [element | acc]} + ...> end + ...> end + iex> after_fun = fn + ...> [] -> {:cont, []} + ...> acc -> {:cont, Enum.reverse(acc), []} + ...> end + iex> Enum.chunk_while(1..10, [], chunk_fun, after_fun) + [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]] + iex> Enum.chunk_while([1, 2, 3, 5, 7], [], chunk_fun, after_fun) + [[1, 2], [3, 5, 7]] + + """ + @doc since: "1.5.0" + @spec chunk_while( + t, + acc, + (element, acc -> {:cont, chunk, acc} | {:cont, acc} | {:halt, acc}), + (acc -> {:cont, chunk, acc} | {:cont, acc}) + ) :: Enumerable.t() + when chunk: any + def chunk_while(enumerable, acc, chunk_fun, after_fun) do + {_, {res, acc}} = + Enumerable.reduce(enumerable, {:cont, {[], acc}}, fn entry, {buffer, acc} -> + case chunk_fun.(entry, acc) do + {:cont, chunk, acc} -> {:cont, {[chunk | buffer], acc}} + {:cont, acc} -> {:cont, {buffer, acc}} + {:halt, acc} -> {:halt, {buffer, acc}} + end + end) + + case after_fun.(acc) do + {:cont, _acc} -> :lists.reverse(res) + {:cont, chunk, _acc} -> :lists.reverse([chunk | res]) + end + end + + @doc """ + Splits enumerable on every element for which `fun` returns a new + value. + + Returns a list of lists. + + ## Examples + + iex> Enum.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1)) + [[1], [2, 2], [3], [4, 4, 6], [7, 7]] + + """ + @spec chunk_by(t, (element -> any)) :: [list] + def chunk_by(enumerable, fun) do + R.chunk_by(&chunk_while/4, enumerable, fun) + end + + @doc """ + Given an enumerable of enumerables, concatenates the `enumerables` into + a single list. + + ## Examples + + iex> Enum.concat([1..3, 4..6, 7..9]) + [1, 2, 3, 4, 5, 6, 7, 8, 9] + + iex> Enum.concat([[1, [2], 3], [4], [5, 6]]) + [1, [2], 3, 4, 5, 6] + + """ + @spec concat(t) :: t + def concat(enumerables) + + def concat(list) when is_list(list) do + concat_list(list) + end + + def concat(enums) do + concat_enum(enums) + end + + @doc """ + Concatenates the enumerable on the `right` with the enumerable on the + `left`. + + This function produces the same result as the `++/2` operator + for lists. + + ## Examples + + iex> Enum.concat(1..3, 4..6) + [1, 2, 3, 4, 5, 6] + + iex> Enum.concat([1, 2, 3], [4, 5, 6]) + [1, 2, 3, 4, 5, 6] + + """ + @spec concat(t, t) :: t + def concat(left, right) when is_list(left) and is_list(right) do + left ++ right + end + + def concat(left, right) do + concat_enum([left, right]) + end + + @doc """ + Returns the size of the `enumerable`. + + ## Examples + + iex> Enum.count([1, 2, 3]) + 3 + + """ + @spec count(t) :: non_neg_integer + def count(enumerable) when is_list(enumerable) do + length(enumerable) + end + + def count(enumerable) do + case Enumerable.count(enumerable) do + {:ok, value} when is_integer(value) -> + value + + {:error, module} -> + enumerable |> module.reduce({:cont, 0}, fn _, acc -> {:cont, acc + 1} end) |> elem(1) + end + end + + @doc """ + Returns the count of elements in the `enumerable` for which `fun` returns + a truthy value. + + ## Examples + + iex> Enum.count([1, 2, 3, 4, 5], fn x -> rem(x, 2) == 0 end) + 2 + + """ + @spec count(t, (element -> as_boolean(term))) :: non_neg_integer + def count(enumerable, fun) do + reduce(enumerable, 0, fn entry, acc -> + if(fun.(entry), do: acc + 1, else: acc) + end) + end + + @doc """ + Counts the enumerable stopping at `limit`. + + This is useful for checking certain properties of the count of an enumerable + without having to actually count the entire enumerable. For example, if you + wanted to check that the count was exactly, at least, or more than a value. + + If the enumerable implements `c:Enumerable.count/1`, the enumerable is + not traversed and we return the lower of the two numbers. To force + enumeration, use `count_until/3` with `fn _ -> true end` as the second + argument. + + ## Examples + + iex> Enum.count_until(1..20, 5) + 5 + iex> Enum.count_until(1..20, 50) + 20 + iex> Enum.count_until(1..10, 10) == 10 # At least 10 + true + iex> Enum.count_until(1..11, 10 + 1) > 10 # More than 10 + true + iex> Enum.count_until(1..5, 10) < 10 # Less than 10 + true + iex> Enum.count_until(1..10, 10 + 1) == 10 # Exactly ten + true + + """ + @doc since: "1.12.0" + @spec count_until(t, pos_integer) :: non_neg_integer + def count_until(enumerable, limit) when is_integer(limit) and limit > 0 do + stop_at = limit - 1 + + case Enumerable.count(enumerable) do + {:ok, value} -> + Kernel.min(value, limit) + + {:error, module} -> + enumerable + |> module.reduce( + {:cont, 0}, + fn + _, ^stop_at -> + {:halt, limit} + + _, acc -> + {:cont, acc + 1} + end + ) + |> elem(1) + end + end + + @doc """ + Counts the elements in the enumerable for which `fun` returns a truthy value, stopping at `limit`. + + See `count/2` and `count_until/2` for more information. + + ## Examples + + iex> Enum.count_until(1..20, fn x -> rem(x, 2) == 0 end, 7) + 7 + iex> Enum.count_until(1..20, fn x -> rem(x, 2) == 0 end, 11) + 10 + """ + @doc since: "1.12.0" + @spec count_until(t, (element -> as_boolean(term)), pos_integer) :: non_neg_integer + def count_until(enumerable, fun, limit) when is_integer(limit) and limit > 0 do + stop_at = limit - 1 + + Enumerable.reduce(enumerable, {:cont, 0}, fn + entry, ^stop_at -> + if fun.(entry) do + {:halt, limit} + else + {:cont, stop_at} + end + + entry, acc -> + if fun.(entry) do + {:cont, acc + 1} + else + {:cont, acc} + end + end) + |> elem(1) + end + + @doc """ + Enumerates the `enumerable`, returning a list where all consecutive + duplicated elements are collapsed to a single element. + + Elements are compared using `===/2`. + + If you want to remove all duplicated elements, regardless of order, + see `uniq/1`. + + ## Examples + + iex> Enum.dedup([1, 2, 3, 3, 2, 1]) + [1, 2, 3, 2, 1] + + iex> Enum.dedup([1, 1, 2, 2.0, :three, :three]) + [1, 2, 2.0, :three] + + """ + @spec dedup(t) :: list + def dedup(enumerable) when is_list(enumerable) do + dedup_list(enumerable, []) |> :lists.reverse() + end + + def dedup(enumerable) do + Enum.reduce(enumerable, [], fn x, acc -> + case acc do + [^x | _] -> acc + _ -> [x | acc] + end + end) + |> :lists.reverse() + end + + @doc """ + Enumerates the `enumerable`, returning a list where all consecutive + duplicated elements are collapsed to a single element. + + The function `fun` maps every element to a term which is used to + determine if two elements are duplicates. + + ## Examples + + iex> Enum.dedup_by([{1, :a}, {2, :b}, {2, :c}, {1, :a}], fn {x, _} -> x end) + [{1, :a}, {2, :b}, {1, :a}] + + iex> Enum.dedup_by([5, 1, 2, 3, 2, 1], fn x -> x > 2 end) + [5, 1, 3, 2] + + """ + @spec dedup_by(t, (element -> term)) :: list + def dedup_by(enumerable, fun) do + {list, _} = reduce(enumerable, {[], []}, R.dedup(fun)) + :lists.reverse(list) + end + + @doc """ + Drops the `amount` of elements from the `enumerable`. + + If a negative `amount` is given, the `amount` of last values will be dropped. + The `enumerable` will be enumerated once to retrieve the proper index and + the remaining calculation is performed from the end. + + ## Examples + + iex> Enum.drop([1, 2, 3], 2) + [3] + + iex> Enum.drop([1, 2, 3], 10) + [] + + iex> Enum.drop([1, 2, 3], 0) + [1, 2, 3] + + iex> Enum.drop([1, 2, 3], -1) + [1, 2] + + """ + @spec drop(t, integer) :: list + def drop(enumerable, amount) + when is_list(enumerable) and is_integer(amount) and amount >= 0 do + drop_list(enumerable, amount) + end + + def drop(enumerable, 0) do + to_list(enumerable) + end + + def drop(enumerable, amount) when is_integer(amount) and amount > 0 do + {result, _} = reduce(enumerable, {[], amount}, R.drop()) + if is_list(result), do: :lists.reverse(result), else: [] + end + + def drop(enumerable, amount) when is_integer(amount) and amount < 0 do + {count, fun} = slice_count_and_fun(enumerable, 1) + amount = Kernel.min(amount + count, count) + + if amount > 0 do + fun.(0, amount, 1) + else + [] + end + end + + @doc """ + Returns a list of every `nth` element in the `enumerable` dropped, + starting with the first element. + + The first element is always dropped, unless `nth` is 0. + + The second argument specifying every `nth` element must be a non-negative + integer. + + ## Examples + + iex> Enum.drop_every(1..10, 2) + [2, 4, 6, 8, 10] + + iex> Enum.drop_every(1..10, 0) + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + + iex> Enum.drop_every([1, 2, 3], 1) + [] + + """ + @spec drop_every(t, non_neg_integer) :: list + def drop_every(enumerable, nth) + + def drop_every(_enumerable, 1), do: [] + def drop_every(enumerable, 0), do: to_list(enumerable) + def drop_every([], nth) when is_integer(nth), do: [] + + def drop_every(enumerable, nth) when is_integer(nth) and nth > 1 do + {res, _} = reduce(enumerable, {[], :first}, R.drop_every(nth)) + :lists.reverse(res) + end + + @doc """ + Drops elements at the beginning of the `enumerable` while `fun` returns a + truthy value. + + ## Examples + + iex> Enum.drop_while([1, 2, 3, 2, 1], fn x -> x < 3 end) + [3, 2, 1] + + """ + @spec drop_while(t, (element -> as_boolean(term))) :: list + def drop_while(enumerable, fun) when is_list(enumerable) do + drop_while_list(enumerable, fun) + end + + def drop_while(enumerable, fun) do + {res, _} = reduce(enumerable, {[], true}, R.drop_while(fun)) + :lists.reverse(res) + end + + @doc """ + Invokes the given `fun` for each element in the `enumerable`. + + Returns `:ok`. + + ## Examples + + Enum.each(["some", "example"], fn x -> IO.puts(x) end) + "some" + "example" + #=> :ok + + """ + @spec each(t, (element -> any)) :: :ok + def each(enumerable, fun) when is_list(enumerable) do + :lists.foreach(fun, enumerable) + end + + def each(enumerable, fun) do + reduce(enumerable, nil, fn entry, _ -> + fun.(entry) + nil + end) + + :ok + end + + @doc """ + Determines if the `enumerable` is empty. + + Returns `true` if `enumerable` is empty, otherwise `false`. + + ## Examples + + iex> Enum.empty?([]) + true + + iex> Enum.empty?([1, 2, 3]) + false + + """ + @spec empty?(t) :: boolean + def empty?(enumerable) when is_list(enumerable) do + enumerable == [] + end + + def empty?(enumerable) do + case Enumerable.slice(enumerable) do + {:ok, value, _} -> + value == 0 + + {:error, module} -> + enumerable + |> module.reduce({:cont, true}, fn _, _ -> {:halt, false} end) + |> elem(1) + end + end + + @doc """ + Finds the element at the given `index` (zero-based). + + Returns `{:ok, element}` if found, otherwise `:error`. + + A negative `index` can be passed, which means the `enumerable` is + enumerated once and the `index` is counted from the end (for example, + `-1` fetches the last element). + + ## Examples + + iex> Enum.fetch([2, 4, 6], 0) + {:ok, 2} + + iex> Enum.fetch([2, 4, 6], -3) + {:ok, 2} + + iex> Enum.fetch([2, 4, 6], 2) + {:ok, 6} + + iex> Enum.fetch([2, 4, 6], 4) + :error + + """ + @spec fetch(t, index) :: {:ok, element} | :error + def fetch(enumerable, index) when is_integer(index) do + case slice_forward(enumerable, index, 1, 1) do + [value] -> {:ok, value} + [] -> :error + end + end + + @doc """ + Finds the element at the given `index` (zero-based). + + Raises `OutOfBoundsError` if the given `index` is outside the range of + the `enumerable`. + + ## Examples + + iex> Enum.fetch!([2, 4, 6], 0) + 2 + + iex> Enum.fetch!([2, 4, 6], 2) + 6 + + iex> Enum.fetch!([2, 4, 6], 4) + ** (Enum.OutOfBoundsError) out of bounds error + + """ + @spec fetch!(t, index) :: element + def fetch!(enumerable, index) when is_integer(index) do + case slice_forward(enumerable, index, 1, 1) do + [value] -> value + [] -> raise Enum.OutOfBoundsError + end + end + + @doc """ + Filters the `enumerable`, i.e. returns only those elements + for which `fun` returns a truthy value. + + See also `reject/2` which discards all elements where the + function returns a truthy value. + + ## Examples + + iex> Enum.filter([1, 2, 3], fn x -> rem(x, 2) == 0 end) + [2] + + Keep in mind that `filter` is not capable of filtering and + transforming an element at the same time. If you would like + to do so, consider using `flat_map/2`. For example, if you + want to convert all strings that represent an integer and + discard the invalid one in one pass: + + strings = ["1234", "abc", "12ab"] + + Enum.flat_map(strings, fn string -> + case Integer.parse(string) do + # transform to integer + {int, _rest} -> [int] + # skip the value + :error -> [] + end + end) + + """ + @spec filter(t, (element -> as_boolean(term))) :: list + def filter(enumerable, fun) when is_list(enumerable) do + filter_list(enumerable, fun) + end + + def filter(enumerable, fun) do + reduce(enumerable, [], R.filter(fun)) |> :lists.reverse() + end + + @doc false + @deprecated "Use Enum.filter/2 + Enum.map/2 or for comprehensions instead" + def filter_map(enumerable, filter, mapper) when is_list(enumerable) do + for element <- enumerable, filter.(element), do: mapper.(element) + end + + def filter_map(enumerable, filter, mapper) do + enumerable + |> reduce([], R.filter_map(filter, mapper)) + |> :lists.reverse() + end + + @doc """ + Returns the first element for which `fun` returns a truthy value. + If no such element is found, returns `default`. + + ## Examples + + iex> Enum.find([2, 3, 4], fn x -> rem(x, 2) == 1 end) + 3 + + iex> Enum.find([2, 4, 6], fn x -> rem(x, 2) == 1 end) + nil + iex> Enum.find([2, 4, 6], 0, fn x -> rem(x, 2) == 1 end) + 0 + + """ + @spec find(t, default, (element -> any)) :: element | default + def find(enumerable, default \\ nil, fun) + + def find(enumerable, default, fun) when is_list(enumerable) do + find_list(enumerable, default, fun) + end + + def find(enumerable, default, fun) do + Enumerable.reduce(enumerable, {:cont, default}, fn entry, default -> + if fun.(entry), do: {:halt, entry}, else: {:cont, default} + end) + |> elem(1) + end + + @doc """ + Similar to `find/3`, but returns the index (zero-based) + of the element instead of the element itself. + + ## Examples + + iex> Enum.find_index([2, 4, 6], fn x -> rem(x, 2) == 1 end) + nil + + iex> Enum.find_index([2, 3, 4], fn x -> rem(x, 2) == 1 end) + 1 + + """ + @spec find_index(t, (element -> any)) :: non_neg_integer | nil + def find_index(enumerable, fun) when is_list(enumerable) do + find_index_list(enumerable, 0, fun) + end + + def find_index(enumerable, fun) do + result = + Enumerable.reduce(enumerable, {:cont, {:not_found, 0}}, fn entry, {_, index} -> + if fun.(entry), do: {:halt, {:found, index}}, else: {:cont, {:not_found, index + 1}} + end) + + case elem(result, 1) do + {:found, index} -> index + {:not_found, _} -> nil + end + end + + @doc """ + Similar to `find/3`, but returns the value of the function + invocation instead of the element itself. + + The return value is considered to be found when the result is truthy + (neither `nil` nor `false`). + + ## Examples + + iex> Enum.find_value([2, 3, 4], fn x -> + ...> if x > 2, do: x * x + ...> end) + 9 + + iex> Enum.find_value([2, 4, 6], fn x -> rem(x, 2) == 1 end) + nil + + iex> Enum.find_value([2, 3, 4], fn x -> rem(x, 2) == 1 end) + true + + iex> Enum.find_value([1, 2, 3], "no bools!", &is_boolean/1) + "no bools!" + + """ + @spec find_value(t, any, (element -> any)) :: any | nil + def find_value(enumerable, default \\ nil, fun) + + def find_value(enumerable, default, fun) when is_list(enumerable) do + find_value_list(enumerable, default, fun) + end + + def find_value(enumerable, default, fun) do + Enumerable.reduce(enumerable, {:cont, default}, fn entry, default -> + fun_entry = fun.(entry) + if fun_entry, do: {:halt, fun_entry}, else: {:cont, default} + end) + |> elem(1) + end + + @doc """ + Maps the given `fun` over `enumerable` and flattens the result. + + This function returns a new enumerable built by appending the result of invoking `fun` + on each element of `enumerable` together; conceptually, this is similar to a + combination of `map/2` and `concat/1`. + + ## Examples + + iex> Enum.flat_map([:a, :b, :c], fn x -> [x, x] end) + [:a, :a, :b, :b, :c, :c] + + iex> Enum.flat_map([{1, 3}, {4, 6}], fn {x, y} -> x..y end) + [1, 2, 3, 4, 5, 6] + + iex> Enum.flat_map([:a, :b, :c], fn x -> [[x]] end) + [[:a], [:b], [:c]] + + """ + @spec flat_map(t, (element -> t)) :: list + def flat_map(enumerable, fun) when is_list(enumerable) do + flat_map_list(enumerable, fun) + end + + def flat_map(enumerable, fun) do + reduce(enumerable, [], fn entry, acc -> + case fun.(entry) do + list when is_list(list) -> [list | acc] + other -> [to_list(other) | acc] + end + end) + |> flat_reverse([]) + end + + defp flat_reverse([h | t], acc), do: flat_reverse(t, h ++ acc) + defp flat_reverse([], acc), do: acc + + @doc """ + Maps and reduces an `enumerable`, flattening the given results (only one level deep). + + It expects an accumulator and a function that receives each enumerable + element, and must return a tuple containing a new enumerable (often a list) + with the new accumulator or a tuple with `:halt` as first element and + the accumulator as second. + + ## Examples + + iex> enumerable = 1..100 + iex> n = 3 + iex> Enum.flat_map_reduce(enumerable, 0, fn x, acc -> + ...> if acc < n, do: {[x], acc + 1}, else: {:halt, acc} + ...> end) + {[1, 2, 3], 3} + + iex> Enum.flat_map_reduce(1..5, 0, fn x, acc -> {[[x]], acc + x} end) + {[[1], [2], [3], [4], [5]], 15} + + """ + @spec flat_map_reduce(t, acc, fun) :: {[any], acc} + when fun: (element, acc -> {t, acc} | {:halt, acc}) + def flat_map_reduce(enumerable, acc, fun) do + {_, {list, acc}} = + Enumerable.reduce(enumerable, {:cont, {[], acc}}, fn entry, {list, acc} -> + case fun.(entry, acc) do + {:halt, acc} -> + {:halt, {list, acc}} + + {[], acc} -> + {:cont, {list, acc}} + + {[entry], acc} -> + {:cont, {[entry | list], acc}} + + {entries, acc} -> + {:cont, {reduce(entries, list, &[&1 | &2]), acc}} + end + end) + + {:lists.reverse(list), acc} + end + + @doc """ + Returns a map with keys as unique elements of `enumerable` and values + as the count of every element. + + ## Examples + + iex> Enum.frequencies(~w{ant buffalo ant ant buffalo dingo}) + %{"ant" => 3, "buffalo" => 2, "dingo" => 1} + + """ + @doc since: "1.10.0" + @spec frequencies(t) :: map + def frequencies(enumerable) do + reduce(enumerable, %{}, fn key, acc -> + case acc do + %{^key => value} -> %{acc | key => value + 1} + %{} -> Map.put(acc, key, 1) + end + end) + end + + @doc """ + Returns a map with keys as unique elements given by `key_fun` and values + as the count of every element. + + ## Examples + + iex> Enum.frequencies_by(~w{aa aA bb cc}, &String.downcase/1) + %{"aa" => 2, "bb" => 1, "cc" => 1} + + iex> Enum.frequencies_by(~w{aaa aA bbb cc c}, &String.length/1) + %{3 => 2, 2 => 2, 1 => 1} + + """ + @doc since: "1.10.0" + @spec frequencies_by(t, (element -> any)) :: map + def frequencies_by(enumerable, key_fun) when is_function(key_fun) do + reduce(enumerable, %{}, fn entry, acc -> + key = key_fun.(entry) + + case acc do + %{^key => value} -> %{acc | key => value + 1} + %{} -> Map.put(acc, key, 1) + end + end) + end + + @doc """ + Splits the `enumerable` into groups based on `key_fun`. + + The result is a map where each key is given by `key_fun` + and each value is a list of elements given by `value_fun`. + The order of elements within each list is preserved from the `enumerable`. + However, like all maps, the resulting map is unordered. + + ## Examples + + iex> Enum.group_by(~w{ant buffalo cat dingo}, &String.length/1) + %{3 => ["ant", "cat"], 5 => ["dingo"], 7 => ["buffalo"]} + + iex> Enum.group_by(~w{ant buffalo cat dingo}, &String.length/1, &String.first/1) + %{3 => ["a", "c"], 5 => ["d"], 7 => ["b"]} + + The key can be any Elixir value. For example, you may use a tuple + to group by multiple keys: + + iex> collection = [ + ...> %{id: 1, lang: "Elixir", seq: 1}, + ...> %{id: 1, lang: "Java", seq: 1}, + ...> %{id: 1, lang: "Ruby", seq: 2}, + ...> %{id: 2, lang: "Python", seq: 1}, + ...> %{id: 2, lang: "C#", seq: 2}, + ...> %{id: 2, lang: "Haskell", seq: 2}, + ...> ] + iex> Enum.group_by(collection, &{&1.id, &1.seq}) + %{ + {1, 1} => [%{id: 1, lang: "Elixir", seq: 1}, %{id: 1, lang: "Java", seq: 1}], + {1, 2} => [%{id: 1, lang: "Ruby", seq: 2}], + {2, 1} => [%{id: 2, lang: "Python", seq: 1}], + {2, 2} => [%{id: 2, lang: "C#", seq: 2}, %{id: 2, lang: "Haskell", seq: 2}] + } + iex> Enum.group_by(collection, &{&1.id, &1.seq}, &{&1.id, &1.lang}) + %{ + {1, 1} => [{1, "Elixir"}, {1, "Java"}], + {1, 2} => [{1, "Ruby"}], + {2, 1} => [{2, "Python"}], + {2, 2} => [{2, "C#"}, {2, "Haskell"}] + } + + """ + @spec group_by(t, (element -> any), (element -> any)) :: map + def group_by(enumerable, key_fun, value_fun \\ fn x -> x end) + + def group_by(enumerable, key_fun, value_fun) when is_function(key_fun) do + reduce(reverse(enumerable), %{}, fn entry, acc -> + key = key_fun.(entry) + value = value_fun.(entry) + + case acc do + %{^key => existing} -> %{acc | key => [value | existing]} + %{} -> Map.put(acc, key, [value]) + end + end) + end + + def group_by(enumerable, dict, fun) do + IO.warn( + "Enum.group_by/3 with a map/dictionary as second element is deprecated. " <> + "A map is used by default and it is no longer required to pass one to this function" + ) + + # Avoid warnings about Dict + dict_module = Dict + + reduce(reverse(enumerable), dict, fn entry, categories -> + dict_module.update(categories, fun.(entry), [entry], &[entry | &1]) + end) + end + + @doc """ + Intersperses `separator` between each element of the enumeration. + + ## Examples + + iex> Enum.intersperse([1, 2, 3], 0) + [1, 0, 2, 0, 3] + + iex> Enum.intersperse([1], 0) + [1] + + iex> Enum.intersperse([], 0) + [] + + """ + @spec intersperse(t, element) :: list + def intersperse(enumerable, separator) when is_list(enumerable) do + case enumerable do + [] -> [] + list -> intersperse_non_empty_list(list, separator) + end + end + + def intersperse(enumerable, separator) do + list = + enumerable + |> reduce([], fn x, acc -> [x, separator | acc] end) + |> :lists.reverse() + + # Head is a superfluous separator + case list do + [] -> [] + [_ | t] -> t + end + end + + @doc """ + Inserts the given `enumerable` into a `collectable`. + + Note that passing a non-empty list as the `collectable` is deprecated. + If you're collecting into a non-empty keyword list, consider using + `Keyword.merge(collectable, Enum.to_list(enumerable))`. If you're collecting + into a non-empty list, consider something like `Enum.to_list(enumerable) ++ collectable`. + + ## Examples + + iex> Enum.into([1, 2], []) + [1, 2] + + iex> Enum.into([a: 1, b: 2], %{}) + %{a: 1, b: 2} + + iex> Enum.into(%{a: 1}, %{b: 2}) + %{a: 1, b: 2} + + iex> Enum.into([a: 1, a: 2], %{}) + %{a: 2} + + """ + @spec into(Enumerable.t(), Collectable.t()) :: Collectable.t() + def into(enumerable, collectable) + + def into(enumerable, []) do + to_list(enumerable) + end + + def into(%_{} = enumerable, collectable) do + into_protocol(enumerable, collectable) + end + + def into(enumerable, %_{} = collectable) do + into_protocol(enumerable, collectable) + end + + def into(enumerable, %{} = collectable) do + if map_size(collectable) == 0 do + into_map(enumerable) + else + into_map(enumerable, collectable) + end + end + + def into(enumerable, collectable) do + into_protocol(enumerable, collectable) + end + + defp into_map(%{} = enumerable), do: enumerable + defp into_map(enumerable) when is_list(enumerable), do: :maps.from_list(enumerable) + defp into_map(enumerable), do: enumerable |> Enum.to_list() |> :maps.from_list() + + defp into_map(%{} = enumerable, collectable), + do: Map.merge(collectable, enumerable) + + defp into_map(enumerable, collectable) when is_list(enumerable), + do: Map.merge(collectable, :maps.from_list(enumerable)) + + defp into_map(enumerable, collectable), + do: Enum.reduce(enumerable, collectable, fn {key, val}, acc -> Map.put(acc, key, val) end) + + defp into_protocol(enumerable, collectable) do + {initial, fun} = Collectable.into(collectable) + + try do + reduce_into_protocol(enumerable, initial, fun) + catch + kind, reason -> + fun.(initial, :halt) + :erlang.raise(kind, reason, __STACKTRACE__) + else + acc -> fun.(acc, :done) + end + end + + defp reduce_into_protocol(enumerable, initial, fun) when is_list(enumerable) do + :lists.foldl(fn x, acc -> fun.(acc, {:cont, x}) end, initial, enumerable) + end + + defp reduce_into_protocol(enumerable, initial, fun) do + enumerable + |> Enumerable.reduce({:cont, initial}, fn x, acc -> + {:cont, fun.(acc, {:cont, x})} + end) + |> elem(1) + end + + @doc """ + Inserts the given `enumerable` into a `collectable` according to the + transformation function. + + ## Examples + + iex> Enum.into([1, 2, 3], [], fn x -> x * 3 end) + [3, 6, 9] + + iex> Enum.into(%{a: 1, b: 2}, %{c: 3}, fn {k, v} -> {k, v * 2} end) + %{a: 2, b: 4, c: 3} + + """ + @spec into(Enumerable.t(), Collectable.t(), (term -> term)) :: Collectable.t() + def into(enumerable, [], transform) do + Enum.map(enumerable, transform) + end + + def into(%_{} = enumerable, collectable, transform) do + into_protocol(enumerable, collectable, transform) + end + + def into(enumerable, %_{} = collectable, transform) do + into_protocol(enumerable, collectable, transform) + end + + def into(enumerable, %{} = collectable, transform) do + if map_size(collectable) == 0 do + enumerable |> Enum.map(transform) |> :maps.from_list() + else + Enum.reduce(enumerable, collectable, fn entry, acc -> + {key, val} = transform.(entry) + Map.put(acc, key, val) + end) + end + end + + def into(enumerable, collectable, transform) do + into_protocol(enumerable, collectable, transform) + end + + defp into_protocol(enumerable, collectable, transform) do + {initial, fun} = Collectable.into(collectable) + + try do + reduce_into_protocol(enumerable, initial, transform, fun) + catch + kind, reason -> + fun.(initial, :halt) + :erlang.raise(kind, reason, __STACKTRACE__) + else + acc -> fun.(acc, :done) + end + end + + defp reduce_into_protocol(enumerable, initial, transform, fun) when is_list(enumerable) do + :lists.foldl(fn x, acc -> fun.(acc, {:cont, transform.(x)}) end, initial, enumerable) + end + + defp reduce_into_protocol(enumerable, initial, transform, fun) do + enumerable + |> Enumerable.reduce({:cont, initial}, fn x, acc -> + {:cont, fun.(acc, {:cont, transform.(x)})} + end) + |> elem(1) + end + + @doc """ + Joins the given `enumerable` into a string using `joiner` as a + separator. + + If `joiner` is not passed at all, it defaults to an empty string. + + All elements in the `enumerable` must be convertible to a string, + otherwise an error is raised. + + ## Examples + + iex> Enum.join([1, 2, 3]) + "123" + + iex> Enum.join([1, 2, 3], " = ") + "1 = 2 = 3" + + iex> Enum.join([["a", "b"], ["c", "d", "e", ["f", "g"]], "h", "i"], " ") + "ab cdefg h i" + + """ + @spec join(t, String.t()) :: String.t() + def join(enumerable, joiner \\ "") + + def join(enumerable, "") do + enumerable + |> map(&entry_to_string(&1)) + |> IO.iodata_to_binary() + end + + def join(enumerable, joiner) when is_list(enumerable) and is_binary(joiner) do + join_list(enumerable, joiner) + end + + def join(enumerable, joiner) when is_binary(joiner) do + reduced = + reduce(enumerable, :first, fn + entry, :first -> entry_to_string(entry) + entry, acc -> [acc, joiner | entry_to_string(entry)] + end) + + if reduced == :first do + "" + else + IO.iodata_to_binary(reduced) + end + end + + @doc """ + Returns a list where each element is the result of invoking + `fun` on each corresponding element of `enumerable`. + + For maps, the function expects a key-value tuple. + + ## Examples + + iex> Enum.map([1, 2, 3], fn x -> x * 2 end) + [2, 4, 6] + + iex> Enum.map([a: 1, b: 2], fn {k, v} -> {k, -v} end) + [a: -1, b: -2] + + """ + @spec map(t, (element -> any)) :: list + def map(enumerable, fun) + + def map(enumerable, fun) when is_list(enumerable) do + :lists.map(fun, enumerable) + end + + def map(first..last//step, fun) do + map_range(first, last, step, fun) + end + + def map(enumerable, fun) do + reduce(enumerable, [], R.map(fun)) |> :lists.reverse() + end + + @doc """ + Returns a list of results of invoking `fun` on every `nth` + element of `enumerable`, starting with the first element. + + The first element is always passed to the given function, unless `nth` is `0`. + + The second argument specifying every `nth` element must be a non-negative + integer. + + If `nth` is `0`, then `enumerable` is directly converted to a list, + without `fun` being ever applied. + + ## Examples + + iex> Enum.map_every(1..10, 2, fn x -> x + 1000 end) + [1001, 2, 1003, 4, 1005, 6, 1007, 8, 1009, 10] + + iex> Enum.map_every(1..10, 3, fn x -> x + 1000 end) + [1001, 2, 3, 1004, 5, 6, 1007, 8, 9, 1010] + + iex> Enum.map_every(1..5, 0, fn x -> x + 1000 end) + [1, 2, 3, 4, 5] + + iex> Enum.map_every([1, 2, 3], 1, fn x -> x + 1000 end) + [1001, 1002, 1003] + + """ + @doc since: "1.4.0" + @spec map_every(t, non_neg_integer, (element -> any)) :: list + def map_every(enumerable, nth, fun) + + def map_every(enumerable, 1, fun), do: map(enumerable, fun) + def map_every(enumerable, 0, _fun), do: to_list(enumerable) + def map_every([], nth, _fun) when is_integer(nth) and nth > 1, do: [] + + def map_every(enumerable, nth, fun) when is_integer(nth) and nth > 1 do + {res, _} = reduce(enumerable, {[], :first}, R.map_every(nth, fun)) + :lists.reverse(res) + end + + @doc """ + Maps and intersperses the given enumerable in one pass. + + ## Examples + + iex> Enum.map_intersperse([1, 2, 3], :a, &(&1 * 2)) + [2, :a, 4, :a, 6] + """ + @doc since: "1.10.0" + @spec map_intersperse(t, element(), (element -> any())) :: list() + def map_intersperse(enumerable, separator, mapper) + + def map_intersperse(enumerable, separator, mapper) when is_list(enumerable) do + map_intersperse_list(enumerable, separator, mapper) + end + + def map_intersperse(enumerable, separator, mapper) do + reduced = + reduce(enumerable, :first, fn + entry, :first -> [mapper.(entry)] + entry, acc -> [mapper.(entry), separator | acc] + end) + + if reduced == :first do + [] + else + :lists.reverse(reduced) + end + end + + @doc """ + Maps and joins the given `enumerable` in one pass. + + If `joiner` is not passed at all, it defaults to an empty string. + + All elements returned from invoking the `mapper` must be convertible to + a string, otherwise an error is raised. + + ## Examples + + iex> Enum.map_join([1, 2, 3], &(&1 * 2)) + "246" + + iex> Enum.map_join([1, 2, 3], " = ", &(&1 * 2)) + "2 = 4 = 6" + + """ + @spec map_join(t, String.t(), (element -> String.Chars.t())) :: String.t() + def map_join(enumerable, joiner \\ "", mapper) when is_binary(joiner) do + enumerable + |> map_intersperse(joiner, &entry_to_string(mapper.(&1))) + |> IO.iodata_to_binary() + end + + @doc """ + Invokes the given function to each element in the `enumerable` to reduce + it to a single element, while keeping an accumulator. + + Returns a tuple where the first element is the mapped enumerable and + the second one is the final accumulator. + + The function, `fun`, receives two arguments: the first one is the + element, and the second one is the accumulator. `fun` must return + a tuple with two elements in the form of `{result, accumulator}`. + + For maps, the first tuple element must be a `{key, value}` tuple. + + ## Examples + + iex> Enum.map_reduce([1, 2, 3], 0, fn x, acc -> {x * 2, x + acc} end) + {[2, 4, 6], 6} + + """ + @spec map_reduce(t, acc, (element, acc -> {element, acc})) :: {list, acc} + def map_reduce(enumerable, acc, fun) when is_list(enumerable) do + :lists.mapfoldl(fun, acc, enumerable) + end + + def map_reduce(enumerable, acc, fun) do + {list, acc} = + reduce(enumerable, {[], acc}, fn entry, {list, acc} -> + {new_entry, acc} = fun.(entry, acc) + {[new_entry | list], acc} + end) + + {:lists.reverse(list), acc} + end + + @doc false + def max(list = [_ | _]), do: :lists.max(list) + + @doc false + def max(list = [_ | _], empty_fallback) when is_function(empty_fallback, 0) do + :lists.max(list) + end + + @doc false + @spec max(t, (() -> empty_result)) :: element | empty_result when empty_result: any + def max(enumerable, empty_fallback) when is_function(empty_fallback, 0) do + max(enumerable, &>=/2, empty_fallback) + end + + @doc """ + Returns the maximal element in the `enumerable` according + to Erlang's term ordering. + + By default, the comparison is done with the `>=` sorter function. + If multiple elements are considered maximal, the first one that + was found is returned. If you want the last element considered + maximal to be returned, the sorter function should not return true + for equal elements. + + If the enumerable is empty, the provided `empty_fallback` is called. + The default `empty_fallback` raises `Enum.EmptyError`. + + ## Examples + + iex> Enum.max([1, 2, 3]) + 3 + + The fact this function uses Erlang's term ordering means that the comparison + is structural and not semantic. For example: + + iex> Enum.max([~D[2017-03-31], ~D[2017-04-01]]) + ~D[2017-03-31] + + In the example above, `max/2` returned March 31st instead of April 1st + because the structural comparison compares the day before the year. + For this reason, most structs provide a "compare" function, such as + `Date.compare/2`, which receives two structs and returns `:lt` (less-than), + `:eq` (equal to), and `:gt` (greater-than). If you pass a module as the + sorting function, Elixir will automatically use the `compare/2` function + of said module: + + iex> Enum.max([~D[2017-03-31], ~D[2017-04-01]], Date) + ~D[2017-04-01] + + Finally, if you don't want to raise on empty enumerables, you can pass + the empty fallback: + + iex> Enum.max([], &>=/2, fn -> 0 end) + 0 + + """ + @spec max(t, (element, element -> boolean) | module()) :: + element | empty_result + when empty_result: any + @spec max(t, (element, element -> boolean) | module(), (() -> empty_result)) :: + element | empty_result + when empty_result: any + def max(enumerable, sorter \\ &>=/2, empty_fallback \\ fn -> raise Enum.EmptyError end) do + aggregate(enumerable, max_sort_fun(sorter), empty_fallback) + end + + defp max_sort_fun(sorter) when is_function(sorter, 2), do: sorter + defp max_sort_fun(module) when is_atom(module), do: &(module.compare(&1, &2) != :lt) + + @doc false + @spec max_by( + t, + (element -> any), + (() -> empty_result) | (element, element -> boolean) | module() + ) :: element | empty_result + when empty_result: any + def max_by(enumerable, fun, empty_fallback) + when is_function(fun, 1) and is_function(empty_fallback, 0) do + max_by(enumerable, fun, &>=/2, empty_fallback) + end + + @doc """ + Returns the maximal element in the `enumerable` as calculated + by the given `fun`. + + By default, the comparison is done with the `>=` sorter function. + If multiple elements are considered maximal, the first one that + was found is returned. If you want the last element considered + maximal to be returned, the sorter function should not return true + for equal elements. + + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. + + ## Examples + + iex> Enum.max_by(["a", "aa", "aaa"], fn x -> String.length(x) end) + "aaa" + + iex> Enum.max_by(["a", "aa", "aaa", "b", "bbb"], &String.length/1) + "aaa" + + The fact this function uses Erlang's term ordering means that the + comparison is structural and not semantic. Therefore, if you want + to compare structs, most structs provide a "compare" function, such as + `Date.compare/2`, which receives two structs and returns `:lt` (less-than), + `:eq` (equal to), and `:gt` (greater-than). If you pass a module as the + sorting function, Elixir will automatically use the `compare/2` function + of said module: + + iex> users = [ + ...> %{name: "Ellis", birthday: ~D[1943-05-11]}, + ...> %{name: "Lovelace", birthday: ~D[1815-12-10]}, + ...> %{name: "Turing", birthday: ~D[1912-06-23]} + ...> ] + iex> Enum.max_by(users, &(&1.birthday), Date) + %{name: "Ellis", birthday: ~D[1943-05-11]} + + Finally, if you don't want to raise on empty enumerables, you can pass + the empty fallback: + + iex> Enum.max_by([], &String.length/1, fn -> nil end) + nil + + """ + @spec max_by( + t, + (element -> any), + (element, element -> boolean) | module(), + (() -> empty_result) + ) :: element | empty_result + when empty_result: any + def max_by(enumerable, fun, sorter \\ &>=/2, empty_fallback \\ fn -> raise Enum.EmptyError end) + when is_function(fun, 1) do + aggregate_by(enumerable, fun, max_sort_fun(sorter), empty_fallback) + end + + @doc """ + Checks if `element` exists within the `enumerable`. + + Membership is tested with the match (`===/2`) operator. + + ## Examples + + iex> Enum.member?(1..10, 5) + true + iex> Enum.member?(1..10, 5.0) + false + + iex> Enum.member?([1.0, 2.0, 3.0], 2) + false + iex> Enum.member?([1.0, 2.0, 3.0], 2.000) + true + + iex> Enum.member?([:a, :b, :c], :d) + false + + + When called outside guards, the [`in`](`in/2`) and [`not in`](`in/2`) + operators work by using this function. + """ + @spec member?(t, element) :: boolean + def member?(enumerable, element) when is_list(enumerable) do + :lists.member(element, enumerable) + end + + def member?(enumerable, element) do + case Enumerable.member?(enumerable, element) do + {:ok, element} when is_boolean(element) -> + element + + {:error, module} -> + module.reduce(enumerable, {:cont, false}, fn + v, _ when v === element -> {:halt, true} + _, _ -> {:cont, false} + end) + |> elem(1) + end + end + + @doc false + def min(list = [_ | _]), do: :lists.min(list) + + @doc false + def min(list = [_ | _], empty_fallback) when is_function(empty_fallback, 0) do + :lists.min(list) + end + + @doc false + @spec min(t, (() -> empty_result)) :: element | empty_result when empty_result: any + def min(enumerable, empty_fallback) when is_function(empty_fallback, 0) do + min(enumerable, &<=/2, empty_fallback) + end + + @doc """ + Returns the minimal element in the `enumerable` according + to Erlang's term ordering. + + By default, the comparison is done with the `<=` sorter function. + If multiple elements are considered minimal, the first one that + was found is returned. If you want the last element considered + minimal to be returned, the sorter function should not return true + for equal elements. + + If the enumerable is empty, the provided `empty_fallback` is called. + The default `empty_fallback` raises `Enum.EmptyError`. + + ## Examples + + iex> Enum.min([1, 2, 3]) + 1 + + The fact this function uses Erlang's term ordering means that the comparison + is structural and not semantic. For example: + + iex> Enum.min([~D[2017-03-31], ~D[2017-04-01]]) + ~D[2017-04-01] + + In the example above, `min/2` returned April 1st instead of March 31st + because the structural comparison compares the day before the year. + For this reason, most structs provide a "compare" function, such as + `Date.compare/2`, which receives two structs and returns `:lt` (less-than), + `:eq` (equal to), and `:gt` (greater-than). If you pass a module as the + sorting function, Elixir will automatically use the `compare/2` function + of said module: + + iex> Enum.min([~D[2017-03-31], ~D[2017-04-01]], Date) + ~D[2017-03-31] + + Finally, if you don't want to raise on empty enumerables, you can pass + the empty fallback: + + iex> Enum.min([], fn -> 0 end) + 0 + + """ + @spec min(t, (element, element -> boolean) | module()) :: + element | empty_result + when empty_result: any + @spec min(t, (element, element -> boolean) | module(), (() -> empty_result)) :: + element | empty_result + when empty_result: any + def min(enumerable, sorter \\ &<=/2, empty_fallback \\ fn -> raise Enum.EmptyError end) do + aggregate(enumerable, min_sort_fun(sorter), empty_fallback) + end + + defp min_sort_fun(sorter) when is_function(sorter, 2), do: sorter + defp min_sort_fun(module) when is_atom(module), do: &(module.compare(&1, &2) != :gt) + + @doc false + @spec min_by( + t, + (element -> any), + (() -> empty_result) | (element, element -> boolean) | module() + ) :: element | empty_result + when empty_result: any + def min_by(enumerable, fun, empty_fallback) + when is_function(fun, 1) and is_function(empty_fallback, 0) do + min_by(enumerable, fun, &<=/2, empty_fallback) + end + + @doc """ + Returns the minimal element in the `enumerable` as calculated + by the given `fun`. + + By default, the comparison is done with the `<=` sorter function. + If multiple elements are considered minimal, the first one that + was found is returned. If you want the last element considered + minimal to be returned, the sorter function should not return true + for equal elements. + + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. + + ## Examples + + iex> Enum.min_by(["a", "aa", "aaa"], fn x -> String.length(x) end) + "a" + + iex> Enum.min_by(["a", "aa", "aaa", "b", "bbb"], &String.length/1) + "a" + + The fact this function uses Erlang's term ordering means that the + comparison is structural and not semantic. Therefore, if you want + to compare structs, most structs provide a "compare" function, such as + `Date.compare/2`, which receives two structs and returns `:lt` (less-than), + `:eq` (equal to), and `:gt` (greater-than). If you pass a module as the + sorting function, Elixir will automatically use the `compare/2` function + of said module: + + iex> users = [ + ...> %{name: "Ellis", birthday: ~D[1943-05-11]}, + ...> %{name: "Lovelace", birthday: ~D[1815-12-10]}, + ...> %{name: "Turing", birthday: ~D[1912-06-23]} + ...> ] + iex> Enum.min_by(users, &(&1.birthday), Date) + %{name: "Lovelace", birthday: ~D[1815-12-10]} + + Finally, if you don't want to raise on empty enumerables, you can pass + the empty fallback: + + iex> Enum.min_by([], &String.length/1, fn -> nil end) + nil + + """ + @spec min_by( + t, + (element -> any), + (element, element -> boolean) | module(), + (() -> empty_result) + ) :: element | empty_result + when empty_result: any + def min_by(enumerable, fun, sorter \\ &<=/2, empty_fallback \\ fn -> raise Enum.EmptyError end) + when is_function(fun, 1) do + aggregate_by(enumerable, fun, min_sort_fun(sorter), empty_fallback) + end + + @doc """ + Returns a tuple with the minimal and the maximal elements in the + enumerable according to Erlang's term ordering. + + If multiple elements are considered maximal or minimal, the first one + that was found is returned. + + Calls the provided `empty_fallback` function and returns its value if + `enumerable` is empty. The default `empty_fallback` raises `Enum.EmptyError`. + + ## Examples + + iex> Enum.min_max([2, 3, 1]) + {1, 3} + + iex> Enum.min_max([], fn -> {nil, nil} end) + {nil, nil} + + """ + @spec min_max(t, (() -> empty_result)) :: {element, element} | empty_result + when empty_result: any + def min_max(enumerable, empty_fallback \\ fn -> raise Enum.EmptyError end) + + def min_max(first..last//step = range, empty_fallback) when is_function(empty_fallback, 0) do + case Range.size(range) do + 0 -> + empty_fallback.() + + _ -> + last = last - rem(last - first, step) + {Kernel.min(first, last), Kernel.max(first, last)} + end + end + + def min_max(enumerable, empty_fallback) when is_function(empty_fallback, 0) do + first_fun = &[&1 | &1] + + reduce_fun = fn entry, [min | max] -> + [Kernel.min(min, entry) | Kernel.max(max, entry)] + end + + case reduce_by(enumerable, first_fun, reduce_fun) do + :empty -> empty_fallback.() + [min | max] -> {min, max} + end + end + + @doc false + @spec min_max_by(t, (element -> any), (() -> empty_result)) :: {element, element} | empty_result + when empty_result: any + def min_max_by(enumerable, fun, empty_fallback) + when is_function(fun, 1) and is_function(empty_fallback, 0) do + min_max_by(enumerable, fun, & Enum.min_max_by(["aaa", "bb", "c"], fn x -> String.length(x) end) + {"c", "aaa"} + + iex> Enum.min_max_by(["aaa", "a", "bb", "c", "ccc"], &String.length/1) + {"a", "aaa"} + + iex> Enum.min_max_by([], &String.length/1, fn -> {nil, nil} end) + {nil, nil} + + The fact this function uses Erlang's term ordering means that the + comparison is structural and not semantic. Therefore, if you want + to compare structs, most structs provide a "compare" function, such as + `Date.compare/2`, which receives two structs and returns `:lt` (less-than), + `:eq` (equal to), and `:gt` (greater-than). If you pass a module as the + sorting function, Elixir will automatically use the `compare/2` function + of said module: + + iex> users = [ + ...> %{name: "Ellis", birthday: ~D[1943-05-11]}, + ...> %{name: "Lovelace", birthday: ~D[1815-12-10]}, + ...> %{name: "Turing", birthday: ~D[1912-06-23]} + ...> ] + iex> Enum.min_max_by(users, &(&1.birthday), Date) + { + %{name: "Lovelace", birthday: ~D[1815-12-10]}, + %{name: "Ellis", birthday: ~D[1943-05-11]} + } + + Finally, if you don't want to raise on empty enumerables, you can pass + the empty fallback: + + iex> Enum.min_max_by([], &String.length/1, fn -> nil end) + nil + + """ + @spec min_max_by(t, (element -> any), (element, element -> boolean) | module()) :: + {element, element} | empty_result + when empty_result: any + @spec min_max_by( + t, + (element -> any), + (element, element -> boolean) | module(), + (() -> empty_result) + ) :: {element, element} | empty_result + when empty_result: any + def min_max_by( + enumerable, + fun, + sorter_or_empty_fallback \\ & raise Enum.EmptyError end + ) + + def min_max_by(enumerable, fun, sorter, empty_fallback) + when is_function(fun, 1) and is_atom(sorter) and is_function(empty_fallback, 0) do + min_max_by(enumerable, fun, min_max_by_sort_fun(sorter), empty_fallback) + end + + def min_max_by(enumerable, fun, sorter, empty_fallback) + when is_function(fun, 1) and is_function(sorter, 2) and is_function(empty_fallback, 0) do + first_fun = fn entry -> + fun_entry = fun.(entry) + {entry, entry, fun_entry, fun_entry} + end + + reduce_fun = fn entry, {prev_min, prev_max, fun_min, fun_max} = acc -> + fun_entry = fun.(entry) + + cond do + sorter.(fun_entry, fun_min) -> + {entry, prev_max, fun_entry, fun_max} + + sorter.(fun_max, fun_entry) -> + {prev_min, entry, fun_min, fun_entry} + + true -> + acc + end + end + + case reduce_by(enumerable, first_fun, reduce_fun) do + :empty -> empty_fallback.() + {min, max, _, _} -> {min, max} + end + end + + defp min_max_by_sort_fun(module) when is_atom(module), do: &(module.compare(&1, &2) == :lt) + + @doc """ + Splits the `enumerable` in two lists according to the given function `fun`. + + Splits the given `enumerable` in two lists by calling `fun` with each element + in the `enumerable` as its only argument. Returns a tuple with the first list + containing all the elements in `enumerable` for which applying `fun` returned + a truthy value, and a second list with all the elements for which applying + `fun` returned a falsy value (`false` or `nil`). + + The elements in both the returned lists are in the same relative order as they + were in the original enumerable (if such enumerable was ordered, like a + list). See the examples below. + + ## Examples + + iex> Enum.split_with([5, 4, 3, 2, 1, 0], fn x -> rem(x, 2) == 0 end) + {[4, 2, 0], [5, 3, 1]} + + iex> Enum.split_with([a: 1, b: -2, c: 1, d: -3], fn {_k, v} -> v < 0 end) + {[b: -2, d: -3], [a: 1, c: 1]} + + iex> Enum.split_with([a: 1, b: -2, c: 1, d: -3], fn {_k, v} -> v > 50 end) + {[], [a: 1, b: -2, c: 1, d: -3]} + + iex> Enum.split_with([], fn {_k, v} -> v > 50 end) + {[], []} + + """ + @doc since: "1.4.0" + @spec split_with(t, (element -> as_boolean(term))) :: {list, list} + def split_with(enumerable, fun) do + {acc1, acc2} = + reduce(enumerable, {[], []}, fn entry, {acc1, acc2} -> + if fun.(entry) do + {[entry | acc1], acc2} + else + {acc1, [entry | acc2]} + end + end) + + {:lists.reverse(acc1), :lists.reverse(acc2)} + end + + @doc false + @deprecated "Use Enum.split_with/2 instead" + def partition(enumerable, fun) do + split_with(enumerable, fun) + end + + @doc """ + Returns a random element of an `enumerable`. + + Raises `Enum.EmptyError` if `enumerable` is empty. + + This function uses Erlang's [`:rand` module](`:rand`) to calculate + the random value. Check its documentation for setting a + different random algorithm or a different seed. + + The implementation is based on the + [reservoir sampling](https://en.wikipedia.org/wiki/Reservoir_sampling#Relation_to_Fisher-Yates_shuffle) + algorithm. + It assumes that the sample being returned can fit into memory; + the input `enumerable` doesn't have to, as it is traversed just once. + + If a range is passed into the function, this function will pick a + random value between the range limits, without traversing the whole + range (thus executing in constant time and constant memory). + + ## Examples + + The examples below use the `:exsss` pseudorandom algorithm since it's + the default from Erlang/OTP 22: + + # Although not necessary, let's seed the random algorithm + iex> :rand.seed(:exsss, {100, 101, 102}) + iex> Enum.random([1, 2, 3]) + 2 + iex> Enum.random([1, 2, 3]) + 1 + iex> Enum.random(1..1_000) + 309 + + """ + @spec random(t) :: element + def random(enumerable) + + def random(enumerable) when is_list(enumerable) do + case length(enumerable) do + 0 -> raise Enum.EmptyError + length -> enumerable |> drop_list(random_integer(0, length - 1)) |> hd() + end + end + + def random(enumerable) do + result = + case Enumerable.slice(enumerable) do + {:ok, 0, _} -> + [] + + {:ok, count, fun} when is_function(fun, 1) -> + slice_list(fun.(enumerable), random_integer(0, count - 1), 1, 1) + + # TODO: Deprecate me in Elixir v1.18. + {:ok, count, fun} when is_function(fun, 2) -> + fun.(random_integer(0, count - 1), 1) + + {:ok, count, fun} when is_function(fun, 3) -> + fun.(random_integer(0, count - 1), 1, 1) + + {:error, _} -> + take_random(enumerable, 1) + end + + case result do + [] -> raise Enum.EmptyError + [elem] -> elem + end + end + + @doc """ + Invokes `fun` for each element in the `enumerable` with the + accumulator. + + Raises `Enum.EmptyError` if `enumerable` is empty. + + The first element of the `enumerable` is used as the initial value + of the accumulator. Then, the function is invoked with the next + element and the accumulator. The result returned by the function + is used as the accumulator for the next iteration, recursively. + When the `enumerable` is done, the last accumulator is returned. + + Since the first element of the enumerable is used as the initial + value of the accumulator, `fun` will only be executed `n - 1` times + where `n` is the length of the enumerable. This function won't call + the specified function for enumerables that are one-element long. + + If you wish to use another value for the accumulator, use + `Enum.reduce/3`. + + ## Examples + + iex> Enum.reduce([1, 2, 3, 4], fn x, acc -> x * acc end) + 24 + + """ + @spec reduce(t, (element, acc -> acc)) :: acc + def reduce(enumerable, fun) + + def reduce([h | t], fun) do + reduce(t, h, fun) + end + + def reduce([], _fun) do + raise Enum.EmptyError + end + + def reduce(enumerable, fun) do + Enumerable.reduce(enumerable, {:cont, :first}, fn + x, {:acc, acc} -> {:cont, {:acc, fun.(x, acc)}} + x, :first -> {:cont, {:acc, x}} + end) + |> elem(1) + |> case do + :first -> raise Enum.EmptyError + {:acc, acc} -> acc + end + end + + @doc """ + Invokes `fun` for each element in the `enumerable` with the accumulator. + + The initial value of the accumulator is `acc`. The function is invoked for + each element in the enumerable with the accumulator. The result returned + by the function is used as the accumulator for the next iteration. + The function returns the last accumulator. + + ## Examples + + iex> Enum.reduce([1, 2, 3], 0, fn x, acc -> x + acc end) + 6 + + iex> Enum.reduce(%{a: 2, b: 3, c: 4}, 0, fn {_key, val}, acc -> acc + val end) + 9 + + ## Reduce as a building block + + Reduce (sometimes called `fold`) is a basic building block in functional + programming. Almost all of the functions in the `Enum` module can be + implemented on top of reduce. Those functions often rely on other operations, + such as `Enum.reverse/1`, which are optimized by the runtime. + + For example, we could implement `map/2` in terms of `reduce/3` as follows: + + def my_map(enumerable, fun) do + enumerable + |> Enum.reduce([], fn x, acc -> [fun.(x) | acc] end) + |> Enum.reverse() + end + + In the example above, `Enum.reduce/3` accumulates the result of each call + to `fun` into a list in reverse order, which is correctly ordered at the + end by calling `Enum.reverse/1`. + + Implementing functions like `map/2`, `filter/2` and others are a good + exercise for understanding the power behind `Enum.reduce/3`. When an + operation cannot be expressed by any of the functions in the `Enum` + module, developers will most likely resort to `reduce/3`. + """ + @spec reduce(t, acc, (element, acc -> acc)) :: acc + def reduce(enumerable, acc, fun) when is_list(enumerable) do + :lists.foldl(fun, acc, enumerable) + end + + def reduce(first..last//step, acc, fun) do + reduce_range(first, last, step, acc, fun) + end + + def reduce(%_{} = enumerable, acc, fun) do + reduce_enumerable(enumerable, acc, fun) + end + + def reduce(%{} = enumerable, acc, fun) do + :maps.fold(fn k, v, acc -> fun.({k, v}, acc) end, acc, enumerable) + end + + def reduce(enumerable, acc, fun) do + reduce_enumerable(enumerable, acc, fun) + end + + @doc """ + Reduces `enumerable` until `fun` returns `{:halt, term}`. + + The return value for `fun` is expected to be + + * `{:cont, acc}` to continue the reduction with `acc` as the new + accumulator or + * `{:halt, acc}` to halt the reduction + + If `fun` returns `{:halt, acc}` the reduction is halted and the function + returns `acc`. Otherwise, if the enumerable is exhausted, the function returns + the accumulator of the last `{:cont, acc}`. + + ## Examples + + iex> Enum.reduce_while(1..100, 0, fn x, acc -> + ...> if x < 5 do + ...> {:cont, acc + x} + ...> else + ...> {:halt, acc} + ...> end + ...> end) + 10 + iex> Enum.reduce_while(1..100, 0, fn x, acc -> + ...> if x > 0 do + ...> {:cont, acc + x} + ...> else + ...> {:halt, acc} + ...> end + ...> end) + 5050 + + """ + @spec reduce_while(t, any, (element, any -> {:cont, any} | {:halt, any})) :: any + def reduce_while(enumerable, acc, fun) do + Enumerable.reduce(enumerable, {:cont, acc}, fun) |> elem(1) + end + + @doc """ + Returns a list of elements in `enumerable` excluding those for which the function `fun` returns + a truthy value. + + See also `filter/2`. + + ## Examples + + iex> Enum.reject([1, 2, 3], fn x -> rem(x, 2) == 0 end) + [1, 3] + + """ + @spec reject(t, (element -> as_boolean(term))) :: list + def reject(enumerable, fun) when is_list(enumerable) do + reject_list(enumerable, fun) + end + + def reject(enumerable, fun) do + reduce(enumerable, [], R.reject(fun)) |> :lists.reverse() + end + + @doc """ + Returns a list of elements in `enumerable` in reverse order. + + ## Examples + + iex> Enum.reverse([1, 2, 3]) + [3, 2, 1] + + """ + @spec reverse(t) :: list + def reverse(enumerable) + + def reverse([]), do: [] + def reverse([_] = list), do: list + def reverse([element1, element2]), do: [element2, element1] + def reverse([element1, element2 | rest]), do: :lists.reverse(rest, [element2, element1]) + def reverse(enumerable), do: reduce(enumerable, [], &[&1 | &2]) + + @doc """ + Reverses the elements in `enumerable`, appends the `tail`, and returns + it as a list. + + This is an optimization for + `enumerable |> Enum.reverse() |> Enum.concat(tail)`. + + ## Examples + + iex> Enum.reverse([1, 2, 3], [4, 5, 6]) + [3, 2, 1, 4, 5, 6] + + """ + @spec reverse(t, t) :: list + def reverse(enumerable, tail) when is_list(enumerable) do + :lists.reverse(enumerable, to_list(tail)) + end + + def reverse(enumerable, tail) do + reduce(enumerable, to_list(tail), fn entry, acc -> + [entry | acc] + end) + end + + @doc """ + Reverses the `enumerable` in the range from initial `start_index` + through `count` elements. + + If `count` is greater than the size of the rest of the `enumerable`, + then this function will reverse the rest of the enumerable. + + ## Examples + + iex> Enum.reverse_slice([1, 2, 3, 4, 5, 6], 2, 4) + [1, 2, 6, 5, 4, 3] + + """ + @spec reverse_slice(t, non_neg_integer, non_neg_integer) :: list + def reverse_slice(enumerable, start_index, count) + when is_integer(start_index) and start_index >= 0 and is_integer(count) and count >= 0 do + list = reverse(enumerable) + length = length(list) + count = Kernel.min(count, length - start_index) + + if count > 0 do + reverse_slice(list, length, start_index + count, count, []) + else + :lists.reverse(list) + end + end + + @doc """ + Slides a single or multiple elements given by `range_or_single_index` from `enumerable` + to `insertion_index`. + + The semantics of the range to be moved match the semantics of `Enum.slice/2`. + Specifically, that means: + + * Indices are normalized, meaning that negative indexes will be counted from the end + (for example, -1 means the last element of the enumerable). This will result in *two* + traversals of your enumerable on types like lists that don't provide a constant-time count. + + * If the normalized index range's `last` is out of bounds, the range is truncated to the last element. + + * If the normalized index range's `first` is out of bounds, the selected range for sliding + will be empty, so you'll get back your input list. + + * Decreasing ranges (such as `5..0//1`) also select an empty range to be moved, + so you'll get back your input list. + + * Ranges with any step but 1 will raise an error. + + ## Examples + + # Slide a single element + iex> Enum.slide([:a, :b, :c, :d, :e, :f, :g], 5, 1) + [:a, :f, :b, :c, :d, :e, :g] + + # Slide a range of elements backward + iex> Enum.slide([:a, :b, :c, :d, :e, :f, :g], 3..5, 1) + [:a, :d, :e, :f, :b, :c, :g] + + # Slide a range of elements forward + iex> Enum.slide([:a, :b, :c, :d, :e, :f, :g], 1..3, 5) + [:a, :e, :f, :b, :c, :d, :g] + + # Slide with negative indices (counting from the end) + iex> Enum.slide([:a, :b, :c, :d, :e, :f, :g], 3..-1//1, 2) + [:a, :b, :d, :e, :f, :g, :c] + iex> Enum.slide([:a, :b, :c, :d, :e, :f, :g], -4..-2, 1) + [:a, :d, :e, :f, :b, :c, :g] + + # Insert at negative indices (counting from the end) + iex> Enum.slide([:a, :b, :c, :d, :e, :f, :g], 3, -1) + [:a, :b, :c, :e, :f, :g, :d] + + """ + @doc since: "1.13.0" + @spec slide(t, Range.t() | index, index) :: list + def slide(enumerable, range_or_single_index, insertion_index) + + def slide(enumerable, single_index, insertion_index) when is_integer(single_index) do + slide(enumerable, single_index..single_index, insertion_index) + end + + # This matches the behavior of Enum.slice/2 + def slide(_, _.._//step = index_range, _insertion_index) when step != 1 do + raise ArgumentError, + "Enum.slide/3 does not accept ranges with custom steps, got: #{inspect(index_range)}" + end + + # Normalize negative input ranges like Enum.slice/2 + def slide(enumerable, first..last, insertion_index) + when first < 0 or last < 0 or insertion_index < 0 do + count = Enum.count(enumerable) + normalized_first = if first >= 0, do: first, else: Kernel.max(first + count, 0) + normalized_last = if last >= 0, do: last, else: last + count + + normalized_insertion_index = + if insertion_index >= 0, do: insertion_index, else: insertion_index + count + + if normalized_first < count and normalized_first != normalized_insertion_index do + normalized_range = normalized_first..normalized_last//1 + slide(enumerable, normalized_range, normalized_insertion_index) + else + Enum.to_list(enumerable) + end + end + + def slide(enumerable, insertion_index.._, insertion_index) do + Enum.to_list(enumerable) + end + + def slide(_, first..last, insertion_index) + when insertion_index > first and insertion_index <= last do + raise ArgumentError, + "insertion index for slide must be outside the range being moved " <> + "(tried to insert #{first}..#{last} at #{insertion_index})" + end + + def slide(enumerable, first..last, _insertion_index) when first > last do + Enum.to_list(enumerable) + end + + # Guarantees at this point: step size == 1 and first <= last and (insertion_index < first or insertion_index > last) + def slide(enumerable, first..last, insertion_index) do + impl = if is_list(enumerable), do: &slide_list_start/4, else: &slide_any/4 + + cond do + insertion_index <= first -> impl.(enumerable, insertion_index, first, last) + insertion_index > last -> impl.(enumerable, first, last + 1, insertion_index) + end + end + + # Takes the range from middle..last and moves it to be in front of index start + defp slide_any(enumerable, start, middle, last) do + # We're going to deal with 4 "chunks" of the enumerable: + # 0. "Head," before the start index + # 1. "Slide back," between start (inclusive) and middle (exclusive) + # 2. "Slide front," between middle (inclusive) and last (inclusive) + # 3. "Tail," after last + # + # But, we're going to accumulate these into only two lists: pre and post. + # We'll reverse-accumulate the head into our pre list, then "slide back" into post, + # then "slide front" into pre, then "tail" into post. + # + # Then at the end, we're going to reassemble and reverse them, and end up with the + # chunks in the correct order. + {_size, pre, post} = + Enum.reduce(enumerable, {0, [], []}, fn item, {index, pre, post} -> + {pre, post} = + cond do + index < start -> {[item | pre], post} + index >= start and index < middle -> {pre, [item | post]} + index >= middle and index <= last -> {[item | pre], post} + true -> {pre, [item | post]} + end + + {index + 1, pre, post} + end) + + :lists.reverse(pre, :lists.reverse(post)) + end + + # Like slide_any/4 above, this optimized implementation of slide for lists depends + # on the indices being sorted such that we're moving middle..last to be in front of start. + defp slide_list_start([h | t], start, middle, last) + when start > 0 and start <= middle and middle <= last do + [h | slide_list_start(t, start - 1, middle - 1, last - 1)] + end + + defp slide_list_start(list, 0, middle, last), do: slide_list_middle(list, middle, last, []) + defp slide_list_start([], _start, _middle, _last), do: [] + + defp slide_list_middle([h | t], middle, last, acc) when middle > 0 do + slide_list_middle(t, middle - 1, last - 1, [h | acc]) + end + + defp slide_list_middle(list, 0, last, start_to_middle) do + {slid_range, tail} = slide_list_last(list, last + 1, []) + slid_range ++ :lists.reverse(start_to_middle, tail) + end + + # You asked for a middle index off the end of the list... you get what we've got + defp slide_list_middle([], _, _, acc) do + :lists.reverse(acc) + end + + defp slide_list_last([h | t], last, acc) when last > 0 do + slide_list_last(t, last - 1, [h | acc]) + end + + defp slide_list_last(rest, 0, acc) do + {:lists.reverse(acc), rest} + end + + defp slide_list_last([], _, acc) do + {:lists.reverse(acc), []} + end + + @doc """ + Applies the given function to each element in the `enumerable`, + storing the result in a list and passing it as the accumulator + for the next computation. Uses the first element in the `enumerable` + as the starting value. + + ## Examples + + iex> Enum.scan(1..5, &(&1 + &2)) + [1, 3, 6, 10, 15] + + """ + @spec scan(t, (element, any -> any)) :: list + def scan(enumerable, fun) + + def scan([], _fun), do: [] + + def scan([elem | rest], fun) do + scanned = scan_list(rest, elem, fun) + [elem | scanned] + end + + def scan(enumerable, fun) do + {res, _} = reduce(enumerable, {[], :first}, R.scan2(fun)) + :lists.reverse(res) + end + + @doc """ + Applies the given function to each element in the `enumerable`, + storing the result in a list and passing it as the accumulator + for the next computation. Uses the given `acc` as the starting value. + + ## Examples + + iex> Enum.scan(1..5, 0, &(&1 + &2)) + [1, 3, 6, 10, 15] + + """ + @spec scan(t, any, (element, any -> any)) :: list + def scan(enumerable, acc, fun) when is_list(enumerable) do + scan_list(enumerable, acc, fun) + end + + def scan(enumerable, acc, fun) do + {res, _} = reduce(enumerable, {[], acc}, R.scan3(fun)) + :lists.reverse(res) + end + + @doc """ + Returns a list with the elements of `enumerable` shuffled. + + This function uses Erlang's [`:rand` module](`:rand`) to calculate + the random value. Check its documentation for setting a + different random algorithm or a different seed. + + ## Examples + + The examples below use the `:exsss` pseudorandom algorithm since it's + the default from Erlang/OTP 22: + + # Although not necessary, let's seed the random algorithm + iex> :rand.seed(:exsss, {1, 2, 3}) + iex> Enum.shuffle([1, 2, 3]) + [3, 2, 1] + iex> Enum.shuffle([1, 2, 3]) + [2, 1, 3] + + """ + @spec shuffle(t) :: list + def shuffle(enumerable) do + randomized = + reduce(enumerable, [], fn x, acc -> + [{:rand.uniform(), x} | acc] + end) + + shuffle_unwrap(:lists.keysort(1, randomized), []) + end + + @doc """ + Returns a subset list of the given `enumerable` by `index_range`. + + `index_range` must be a `Range`. Given an `enumerable`, it drops + elements before `index_range.first` (zero-base), then it takes elements + until element `index_range.last` (inclusively). + + Indexes are normalized, meaning that negative indexes will be counted + from the end (for example, `-1` means the last element of the `enumerable`). + + If `index_range.last` is out of bounds, then it is assigned as the index + of the last element. + + If the normalized `index_range.first` is out of bounds of the given + `enumerable`, or this one is greater than the normalized `index_range.last`, + then `[]` is returned. + + If a step `n` (other than `1`) is used in `index_range`, then it takes + every `n`th element from `index_range.first` to `index_range.last` + (according to the same rules described above). + + ## Examples + + iex> Enum.slice([1, 2, 3, 4, 5], 1..3) + [2, 3, 4] + + iex> Enum.slice([1, 2, 3, 4, 5], 3..10) + [4, 5] + + # Last three elements (negative indexes) + iex> Enum.slice([1, 2, 3, 4, 5], -3..-1) + [3, 4, 5] + + For ranges where `start > stop`, you need to explicit + mark them as increasing: + + iex> Enum.slice([1, 2, 3, 4, 5], 1..-2//1) + [2, 3, 4] + + The step can be any positive number. For example, to + get every 2 elements of the collection: + + iex> Enum.slice([1, 2, 3, 4, 5], 0..-1//2) + [1, 3, 5] + + To get every third element of the first ten elements: + + iex> integers = Enum.to_list(1..20) + iex> Enum.slice(integers, 0..9//3) + [1, 4, 7, 10] + + If the first position is after the end of the enumerable + or after the last position of the range, it returns an + empty list: + + iex> Enum.slice([1, 2, 3, 4, 5], 6..10) + [] + + # first is greater than last + iex> Enum.slice([1, 2, 3, 4, 5], 6..5) + [] + + """ + @doc since: "1.6.0" + @spec slice(t, Range.t()) :: list + def slice(enumerable, first..last//step = index_range) do + # TODO: Deprecate negative steps on Elixir v1.16 + # TODO: Support negative steps as a reverse on Elixir v2.0. + cond do + step > 0 -> + slice_range(enumerable, first, last, step) + + step == -1 and first > last -> + slice_range(enumerable, first, last, 1) + + true -> + raise ArgumentError, + "Enum.slice/2 does not accept ranges with negative steps, got: #{inspect(index_range)}" + end + end + + # TODO: Remove me on v2.0 + def slice(enumerable, %{__struct__: Range, first: first, last: last} = index_range) do + step = if first <= last, do: 1, else: -1 + slice(enumerable, Map.put(index_range, :step, step)) + end + + defp slice_range(enumerable, first, -1, step) when first >= 0 do + if step == 1 do + drop(enumerable, first) + else + enumerable |> drop(first) |> take_every_list(step - 1) + end + end + + defp slice_range(enumerable, first, last, step) + when last >= first and last >= 0 and first >= 0 do + slice_forward(enumerable, first, last - first + 1, step) + end + + defp slice_range(enumerable, first, last, step) do + {count, fun} = slice_count_and_fun(enumerable, step) + first = if first >= 0, do: first, else: Kernel.max(first + count, 0) + last = if last >= 0, do: last, else: last + count + amount = last - first + 1 + + if first < count and amount > 0 do + amount = Kernel.min(amount, count - first) + amount = amount_with_step(amount, step) + fun.(first, amount, step) + else + [] + end + end + + defp amount_with_step(amount, 1), do: amount + defp amount_with_step(amount, step), do: div(amount - 1, step) + 1 + + @doc """ + Returns a subset list of the given `enumerable`, from `start_index` (zero-based) + with `amount` number of elements if available. + + Given an `enumerable`, it drops elements right before element `start_index`; + then, it takes `amount` of elements, returning as many elements as possible if + there are not enough elements. + + A negative `start_index` can be passed, which means the `enumerable` is + enumerated once and the index is counted from the end (for example, + `-1` starts slicing from the last element). + + It returns `[]` if `amount` is `0` or if `start_index` is out of bounds. + + ## Examples + + iex> Enum.slice(1..100, 5, 10) + [6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + + # amount to take is greater than the number of elements + iex> Enum.slice(1..10, 5, 100) + [6, 7, 8, 9, 10] + + iex> Enum.slice(1..10, 5, 0) + [] + + # using a negative start index + iex> Enum.slice(1..10, -6, 3) + [5, 6, 7] + iex> Enum.slice(1..10, -11, 5) + [1, 2, 3, 4, 5] + + # out of bound start index + iex> Enum.slice(1..10, 10, 5) + [] + + """ + @spec slice(t, index, non_neg_integer) :: list + def slice(_enumerable, start_index, 0) when is_integer(start_index), do: [] + + def slice(enumerable, start_index, amount) + when is_integer(start_index) and start_index < 0 and is_integer(amount) and amount >= 0 do + {count, fun} = slice_count_and_fun(enumerable, 1) + start_index = Kernel.max(count + start_index, 0) + amount = Kernel.min(amount, count - start_index) + + if amount > 0 do + fun.(start_index, amount, 1) + else + [] + end + end + + def slice(enumerable, start_index, amount) + when is_integer(start_index) and is_integer(amount) and amount >= 0 do + slice_forward(enumerable, start_index, amount, 1) + end + + @doc """ + Sorts the `enumerable` according to Erlang's term ordering. + + This function uses the merge sort algorithm. Do not use this + function to sort structs, see `sort/2` for more information. + + ## Examples + + iex> Enum.sort([3, 2, 1]) + [1, 2, 3] + + """ + @spec sort(t) :: list + def sort(enumerable) when is_list(enumerable) do + :lists.sort(enumerable) + end + + def sort(enumerable) do + sort(enumerable, &(&1 <= &2)) + end + + @doc """ + Sorts the `enumerable` by the given function. + + This function uses the merge sort algorithm. The given function should compare + two arguments, and return `true` if the first argument precedes or is in the + same place as the second one. + + ## Examples + + iex> Enum.sort([1, 2, 3], &(&1 >= &2)) + [3, 2, 1] + + The sorting algorithm will be stable as long as the given function + returns `true` for values considered equal: + + iex> Enum.sort(["some", "kind", "of", "monster"], &(byte_size(&1) <= byte_size(&2))) + ["of", "some", "kind", "monster"] + + If the function does not return `true` for equal values, the sorting + is not stable and the order of equal terms may be shuffled. + For example: + + iex> Enum.sort(["some", "kind", "of", "monster"], &(byte_size(&1) < byte_size(&2))) + ["of", "kind", "some", "monster"] + + ## Ascending and descending (since v1.10.0) + + `sort/2` allows a developer to pass `:asc` or `:desc` as the sorter, which is a convenience for + [`&<=/2`](`<=/2`) and [`&>=/2`](`>=/2`) respectively. + + iex> Enum.sort([2, 3, 1], :asc) + [1, 2, 3] + iex> Enum.sort([2, 3, 1], :desc) + [3, 2, 1] + + ## Sorting structs + + Do not use `/2`, `>=/2` and friends when sorting structs. + That's because the built-in operators above perform structural comparison + and not a semantic one. Imagine we sort the following list of dates: + + iex> dates = [~D[2019-01-01], ~D[2020-03-02], ~D[2019-06-06]] + iex> Enum.sort(dates) + [~D[2019-01-01], ~D[2020-03-02], ~D[2019-06-06]] + + Note that the returned result is incorrect, because `sort/1` by default uses + `<=/2`, which will compare their structure. When comparing structures, the + fields are compared in alphabetical order, which means the dates above will + be compared by `day`, `month` and then `year`, which is the opposite of what + we want. + + For this reason, most structs provide a "compare" function, such as + `Date.compare/2`, which receives two structs and returns `:lt` (less-than), + `:eq` (equal to), and `:gt` (greater-than). If you pass a module as the + sorting function, Elixir will automatically use the `compare/2` function + of said module: + + iex> dates = [~D[2019-01-01], ~D[2020-03-02], ~D[2019-06-06]] + iex> Enum.sort(dates, Date) + [~D[2019-01-01], ~D[2019-06-06], ~D[2020-03-02]] + + To retrieve all dates in descending order, you can wrap the module in + a tuple with `:asc` or `:desc` as first element: + + iex> dates = [~D[2019-01-01], ~D[2020-03-02], ~D[2019-06-06]] + iex> Enum.sort(dates, {:asc, Date}) + [~D[2019-01-01], ~D[2019-06-06], ~D[2020-03-02]] + iex> dates = [~D[2019-01-01], ~D[2020-03-02], ~D[2019-06-06]] + iex> Enum.sort(dates, {:desc, Date}) + [~D[2020-03-02], ~D[2019-06-06], ~D[2019-01-01]] + + """ + @spec sort( + t, + (element, element -> boolean) | :asc | :desc | module() | {:asc | :desc, module()} + ) :: list + def sort(enumerable, sorter) when is_list(enumerable) do + case sorter do + :asc -> :lists.sort(enumerable) + :desc -> :lists.sort(enumerable) |> :lists.reverse() + _ -> :lists.sort(to_sort_fun(sorter), enumerable) + end + end + + def sort(enumerable, sorter) do + fun = to_sort_fun(sorter) + + reduce(enumerable, [], &sort_reducer(&1, &2, fun)) + |> sort_terminator(fun) + end + + defp to_sort_fun(sorter) when is_function(sorter, 2), do: sorter + defp to_sort_fun(:asc), do: &<=/2 + defp to_sort_fun(:desc), do: &>=/2 + defp to_sort_fun(module) when is_atom(module), do: &(module.compare(&1, &2) != :gt) + defp to_sort_fun({:asc, module}) when is_atom(module), do: &(module.compare(&1, &2) != :gt) + defp to_sort_fun({:desc, module}) when is_atom(module), do: &(module.compare(&1, &2) != :lt) + + @doc """ + Sorts the mapped results of the `enumerable` according to the provided `sorter` + function. + + This function maps each element of the `enumerable` using the + provided `mapper` function. The enumerable is then sorted by + the mapped elements using the `sorter`, which defaults to `:asc` + and sorts the elements ascendingly. + + `sort_by/3` differs from `sort/2` in that it only calculates the + comparison value for each element in the enumerable once instead of + once for each element in each comparison. If the same function is + being called on both elements, it's more efficient to use `sort_by/3`. + + ## Ascending and descending (since v1.10.0) + + `sort_by/3` allows a developer to pass `:asc` or `:desc` as the sorter, + which is a convenience for [`&<=/2`](`<=/2`) and [`&>=/2`](`>=/2`) respectively: + iex> Enum.sort_by([2, 3, 1], &(&1), :asc) + [1, 2, 3] + + iex> Enum.sort_by([2, 3, 1], &(&1), :desc) + [3, 2, 1] + + ## Examples + + Using the default `sorter` of `:asc` : + + iex> Enum.sort_by(["some", "kind", "of", "monster"], &byte_size/1) + ["of", "some", "kind", "monster"] + + Sorting by multiple properties - first by size, then by first letter + (this takes advantage of the fact that tuples are compared element-by-element): + + iex> Enum.sort_by(["some", "kind", "of", "monster"], &{byte_size(&1), String.first(&1)}) + ["of", "kind", "some", "monster"] + + Similar to `sort/2`, you can pass a custom sorter: + + iex> Enum.sort_by(["some", "kind", "of", "monster"], &byte_size/1, :desc) + ["monster", "some", "kind", "of"] + + As in `sort/2`, avoid using the default sorting function to sort + structs, as by default it performs structural comparison instead of + a semantic one. In such cases, you shall pass a sorting function as + third element or any module that implements a `compare/2` function. + For example, to sort users by their birthday in both ascending and + descending order respectively: + + iex> users = [ + ...> %{name: "Ellis", birthday: ~D[1943-05-11]}, + ...> %{name: "Lovelace", birthday: ~D[1815-12-10]}, + ...> %{name: "Turing", birthday: ~D[1912-06-23]} + ...> ] + iex> Enum.sort_by(users, &(&1.birthday), Date) + [ + %{name: "Lovelace", birthday: ~D[1815-12-10]}, + %{name: "Turing", birthday: ~D[1912-06-23]}, + %{name: "Ellis", birthday: ~D[1943-05-11]} + ] + iex> Enum.sort_by(users, &(&1.birthday), {:desc, Date}) + [ + %{name: "Ellis", birthday: ~D[1943-05-11]}, + %{name: "Turing", birthday: ~D[1912-06-23]}, + %{name: "Lovelace", birthday: ~D[1815-12-10]} + ] + + ## Performance characteristics + + As detailed in the initial section, `sort_by/3` calculates the comparison + value for each element in the enumerable once instead of once for each + element in each comparison. This implies `sort_by/3` must do an initial + pass on the data to compute those values. + + However, if those values are cheap to compute, for example, you have + already extracted the field you want to sort by into a tuple, then those + extra passes become overhead. In such cases, consider using `List.keysort/3` + instead. + + Let's see an example. Imagine you have a list of products and you have a + list of IDs. You want to keep all products that are in the given IDs and + return their names sorted by their price. You could write it like this: + + for( + product <- products, + product.id in ids, + do: product + ) + |> Enum.sort_by(& &1.price) + |> Enum.map(& &1.name) + + However, you could also write it like this: + + for( + product <- products, + product.id in ids, + do: {product.name, product.price} + ) + |> List.keysort(1) + |> Enum.map(&elem(&1, 0)) + + Using `List.keysort/3` will be a better choice for performance sensitive + code as it avoids additional traversals. + """ + @spec sort_by( + t, + (element -> mapped_element), + (element, element -> boolean) | :asc | :desc | module() | {:asc | :desc, module()} + ) :: + list + when mapped_element: element + def sort_by(enumerable, mapper, sorter \\ :asc) + + def sort_by(enumerable, mapper, :desc) when is_function(mapper, 1) do + enumerable + |> Enum.reduce([], &[{&1, mapper.(&1)} | &2]) + |> List.keysort(1, :asc) + |> List.foldl([], &[elem(&1, 0) | &2]) + end + + def sort_by(enumerable, mapper, sorter) when is_function(mapper, 1) do + enumerable + |> map(&{&1, mapper.(&1)}) + |> List.keysort(1, sorter) + |> map(&elem(&1, 0)) + end + + @doc """ + Splits the `enumerable` into two enumerables, leaving `count` + elements in the first one. + + If `count` is a negative number, it starts counting from the + back to the beginning of the `enumerable`. + + Be aware that a negative `count` implies the `enumerable` + will be enumerated twice: once to calculate the position, and + a second time to do the actual splitting. + + ## Examples + + iex> Enum.split([1, 2, 3], 2) + {[1, 2], [3]} + + iex> Enum.split([1, 2, 3], 10) + {[1, 2, 3], []} + + iex> Enum.split([1, 2, 3], 0) + {[], [1, 2, 3]} + + iex> Enum.split([1, 2, 3], -1) + {[1, 2], [3]} + + iex> Enum.split([1, 2, 3], -5) + {[], [1, 2, 3]} + + """ + @spec split(t, integer) :: {list, list} + def split(enumerable, count) when is_list(enumerable) and is_integer(count) and count >= 0 do + split_list(enumerable, count, []) + end + + def split(enumerable, count) when is_integer(count) and count >= 0 do + {_, list1, list2} = + reduce(enumerable, {count, [], []}, fn entry, {counter, acc1, acc2} -> + if counter > 0 do + {counter - 1, [entry | acc1], acc2} + else + {counter, acc1, [entry | acc2]} + end + end) + + {:lists.reverse(list1), :lists.reverse(list2)} + end + + def split(enumerable, count) when is_integer(count) and count < 0 do + split_reverse_list(reverse(enumerable), -count, []) + end + + @doc """ + Splits enumerable in two at the position of the element for which + `fun` returns a falsy value (`false` or `nil`) for the first time. + + It returns a two-element tuple with two lists of elements. + The element that triggered the split is part of the second list. + + ## Examples + + iex> Enum.split_while([1, 2, 3, 4], fn x -> x < 3 end) + {[1, 2], [3, 4]} + + iex> Enum.split_while([1, 2, 3, 4], fn x -> x < 0 end) + {[], [1, 2, 3, 4]} + + iex> Enum.split_while([1, 2, 3, 4], fn x -> x > 0 end) + {[1, 2, 3, 4], []} + + """ + @spec split_while(t, (element -> as_boolean(term))) :: {list, list} + def split_while(enumerable, fun) when is_list(enumerable) do + split_while_list(enumerable, fun, []) + end + + def split_while(enumerable, fun) do + {list1, list2} = + reduce(enumerable, {[], []}, fn + entry, {acc1, []} -> + if(fun.(entry), do: {[entry | acc1], []}, else: {acc1, [entry]}) + + entry, {acc1, acc2} -> + {acc1, [entry | acc2]} + end) + + {:lists.reverse(list1), :lists.reverse(list2)} + end + + @doc """ + Returns the sum of all elements. + + Raises `ArithmeticError` if `enumerable` contains a non-numeric value. + + ## Examples + + iex> Enum.sum([1, 2, 3]) + 6 + + iex> Enum.sum(1..10) + 55 + + iex> Enum.sum(1..10//2) + 25 + + """ + @spec sum(t) :: number + def sum(enumerable) + + def sum(first..last//step = range) do + range + |> Range.size() + |> Kernel.*(first + last - rem(last - first, step)) + |> div(2) + end + + def sum(enumerable) do + reduce(enumerable, 0, &+/2) + end + + @doc """ + Returns the product of all elements. + + Raises `ArithmeticError` if `enumerable` contains a non-numeric value. + + ## Examples + + iex> Enum.product([]) + 1 + iex> Enum.product([2, 3, 4]) + 24 + iex> Enum.product([2.0, 3.0, 4.0]) + 24.0 + + """ + @doc since: "1.12.0" + @spec product(t) :: number + def product(enumerable) do + reduce(enumerable, 1, &*/2) + end + + @doc """ + Takes an `amount` of elements from the beginning or the end of the `enumerable`. + + If a positive `amount` is given, it takes the `amount` elements from the + beginning of the `enumerable`. + + If a negative `amount` is given, the `amount` of elements will be taken from the end. + The `enumerable` will be enumerated once to retrieve the proper index and + the remaining calculation is performed from the end. + + If amount is `0`, it returns `[]`. + + ## Examples + + iex> Enum.take([1, 2, 3], 2) + [1, 2] + + iex> Enum.take([1, 2, 3], 10) + [1, 2, 3] + + iex> Enum.take([1, 2, 3], 0) + [] + + iex> Enum.take([1, 2, 3], -1) + [3] + + """ + @spec take(t, integer) :: list + def take(enumerable, amount) + + def take(_enumerable, 0), do: [] + + def take(enumerable, amount) + when is_list(enumerable) and is_integer(amount) and amount > 0 do + take_list(enumerable, amount) + end + + def take(enumerable, amount) when is_integer(amount) and amount > 0 do + {_, {res, _}} = + Enumerable.reduce(enumerable, {:cont, {[], amount}}, fn entry, {list, n} -> + case n do + 1 -> {:halt, {[entry | list], n - 1}} + _ -> {:cont, {[entry | list], n - 1}} + end + end) + + :lists.reverse(res) + end + + def take(enumerable, amount) when is_integer(amount) and amount < 0 do + {count, fun} = slice_count_and_fun(enumerable, 1) + first = Kernel.max(amount + count, 0) + fun.(first, count - first, 1) + end + + @doc """ + Returns a list of every `nth` element in the `enumerable`, + starting with the first element. + + The first element is always included, unless `nth` is 0. + + The second argument specifying every `nth` element must be a non-negative + integer. + + ## Examples + + iex> Enum.take_every(1..10, 2) + [1, 3, 5, 7, 9] + + iex> Enum.take_every(1..10, 0) + [] + + iex> Enum.take_every([1, 2, 3], 1) + [1, 2, 3] + + """ + @spec take_every(t, non_neg_integer) :: list + def take_every(enumerable, nth) + + def take_every(_enumerable, 0), do: [] + def take_every(enumerable, 1), do: to_list(enumerable) + + def take_every(list, nth) when is_list(list) and is_integer(nth) and nth > 1 do + take_every_list(list, nth - 1) + end + + def take_every(enumerable, nth) when is_integer(nth) and nth > 1 do + {res, _} = reduce(enumerable, {[], :first}, R.take_every(nth)) + :lists.reverse(res) + end + + @doc """ + Takes `count` random elements from `enumerable`. + + Note that this function will traverse the whole `enumerable` to + get the random sublist. + + See `random/1` for notes on implementation and random seed. + + ## Examples + + # Although not necessary, let's seed the random algorithm + iex> :rand.seed(:exsss, {1, 2, 3}) + iex> Enum.take_random(1..10, 2) + [3, 1] + iex> Enum.take_random(?a..?z, 5) + 'mikel' + + """ + @spec take_random(t, non_neg_integer) :: list + def take_random(enumerable, count) + def take_random(_enumerable, 0), do: [] + + def take_random([], _), do: [] + def take_random([h | t], 1), do: take_random_list_one(t, h, 1) + + def take_random(enumerable, 1) do + enumerable + |> reduce([], fn + x, [current | index] -> + if :rand.uniform(index + 1) == 1 do + [x | index + 1] + else + [current | index + 1] + end + + x, [] -> + [x | 1] + end) + |> case do + [] -> [] + [current | _index] -> [current] + end + end + + def take_random(enumerable, count) when is_integer(count) and count in 0..128 do + sample = Tuple.duplicate(nil, count) + + reducer = fn elem, {idx, sample} -> + jdx = random_integer(0, idx) + + cond do + idx < count -> + value = elem(sample, jdx) + {idx + 1, put_elem(sample, idx, value) |> put_elem(jdx, elem)} + + jdx < count -> + {idx + 1, put_elem(sample, jdx, elem)} + + true -> + {idx + 1, sample} + end + end + + {size, sample} = reduce(enumerable, {0, sample}, reducer) + sample |> Tuple.to_list() |> take(Kernel.min(count, size)) + end + + def take_random(enumerable, count) when is_integer(count) and count >= 0 do + reducer = fn elem, {idx, sample} -> + jdx = random_integer(0, idx) + + cond do + idx < count -> + value = Map.get(sample, jdx) + {idx + 1, Map.put(sample, idx, value) |> Map.put(jdx, elem)} + + jdx < count -> + {idx + 1, Map.put(sample, jdx, elem)} + + true -> + {idx + 1, sample} + end + end + + {size, sample} = reduce(enumerable, {0, %{}}, reducer) + take_random(sample, Kernel.min(count, size), []) + end + + defp take_random(_sample, 0, acc), do: acc + + defp take_random(sample, position, acc) do + position = position - 1 + take_random(sample, position, [Map.get(sample, position) | acc]) + end + + defp take_random_list_one([h | t], current, index) do + if :rand.uniform(index + 1) == 1 do + take_random_list_one(t, h, index + 1) + else + take_random_list_one(t, current, index + 1) + end + end + + defp take_random_list_one([], current, _), do: [current] + + @doc """ + Takes the elements from the beginning of the `enumerable` while `fun` returns + a truthy value. + + ## Examples + + iex> Enum.take_while([1, 2, 3], fn x -> x < 3 end) + [1, 2] + + """ + @spec take_while(t, (element -> as_boolean(term))) :: list + def take_while(enumerable, fun) when is_list(enumerable) do + take_while_list(enumerable, fun) + end + + def take_while(enumerable, fun) do + {_, res} = + Enumerable.reduce(enumerable, {:cont, []}, fn entry, acc -> + if fun.(entry) do + {:cont, [entry | acc]} + else + {:halt, acc} + end + end) + + :lists.reverse(res) + end + + @doc """ + Converts `enumerable` to a list. + + ## Examples + + iex> Enum.to_list(1..3) + [1, 2, 3] + + """ + @spec to_list(t) :: [element] + def to_list(enumerable) when is_list(enumerable), do: enumerable + def to_list(%{__struct__: Range} = range), do: Range.to_list(range) + def to_list(%_{} = enumerable), do: reverse(enumerable) |> :lists.reverse() + def to_list(%{} = enumerable), do: Map.to_list(enumerable) + def to_list(enumerable), do: reverse(enumerable) |> :lists.reverse() + + @doc """ + Enumerates the `enumerable`, removing all duplicated elements. + + ## Examples + + iex> Enum.uniq([1, 2, 3, 3, 2, 1]) + [1, 2, 3] + + """ + @spec uniq(t) :: list + def uniq(enumerable) do + uniq_by(enumerable, fn x -> x end) + end + + @doc false + @deprecated "Use Enum.uniq_by/2 instead" + def uniq(enumerable, fun) do + uniq_by(enumerable, fun) + end + + @doc """ + Enumerates the `enumerable`, by removing the elements for which + function `fun` returned duplicate elements. + + The function `fun` maps every element to a term. Two elements are + considered duplicates if the return value of `fun` is equal for + both of them. + + The first occurrence of each element is kept. + + ## Example + + iex> Enum.uniq_by([{1, :x}, {2, :y}, {1, :z}], fn {x, _} -> x end) + [{1, :x}, {2, :y}] + + iex> Enum.uniq_by([a: {:tea, 2}, b: {:tea, 2}, c: {:coffee, 1}], fn {_, y} -> y end) + [a: {:tea, 2}, c: {:coffee, 1}] + + """ + @spec uniq_by(t, (element -> term)) :: list + + def uniq_by(enumerable, fun) when is_list(enumerable) do + uniq_list(enumerable, %{}, fun) + end + + def uniq_by(enumerable, fun) do + {list, _} = reduce(enumerable, {[], %{}}, R.uniq_by(fun)) + :lists.reverse(list) + end + + @doc """ + Opposite of `zip/2`. Extracts two-element tuples from the + given `enumerable` and groups them together. + + It takes an `enumerable` with elements being two-element tuples and returns + a tuple with two lists, each of which is formed by the first and + second element of each tuple, respectively. + + This function fails unless `enumerable` is or can be converted into a + list of tuples with *exactly* two elements in each tuple. + + ## Examples + + iex> Enum.unzip([{:a, 1}, {:b, 2}, {:c, 3}]) + {[:a, :b, :c], [1, 2, 3]} + + iex> Enum.unzip(%{a: 1, b: 2}) + {[:a, :b], [1, 2]} + + """ + @spec unzip(t) :: {[element], [element]} + + def unzip([_ | _] = list) do + :lists.reverse(list) |> unzip([], []) + end + + def unzip([]) do + {[], []} + end + + def unzip(enumerable) do + {list1, list2} = + reduce(enumerable, {[], []}, fn {el1, el2}, {list1, list2} -> + {[el1 | list1], [el2 | list2]} + end) + + {:lists.reverse(list1), :lists.reverse(list2)} + end + + defp unzip([{el1, el2} | reversed_list], list1, list2) do + unzip(reversed_list, [el1 | list1], [el2 | list2]) + end + + defp unzip([], list1, list2) do + {list1, list2} + end + + @doc """ + Returns the `enumerable` with each element wrapped in a tuple + alongside its index. + + May receive a function or an integer offset. + + If an `offset` is given, it will index from the given offset instead of from + zero. + + If a `function` is given, it will index by invoking the function for each + element and index (zero-based) of the enumerable. + + ## Examples + + iex> Enum.with_index([:a, :b, :c]) + [a: 0, b: 1, c: 2] + + iex> Enum.with_index([:a, :b, :c], 3) + [a: 3, b: 4, c: 5] + + iex> Enum.with_index([:a, :b, :c], fn element, index -> {index, element} end) + [{0, :a}, {1, :b}, {2, :c}] + + """ + @spec with_index(t, integer) :: [{term, integer}] + @spec with_index(t, (element, index -> value)) :: [value] when value: any + def with_index(enumerable, fun_or_offset \\ 0) + + def with_index(enumerable, offset) when is_list(enumerable) and is_integer(offset) do + with_index_list(enumerable, offset) + end + + def with_index(enumerable, fun) when is_list(enumerable) and is_function(fun, 2) do + with_index_list(enumerable, 0, fun) + end + + def with_index(enumerable, offset) when is_integer(offset) do + enumerable + |> map_reduce(offset, fn x, i -> {{x, i}, i + 1} end) + |> elem(0) + end + + def with_index(enumerable, fun) when is_function(fun, 2) do + enumerable + |> map_reduce(0, fn x, i -> {fun.(x, i), i + 1} end) + |> elem(0) + end + + @doc """ + Zips corresponding elements from two enumerables into a list + of tuples. + + The zipping finishes as soon as either enumerable completes. + + ## Examples + + iex> Enum.zip([1, 2, 3], [:a, :b, :c]) + [{1, :a}, {2, :b}, {3, :c}] + + iex> Enum.zip([1, 2, 3, 4, 5], [:a, :b, :c]) + [{1, :a}, {2, :b}, {3, :c}] + + """ + @spec zip(t, t) :: [{any, any}] + def zip(enumerable1, enumerable2) when is_list(enumerable1) and is_list(enumerable2) do + zip_list(enumerable1, enumerable2, []) + end + + def zip(enumerable1, enumerable2) do + zip([enumerable1, enumerable2]) + end + + @doc """ + Zips corresponding elements from a finite collection of enumerables + into a list of tuples. + + The zipping finishes as soon as any enumerable in the given collection completes. + + ## Examples + + iex> Enum.zip([[1, 2, 3], [:a, :b, :c], ["foo", "bar", "baz"]]) + [{1, :a, "foo"}, {2, :b, "bar"}, {3, :c, "baz"}] + + iex> Enum.zip([[1, 2, 3, 4, 5], [:a, :b, :c]]) + [{1, :a}, {2, :b}, {3, :c}] + + """ + @doc since: "1.4.0" + @spec zip(enumerables) :: [tuple()] when enumerables: [t()] | t() + def zip([]), do: [] + + def zip(enumerables) do + zip_reduce(enumerables, [], &[List.to_tuple(&1) | &2]) + |> :lists.reverse() + end + + @doc """ + Zips corresponding elements from two enumerables into a list, transforming them with + the `zip_fun` function as it goes. + + The corresponding elements from each collection are passed to the provided two-arity `zip_fun` + function in turn. Returns a list that contains the result of calling `zip_fun` for each pair of + elements. + + The zipping finishes as soon as either enumerable runs out of elements. + + ## Zipping Maps + + It's important to remember that zipping inherently relies on order. + If you zip two lists you get the element at the index from each list in turn. + If we zip two maps together it's tempting to think that you will get the given + key in the left map and the matching key in the right map, but there is no such + guarantee because map keys are not ordered! Consider the following: + + left = %{:a => 1, 1 => 3} + right = %{:a => 1, :b => :c} + Enum.zip(left, right) + # [{{1, 3}, {:a, 1}}, {{:a, 1}, {:b, :c}}] + + As you can see `:a` does not get paired with `:a`. If this is what you want, + you should use `Map.merge/3`. + + ## Examples + + iex> Enum.zip_with([1, 2], [3, 4], fn x, y -> x + y end) + [4, 6] + + iex> Enum.zip_with([1, 2], [3, 4, 5, 6], fn x, y -> x + y end) + [4, 6] + + iex> Enum.zip_with([1, 2, 5, 6], [3, 4], fn x, y -> x + y end) + [4, 6] + + """ + @doc since: "1.12.0" + @spec zip_with(t, t, (enum1_elem :: term, enum2_elem :: term -> term)) :: [term] + def zip_with(enumerable1, enumerable2, zip_fun) + when is_list(enumerable1) and is_list(enumerable2) and is_function(zip_fun, 2) do + zip_with_list(enumerable1, enumerable2, zip_fun) + end + + def zip_with(enumerable1, enumerable2, zip_fun) when is_function(zip_fun, 2) do + zip_reduce(enumerable1, enumerable2, [], fn l, r, acc -> [zip_fun.(l, r) | acc] end) + |> :lists.reverse() + end + + @doc """ + Zips corresponding elements from a finite collection of enumerables + into list, transforming them with the `zip_fun` function as it goes. + + The first element from each of the enums in `enumerables` will be put + into a list which is then passed to the one-arity `zip_fun` function. + Then, the second elements from each of the enums are put into a list + and passed to `zip_fun`, and so on until any one of the enums in + `enumerables` runs out of elements. + + Returns a list with all the results of calling `zip_fun`. + + ## Examples + + iex> Enum.zip_with([[1, 2], [3, 4], [5, 6]], fn [x, y, z] -> x + y + z end) + [9, 12] + + iex> Enum.zip_with([[1, 2], [3, 4]], fn [x, y] -> x + y end) + [4, 6] + + """ + @doc since: "1.12.0" + @spec zip_with(t, ([term] -> term)) :: [term] + def zip_with([], _fun), do: [] + + def zip_with(enumerables, zip_fun) do + zip_reduce(enumerables, [], fn values, acc -> [zip_fun.(values) | acc] end) + |> :lists.reverse() + end + + @doc """ + Reduces over two enumerables halting as soon as either enumerable is empty. + + In practice, the behaviour provided by this function can be achieved with: + + Enum.reduce(Stream.zip(left, right), acc, reducer) + + But `zip_reduce/4` exists for convenience purposes. + + ## Examples + + iex> Enum.zip_reduce([1, 2], [3, 4], 0, fn x, y, acc -> x + y + acc end) + 10 + + iex> Enum.zip_reduce([1, 2], [3, 4], [], fn x, y, acc -> [x + y | acc] end) + [6, 4] + """ + @doc since: "1.12.0" + @spec zip_reduce(t, t, acc, (enum1_elem :: term, enum2_elem :: term, acc -> acc)) :: acc + when acc: term + def zip_reduce(left, right, acc, reducer) + when is_list(left) and is_list(right) and is_function(reducer, 3) do + zip_reduce_list(left, right, acc, reducer) + end + + def zip_reduce(left, right, acc, reducer) when is_function(reducer, 3) do + reduce = fn [l, r], acc -> {:cont, reducer.(l, r, acc)} end + Stream.zip_with([left, right], & &1).({:cont, acc}, reduce) |> elem(1) + end + + @doc """ + Reduces over all of the given enumerables, halting as soon as any enumerable is + empty. + + The reducer will receive 2 args: a list of elements (one from each enum) and the + accumulator. + + In practice, the behaviour provided by this function can be achieved with: + + Enum.reduce(Stream.zip(enums), acc, reducer) + + But `zip_reduce/3` exists for convenience purposes. + + ## Examples + + iex> enums = [[1, 1], [2, 2], [3, 3]] + ...> Enum.zip_reduce(enums, [], fn elements, acc -> + ...> [List.to_tuple(elements) | acc] + ...> end) + [{1, 2, 3}, {1, 2, 3}] + + iex> enums = [[1, 2], %{a: 3, b: 4}, [5, 6]] + ...> Enum.zip_reduce(enums, [], fn elements, acc -> + ...> [List.to_tuple(elements) | acc] + ...> end) + [{2, {:b, 4}, 6}, {1, {:a, 3}, 5}] + """ + @doc since: "1.12.0" + @spec zip_reduce(t, acc, ([term], acc -> acc)) :: acc when acc: term + def zip_reduce([], acc, reducer) when is_function(reducer, 2), do: acc + + def zip_reduce(enums, acc, reducer) when is_function(reducer, 2) do + Stream.zip_with(enums, & &1).({:cont, acc}, &{:cont, reducer.(&1, &2)}) |> elem(1) + end + + ## Helpers + + @compile {:inline, + entry_to_string: 1, + reduce: 3, + reduce_by: 3, + reduce_enumerable: 3, + reduce_range: 5, + map_range: 4} + + defp entry_to_string(entry) when is_binary(entry), do: entry + defp entry_to_string(entry), do: String.Chars.to_string(entry) + + defp aggregate([head | tail], fun, _empty) do + aggregate_list(tail, head, fun) + end + + defp aggregate([], _fun, empty) do + empty.() + end + + defp aggregate(first..last//step = range, fun, empty) do + case Range.size(range) do + 0 -> + empty.() + + _ -> + last = last - rem(last - first, step) + + case fun.(first, last) do + true -> first + false -> last + end + end + end + + defp aggregate(enumerable, fun, empty) do + ref = make_ref() + + enumerable + |> reduce(ref, fn + element, ^ref -> + element + + element, acc -> + case fun.(acc, element) do + true -> acc + false -> element + end + end) + |> case do + ^ref -> empty.() + result -> result + end + end + + defp aggregate_list([head | tail], acc, fun) do + acc = + case fun.(acc, head) do + true -> acc + false -> head + end + + aggregate_list(tail, acc, fun) + end + + defp aggregate_list([], acc, _fun), do: acc + + defp aggregate_by(enumerable, fun, sorter, empty_fallback) do + first_fun = &[&1 | fun.(&1)] + + reduce_fun = fn entry, [_ | fun_ref] = old -> + fun_entry = fun.(entry) + + case sorter.(fun_ref, fun_entry) do + true -> old + false -> [entry | fun_entry] + end + end + + case reduce_by(enumerable, first_fun, reduce_fun) do + :empty -> empty_fallback.() + [entry | _] -> entry + end + end + + defp reduce_by([head | tail], first, fun) do + :lists.foldl(fun, first.(head), tail) + end + + defp reduce_by([], _first, _fun) do + :empty + end + + defp reduce_by(enumerable, first, fun) do + reduce(enumerable, :empty, fn + element, :empty -> first.(element) + element, acc -> fun.(element, acc) + end) + end + + defp random_integer(limit, limit) when is_integer(limit) do + limit + end + + defp random_integer(lower_limit, upper_limit) when upper_limit < lower_limit do + random_integer(upper_limit, lower_limit) + end + + defp random_integer(lower_limit, upper_limit) do + lower_limit + :rand.uniform(upper_limit - lower_limit + 1) - 1 + end + + ## Implementations + + ## all?/1 + + defp all_list([h | t]) do + if h do + all_list(t) + else + false + end + end + + defp all_list([]) do + true + end + + ## any?/1 + + defp any_list([h | t]) do + if h do + true + else + any_list(t) + end + end + + defp any_list([]) do + false + end + + ## any?/2 all?/2 + + defp predicate_list([h | t], initial, fun) do + if !!fun.(h) == initial do + predicate_list(t, initial, fun) + else + not initial + end + end + + defp predicate_list([], initial, _) do + initial + end + + defp predicate_range(first, last, step, initial, fun) + when step > 0 and first <= last + when step < 0 and first >= last do + if !!fun.(first) == initial do + predicate_range(first + step, last, step, initial, fun) + else + not initial + end + end + + defp predicate_range(_first, _last, _step, initial, _fun) do + initial + end + + ## concat + + defp concat_list([h | t]) when is_list(h), do: h ++ concat_list(t) + defp concat_list([h | t]), do: concat_enum([h | t]) + defp concat_list([]), do: [] + + defp concat_enum(enum) do + fun = &[&1 | &2] + enum |> reduce([], &reduce(&1, &2, fun)) |> :lists.reverse() + end + + # dedup + + defp dedup_list([value | tail], acc) do + acc = + case acc do + [^value | _] -> acc + _ -> [value | acc] + end + + dedup_list(tail, acc) + end + + defp dedup_list([], acc) do + acc + end + + ## drop + + defp drop_list(list, 0), do: list + defp drop_list([_ | tail], counter), do: drop_list(tail, counter - 1) + defp drop_list([], _), do: [] + + ## drop_while + + defp drop_while_list([head | tail], fun) do + if fun.(head) do + drop_while_list(tail, fun) + else + [head | tail] + end + end + + defp drop_while_list([], _) do + [] + end + + ## filter + + defp filter_list([head | tail], fun) do + if fun.(head) do + [head | filter_list(tail, fun)] + else + filter_list(tail, fun) + end + end + + defp filter_list([], _fun) do + [] + end + + ## find + + defp find_list([head | tail], default, fun) do + if fun.(head) do + head + else + find_list(tail, default, fun) + end + end + + defp find_list([], default, _) do + default + end + + ## find_index + + defp find_index_list([head | tail], counter, fun) do + if fun.(head) do + counter + else + find_index_list(tail, counter + 1, fun) + end + end + + defp find_index_list([], _, _) do + nil + end + + ## find_value + + defp find_value_list([head | tail], default, fun) do + fun.(head) || find_value_list(tail, default, fun) + end + + defp find_value_list([], default, _) do + default + end + + ## flat_map + + defp flat_map_list([head | tail], fun) do + case fun.(head) do + list when is_list(list) -> list ++ flat_map_list(tail, fun) + other -> to_list(other) ++ flat_map_list(tail, fun) + end + end + + defp flat_map_list([], _fun) do + [] + end + + ## intersperse + + defp intersperse_non_empty_list([head], _separator), do: [head] + + defp intersperse_non_empty_list([head | rest], separator) do + [head, separator | intersperse_non_empty_list(rest, separator)] + end + + ## join + + defp join_list([], _joiner), do: "" + + defp join_list(list, joiner) do + join_non_empty_list(list, joiner, []) + |> :lists.reverse() + |> IO.iodata_to_binary() + end + + defp join_non_empty_list([first], _joiner, acc), do: [entry_to_string(first) | acc] + + defp join_non_empty_list([first | rest], joiner, acc) do + join_non_empty_list(rest, joiner, [joiner, entry_to_string(first) | acc]) + end + + ## map + + defp map_range(first, last, step, fun) + when step > 0 and first <= last + when step < 0 and first >= last do + [fun.(first) | map_range(first + step, last, step, fun)] + end + + defp map_range(_first, _last, _step, _fun) do + [] + end + + ## map_intersperse + + defp map_intersperse_list([], _, _), + do: [] + + defp map_intersperse_list([last], _, mapper), + do: [mapper.(last)] + + defp map_intersperse_list([head | rest], separator, mapper), + do: [mapper.(head), separator | map_intersperse_list(rest, separator, mapper)] + + ## reduce + + defp reduce_range(first, last, step, acc, fun) + when step > 0 and first <= last + when step < 0 and first >= last do + reduce_range(first + step, last, step, fun.(first, acc), fun) + end + + defp reduce_range(_first, _last, _step, acc, _fun) do + acc + end + + defp reduce_enumerable(enumerable, acc, fun) do + Enumerable.reduce(enumerable, {:cont, acc}, fn x, acc -> {:cont, fun.(x, acc)} end) |> elem(1) + end + + ## reject + + defp reject_list([head | tail], fun) do + if fun.(head) do + reject_list(tail, fun) + else + [head | reject_list(tail, fun)] + end + end + + defp reject_list([], _fun) do + [] + end + + ## reverse_slice + + defp reverse_slice(rest, idx, idx, count, acc) do + {slice, rest} = head_slice(rest, count, []) + :lists.reverse(rest, :lists.reverse(slice, acc)) + end + + defp reverse_slice([elem | rest], idx, start, count, acc) do + reverse_slice(rest, idx - 1, start, count, [elem | acc]) + end + + defp head_slice(rest, 0, acc), do: {acc, rest} + + defp head_slice([elem | rest], count, acc) do + head_slice(rest, count - 1, [elem | acc]) + end + + ## scan + + defp scan_list([], _acc, _fun), do: [] + + defp scan_list([elem | rest], acc, fun) do + acc = fun.(elem, acc) + [acc | scan_list(rest, acc, fun)] + end + + ## shuffle + + defp shuffle_unwrap([{_, h} | enumerable], t) do + shuffle_unwrap(enumerable, [h | t]) + end + + defp shuffle_unwrap([], t), do: t + + ## slice + + defp slice_forward(enumerable, start, amount, step) when start < 0 do + {count, fun} = slice_count_and_fun(enumerable, step) + start = count + start + + if start >= 0 do + amount = Kernel.min(amount, count - start) + amount = amount_with_step(amount, step) + fun.(start, amount, step) + else + [] + end + end + + defp slice_forward(list, start, amount, step) when is_list(list) do + amount = amount_with_step(amount, step) + slice_list(list, start, amount, step) + end + + defp slice_forward(enumerable, start, amount, step) do + case Enumerable.slice(enumerable) do + {:ok, count, _} when start >= count -> + [] + + {:ok, count, fun} when is_function(fun, 1) -> + amount = Kernel.min(amount, count - start) |> amount_with_step(step) + enumerable |> fun.() |> slice_exact(start, amount, step, count) + + # TODO: Deprecate me in Elixir v1.18. + {:ok, count, fun} when is_function(fun, 2) -> + amount = Kernel.min(amount, count - start) + + if step == 1 do + fun.(start, amount) + else + fun.(start, Kernel.min(amount * step, count - start)) + |> take_every_list(amount, step - 1) + end + + {:ok, count, fun} when is_function(fun, 3) -> + amount = Kernel.min(amount, count - start) |> amount_with_step(step) + fun.(start, amount, step) + + {:error, module} -> + slice_enum(enumerable, module, start, amount, step) + end + end + + defp slice_list(list, start, amount, step) do + if step == 1 do + list |> drop_list(start) |> take_list(amount) + else + list |> drop_list(start) |> take_every_list(amount, step - 1) + end + end + + defp slice_enum(enumerable, module, start, amount, 1) do + {_, {_, _, slice}} = + module.reduce(enumerable, {:cont, {start, amount, []}}, fn + _entry, {start, amount, _list} when start > 0 -> + {:cont, {start - 1, amount, []}} + + entry, {start, amount, list} when amount > 1 -> + {:cont, {start, amount - 1, [entry | list]}} + + entry, {start, amount, list} -> + {:halt, {start, amount, [entry | list]}} + end) + + :lists.reverse(slice) + end + + defp slice_enum(enumerable, module, start, amount, step) do + {_, {_, _, _, slice}} = + module.reduce(enumerable, {:cont, {start, amount, 1, []}}, fn + _entry, {start, amount, to_drop, _list} when start > 0 -> + {:cont, {start - 1, amount, to_drop, []}} + + entry, {start, amount, to_drop, list} when amount > 1 -> + case to_drop do + 1 -> {:cont, {start, amount - 1, step, [entry | list]}} + _ -> {:cont, {start, amount - 1, to_drop - 1, list}} + end + + entry, {start, amount, to_drop, list} -> + case to_drop do + 1 -> {:halt, {start, amount, to_drop, [entry | list]}} + _ -> {:halt, {start, amount, to_drop, list}} + end + end) + + :lists.reverse(slice) + end + + defp slice_count_and_fun(list, _step) when is_list(list) do + length = length(list) + {length, &slice_exact(list, &1, &2, &3, length)} + end + + defp slice_count_and_fun(enumerable, step) do + case Enumerable.slice(enumerable) do + {:ok, count, fun} when is_function(fun, 3) -> + {count, fun} + + # TODO: Deprecate me in Elixir v1.18. + {:ok, count, fun} when is_function(fun, 2) -> + if step == 1 do + {count, fn start, amount, 1 -> fun.(start, amount) end} + else + {count, + fn start, amount, step -> + fun.(start, Kernel.min(amount * step, count - start)) + |> take_every_list(amount, step - 1) + end} + end + + {:ok, count, fun} when is_function(fun, 1) -> + {count, &slice_exact(fun.(enumerable), &1, &2, &3, count)} + + {:error, module} -> + {list, count} = + enumerable + |> module.reduce({:cont, {[], 0}}, fn elem, {acc, count} -> + {:cont, {[elem | acc], count + 1}} + end) + |> elem(1) + + {count, + fn start, amount, step -> + list |> :lists.reverse() |> slice_exact(start, amount, step, count) + end} + end + end + + # Slice a list when we know the bounds + defp slice_exact(_list, _start, 0, _step, _), do: [] + + defp slice_exact(list, start, amount, 1, size) when start + amount == size, + do: list |> drop_exact(start) + + defp slice_exact(list, start, amount, 1, _), + do: list |> drop_exact(start) |> take_exact(amount) + + defp slice_exact(list, start, amount, step, _), + do: list |> drop_exact(start) |> take_every_list(amount, step - 1) + + defp drop_exact(list, 0), do: list + defp drop_exact([_ | tail], amount), do: drop_exact(tail, amount - 1) + + defp take_exact(_list, 0), do: [] + defp take_exact([head | tail], amount), do: [head | take_exact(tail, amount - 1)] + + ## sort + + defp sort_reducer(entry, {:split, y, x, r, rs, bool}, fun) do + cond do + fun.(y, entry) == bool -> + {:split, entry, y, [x | r], rs, bool} + + fun.(x, entry) == bool -> + {:split, y, entry, [x | r], rs, bool} + + r == [] -> + {:split, y, x, [entry], rs, bool} + + true -> + {:pivot, y, x, r, rs, entry, bool} + end + end + + defp sort_reducer(entry, {:pivot, y, x, r, rs, s, bool}, fun) do + cond do + fun.(y, entry) == bool -> + {:pivot, entry, y, [x | r], rs, s, bool} + + fun.(x, entry) == bool -> + {:pivot, y, entry, [x | r], rs, s, bool} + + fun.(s, entry) == bool -> + {:split, entry, s, [], [[y, x | r] | rs], bool} + + true -> + {:split, s, entry, [], [[y, x | r] | rs], bool} + end + end + + defp sort_reducer(entry, [x], fun) do + {:split, entry, x, [], [], fun.(x, entry)} + end + + defp sort_reducer(entry, acc, _fun) do + [entry | acc] + end + + defp sort_terminator({:split, y, x, r, rs, bool}, fun) do + sort_merge([[y, x | r] | rs], fun, bool) + end + + defp sort_terminator({:pivot, y, x, r, rs, s, bool}, fun) do + sort_merge([[s], [y, x | r] | rs], fun, bool) + end + + defp sort_terminator(acc, _fun) do + acc + end + + defp sort_merge(list, fun, true), do: reverse_sort_merge(list, [], fun, true) + + defp sort_merge(list, fun, false), do: sort_merge(list, [], fun, false) + + defp sort_merge([t1, [h2 | t2] | l], acc, fun, true), + do: sort_merge(l, [sort_merge1(t1, h2, t2, [], fun, false) | acc], fun, true) + + defp sort_merge([[h2 | t2], t1 | l], acc, fun, false), + do: sort_merge(l, [sort_merge1(t1, h2, t2, [], fun, false) | acc], fun, false) + + defp sort_merge([l], [], _fun, _bool), do: l + + defp sort_merge([l], acc, fun, bool), + do: reverse_sort_merge([:lists.reverse(l, []) | acc], [], fun, bool) + + defp sort_merge([], acc, fun, bool), do: reverse_sort_merge(acc, [], fun, bool) + + defp reverse_sort_merge([[h2 | t2], t1 | l], acc, fun, true), + do: reverse_sort_merge(l, [sort_merge1(t1, h2, t2, [], fun, true) | acc], fun, true) + + defp reverse_sort_merge([t1, [h2 | t2] | l], acc, fun, false), + do: reverse_sort_merge(l, [sort_merge1(t1, h2, t2, [], fun, true) | acc], fun, false) + + defp reverse_sort_merge([l], acc, fun, bool), + do: sort_merge([:lists.reverse(l, []) | acc], [], fun, bool) + + defp reverse_sort_merge([], acc, fun, bool), do: sort_merge(acc, [], fun, bool) + + defp sort_merge1([h1 | t1], h2, t2, m, fun, bool) do + if fun.(h1, h2) == bool do + sort_merge2(h1, t1, t2, [h2 | m], fun, bool) + else + sort_merge1(t1, h2, t2, [h1 | m], fun, bool) + end + end + + defp sort_merge1([], h2, t2, m, _fun, _bool), do: :lists.reverse(t2, [h2 | m]) + + defp sort_merge2(h1, t1, [h2 | t2], m, fun, bool) do + if fun.(h1, h2) == bool do + sort_merge2(h1, t1, t2, [h2 | m], fun, bool) + else + sort_merge1(t1, h2, t2, [h1 | m], fun, bool) + end + end + + defp sort_merge2(h1, t1, [], m, _fun, _bool), do: :lists.reverse(t1, [h1 | m]) + + ## split + + defp split_list([head | tail], counter, acc) when counter > 0 do + split_list(tail, counter - 1, [head | acc]) + end + + defp split_list(list, 0, acc) do + {:lists.reverse(acc), list} + end + + defp split_list([], _, acc) do + {:lists.reverse(acc), []} + end + + defp split_reverse_list([head | tail], counter, acc) when counter > 0 do + split_reverse_list(tail, counter - 1, [head | acc]) + end + + defp split_reverse_list(list, 0, acc) do + {:lists.reverse(list), acc} + end + + defp split_reverse_list([], _, acc) do + {[], acc} + end + + ## split_while + + defp split_while_list([head | tail], fun, acc) do + if fun.(head) do + split_while_list(tail, fun, [head | acc]) + else + {:lists.reverse(acc), [head | tail]} + end + end + + defp split_while_list([], _, acc) do + {:lists.reverse(acc), []} + end + + ## take + + defp take_list(_list, 0), do: [] + defp take_list([head | tail], counter), do: [head | take_list(tail, counter - 1)] + defp take_list([], _counter), do: [] + + defp take_every_list([head | tail], to_drop), + do: [head | tail |> drop_list(to_drop) |> take_every_list(to_drop)] + + defp take_every_list([], _to_drop), do: [] + + defp take_every_list(_list, 0, _to_drop), do: [] + + defp take_every_list([head | tail], counter, to_drop), + do: [head | tail |> drop_list(to_drop) |> take_every_list(counter - 1, to_drop)] + + defp take_every_list([], _counter, _to_drop), do: [] + + ## take_while + + defp take_while_list([head | tail], fun) do + if fun.(head) do + [head | take_while_list(tail, fun)] + else + [] + end + end + + defp take_while_list([], _) do + [] + end + + ## uniq + + defp uniq_list([head | tail], set, fun) do + value = fun.(head) + + case set do + %{^value => true} -> uniq_list(tail, set, fun) + %{} -> [head | uniq_list(tail, Map.put(set, value, true), fun)] + end + end + + defp uniq_list([], _set, _fun) do + [] + end + + ## with_index + + defp with_index_list([head | tail], offset) do + [{head, offset} | with_index_list(tail, offset + 1)] + end + + defp with_index_list([], _offset), do: [] + + defp with_index_list([head | tail], offset, fun) do + [fun.(head, offset) | with_index_list(tail, offset + 1, fun)] + end + + defp with_index_list([], _offset, _fun), do: [] + + ## zip + + defp zip_list([head1 | next1], [head2 | next2], acc) do + zip_list(next1, next2, [{head1, head2} | acc]) + end + + defp zip_list([], _, acc), do: :lists.reverse(acc) + defp zip_list(_, [], acc), do: :lists.reverse(acc) + + defp zip_with_list([head1 | next1], [head2 | next2], fun) do + [fun.(head1, head2) | zip_with_list(next1, next2, fun)] + end + + defp zip_with_list(_, [], _fun), do: [] + defp zip_with_list([], _, _fun), do: [] + + defp zip_reduce_list([head1 | next1], [head2 | next2], acc, fun) do + zip_reduce_list(next1, next2, fun.(head1, head2, acc), fun) + end + + defp zip_reduce_list(_, [], acc, _fun), do: acc + defp zip_reduce_list([], _, acc, _fun), do: acc +end + +defimpl Enumerable, for: List do + def count(list), do: {:ok, length(list)} + + def member?([], _value), do: {:ok, false} + def member?(_list, _value), do: {:error, __MODULE__} + + def slice([]), do: {:ok, 0, fn _, _, _ -> [] end} + def slice(_list), do: {:error, __MODULE__} + + def reduce(_list, {:halt, acc}, _fun), do: {:halted, acc} + def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)} + def reduce([], {:cont, acc}, _fun), do: {:done, acc} + def reduce([head | tail], {:cont, acc}, fun), do: reduce(tail, fun.(head, acc), fun) +end + +defimpl Enumerable, for: Map do + def count(map) do + {:ok, map_size(map)} + end + + def member?(map, {key, value}) do + {:ok, match?(%{^key => ^value}, map)} + end + + def member?(_map, _other) do + {:ok, false} + end + + def slice(map) do + size = map_size(map) + {:ok, size, &:maps.to_list/1} + end + + def reduce(map, acc, fun) do + Enumerable.List.reduce(:maps.to_list(map), acc, fun) + end +end + +defimpl Enumerable, for: Function do + def count(_function), do: {:error, __MODULE__} + def member?(_function, _value), do: {:error, __MODULE__} + def slice(_function), do: {:error, __MODULE__} + + def reduce(function, acc, fun) when is_function(function, 2), do: function.(acc, fun) + + def reduce(function, _acc, _fun) do + raise Protocol.UndefinedError, + protocol: @protocol, + value: function, + description: "only anonymous functions of arity 2 are enumerable" + end +end diff --git a/apps/remote_control/benchmarks/data/source.index.v1.ets b/apps/remote_control/benchmarks/data/source.index.v1.ets index 9cd21d195..00cd76189 100644 --- a/apps/remote_control/benchmarks/data/source.index.v1.ets +++ b/apps/remote_control/benchmarks/data/source.index.v1.ets @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:e8b938d9c25331df2752ecf44b1b7823f80e68a0c34489aa0df58edd75d096ee -size 99882388 +oid sha256:6aa5313736d6431963476b7b0b38cc20f2c95b252b3358ca6fba17b8d116d6a0 +size 221782947 diff --git a/apps/remote_control/benchmarks/enum_index.exs b/apps/remote_control/benchmarks/enum_index.exs new file mode 100644 index 000000000..6f55b57d3 --- /dev/null +++ b/apps/remote_control/benchmarks/enum_index.exs @@ -0,0 +1,16 @@ +alias Lexical.RemoteControl.Search.Indexer + +path = + [__DIR__, "**", "enum.ex"] + |> Path.join() + |> Path.wildcard() + |> List.first() + +{:ok, source} = File.read(path) + +Benchee.run( + %{ + "indexing source code" => fn -> Indexer.Source.index(path, source) end + }, + profile_after: true +) diff --git a/apps/remote_control/benchmarks/ets_bench.exs b/apps/remote_control/benchmarks/ets_bench.exs index 0c85dc6f4..fd35aab52 100644 --- a/apps/remote_control/benchmarks/ets_bench.exs +++ b/apps/remote_control/benchmarks/ets_bench.exs @@ -3,6 +3,7 @@ alias Lexical.RemoteControl alias Lexical.RemoteControl.Search.Store.Backends.Ets alias Lexical.RemoteControl.Search.Store.Backends.Ets.Schema alias Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas +alias Lexical.VM.Versions defmodule BenchHelper do def wait_for_registration do @@ -20,14 +21,14 @@ defmodule BenchHelper do Enum.random(entries).path end - def random_ref(entries) do - Enum.random(entries).ref + def random_id(entries) do + Enum.random(entries).id end - def random_refs(entries, count) do + def random_ids(entries, count) do entries |> Enum.take_random(count) - |> Enum.map(& &1.ref) + |> Enum.map(& &1.id) end end @@ -37,7 +38,8 @@ project = Project.new("file://#{cwd}") RemoteControl.set_project(project) Project.ensure_workspace(project) -indexes_path = Project.workspace_path(project, "indexes") +versions = Versions.current() +indexes_path = Project.workspace_path(project, ["indexes", versions.erlang, versions.elixir]) data_dir = Path.join(cwd, "data") File.mkdir_p!(indexes_path) @@ -64,17 +66,17 @@ Benchee.run( "find_by_subject, two wildcards" => fn _ -> Ets.find_by_subject(Enum, :_, :_) end, - "find_by_references" => fn %{refs: refs} -> - Ets.find_by_refs(refs, :module, :_) + "find_by_references" => fn %{ids: ids} -> + Ets.find_by_ids(ids, :module, :_) end, "delete_by_path" => fn %{path: path} -> Ets.delete_by_path(path) end }, before_each: fn _ -> - refs = BenchHelper.random_refs(entries, 50) + ids = BenchHelper.random_ids(entries, 50) path = BenchHelper.random_path(entries) - %{path: path, refs: refs} + %{path: path, ids: ids} end ) diff --git a/apps/remote_control/benchmarks/snowflake_bench.exs b/apps/remote_control/benchmarks/snowflake_bench.exs new file mode 100644 index 000000000..09bad770e --- /dev/null +++ b/apps/remote_control/benchmarks/snowflake_bench.exs @@ -0,0 +1,8 @@ +Benchee.run( + %{ + "next_id" => fn -> + Snowflake.next_id() + end + }, + profile_after: true +) diff --git a/apps/remote_control/lib/lexical/remote_control.ex b/apps/remote_control/lib/lexical/remote_control.ex index b7d776815..2ed819287 100644 --- a/apps/remote_control/lib/lexical/remote_control.ex +++ b/apps/remote_control/lib/lexical/remote_control.ex @@ -6,12 +6,62 @@ defmodule Lexical.RemoteControl do """ alias Lexical.Project + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Api.Proxy + alias Lexical.RemoteControl.CodeAction + alias Lexical.RemoteControl.CodeIntelligence alias Lexical.RemoteControl.ProjectNode + require Logger @excluded_apps [:patch, :nimble_parsec] @allowed_apps [:remote_control | Mix.Project.deps_apps()] -- @excluded_apps + defdelegate schedule_compile(force?), to: Proxy + + defdelegate compile_document(document), to: Proxy + + defdelegate format(document), to: Proxy + + defdelegate reindex, to: Proxy + + defdelegate index_running?, to: Proxy + + defdelegate broadcast(message), to: Proxy + + defdelegate expand_alias(segments_or_module, analysis, position), to: RemoteControl.Analyzer + + defdelegate list_modules, to: :code, as: :all_available + + defdelegate code_actions(document, range, diagnostics, kinds), to: CodeAction, as: :for_range + + defdelegate complete(env), to: RemoteControl.Completion, as: :elixir_sense_expand + + defdelegate complete_struct_fields(analysis, position), + to: RemoteControl.Completion, + as: :struct_fields + + defdelegate definition(document, position), to: CodeIntelligence.Definition + + defdelegate references(analysis, position, include_definitions?), + to: CodeIntelligence.References + + defdelegate modules_with_prefix(prefix), to: RemoteControl.Modules, as: :with_prefix + + defdelegate modules_with_prefix(prefix, predicate), to: RemoteControl.Modules, as: :with_prefix + + defdelegate docs(module, opts \\ []), to: CodeIntelligence.Docs, as: :for_module + + defdelegate register_listener(listener_pid, message_types), to: RemoteControl.Dispatch + + defdelegate resolve_entity(analysis, position), to: CodeIntelligence.Entity, as: :resolve + + defdelegate struct_definitions, to: CodeIntelligence.Structs, as: :for_project + + defdelegate document_symbols(document), to: CodeIntelligence.Symbols, as: :for_document + + defdelegate workspace_symbols(query), to: CodeIntelligence.Symbols, as: :for_workspace + def start_link(%Project{} = project) do :ok = ensure_epmd_started() start_net_kernel(project) @@ -25,6 +75,23 @@ defmodule Lexical.RemoteControl do end end + def deps_paths do + case :persistent_term.get({__MODULE__, :deps_paths}, :error) do + :error -> + {:ok, deps_paths} = + RemoteControl.Mix.in_project(fn _ -> + Mix.Task.run("loadpaths") + Mix.Project.deps_paths() + end) + + :persistent_term.put({__MODULE__, :deps_paths}, deps_paths) + deps_paths + + deps_paths -> + deps_paths + end + end + def with_lock(lock_type, func) do :global.trans({lock_type, self()}, func, [Node.self()]) end @@ -49,15 +116,16 @@ defmodule Lexical.RemoteControl do |> :erpc.call(m, f, a) end - defp start_net_kernel(%Project{} = project) do - :net_kernel.start([manager_node_name(project)]) - end - def manager_node_name(%Project{} = project) do :"manager-#{Project.name(project)}-#{Project.entropy(project)}@127.0.0.1" end - def ensure_apps_started(node, app_names) do + defp start_net_kernel(%Project{} = project) do + manager = manager_node_name(project) + :net_kernel.start(manager, %{name_domain: :longnames}) + end + + defp ensure_apps_started(node, app_names) do Enum.reduce_while(app_names, :ok, fn app_name, _ -> case :rpc.call(node, :application, :ensure_all_started, [app_name]) do {:ok, _} -> {:cont, :ok} @@ -66,7 +134,7 @@ defmodule Lexical.RemoteControl do end) end - def glob_paths do + defp glob_paths do for entry <- :code.get_path(), entry_string = List.to_string(entry), entry_string != ".", @@ -80,6 +148,7 @@ defmodule Lexical.RemoteControl do {path_result, env} = with nil <- version_manager_path_and_env("asdf", root_path), + nil <- version_manager_path_and_env("mise", root_path), nil <- version_manager_path_and_env("rtx", root_path) do {File.cd!(root_path, fn -> System.find_executable("elixir") end), System.get_env()} end @@ -153,4 +222,25 @@ defmodule Lexical.RemoteControl do end) |> Enum.reject(&is_nil/1) end + + defp reset_env("mise", root_path) do + {env, _} = System.cmd("mise", ~w(env -s bash), cd: root_path) + + env + |> String.trim() + |> String.split("\n") + |> Enum.map(fn + "export " <> key_and_value -> + [key, value] = + key_and_value + |> String.split("=", parts: 2) + |> Enum.map(&String.trim/1) + + {key, value} + + _ -> + nil + end) + |> Enum.reject(&is_nil/1) + end end diff --git a/apps/remote_control/lib/lexical/remote_control/analyzer.ex b/apps/remote_control/lib/lexical/remote_control/analyzer.ex new file mode 100644 index 000000000..872c81714 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/analyzer.ex @@ -0,0 +1,213 @@ +defmodule Lexical.RemoteControl.Analyzer do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Require + alias Lexical.Ast.Analysis.Use + alias Lexical.Document.Position + alias Lexical.RemoteControl.Analyzer.Aliases + alias Lexical.RemoteControl.Analyzer.Imports + alias Lexical.RemoteControl.Analyzer.Requires + alias Lexical.RemoteControl.Analyzer.Uses + + require Logger + + defdelegate aliases_at(analysis, position), to: Aliases, as: :at + defdelegate imports_at(analysis, position), to: Imports, as: :at + + @spec requires_at(Analysis.t(), Position.t()) :: [module()] + def requires_at(%Analysis{} = analysis, %Position{} = position) do + analysis + |> Requires.at(position) + |> Enum.reduce([], fn %Require{} = require, acc -> + case expand_alias(require.module, analysis, position) do + {:ok, expanded} -> [expanded | acc] + _ -> [Module.concat(require.as) | acc] + end + end) + end + + @spec uses_at(Analysis.t(), Position.t()) :: [module()] + def uses_at(%Analysis{} = analysis, %Position{} = position) do + analysis + |> Uses.at(position) + |> Enum.reduce([], fn %Use{} = use, acc -> + case expand_alias(use.module, analysis, position) do + {:ok, expanded} -> [expanded | acc] + _ -> [Module.concat(use.module) | acc] + end + end) + end + + def resolve_local_call(%Analysis{} = analysis, %Position{} = position, function_name, arity) do + maybe_imported_mfa = + analysis + |> imports_at(position) + |> Enum.find(fn + {_, ^function_name, ^arity} -> true + _ -> false + end) + + if is_nil(maybe_imported_mfa) do + aliases = aliases_at(analysis, position) + current_module = aliases[:__MODULE__] + {current_module, function_name, arity} + else + maybe_imported_mfa + end + end + + @doc """ + Expands an alias at the given position in the context of a document + analysis. + + When we refer to a module, it's usually a short name, often aliased or + in a nested module. This function finds the full name of the module at + a cursor position. + + For example, if we have: + + defmodule Project do + defmodule Issue do + defstruct [:message] + end + + def message(%Issue{|} = issue) do # cursor marked as `|` + end + end + + We could get the expansion for the `Issue` alias at the cursor position + like so: + + iex> Analyzer.expand_alias([:Issue], analysis, position) + {:ok, Project.Issue} + + Another example: + + defmodule Project do + defmodule Issue do + defstruct [:message] + end + end + + defmodule MyModule do + alias Project, as: MyProject + + def message(%MyProject.Issue{|} = issue) do + end + end + + This would yield the same result: + + iex> Analyzer.expand_alias([:MyProject, :Issue], analysis, position) + {:ok, Project.Issue} + + If no aliases are present at the given position, no expansion occurs: + + iex> Analyzer.expand_alias([:Some, :Other, :Module], analysis, position) + {:ok, Some.Other.Module} + + """ + @spec expand_alias( + Ast.alias_segments() | module(), + Analysis.t(), + Position.t() | {Position.line(), Position.character()} + ) :: + {:ok, module()} | :error + def expand_alias([_ | _] = segments, %Analysis{} = analysis, %Position{} = position) do + with %Analysis{valid?: true} = analysis <- Lexical.Ast.reanalyze_to(analysis, position), + aliases <- aliases_at(analysis, position), + {:ok, resolved} <- resolve_alias(segments, aliases) do + {:ok, Module.concat(resolved)} + else + _ -> + case segments do + [:__MODULE__] -> + # we've had a request for the current module, but none was found + # so we've failed. This can happen if we're resolving the current + # module in a script, or inside the defmodule call. + :error + + _ -> + if Enum.all?(segments, &is_atom/1) do + {:ok, Module.concat(segments)} + else + :error + end + end + end + end + + def expand_alias(module, %Analysis{} = analysis, %Position{} = position) + when is_atom(module) and not is_nil(module) do + {:elixir, segments} = Ast.Module.safe_split(module, as: :atoms) + expand_alias(segments, analysis, position) + end + + def expand_alias(empty, _, _) when empty in [nil, []] do + Logger.warning("nothing to expand (expand_alias was passed #{inspect(empty)})") + :error + end + + @doc """ + Returns the current module at the given position in the analysis + """ + def current_module(%Analysis{} = analysis, %Position{} = position) do + expand_alias([:__MODULE__], analysis, position) + end + + defp resolve_alias([{:@, _, [{:protocol, _, _}]} | rest], alias_mapping) do + with {:ok, protocol} <- Map.fetch(alias_mapping, :"@protocol") do + Ast.reify_alias(protocol, rest) + end + end + + defp resolve_alias( + [{:__aliases__, _, [{:@, _, [{:protocol, _, _}]} | _] = protocol}], + alias_mapping + ) do + resolve_alias(protocol, alias_mapping) + end + + defp resolve_alias([{:@, _, [{:for, _, _} | _]} | rest], alias_mapping) do + with {:ok, protocol_for} <- Map.fetch(alias_mapping, :"@for") do + Ast.reify_alias(protocol_for, rest) + end + end + + defp resolve_alias( + [{:__aliases__, _, [{:@, _, [{:for, _, _}]} | _] = protocol_for}], + alias_mapping + ) do + resolve_alias(protocol_for, alias_mapping) + end + + defp resolve_alias([first | _] = segments, aliases_mapping) when is_tuple(first) do + with {:ok, current_module} <- Map.fetch(aliases_mapping, :__MODULE__) do + Ast.reify_alias(current_module, segments) + end + end + + defp resolve_alias([first | _] = segments, aliases_mapping) when is_atom(first) do + with :error <- fetch_leading_alias(segments, aliases_mapping) do + fetch_trailing_alias(segments, aliases_mapping) + end + end + + defp resolve_alias(_, _), do: :error + + defp fetch_leading_alias([first | rest], aliases_mapping) do + with {:ok, resolved} <- Map.fetch(aliases_mapping, first) do + {:ok, [resolved | rest]} + end + end + + defp fetch_trailing_alias(segments, aliases_mapping) do + # Trailing aliases happen when you use the curly syntax to define multiple aliases + # in one go, like Foo.{First, Second.Third, Fourth} + # Our alias mapping will have Third mapped to Foo.Second.Third, so we need to look + # for Third, wheras the leading alias will look for Second in the mappings. + with {:ok, resolved} <- Map.fetch(aliases_mapping, List.last(segments)) do + {:ok, List.wrap(resolved)} + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/analyzer/aliases.ex b/apps/remote_control/lib/lexical/remote_control/analyzer/aliases.ex new file mode 100644 index 000000000..399877797 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/analyzer/aliases.ex @@ -0,0 +1,62 @@ +defmodule Lexical.RemoteControl.Analyzer.Aliases do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Alias + alias Lexical.Ast.Analysis.Scope + alias Lexical.Document.Position + + @spec at(Analysis.t(), Position.t()) :: %{atom() => module()} + def at(%Analysis{} = analysis, %Position{} = position) do + case Analysis.scopes_at(analysis, position) do + [%Scope{} = scope | _] -> + scope + |> Scope.alias_map(position) + |> Map.new(fn {as, %Alias{} = alias} -> + {as, Alias.to_module(alias)} + end) + + [] -> + %{} + end + end + + @doc """ + Resolves an alias in the context of a line and a scope + (used internally when calculating imports) + """ + def resolve_at(%Scope{} = scope, module, line) do + aliases = Scope.alias_map(scope, line) + + case module do + # unquote(__MODULE__).SubModule + [{:unquote, _, [{:__MODULE__, _, _}]} | suffix] -> + resolve_current_module(aliases, suffix) + + [{:__MODULE__, _, _} | suffix] -> + resolve_current_module(aliases, suffix) + + [prefix | suffix] -> + case aliases do + %{^prefix => _} -> + current_module = resolve_alias(aliases, prefix, suffix) + + Module.concat([current_module | suffix]) + + _ -> + Module.concat(module) + end + end + end + + defp resolve_current_module(aliases, suffix) do + resolve_alias(aliases, :__MODULE__, suffix) + end + + defp resolve_alias(aliases, prefix, suffix) do + current_module = + aliases + |> Map.get(prefix) + |> Alias.to_module() + + Module.concat([current_module | suffix]) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/analyzer/imports.ex b/apps/remote_control/lib/lexical/remote_control/analyzer/imports.ex new file mode 100644 index 000000000..77a1bb001 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/analyzer/imports.ex @@ -0,0 +1,144 @@ +defmodule Lexical.RemoteControl.Analyzer.Imports do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Import + alias Lexical.Ast.Analysis.Scope + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.ProcessCache + alias Lexical.RemoteControl.Analyzer.Aliases + alias Lexical.RemoteControl.Module.Loader + + @spec at(Analysis.t(), Position.t()) :: [Scope.import_mfa()] + def at(%Analysis{} = analysis, %Position{} = position) do + case Analysis.scopes_at(analysis, position) do + [%Scope{} = scope | _] -> + imports(scope, position) + + _ -> + [] + end + end + + @spec imports(Scope.t(), Scope.scope_position()) :: [Scope.import_mfa()] + def imports(%Scope{} = scope, position \\ :end) do + scope + |> import_map(position) + |> Map.values() + |> List.flatten() + end + + defp import_map(%Scope{} = scope, position) do + end_line = Scope.end_line(scope, position) + + (kernel_imports(scope) ++ scope.imports) + # sorting by line ensures that imports on later lines + # override imports on earlier lines + |> Enum.sort_by(& &1.range.start.line) + |> Enum.take_while(&(&1.range.start.line <= end_line)) + |> Enum.reduce(%{}, fn %Import{} = import, current_imports -> + apply_to_scope(import, scope, current_imports) + end) + end + + defp apply_to_scope(%Import{} = import, current_scope, %{} = current_imports) do + import_module = Aliases.resolve_at(current_scope, import.module, import.range.start.line) + + functions = mfas_for(import_module, :functions) + macros = mfas_for(import_module, :macros) + + case import.selector do + :all -> + Map.put(current_imports, import_module, functions ++ macros) + + [only: :functions] -> + Map.put(current_imports, import_module, functions) + + [only: :macros] -> + Map.put(current_imports, import_module, macros) + + [only: :sigils] -> + sigils = mfas_for(import_module, :sigils) + Map.put(current_imports, import_module, sigils) + + [only: functions_to_import] -> + functions_to_import = function_and_arity_to_mfa(import_module, functions_to_import) + Map.put(current_imports, import_module, functions_to_import) + + [except: functions_to_except] -> + # This one is a little tricky. Imports using except have two cases. + # In the first case, if the module hasn't been previously imported, we + # collect all the functions in the current module and remove the ones in the + # except clause. + # If the module has been previously imported, we just remove the functions from + # the except clause from those that have been previously imported. + # See: https://hexdocs.pm/elixir/1.13.0/Kernel.SpecialForms.html#import/2-selector + + functions_to_except = function_and_arity_to_mfa(import_module, functions_to_except) + + if already_imported?(current_imports, import_module) do + Map.update!(current_imports, import_module, fn old_imports -> + old_imports -- functions_to_except + end) + else + to_import = (functions ++ macros) -- functions_to_except + Map.put(current_imports, import_module, to_import) + end + end + end + + defp already_imported?(%{} = current_imports, imported_module) do + case current_imports do + %{^imported_module => [_ | _]} -> true + _ -> false + end + end + + defp function_and_arity_to_mfa(current_module, fa_list) when is_list(fa_list) do + Enum.map(fa_list, fn {function, arity} -> {current_module, function, arity} end) + end + + defp mfas_for(current_module, type) do + if Loader.ensure_loaded?(current_module) do + fa_list = function_and_arities_for_module(current_module, type) + + function_and_arity_to_mfa(current_module, fa_list) + else + [] + end + end + + defp function_and_arities_for_module(module, :sigils) do + ProcessCache.trans({module, :info, :sigils}, fn -> + for {name, arity} <- module.__info__(:functions), + string_name = Atom.to_string(name), + sigil?(string_name, arity) do + {name, arity} + end + end) + end + + defp function_and_arities_for_module(module, type) do + ProcessCache.trans({module, :info, type}, fn -> + type + |> module.__info__() + |> Enum.reject(fn {name, arity} -> + string_name = Atom.to_string(name) + String.starts_with?(string_name, "_") or sigil?(string_name, arity) + end) + end) + end + + defp sigil?(string_name, arity) do + String.starts_with?(string_name, "sigil_") and arity in [1, 2] + end + + defp kernel_imports(%Scope{} = scope) do + start_pos = scope.range.start + range = Range.new(start_pos, start_pos) + + [ + Import.implicit(range, [:Kernel]), + Import.implicit(range, [:Kernel, :SpecialForms]) + ] + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/analyzer/requires.ex b/apps/remote_control/lib/lexical/remote_control/analyzer/requires.ex new file mode 100644 index 000000000..2faebdf77 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/analyzer/requires.ex @@ -0,0 +1,26 @@ +defmodule Lexical.RemoteControl.Analyzer.Requires do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Require + alias Lexical.Ast.Analysis.Scope + alias Lexical.Document.Position + + def at(%Analysis{} = analysis, %Position{} = position) do + case Analysis.scopes_at(analysis, position) do + [%Scope{} = scope | _] -> + scope.requires + |> Enum.filter(fn %Require{} = require -> + require_end = require.range.end + + if require_end.line == position.line do + require_end.character <= position.character + else + require_end.line < position.line + end + end) + |> Enum.uniq_by(& &1.as) + + _ -> + [] + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/analyzer/uses.ex b/apps/remote_control/lib/lexical/remote_control/analyzer/uses.ex new file mode 100644 index 000000000..c0921e654 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/analyzer/uses.ex @@ -0,0 +1,23 @@ +defmodule Lexical.RemoteControl.Analyzer.Uses do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Scope + alias Lexical.Document.Position + + def at(%Analysis{} = analysis, %Position{} = position) do + case Analysis.scopes_at(analysis, position) do + [%Scope{} = scope | _] -> + Enum.filter(scope.uses, fn use -> + use_end = use.range.end + + if position.line == use_end.line do + position.character >= use_end.character + else + position.line > use_end.line + end + end) + + _ -> + [] + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/api.ex b/apps/remote_control/lib/lexical/remote_control/api.ex index 309319c8f..ffb723188 100644 --- a/apps/remote_control/lib/lexical/remote_control/api.ex +++ b/apps/remote_control/lib/lexical/remote_control/api.ex @@ -1,90 +1,136 @@ defmodule Lexical.RemoteControl.Api do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Env alias Lexical.Document alias Lexical.Document.Position + alias Lexical.Document.Range alias Lexical.Project alias Lexical.RemoteControl - alias Lexical.RemoteControl.Build alias Lexical.RemoteControl.CodeIntelligence - alias Lexical.RemoteControl.CodeMod require Logger - defdelegate schedule_compile(project, force?), to: Build - defdelegate compile_document(project, document), to: Build + def schedule_compile(%Project{} = project, force?) do + RemoteControl.call(project, RemoteControl, :schedule_compile, [force?]) + end + + def compile_document(%Project{} = project, %Document{} = document) do + RemoteControl.call(project, RemoteControl, :compile_document, [document]) + end + + def expand_alias( + %Project{} = project, + segments_or_module, + %Analysis{} = analysis, + %Position{} = position + ) do + RemoteControl.call(project, RemoteControl, :expand_alias, [ + segments_or_module, + analysis, + position + ]) + end def list_modules(%Project{} = project) do - RemoteControl.call(project, :code, :all_available) + RemoteControl.call(project, RemoteControl, :list_modules) end def format(%Project{} = project, %Document{} = document) do - RemoteControl.call(project, CodeMod.Format, :edits, [project, document]) + RemoteControl.call(project, RemoteControl, :format, [document]) end - def replace_with_underscore( + def code_actions( %Project{} = project, %Document{} = document, - line_number, - variable_name + %Range{} = range, + diagnostics, + kinds ) do - RemoteControl.call(project, CodeMod.ReplaceWithUnderscore, :edits, [ + RemoteControl.call(project, RemoteControl, :code_actions, [ document, - line_number, - variable_name + range, + diagnostics, + kinds ]) end - def complete(%Project{} = project, %Document{} = document, %Position{} = position) do - document_string = Document.to_string(document) - complete(project, document_string, position) + def complete(%Project{} = project, %Env{} = env) do + Logger.info("Completion for #{inspect(env.position)}") + RemoteControl.call(project, RemoteControl, :complete, [env]) end - def complete(%Project{} = project, document_string, %Position{} = position) do - Logger.info("Completion for #{inspect(position)}") - - RemoteControl.call(project, RemoteControl.Completion, :elixir_sense_expand, [ - document_string, + def complete_struct_fields(%Project{} = project, %Analysis{} = analysis, %Position{} = position) do + RemoteControl.call(project, RemoteControl, :complete_struct_fields, [ + analysis, position ]) end - def complete_struct_fields(%Project{} = project, %Document{} = document, %Position{} = position) do - RemoteControl.call(project, RemoteControl.Completion, :struct_fields, [ - document, - position - ]) + def definition(%Project{} = project, %Document{} = document, %Position{} = position) do + RemoteControl.call(project, RemoteControl, :definition, [document, position]) end - def definition(%Project{} = project, %Document{} = document, %Position{} = position) do - RemoteControl.call(project, CodeIntelligence.Definition, :definition, [ - document, - position + def references( + %Project{} = project, + %Analysis{} = analysis, + %Position{} = position, + include_definitions? + ) do + RemoteControl.call(project, RemoteControl, :references, [ + analysis, + position, + include_definitions? ]) end def modules_with_prefix(%Project{} = project, prefix) when is_binary(prefix) or is_atom(prefix) do - RemoteControl.call(project, RemoteControl.Modules, :with_prefix, [prefix]) + RemoteControl.call(project, RemoteControl, :modules_with_prefix, [prefix]) end def modules_with_prefix(%Project{} = project, prefix, predicate) when is_binary(prefix) or is_atom(prefix) do - RemoteControl.call(project, RemoteControl.Modules, :with_prefix, [prefix, predicate]) + RemoteControl.call(project, RemoteControl, :modules_with_prefix, [prefix, predicate]) end @spec docs(Project.t(), module()) :: {:ok, CodeIntelligence.Docs.t()} | {:error, any()} def docs(%Project{} = project, module, opts \\ []) when is_atom(module) do - RemoteControl.call(project, CodeIntelligence.Docs, :for_module, [module, opts]) + RemoteControl.call(project, RemoteControl, :docs, [module, opts]) end def register_listener(%Project{} = project, listener_pid, message_types) when is_pid(listener_pid) and is_list(message_types) do - RemoteControl.call(project, RemoteControl.Dispatch, :register_listener, [ + RemoteControl.call(project, RemoteControl, :register_listener, [ listener_pid, message_types ]) end def broadcast(%Project{} = project, message) do - RemoteControl.call(project, RemoteControl.Dispatch, :broadcast, [message]) + RemoteControl.call(project, RemoteControl, :broadcast, [message]) + end + + def reindex(%Project{} = project) do + RemoteControl.call(project, RemoteControl, :reindex, []) + end + + def index_running?(%Project{} = project) do + RemoteControl.call(project, RemoteControl, :index_running?, []) + end + + def resolve_entity(%Project{} = project, %Analysis{} = analysis, %Position{} = position) do + RemoteControl.call(project, RemoteControl, :resolve_entity, [analysis, position]) + end + + def struct_definitions(%Project{} = project) do + RemoteControl.call(project, RemoteControl, :struct_definitions, []) + end + + def document_symbols(%Project{} = project, %Document{} = document) do + RemoteControl.call(project, RemoteControl, :document_symbols, [document]) + end + + def workspace_symbols(%Project{} = project, query) do + RemoteControl.call(project, RemoteControl, :workspace_symbols, [query]) end end diff --git a/apps/remote_control/lib/lexical/remote_control/api/messages.ex b/apps/remote_control/lib/lexical/remote_control/api/messages.ex index 7a0e334d8..fa1baaad7 100644 --- a/apps/remote_control/lib/lexical/remote_control/api/messages.ex +++ b/apps/remote_control/lib/lexical/remote_control/api/messages.ex @@ -17,8 +17,6 @@ defmodule Lexical.RemoteControl.Api.Messages do defrecord :file_compile_requested, project: nil, build_number: 0, uri: nil - defrecord :file_quoted, project: nil, document: nil, quoted_ast: nil - defrecord :file_compiled, project: nil, build_number: 0, @@ -32,12 +30,21 @@ defmodule Lexical.RemoteControl.Api.Messages do defrecord :module_updated, file: nil, name: nil, functions: [], macros: [], struct: nil defrecord :project_diagnostics, project: nil, build_number: 0, diagnostics: [] + defrecord :file_diagnostics, project: nil, build_number: 0, uri: nil, diagnostics: [] defrecord :project_progress, label: nil, message: nil, stage: :report + defrecord :percent_progress, label: nil, message: nil, stage: :report, max: 0, delta: 0 + defrecord :struct_discovered, module: nil, fields: [] + defrecord :project_index_ready, project: nil + + defrecord :project_reindex_requested, project: nil + + defrecord :project_reindexed, project: nil, elapsed_ms: 0, status: :success + @type compile_status :: :successful | :error @type name_and_arity :: {atom, non_neg_integer} @type field_list :: Keyword.t() | [atom] @@ -71,6 +78,7 @@ defmodule Lexical.RemoteControl.Api.Messages do to_version: maybe_version, open?: boolean() ) + @type file_compile_requested :: record(:file_compile_requested, project: Lexical.Project.t(), @@ -78,13 +86,6 @@ defmodule Lexical.RemoteControl.Api.Messages do uri: Lexical.uri() ) - @type file_quoted :: - record(:file_quoted, - project: Lexical.Project.t(), - document: Lexical.Document.t(), - quoted_ast: Macro.t() - ) - @type file_compiled :: record(:file_compiled, project: Lexical.Project.t(), @@ -123,4 +124,16 @@ defmodule Lexical.RemoteControl.Api.Messages do ) @type struct_discovered :: record(:struct_discovered, module: module(), fields: field_list()) + + @type project_index_ready :: record(:project_index_ready, project: Lexical.Project.t()) + + @type project_reindex_requested :: + record(:project_reindex_requested, project: Lexical.Project.t()) + + @type project_reindexed :: + record(:project_reindexed, + project: Lexical.Project.t(), + elapsed_ms: non_neg_integer(), + status: :success | {:error, term()} + ) end diff --git a/apps/remote_control/lib/lexical/remote_control/api/proxy.ex b/apps/remote_control/lib/lexical/remote_control/api/proxy.ex new file mode 100644 index 000000000..790af722e --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/api/proxy.ex @@ -0,0 +1,247 @@ +defmodule Lexical.RemoteControl.Api.Proxy do + @moduledoc """ + A bimodal buffering proxy + + This proxy has two modes. In its default mode, it simply forwards function calls to another module, but when + buffering is activated, it will buffer requests and returned canned responses. + When a process calls `start_buffering`, it is monitored, and while it's alive, all messages are buffered. When the + process that calls `start_buffering` exits, the messages that are buffered then have the potential to be emitted. + + Buffered request are subject to the proxy's internal logic. Some requests that are time sensitive + (like formatting) are dropped. Others are deduplicated, while others are reordered. + + The logic follows below + `broadcast` - Buffered - Though, those related to other events, like compilation are subject to + the rules that govern their source events. Progress messages are sent regardless of + buffering. + `schedule_compile` - Buffered - Only one call is kept + `compile_document` - Buffered, though only one call per URI is kept, and if a `schedule_compile` call + was buffered, all `compile_document` calls are dropped + `reindex` - Buffered, though only one call is kept and it is the last thing run. + `index_running?` - Dropped because it requires an immediate response + `format` - Dropped, as it requires an immediate response. Responds immediately with empty changes + + Internally, there are three states: proxying, draining and buffering. + The proxy starts in proxying mode. Then, when start_buffering is called, it changes to draining mode. This + mode checks if there are any in-flight calls. If there aren't any, it changes immediately to buffring mode. + If there are in-flight reqeusts, it waits for them to finish, and then switches to buffer mode. Once in buffer + mode, requests are buffered until the process that called `start_buffering` exits. When that happens, then + the requests are de-duplicated and run, and then the proxy returns to proxying mode. + + """ + + alias Lexical.Document + alias Lexical.Document.Changes + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Api.Messages + alias Lexical.RemoteControl.Api.Proxy.BufferingState + alias Lexical.RemoteControl.Api.Proxy.DrainingState + alias Lexical.RemoteControl.Api.Proxy.ProxyingState + alias Lexical.RemoteControl.Api.Proxy.Records + alias Lexical.RemoteControl.CodeMod + alias Lexical.RemoteControl.Commands + + import Messages + import Record + import Records, only: :macros + + @behaviour :gen_statem + + defrecord :buffer, contents: nil, return: :ok + defrecord :drop, contents: nil, return: :ok + + # public API + + def start_buffering do + start_buffering(self()) + end + + def start_buffering(caller) when is_pid(caller) do + :gen_statem.call(__MODULE__, {:start_buffering, caller}) + end + + # proxied functions + + def broadcast(percent_progress() = message) do + RemoteControl.Dispatch.broadcast(message) + end + + def broadcast(message) do + mfa = to_mfa(RemoteControl.Dispatch.broadcast(message)) + :gen_statem.call(__MODULE__, buffer(contents: mfa)) + end + + def schedule_compile(force? \\ false) do + project = RemoteControl.get_project() + + mfa = to_mfa(RemoteControl.Build.schedule_compile(project, force?)) + :gen_statem.call(__MODULE__, buffer(contents: mfa)) + end + + def compile_document(document) do + project = RemoteControl.get_project() + + mfa = to_mfa(RemoteControl.Build.compile_document(project, document)) + + :gen_statem.call(__MODULE__, buffer(contents: mfa)) + end + + def reindex do + mfa = to_mfa(Commands.Reindex.perform()) + :gen_statem.call(__MODULE__, buffer(contents: mfa)) + end + + def index_running? do + mfa = to_mfa(Commands.Reindex.running?()) + :gen_statem.call(__MODULE__, drop(contents: mfa, return: false)) + end + + def format(%Document{} = document) do + mfa = to_mfa(CodeMod.Format.edits(document)) + drop = drop(contents: mfa, return: {:ok, Changes.new(document, [])}) + :gen_statem.call(__MODULE__, drop) + end + + # utility functions + + def buffering? do + :gen_statem.call(__MODULE__, :buffering?) + end + + # :gen_statem callbacks + def start_link do + :gen_statem.start_link({:local, __MODULE__}, __MODULE__, [], []) + end + + @impl :gen_statem + def init(_) do + {:ok, :proxying, ProxyingState.new()} + end + + def child_spec(_) do + %{ + id: __MODULE__, + start: {__MODULE__, :start_link, []} + } + end + + @impl :gen_statem + def callback_mode, do: :state_functions + + # callbacks for proxying mode + + def proxying({:call, from}, {:start_buffering, caller}, %ProxyingState{} = state) do + Process.monitor(caller) + buffering_state = BufferingState.new(caller) + + if ProxyingState.empty?(state) do + {:next_state, :buffering, buffering_state, {:reply, from, :ok}} + else + draining_state = DrainingState.new(buffering_state, state) + {:next_state, :draining, draining_state, {:reply, from, :ok}} + end + end + + def proxying({:call, from}, buffer(contents: contents), %ProxyingState{} = state) do + state = ProxyingState.apply_mfa(state, from, contents) + + {:keep_state, state} + end + + def proxying({:call, from}, drop(contents: contents), state) do + state = ProxyingState.apply_mfa(state, from, contents) + {:keep_state, state} + end + + def proxying({:call, from}, :buffering?, state) do + {:keep_state, state, {:reply, from, false}} + end + + def proxying(:info, {ref, reply}, %ProxyingState{} = state) when is_reference(ref) do + ProxyingState.reply(state, ref, reply) + {:keep_state, state} + end + + def proxying(:info, {:DOWN, ref, _, _, _}, %ProxyingState{} = state) do + # Handle the DOWN from the task + new_state = ProxyingState.consume_reply(state, ref) + {:keep_state, new_state} + end + + # Callbacks for the draining mode + + def draining(:info, {ref, reply}, %DrainingState{} = state) when is_reference(ref) do + DrainingState.reply(state, ref, reply) + + {:keep_state, state} + end + + def draining({:call, from}, {:start_buffering, _}, %DrainingState{} = state) do + initiator_pid = state.buffering_state.initiator_pid + {:keep_state, state, {:reply, from, {:error, {:already_buffering, initiator_pid}}}} + end + + def draining( + {:call, from}, + buffer(contents: mfa() = mfa, return: return), + %DrainingState{} = state + ) do + state = DrainingState.add_mfa(state, mfa) + {:keep_state, state, {:reply, from, return}} + end + + def draining({:call, from}, drop(return: return), %DrainingState{} = state) do + {:keep_state, state, {:reply, from, return}} + end + + def draining({:call, from}, :buffering?, state) do + {:keep_state, state, {:reply, from, true}} + end + + def draining(:info, {:DOWN, ref, _, _, _}, %DrainingState{} = state) do + new_state = DrainingState.consume_reply(state, ref) + + if DrainingState.drained?(new_state) do + {:next_state, :buffering, state.buffering_state} + else + {:keep_state, state} + end + end + + # Callbacks for buffering mode + + def buffering({:call, from}, {:start_buffering, _}, %BufferingState{} = state) do + {:keep_state, state, {:reply, from, {:error, {:already_buffering, state.initiator_pid}}}} + end + + def buffering( + {:call, from}, + buffer(contents: mfa() = mfa, return: return), + %BufferingState{} = state + ) do + state = BufferingState.add_mfa(state, mfa) + {:keep_state, state, {:reply, from, return}} + end + + def buffering({:call, from}, drop(return: return), %BufferingState{} = state) do + {:keep_state, state, {:reply, from, return}} + end + + def buffering(:info, {:DOWN, _, :process, pid, _}, %BufferingState{initiator_pid: pid} = state) do + state + |> BufferingState.flush() + |> Enum.each(&apply/1) + + {:next_state, :proxying, ProxyingState.new()} + end + + def buffering({:call, from}, :buffering?, state) do + {:keep_state, state, {:reply, from, true}} + end + + # Private + + defp apply(mfa(module: module, function: function, arguments: arguments)) do + apply(module, function, arguments) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/api/proxy/buffering_state.ex b/apps/remote_control/lib/lexical/remote_control/api/proxy/buffering_state.ex new file mode 100644 index 000000000..ea00e51e0 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/api/proxy/buffering_state.ex @@ -0,0 +1,146 @@ +defmodule Lexical.RemoteControl.Api.Proxy.BufferingState do + alias Lexical.Document + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Api + alias Lexical.RemoteControl.Build + alias Lexical.RemoteControl.Commands + + import Api.Messages + import Api.Proxy.Records + + defstruct initiator_pid: nil, buffer: [] + + def new(initiator_pid) do + %__MODULE__{initiator_pid: initiator_pid} + end + + def add_mfa(%__MODULE__{} = state, mfa() = mfa_record) do + %__MODULE__{state | buffer: [mfa_record | state.buffer]} + end + + def flush(%__MODULE__{} = state) do + {messages, commands} = + state.buffer + |> Enum.reverse() + |> Enum.split_with(fn value -> + match?(mfa(module: RemoteControl.Dispatch, function: :broadcast), value) + end) + + {project_compile, document_compiles, reindex} = collapse_commands(commands) + + all_commands = [project_compile | Map.values(document_compiles)] + + all_commands + |> Enum.concat(collapse_messages(messages, project_compile, document_compiles)) + |> Enum.filter(&match?(mfa(), &1)) + |> Enum.sort_by(fn mfa(seq: seq) -> seq end) + |> then(fn commands -> + if reindex do + commands ++ [reindex] + else + commands + end + end) + end + + defp collapse_commands(commands) do + # Rules for collapsing commands + # 1. If there's a project compilation requested, remove all document compilations + # 2. Formats can be dropped, as they're only valid for a short time. + # 3. If there's a reindex, do it after the project compilation has finished + + initial_state = %{project_compiles: [], document_compiles: %{}, reindex: nil} + + grouped = + commands + |> Enum.reduce( + initial_state, + fn + mfa(module: Build, function: :schedule_compile) = mfa, acc -> + Map.update(acc, :project_compiles, [mfa], &[mfa | &1]) + + mfa(module: Build, function: :compile_document) = mfa, acc -> + mfa(arguments: [_, document]) = mfa + uri = document.uri + put_in(acc, [:document_compiles, uri], mfa) + + mfa(module: Commands.Reindex) = mfa, acc -> + Map.put(acc, :reindex, mfa) + + _, acc -> + acc + end + ) + + %{ + project_compiles: project_compiles, + document_compiles: document_compiles, + reindex: reindex + } = grouped + + project_compile = + Enum.reduce(project_compiles, nil, fn + mfa(arguments: [_, true]) = mfa, _ -> + mfa + + mfa(arguments: [true]) = mfa, _ -> + mfa + + mfa() = mfa, nil -> + mfa + + _, acc -> + acc + end) + + document_compiles = + if project_compile do + %{} + else + for {uri, indexed} <- document_compiles, Document.Store.open?(uri), into: %{} do + {uri, indexed} + end + end + + {project_compile, document_compiles, reindex} + end + + defp collapse_messages(messages, project_compile, document_compiles) do + # Rules for collapsing messages + # 1. If the message is document-centric, discard it if the document isn't open. + # 2. It's probably safe to drop all file compile requested messages + # 3. File diagnostics can be dropped if + # a. There is a document compile command for that uri + # b. There is a project compile requested + # 4. Progress messages should still be sent to dispatch, even when buffering + + Enum.filter(messages, fn + mfa(arguments: [file_compile_requested()]) -> + false + + mfa(arguments: [project_compile_requested()]) -> + false + + mfa(arguments: [file_diagnostics(uri: uri)]) -> + not (Map.has_key?(document_compiles, uri) or + match?(project_compile_requested(), project_compile)) + + mfa(arguments: [body]) -> + case fetch_uri(body) do + {:ok, uri} -> + Document.Store.open?(uri) + + :error -> + true + end + end) + end + + defp fetch_uri(filesystem_event(uri: uri)), do: {:ok, uri} + defp fetch_uri(file_changed(uri: uri)), do: {:ok, uri} + defp fetch_uri(file_compile_requested(uri: uri)), do: {:ok, uri} + defp fetch_uri(file_compiled(uri: uri)), do: {:ok, uri} + defp fetch_uri(file_deleted(uri: uri)), do: {:ok, uri} + defp fetch_uri(file_diagnostics(uri: uri)), do: {:ok, uri} + defp fetch_uri(_), do: :error +end diff --git a/apps/remote_control/lib/lexical/remote_control/api/proxy/draining_state.ex b/apps/remote_control/lib/lexical/remote_control/api/proxy/draining_state.ex new file mode 100644 index 000000000..4941fc121 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/api/proxy/draining_state.ex @@ -0,0 +1,30 @@ +defmodule Lexical.RemoteControl.Api.Proxy.DrainingState do + alias Lexical.RemoteControl.Api.Proxy.BufferingState + alias Lexical.RemoteControl.Api.Proxy.ProxyingState + alias Lexical.RemoteControl.Api.Proxy.Records + + import Records + + defstruct [:proxying_state, :buffering_state] + + def new(%BufferingState{} = buffering_state, %ProxyingState{} = proxying_state) do + %__MODULE__{buffering_state: buffering_state, proxying_state: proxying_state} + end + + def drained?(%__MODULE__{} = state) do + ProxyingState.empty?(state.proxying_state) + end + + def consume_reply(%__MODULE__{} = state, ref) when is_reference(ref) do + %__MODULE__{state | proxying_state: ProxyingState.consume_reply(state.proxying_state, ref)} + end + + def reply(%__MODULE__{} = state, ref, reply) do + ProxyingState.reply(state.proxying_state, ref, reply) + end + + def add_mfa(%__MODULE__{} = state, mfa() = mfa) do + new_buffering_state = BufferingState.add_mfa(state.buffering_state, mfa) + %__MODULE__{state | buffering_state: new_buffering_state} + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/api/proxy/proxying_state.ex b/apps/remote_control/lib/lexical/remote_control/api/proxy/proxying_state.ex new file mode 100644 index 000000000..81a723e46 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/api/proxy/proxying_state.ex @@ -0,0 +1,42 @@ +defmodule Lexical.RemoteControl.Api.Proxy.ProxyingState do + alias Lexical.RemoteControl.Api.Proxy.Records + + defstruct refs_to_from: %{} + + import Records + + def new do + %__MODULE__{} + end + + def apply_mfa( + %__MODULE__{} = state, + from, + mfa(module: module, function: function, arguments: arguments) + ) do + task = Task.async(module, function, arguments) + + %__MODULE__{ + state + | refs_to_from: Map.put(state.refs_to_from, task.ref, from) + } + end + + def reply(%__MODULE__{} = state, ref, reply) when is_reference(ref) do + case Map.fetch(state.refs_to_from, ref) do + {:ok, from} -> + :gen_statem.reply(from, reply) + + _ -> + :ok + end + end + + def consume_reply(%__MODULE__{} = state, ref) when is_reference(ref) do + %__MODULE__{state | refs_to_from: Map.delete(state.refs_to_from, ref)} + end + + def empty?(%__MODULE__{} = state) do + Enum.empty?(state.refs_to_from) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/api/proxy/records.ex b/apps/remote_control/lib/lexical/remote_control/api/proxy/records.ex new file mode 100644 index 000000000..5f625b73f --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/api/proxy/records.ex @@ -0,0 +1,37 @@ +defmodule Lexical.RemoteControl.Api.Proxy.Records do + alias Lexical.Formats + + import Record + + defrecord :mfa, module: nil, function: nil, arguments: [], seq: nil + + def mfa(module, function, arguments) do + mfa( + module: module, + function: function, + arguments: arguments, + seq: System.unique_integer([:monotonic]) + ) + end + + defmacro to_mfa(ast) do + {m, f, a} = Macro.decompose_call(ast) + module = Macro.expand(m, __CALLER__) + arity = length(a) + + Code.ensure_compiled!(module) + Code.ensure_loaded!(module) + + unless function_exported?(module, f, arity) do + mfa = Formats.mfa(module, f, arity) + + raise CompileError.message(%{ + file: __CALLER__.file, + line: __CALLER__.line, + description: "No function named #{mfa} defined. Proxy will fail" + }) + end + + quote(do: unquote(__MODULE__).mfa(unquote(m), unquote(f), unquote(a))) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/application.ex b/apps/remote_control/lib/lexical/remote_control/application.ex index 373a8e32a..907fc43d4 100644 --- a/apps/remote_control/lib/lexical/remote_control/application.ex +++ b/apps/remote_control/lib/lexical/remote_control/application.ex @@ -1,30 +1,36 @@ defmodule Lexical.RemoteControl.Application do - # See https://hexdocs.pm/elixir/Application.html - # for more information on OTP Applications @moduledoc false alias Lexical.RemoteControl use Application + require Logger @impl true def start(_type, _args) do children = if RemoteControl.project_node?() do [ + RemoteControl.Api.Proxy, + RemoteControl.Commands.Reindex, + RemoteControl.Module.Loader, {RemoteControl.Dispatch, progress: true}, RemoteControl.ModuleMappings, RemoteControl.Build, RemoteControl.Build.CaptureServer, RemoteControl.Plugin.Runner.Supervisor, - RemoteControl.Plugin.Runner.Coordinator + RemoteControl.Plugin.Runner.Coordinator, + RemoteControl.Search.Store.Backends.Ets, + {RemoteControl.Search.Store, + [ + &RemoteControl.Search.Indexer.create_index/1, + &RemoteControl.Search.Indexer.update_index/2 + ]} ] else [] end - # See https://hexdocs.pm/elixir/Supervisor.html - # for other strategies and supported options opts = [strategy: :one_for_one, name: Lexical.RemoteControl.Supervisor] Supervisor.start_link(children, opts) end diff --git a/apps/remote_control/lib/lexical/remote_control/bootstrap.ex b/apps/remote_control/lib/lexical/remote_control/bootstrap.ex index 8f49f8d8b..d7c43b9af 100644 --- a/apps/remote_control/lib/lexical/remote_control/bootstrap.ex +++ b/apps/remote_control/lib/lexical/remote_control/bootstrap.ex @@ -10,9 +10,12 @@ defmodule Lexical.RemoteControl.Bootstrap do alias Lexical.RemoteControl require Logger - def init(%Project{} = project, remote_control_config) do + def init(%Project{} = project, document_store_entropy, app_configs) do + Lexical.Document.Store.set_entropy(document_store_entropy) + + Application.put_all_env(app_configs) + maybe_append_hex_path() - Application.put_all_env(remote_control: remote_control_config) project_root = Project.root_path(project) diff --git a/apps/remote_control/lib/lexical/remote_control/build.ex b/apps/remote_control/lib/lexical/remote_control/build.ex index 83cbed5ff..0137a3527 100644 --- a/apps/remote_control/lib/lexical/remote_control/build.ex +++ b/apps/remote_control/lib/lexical/remote_control/build.ex @@ -2,34 +2,44 @@ defmodule Lexical.RemoteControl.Build do alias Lexical.Document alias Lexical.Project alias Lexical.RemoteControl + alias Lexical.RemoteControl.Build.Document.Compilers.HEEx alias Lexical.RemoteControl.Build.State + alias Lexical.VM.Versions require Logger use GenServer - @tick_interval_millis 50 + @timeout_interval_millis 50 # Public interface - def schedule_compile(%Project{} = project, force? \\ false) do - RemoteControl.call(project, GenServer, :cast, [__MODULE__, {:compile, force?}]) + def path(%Project{} = project) do + %{elixir: elixir, erlang: erlang} = Versions.current() + erlang_major = erlang |> String.split(".") |> List.first() + elixir_version = Version.parse!(elixir) + elixir_major = "#{elixir_version.major}.#{elixir_version.minor}" + build_root = Project.build_path(project) + Path.join([build_root, "erl-#{erlang_major}", "elixir-#{elixir_major}"]) end - def compile_document(%Project{} = project, %Document{} = document) do - unless Path.absname(document.path) == "mix.exs" do - RemoteControl.call(project, GenServer, :cast, [__MODULE__, {:compile_file, document}]) + def schedule_compile(%Project{} = _project, force? \\ false) do + GenServer.cast(__MODULE__, {:compile, force?}) + end + + def compile_document(%Project{} = _project, %Document{} = document) do + with false <- Path.absname(document.path) == "mix.exs", + false <- HEEx.recognizes?(document) do + GenServer.cast(__MODULE__, {:compile_file, document}) end :ok end # this is for testing - def force_compile_document(%Project{} = project, %Document{} = document) do - unless Path.absname(document.path) == "mix.exs" do - RemoteControl.call(project, GenServer, :call, [ - __MODULE__, - {:force_compile_file, document} - ]) + def force_compile_document(%Document{} = document) do + with false <- Path.absname(document.path) == "mix.exs", + false <- HEEx.recognizes?(document) do + GenServer.call(__MODULE__, {:force_compile_file, document}) end :ok @@ -57,32 +67,30 @@ defmodule Lexical.RemoteControl.Build do @impl GenServer def handle_continue(:ensure_build_directory, %State{} = state) do State.ensure_build_directory(state) - schedule_tick() {:noreply, state} end @impl GenServer def handle_call({:force_compile_file, %Document{} = document}, _from, %State{} = state) do State.compile_file(state, document) - {:reply, :ok, state} + {:reply, :ok, state, @timeout_interval_millis} end @impl GenServer def handle_cast({:compile, force?}, %State{} = state) do - State.compile_project(state, force?) - {:noreply, state} + new_state = State.on_project_compile(state, force?) + {:noreply, new_state, @timeout_interval_millis} end @impl GenServer def handle_cast({:compile_file, %Document{} = document}, %State{} = state) do new_state = State.on_file_compile(state, document) - {:noreply, new_state} + {:noreply, new_state, @timeout_interval_millis} end @impl GenServer - def handle_info(:tick, %State{} = state) do - new_state = State.on_tick(state) - schedule_tick() + def handle_info(:timeout, %State{} = state) do + new_state = State.on_timeout(state) {:noreply, new_state} end @@ -91,8 +99,4 @@ defmodule Lexical.RemoteControl.Build do Logger.warning("Undefined message: #{inspect(msg)}") {:noreply, project} end - - defp schedule_tick do - Process.send_after(self(), :tick, @tick_interval_millis) - end end diff --git a/apps/remote_control/lib/lexical/remote_control/build/document.ex b/apps/remote_control/lib/lexical/remote_control/build/document.ex index 16d4065e1..d1e3a41f2 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/document.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/document.ex @@ -1,12 +1,23 @@ defmodule Lexical.RemoteControl.Build.Document do - alias Elixir.Features alias Lexical.Document + alias Lexical.RemoteControl.Build alias Lexical.RemoteControl.Build.Document.Compilers + alias Lexical.RemoteControl.Build.Isolation @compilers [Compilers.Config, Compilers.Elixir, Compilers.EEx, Compilers.HEEx, Compilers.NoOp] def compile(%Document{} = document) do compiler = Enum.find(@compilers, & &1.recognizes?(document)) - compiler.compile(document) + compile_fun = fn -> compiler.compile(document) end + + case Isolation.invoke(compile_fun) do + {:ok, result} -> + result + + {:error, {exception, stack}} -> + diagnostic = Build.Error.error_to_diagnostic(document, exception, stack, nil) + diagnostics = Build.Error.refine_diagnostics([diagnostic]) + {:error, diagnostics} + end end end diff --git a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/config.ex b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/config.ex index bc4cbbc46..d135f5805 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/config.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/config.ex @@ -6,10 +6,17 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Config do alias Lexical.Document alias Lexical.Plugin.V1.Diagnostic alias Lexical.RemoteControl.Build + alias Lexical.RemoteControl.Build.Error.Location + + @elixir_source "Elixir" @behaviour Build.Document.Compiler require Logger + @impl true + def enabled?, do: true + + @impl true def recognizes?(%Document{} = document) do in_config_dir? = document.path @@ -19,12 +26,13 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Config do in_config_dir? and Path.extname(document.path) == ".exs" end - def enabled? do - Features.config_reader?() - end - + @impl true def compile(%Document{} = document) do - do_compile(document) + if Features.with_diagnostics?() do + compile_with_diagnostics(document) + else + raw_compile(document) + end end defp config_dir do @@ -36,23 +44,15 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Config do |> Path.dirname() end - defp do_compile(%Document{} = document) do - if Features.with_diagnostics?() do - compile_with_diagnostics(document) - else - raw_compile(document) - end - end - defp raw_compile(%Document{} = document) do contents = Document.to_string(document) try do - Config.Reader.eval!(document.path, contents) + Config.Reader.eval!(document.path, contents, env: :test) {:ok, []} rescue e -> - {:error, [to_result(document, e)]} + {:error, [to_result(document, e, __STACKTRACE__)]} end end @@ -76,33 +76,41 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Config do end end - defp to_result(%Document{} = document, %CompileError{} = error) do - Diagnostic.Result.new(document.uri, error.line, Exception.message(error), :error, "Elixir") + defp to_result(document, error, stack \\ []) + + defp to_result(%Document{} = document, %CompileError{} = error, _stack) do + Diagnostic.Result.new( + document.uri, + error.line, + Exception.message(error), + :error, + @elixir_source + ) end - defp to_result(%Document{} = document, %error_type{} = error) + defp to_result(%Document{} = document, %error_type{} = error, _stack) when error_type in [SyntaxError, TokenMissingError] do Diagnostic.Result.new( document.uri, {error.line, error.column}, Exception.message(error), :error, - "Elixir" + @elixir_source ) end - defp to_result(%Document{} = document, %{ - position: position, - message: message, - severity: severity - }) do - Diagnostic.Result.new( - document.path, - position, - message, - severity, - "Elixir" - ) + defp to_result( + %Document{} = document, + %{position: position, message: message, severity: severity}, + _stack + ) do + Diagnostic.Result.new(document.path, position, message, severity, @elixir_source) + end + + defp to_result(%Document{} = document, %{__exception__: true} = exception, stack) do + message = Exception.message(exception) + position = Location.stack_to_position(stack) + Diagnostic.Result.new(document.path, position, message, :error, @elixir_source) end defp reject_logged_messages(results) do diff --git a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/eex.ex b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/eex.ex index 89c2671f7..243ef2249 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/eex.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/eex.ex @@ -10,14 +10,14 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EEx do @behaviour Compiler - def recognizes?(%Document{} = document) do - Path.extname(document.path) == ".eex" - end + @impl true + def recognizes?(%Document{language_id: "eex"}), do: true + def recognizes?(_), do: false - def enabled? do - true - end + @impl true + def enabled?, do: true + @impl true def compile(%Document{} = document) do with {:ok, quoted} <- eex_to_quoted(document), :ok <- eval_quoted(document, quoted) do @@ -45,7 +45,8 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EEx do end end - defp eval_quoted(%Document{} = document, quoted_ast) do + @spec eval_quoted(Document.t(), Macro.t()) :: :ok | {:error, [Result.t()]} + def eval_quoted(%Document{} = document, quoted_ast) do result = if Elixir.Features.with_diagnostics?() do eval_quoted_with_diagnostics(quoted_ast, document.path) @@ -92,8 +93,21 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EEx do end def do_eval_quoted(quoted_ast, path) do + eval_heex_quoted? = + quoted_ast + |> Future.Macro.path(&match?({:require, [context: Phoenix.LiveView.TagEngine], _}, &1)) + |> then(&(not is_nil(&1))) + + env = + if eval_heex_quoted? do + # __ENV__ is required for heex quoted evaluations. + Map.put(__ENV__, :file, path) + else + [file: path] + end + try do - {result, _} = Code.eval_quoted(quoted_ast, [assigns: %{}], file: path) + {result, _} = Code.eval_quoted(quoted_ast, [assigns: %{}], env) {:ok, result} rescue exception -> diff --git a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/elixir.ex b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/elixir.ex index 934168033..e53ab498c 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/elixir.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/elixir.ex @@ -9,23 +9,23 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Elixir do alias Lexical.RemoteControl.Build.Document.Compilers @behaviour Build.Document.Compiler - @valid_extensions ~w(.ex .exs) + @impl true def recognizes?(%Document{} = doc) do - Path.extname(doc.path) in @valid_extensions + doc.language_id in ["elixir", "elixir-script"] end - def enabled? do - true - end + @impl true + def enabled?, do: true + @impl true def compile(%Document{} = document) do case to_quoted(document) do {:ok, quoted} -> Compilers.Quoted.compile(document, quoted, "Elixir") {:error, {meta, message_info, token}} -> - diagnostics = Build.Error.parse_error_to_diagnostics(document, meta, message_info, token) + diagnostics = Build.Error.Parse.to_diagnostics(document, meta, message_info, token) {:error, diagnostics} end end diff --git a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/heex.ex b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/heex.ex index a08459782..69a0338b2 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/heex.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/heex.ex @@ -10,24 +10,39 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.HEEx do @behaviour Compiler - def recognizes?(%Document{} = document) do - Path.extname(document.path) == ".heex" - end + def recognizes?(%Document{language_id: "phoenix-heex"}), do: true + def recognizes?(%Document{language_id: "heex"}), do: true + def recognizes?(_), do: false def enabled? do true end def compile(%Document{} = document) do - case heex_to_quoted(document) do - {:ok, _} -> - Compilers.EEx.compile(document) + with :ok <- eval_heex_quoted(document) do + compile_eex_quoted(document) + end + end + + defp eval_heex_quoted(document) do + with {:ok, quoted} <- heex_to_quoted(document) do + Compilers.EEx.eval_quoted(document, quoted) + end + end - other -> - other + defp compile_eex_quoted(document) do + with {:error, errors} <- Compilers.EEx.compile(document) do + {:error, reject_undefined_variables(errors)} end end + defp reject_undefined_variables(errors) do + # the undefined variable error is handled by the `eval_heex_quoted` + Enum.reject(errors, fn error -> + error.message =~ "undefined variable" + end) + end + defp heex_to_quoted(%Document{} = document) do try do source = Document.to_string(document) @@ -59,6 +74,7 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.HEEx do defp error_to_result(document, %error_struct{} = error) when error_struct in [ + SyntaxError, TokenMissingError, Phoenix.LiveView.Tokenizer.ParseError ] do diff --git a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/quoted.ex b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/quoted.ex index 3a4176ef7..94551b504 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/document/compilers/quoted.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/document/compilers/quoted.ex @@ -1,26 +1,22 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Quoted do alias Elixir.Features + alias Lexical.Ast alias Lexical.Document - alias Lexical.RemoteControl - alias Lexical.RemoteControl.Api alias Lexical.RemoteControl.Build - alias Lexical.RemoteControl.Dispatch alias Lexical.RemoteControl.ModuleMappings - import Api.Messages import Lexical.RemoteControl.Build.CaptureIO, only: [capture_io: 2] def compile(%Document{} = document, quoted_ast, compiler_name) do - Dispatch.broadcast( - file_quoted( - project: RemoteControl.get_project(), - document: document, - quoted_ast: quoted_ast - ) - ) - prepare_compile(document.path) + quoted_ast = + if document.language_id == "elixir-script" do + wrap_top_level_forms(quoted_ast) + else + quoted_ast + end + {status, diagnostics} = if Features.with_diagnostics?() do do_compile(quoted_ast, document) @@ -78,8 +74,9 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Quoted do {captured_messages, {:exception, exception, stack, quoted_ast}} -> error = Build.Error.error_to_diagnostic(document, exception, stack, quoted_ast) diagnostics = Build.Error.message_to_diagnostic(document, captured_messages) + refined = Build.Error.refine_diagnostics([error | diagnostics]) - {:error, [error | diagnostics]} + {:error, refined} {"", {:ok, modules}} -> purge_removed_modules(old_modules, modules) @@ -87,7 +84,12 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Quoted do {captured_warnings, {:ok, modules}} -> purge_removed_modules(old_modules, modules) - diagnostics = Build.Error.message_to_diagnostic(document, captured_warnings) + + diagnostics = + document + |> Build.Error.message_to_diagnostic(captured_warnings) + |> Build.Error.refine_diagnostics() + {:ok, diagnostics} end end @@ -136,4 +138,145 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.Quoted do defp replace_source(result, source) do Map.put(result, :source, source) end + + @doc false + def wrap_top_level_forms({:__block__, meta, nodes}) do + {chunks, _vars} = + nodes + |> Enum.chunk_by(&should_wrap?/1) + |> Enum.with_index() + |> Enum.flat_map_reduce([], fn {[node | _] = nodes, i}, vars -> + if should_wrap?(node) do + {wrapped, vars} = wrap_nodes(nodes, vars, i) + {[wrapped], vars} + else + {nodes, vars} + end + end) + + {:__block__, meta, chunks} + end + + def wrap_top_level_forms(ast) do + wrap_top_level_forms({:__block__, [], [ast]}) + end + + defp wrap_nodes(nodes, vars, i) do + module_name = :"lexical_wrapper_#{i}" + {nodes, new_vars} = suppress_and_extract_vars(nodes) + + quoted = + quote do + defmodule unquote(module_name) do + def __lexical_wrapper__([unquote_splicing(vars)]) do + (unquote_splicing(nodes)) + end + end + end + + {quoted, new_vars ++ vars} + end + + @allowed_top_level [:defmodule, :alias, :import, :require, :use] + defp should_wrap?({allowed, _, _}) when allowed in @allowed_top_level, do: false + defp should_wrap?(_), do: true + + @doc false + # This function replaces all unused variables with `_` in order + # to suppress warnings while accumulating those vars. The approach + # here is bottom-up, starting from the last expression and working + # back to the beginning: + # + # - If the expression is an assignment, collect vars from the LHS, + # replacing them with `_` if they haven't been referenced, then + # collect references from the RHS. + # - If the expression isn't an assignment, just collect references. + # - Note that pinned vars on the LHS of an assignment are references. + # + def suppress_and_extract_vars(quoted) + + def suppress_and_extract_vars(list) when is_list(list) do + list + |> Enum.reverse() + |> do_suppress_and_extract_vars() + end + + def suppress_and_extract_vars({:__block__, meta, nodes}) do + {nodes, vars} = suppress_and_extract_vars(nodes) + {{:__block__, meta, nodes}, vars} + end + + def suppress_and_extract_vars(expr) do + {[expr], vars} = suppress_and_extract_vars([expr]) + {expr, vars} + end + + defp do_suppress_and_extract_vars(list, acc \\ [], references \\ [], vars \\ []) + + defp do_suppress_and_extract_vars([expr | rest], acc, references, vars) do + {expr, new_vars} = suppress_and_extract_vars_from_expr(expr, references) + new_references = extract_references_from_expr(expr) + + do_suppress_and_extract_vars( + rest, + [expr | acc], + new_references ++ references, + new_vars ++ vars + ) + end + + defp do_suppress_and_extract_vars([], acc, _references, vars) do + {acc, vars} + end + + defp suppress_and_extract_vars_from_expr({:=, meta, [left, right]}, references) do + {left, left_vars} = + Ast.prewalk_vars(left, [], fn + {:^, _, _} = pinned, acc -> + {pinned, acc} + + {name, meta, context} = var, acc -> + if Ast.has_var?(references, name, context) do + {var, [{name, [], context} | acc]} + else + {{:_, meta, nil}, [var | acc]} + end + end) + + {right, right_vars} = suppress_and_extract_vars_from_expr(right, references) + + {{:=, meta, [left, right]}, left_vars ++ right_vars} + end + + defp suppress_and_extract_vars_from_expr(other, _references) do + {other, []} + end + + defp extract_references_from_expr({:=, _, [left, right]}) do + {_, left_references} = + Ast.prewalk_vars(left, [], fn + {:^, _, [referenced_var]}, acc -> + {:ok, [referenced_var | acc]} + + node, acc -> + {node, acc} + end) + + right_references = extract_references_from_expr(right) + + left_references ++ right_references + end + + defp extract_references_from_expr(expr) do + {_, references} = + Ast.prewalk_vars(expr, [], fn + {:^, _, _}, acc -> + {:ok, acc} + + var, acc -> + {:ok, [var | acc]} + end) + + references + end end diff --git a/apps/remote_control/lib/lexical/remote_control/build/error.ex b/apps/remote_control/lib/lexical/remote_control/build/error.ex index c9dced449..f1ae13fe4 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/error.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/error.ex @@ -1,6 +1,8 @@ defmodule Lexical.RemoteControl.Build.Error do + alias Lexical.Ast alias Lexical.Document alias Lexical.Plugin.V1.Diagnostic.Result + alias Lexical.RemoteControl.Build.Error.Location alias Mix.Task.Compiler require Logger @@ -24,7 +26,7 @@ defmodule Lexical.RemoteControl.Build.Error do |> normalize() |> format() end) - |> uniq() + |> Location.uniq() end defp normalize(%Compiler.Diagnostic{} = diagnostic) do @@ -67,119 +69,24 @@ defmodule Lexical.RemoteControl.Build.Error do end end - defp reject_zeroth_line(diagnostics) do - # Since 1.15, Elixir has some nonsensical error on line 0, - # e.g.: Can't compile this file - # We can simply ignore it, as there is a more accurate one - Enum.reject(diagnostics, fn diagnostic -> - diagnostic.position == 0 - end) - end - - defp uniq(diagnostics) do - # We need to uniq by position because the same position can be reported - # and the `end_line_diagnostic` is always the precise one - extract_line = fn - %Result{position: {line, _column}} -> line - %Result{position: {start_line, _start_col, _end_line, _end_col}} -> start_line - %Result{position: line} -> line - end - - # Note: Sometimes error and warning appear on one line at the same time - # So we need to uniq by line and severity, - # and :error is always more important than :warning - extract_line_and_severity = &{extract_line.(&1), &1.severity} - - diagnostics - |> Enum.sort_by(extract_line_and_severity) - |> Enum.uniq_by(extract_line) - |> reject_zeroth_line() - end - - # Parse errors happen during Code.string_to_quoted and are raised as SyntaxErrors, and TokenMissingErrors. - def parse_error_to_diagnostics( - %Document{} = source, - context, - {_error, detail} = message_info, - token - ) - when is_binary(detail) do - detail_diagnostics = detail_diagnostics(source, detail) - error = message_info_to_binary(message_info, token) - error_diagnostics = parse_error_to_diagnostics(source, context, error, token) - uniq(error_diagnostics ++ detail_diagnostics) - end - - def parse_error_to_diagnostics(%Document{} = source, context, message_info, token) - when is_exception(message_info) do - parse_error_to_diagnostics(source, context, Exception.message(message_info), token) - end - - def parse_error_to_diagnostics(%Document{} = source, context, message_info, token) do - parse_error_diagnostic_functions = [ - &build_end_line_diagnostics/4, - &build_start_line_diagnostics/4, - &build_hint_diagnostics/4 - ] - - Enum.flat_map( - parse_error_diagnostic_functions, - & &1.(source, context, message_info, token) - ) - end - - defp build_end_line_diagnostics(%Document{} = source, context, message_info, token) do - [end_line_message | _] = String.split(message_info, "\n") - - message = - if String.ends_with?(end_line_message, token) do - end_line_message - else - end_line_message <> token - end - - diagnostic = Result.new(source.uri, context_to_position(context), message, :error, "Elixir") - [diagnostic] - end - - @start_line_regex ~r/(\w+) \(for (.*) starting at line (\d+)\)/ - defp build_start_line_diagnostics(%Document{} = source, _context, message_info, _token) do - case Regex.run(@start_line_regex, message_info) do - [_, missing, token, start_line] -> - message = "The #{token} here is missing a terminator: #{inspect(missing)}" - position = String.to_integer(start_line) - result = Result.new(source.uri, position, message, :error, @elixir_source) - [result] - - _ -> - [] - end - end - - @hint_regex ~r/HINT: .*on line (\d+).*/m - defp build_hint_diagnostics(%Document{} = source, _context, message_info, _token) do - case Regex.run(@hint_regex, message_info) do - [message, hint_line] -> - message = String.replace(message, ~r/on line \d+/, "here") - position = String.to_integer(hint_line) - result = Result.new(source.uri, position, message, :error, @elixir_source) - [result] - - _ -> - [] - end - end - @doc """ The `diagnostics_from_mix/2` is only for Elixir version > 1.15 - From 1.15 onwards with_diagnostics can return some compile-time errors, + From 1.15 onwards `with_diagnostics` can return some compile-time errors, more details: https://github.com/elixir-lang/elixir/pull/12742 """ def diagnostics_from_mix(%Document{} = doc, all_errors_and_warnings) when is_list(all_errors_and_warnings) do for error_or_wanning <- all_errors_and_warnings do - %{position: position, message: message, severity: severity} = error_or_wanning + %{position: pos, message: message, severity: severity} = error_or_wanning + + position = + if span = error_or_wanning[:span] do + Location.range(doc, pos, span) + else + pos + end + Result.new(doc.uri, position, message, severity, @elixir_source) end end @@ -194,7 +101,7 @@ defmodule Lexical.RemoteControl.Build.Error do Result.new( path, - position(compile_error.line), + Location.position(compile_error.line), compile_error.description, :error, @elixir_source @@ -209,7 +116,7 @@ defmodule Lexical.RemoteControl.Build.Error do ) do [{_module, _function, _arity, context} | _] = stack message = Exception.message(function_clause) - position = context_to_position(context) + position = Location.context_to_position(context) Result.new(source.uri, position, message, :error, @elixir_source) end @@ -220,7 +127,7 @@ defmodule Lexical.RemoteControl.Build.Error do _quoted_ast ) do message = Exception.message(error) - position = position(1) + position = Location.position(1) Result.new(source.uri, position, message, :error, @elixir_source) end @@ -234,12 +141,12 @@ defmodule Lexical.RemoteControl.Build.Error do message = Exception.message(undefined_function) position = - if context == [] do + if context == [] and is_list(arguments) do arity = length(arguments) mfa = {module, function, arity} mfa_to_position(mfa, quoted_ast) else - stack_to_position(stack) + Location.stack_to_position(stack) end Result.new(source.uri, position, message, :error, @elixir_source) @@ -273,9 +180,9 @@ defmodule Lexical.RemoteControl.Build.Error do position = if pipe_or_struct? or expanding? do - context_to_position(context) + Location.context_to_position(context) else - stack_to_position(stack) + Location.stack_to_position(stack) end Result.new(source.uri, position, message, :error, @elixir_source) @@ -288,7 +195,7 @@ defmodule Lexical.RemoteControl.Build.Error do ExUnit.DuplicateDescribeError ] do message = Exception.message(exception) - position = stack_to_position(stack) + position = Location.stack_to_position(stack) Result.new(source.uri, position, message, :error, @elixir_source) end @@ -354,13 +261,13 @@ defmodule Lexical.RemoteControl.Build.Error do cond do is_nil(context) -> - position(0) + Location.position(0) Keyword.has_key?(context, :line) and Keyword.has_key?(context, :column) -> - position(context[:line], context[:column]) + Location.position(context[:line], context[:column]) Keyword.has_key?(context, :line) -> - position(context[:line]) + Location.position(context[:line]) true -> nil @@ -368,39 +275,9 @@ defmodule Lexical.RemoteControl.Build.Error do end defp safe_split(module) do - module - |> Atom.to_string() - |> String.split(".") - |> case do - [erlang_module] -> String.to_atom(erlang_module) - ["Elixir" | elixir_module_path] -> Enum.map(elixir_module_path, &String.to_atom/1) - end - end - - defp stack_to_position([{_, target, _, _} | rest]) - when target not in [:__FILE__, :__MODULE__] do - stack_to_position(rest) - end - - defp stack_to_position([{_, target, _, context} | _rest]) - when target in [:__FILE__, :__MODULE__] do - context_to_position(context) - end - - defp stack_to_position([]) do - nil - end - - defp context_to_position(context) do - cond do - Keyword.has_key?(context, :line) and Keyword.has_key?(context, :column) -> - position(context[:line], context[:column]) - - Keyword.has_key?(context, :line) -> - position(context[:line]) - - true -> - nil + case Ast.Module.safe_split(module, as: :atoms) do + {:elixir, segments} -> segments + {:erlang, [erlang_module]} -> erlang_module end end @@ -471,32 +348,6 @@ defmodule Lexical.RemoteControl.Build.Error do end end - defp position(line) do - line - end - - defp position(line, column) do - {line, column} - end - - defp message_info_to_binary({header, footer}, token) do - header <> token <> footer - end - - @detail_location_re ~r/at line (\d+)/ - defp detail_diagnostics(%Document{} = source, detail) do - case Regex.scan(@detail_location_re, detail) do - [[matched, line_number]] -> - line_number = String.to_integer(line_number) - message = String.replace(detail, matched, "here") - result = Result.new(source.uri, line_number, message, :error, @elixir_source) - [result] - - _ -> - [] - end - end - defp blank?(s) when is_binary(s) do String.trim(s) == "" end diff --git a/apps/remote_control/lib/lexical/remote_control/build/error/location.ex b/apps/remote_control/lib/lexical/remote_control/build/error/location.ex new file mode 100644 index 000000000..d8f4e92c4 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/build/error/location.ex @@ -0,0 +1,97 @@ +defmodule Lexical.RemoteControl.Build.Error.Location do + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.Plugin.V1.Diagnostic.Result + + require Logger + + def stack_to_position([{_, target, _, _} | rest]) + when target not in [:__FILE__, :__MODULE__] do + stack_to_position(rest) + end + + def stack_to_position([{_, target, _, context} | _rest]) + when target in [:__FILE__, :__MODULE__] do + context_to_position(context) + end + + def stack_to_position([]) do + nil + end + + def context_to_position(context) do + case {context[:line], context[:column]} do + {nil, nil} -> + Logger.error("Invalid context: #{inspect(context)}") + nil + + {line, nil} -> + line + + {line, column} -> + position(line, column) + end + end + + def position(line) do + line + end + + def position(line, column) do + {line, column} + end + + def fetch_range(%Document{} = document, context) do + case {context[:end_line], context[:end_column]} do + {nil, _} -> + :error + + {end_line, end_column} -> + {line, column} = {context[:line], context[:column]} + {:ok, range(document, line, column, end_line, end_column)} + end + end + + def range(%Document{} = document, {line, column}, {end_line, end_column}) do + range(document, line, column, end_line, end_column) + end + + def range(%Document{} = document, line, column, end_line, end_column) do + start_position = Position.new(document, line, column) + end_position = Position.new(document, end_line, end_column) + Range.new(start_position, end_position) + end + + def uniq(diagnostics) do + exacts = Enum.filter(diagnostics, fn diagnostic -> match?(%Range{}, diagnostic.position) end) + + extract_line = fn + %Result{position: {line, _column}} -> line + %Result{position: line} -> line + end + + # Note: Sometimes error and warning appear on one line at the same time + # So we need to uniq by line and severity, + # and :error is always more important than :warning + extract_line_and_severity = &{extract_line.(&1), &1.severity} + + filtered = + diagnostics + |> Enum.filter(fn diagnostic -> not match?(%Range{}, diagnostic.position) end) + |> Enum.sort_by(extract_line_and_severity) + |> Enum.uniq_by(extract_line) + |> reject_zeroth_line() + + exacts ++ filtered + end + + defp reject_zeroth_line(diagnostics) do + # Since 1.15, Elixir has some nonsensical error on line 0, + # e.g.: Can't compile this file + # We can simply ignore it, as there is a more accurate one + Enum.reject(diagnostics, fn diagnostic -> + diagnostic.position == 0 + end) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/build/error/parse.ex b/apps/remote_control/lib/lexical/remote_control/build/error/parse.ex new file mode 100644 index 000000000..e02416016 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/build/error/parse.ex @@ -0,0 +1,198 @@ +defmodule Lexical.RemoteControl.Build.Error.Parse do + alias Lexical.Document + alias Lexical.Document.Range + alias Lexical.Plugin.V1.Diagnostic.Result + alias Lexical.RemoteControl.Build.Error.Location + + @elixir_source "Elixir" + + # Parse errors happen during Code.string_to_quoted and are raised as SyntaxErrors, and TokenMissingErrors. + def to_diagnostics( + %Document{} = source, + context, + {_error, detail} = message_info, + token + ) + when is_binary(detail) do + # NOTE: mainly for `unexpected token` errors `< 1.16`, + # its details consist of multiple lines, so it is a tuple. + detail_diagnostics = detail_diagnostics(source, detail) + error = message_info_to_binary(message_info, token) + error_diagnostics = to_diagnostics(source, context, error, token) + Location.uniq(detail_diagnostics ++ error_diagnostics) + end + + def to_diagnostics(%Document{} = source, context, message_info, token) + when is_exception(message_info) do + to_diagnostics(source, context, Exception.message(message_info), token) + end + + def to_diagnostics(%Document{} = source, context, message_info, token) do + {start_line_fn, end_line_fn} = + if Features.details_in_context?() do + {&build_end_line_diagnostics_from_context/4, &build_start_line_diagnostics_from_context/4} + else + {&build_start_line_diagnostics/4, &build_end_line_diagnostics/4} + end + + parse_error_diagnostic_functions = [ + end_line_fn, + start_line_fn, + &build_hint_diagnostics/4 + ] + + parse_error_diagnostic_functions + |> Enum.flat_map(& &1.(source, context, message_info, token)) + |> Location.uniq() + end + + @missing_terminator_pattern ~r/missing terminator: \w+/ + defp build_end_line_diagnostics_from_context( + %Document{} = source, + context, + message_info, + token + ) do + message = + cond do + String.starts_with?(message_info, "unexpected") -> + ~s/#{message_info}#{token}, expected `#{context[:expected_delimiter]}`/ + + Regex.match?(@missing_terminator_pattern, message_info) -> + [message] = Regex.run(@missing_terminator_pattern, message_info) + message + + true -> + "#{message_info}#{token}" + end + + case Location.fetch_range(source, context) do + {:ok, %Range{end: end_pos}} -> + [ + Result.new( + source.uri, + {end_pos.line, end_pos.character}, + message, + :error, + @elixir_source + ) + ] + + :error -> + [] + end + end + + defp build_end_line_diagnostics(%Document{} = source, context, message_info, token) do + [end_line_message | _] = String.split(message_info, "\n") + + message = + if String.ends_with?(end_line_message, token) do + end_line_message + else + end_line_message <> token + end + + diagnostic = + Result.new(source.uri, Location.context_to_position(context), message, :error, "Elixir") + + [diagnostic] + end + + defp build_start_line_diagnostics_from_context( + %Document{} = source, + context, + message_info, + token + ) do + opening_delimiter = context[:opening_delimiter] + + if opening_delimiter do + build_opening_delimiter_diagnostics(source, context, opening_delimiter) + else + build_syntax_error_diagnostic(source, context, message_info, token) + end + end + + defp build_opening_delimiter_diagnostics(%Document{} = source, context, opening_delimiter) do + message = + ~s/The `#{opening_delimiter}` here is missing terminator `#{context[:expected_delimiter]}`/ + + opening_delimiter_length = opening_delimiter |> Atom.to_string() |> String.length() + + pos = + Location.range( + source, + context[:line], + context[:column], + context[:line], + context[:column] + opening_delimiter_length + ) + + result = Result.new(source.uri, pos, message, :error, @elixir_source) + [result] + end + + defp build_syntax_error_diagnostic(%Document{} = source, context, message_info, token) do + message = "#{message_info}#{token}" + pos = Location.position(context[:line], context[:column]) + result = Result.new(source.uri, pos, message, :error, @elixir_source) + [result] + end + + @start_line_regex ~r/(\w+) \(for (.*) starting at line (\d+)\)/ + defp build_start_line_diagnostics(%Document{} = source, _context, message_info, _token) do + case Regex.run(@start_line_regex, message_info) do + [_, missing, token, start_line] -> + message = + ~s[The #{format_token(token)} here is missing terminator #{format_token(missing)}] + + position = String.to_integer(start_line) + result = Result.new(source.uri, position, message, :error, @elixir_source) + [result] + + _ -> + [] + end + end + + @hint_regex ~r/(HINT:|hint:\e\[0m|hint:)( .*on line (\d+).*)/m + defp build_hint_diagnostics(%Document{} = source, _context, message_info, _token) do + case Regex.run(@hint_regex, message_info) do + [_whole_message, _hint, message, hint_line] -> + message = "HINT:" <> String.replace(message, ~r/on line \d+/, "here") + position = String.to_integer(hint_line) + result = Result.new(source.uri, position, message, :error, @elixir_source) + [result] + + _ -> + [] + end + end + + defp message_info_to_binary({header, footer}, token) do + header <> token <> footer + end + + @detail_location_re ~r/at line (\d+)/ + defp detail_diagnostics(%Document{} = source, detail) do + case Regex.scan(@detail_location_re, detail) do + [[matched, line_number]] -> + line_number = String.to_integer(line_number) + message = String.replace(detail, matched, "here") + result = Result.new(source.uri, line_number, message, :error, @elixir_source) + [result] + + _ -> + [] + end + end + + defp format_token(token) when is_binary(token) do + if String.contains?(token, "\"") do + String.replace(token, "\"", "`") + else + "`#{token}`" + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/build/isolation.ex b/apps/remote_control/lib/lexical/remote_control/build/isolation.ex new file mode 100644 index 000000000..4bfd3e00b --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/build/isolation.ex @@ -0,0 +1,25 @@ +defmodule Lexical.RemoteControl.Build.Isolation do + @moduledoc """ + Runs functions in an isolated, monitored process + """ + + @spec invoke((-> term())) :: {:ok, term()} | {:error, term()} + def invoke(function) when is_function(function, 0) do + me = self() + + {pid, ref} = + spawn_monitor(fn -> + send(me, {:result, function.()}) + end) + + receive do + {:result, result} -> + # clean up the DOWN message from the above process in the mailbox. + Process.demonitor(ref, [:flush]) + {:ok, result} + + {:DOWN, ^ref, :process, ^pid, reason} -> + {:error, reason} + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/build/progress.ex b/apps/remote_control/lib/lexical/remote_control/build/progress.ex deleted file mode 100644 index 83e90fe50..000000000 --- a/apps/remote_control/lib/lexical/remote_control/build/progress.ex +++ /dev/null @@ -1,19 +0,0 @@ -defmodule Lexical.RemoteControl.Build.Progress do - alias Lexical.RemoteControl.Dispatch - import Lexical.RemoteControl.Api.Messages - - defmacro __using__(_) do - quote do - import unquote(__MODULE__), only: [with_progress: 2] - end - end - - def with_progress(label, func) when is_function(func, 0) do - try do - Dispatch.broadcast(project_progress(label: label, stage: :begin)) - func.() - after - Dispatch.broadcast(project_progress(label: label, stage: :complete)) - end - end -end diff --git a/apps/remote_control/lib/lexical/remote_control/build/project.ex b/apps/remote_control/lib/lexical/remote_control/build/project.ex index 399f78eb9..3f72999fd 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/project.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/project.ex @@ -2,22 +2,24 @@ defmodule Lexical.RemoteControl.Build.Project do alias Lexical.Project alias Lexical.RemoteControl alias Lexical.RemoteControl.Build + alias Lexical.RemoteControl.Build.Isolation alias Lexical.RemoteControl.Plugin + alias Mix.Task.Compiler.Diagnostic - use Build.Progress + use RemoteControl.Progress require Logger - def compile(%Project{} = project, force?) do + def compile(%Project{} = project, initial?) do RemoteControl.Mix.in_project(fn _ -> Mix.Task.clear() - prepare_for_project_build(force?) + prepare_for_project_build(initial?) compile_fun = fn -> Mix.Task.clear() with_progress building_label(project), fn -> - result = Mix.Task.run(:compile, mix_compile_opts(force?)) + result = compile_in_isolation() Mix.Task.run(:loadpaths) result end @@ -44,11 +46,31 @@ defmodule Lexical.RemoteControl.Build.Project do end) end - defp prepare_for_project_build(false = _force?) do + defp compile_in_isolation do + compile_fun = fn -> Mix.Task.run(:compile, mix_compile_opts()) end + + case Isolation.invoke(compile_fun) do + {:ok, result} -> + result + + {:error, {exception, [{_mod, _fun, _arity, meta} | _]}} -> + diagnostic = %Diagnostic{ + file: Keyword.get(meta, :file), + severity: :error, + message: Exception.message(exception), + compiler_name: "Elixir", + position: Keyword.get(meta, :line, 1) + } + + {:error, [diagnostic]} + end + end + + defp prepare_for_project_build(false = _initial?) do :ok end - defp prepare_for_project_build(true = _force?) do + defp prepare_for_project_build(true = _initial?) do if connected_to_internet?() do with_progress "mix local.hex", fn -> Mix.Task.run("local.hex", ~w(--force --if-missing)) @@ -69,22 +91,15 @@ defmodule Lexical.RemoteControl.Build.Project do Mix.Task.run(:loadconfig) end - with_progress "mix deps.compile", fn -> - deps_compile = - if Features.compile_wont_change_directory?() do - "deps.compile" - else - "deps.safe_compile" - end - - Mix.Task.run(deps_compile, ~w(--skip-umbrella-children)) + unless Elixir.Features.compile_keeps_current_directory?() do + with_progress "mix deps.compile", fn -> + Mix.Task.run("deps.safe_compile", ~w(--skip-umbrella-children)) + end end with_progress "loading plugins", fn -> Plugin.Discovery.run() end - - Mix.Task.run("clean") end defp connected_to_internet? do @@ -102,8 +117,8 @@ defmodule Lexical.RemoteControl.Build.Project do "Building #{Project.display_name(project)}" end - defp mix_compile_opts(force?) do - opts = ~w( + defp mix_compile_opts do + ~w( --return-errors --ignore-module-conflict --all-warnings @@ -111,11 +126,5 @@ defmodule Lexical.RemoteControl.Build.Project do --debug-info --no-protocol-consolidation ) - - if force? do - ["--force " | opts] - else - opts - end end end diff --git a/apps/remote_control/lib/lexical/remote_control/build/state.ex b/apps/remote_control/lib/lexical/remote_control/build/state.ex index cf6677c10..baa7aa35a 100644 --- a/apps/remote_control/lib/lexical/remote_control/build/state.ex +++ b/apps/remote_control/lib/lexical/remote_control/build/state.ex @@ -5,8 +5,6 @@ defmodule Lexical.RemoteControl.Build.State do alias Lexical.RemoteControl alias Lexical.RemoteControl.Api.Messages alias Lexical.RemoteControl.Build - alias Lexical.RemoteControl.CodeIntelligence - alias Lexical.RemoteControl.Dispatch alias Lexical.RemoteControl.Plugin alias Lexical.VM.Versions @@ -14,42 +12,56 @@ defmodule Lexical.RemoteControl.Build.State do import Messages - use Build.Progress + use RemoteControl.Progress - defstruct project: nil, build_number: 0, uri_to_source_and_edit_time: %{} + defstruct project: nil, + build_number: 0, + uri_to_document: %{}, + project_compile: :none def new(%Project{} = project) do %__MODULE__{project: project} end - def on_tick(%__MODULE__{} = state) do - {new_state, compiled_uris} = - Enum.reduce(state.uri_to_source_and_edit_time, {state, []}, fn - {uri, {document, edit_time}}, {state, compiled_uris} -> - if should_compile?(edit_time) do - new_state = increment_build_number(state) - compile_file(new_state, document) - {new_state, [uri | compiled_uris]} - else - {state, compiled_uris} - end + def on_timeout(%__MODULE__{} = state) do + new_state = + case state.project_compile do + :none -> state + :force -> compile_project(state, true) + :normal -> compile_project(state, false) + end + + # We need to compile the individual documents even after the project is + # compiled because they might have unsaved changes, and we want that state + # to be the latest state of the project. + new_state = + Enum.reduce(new_state.uri_to_document, state, fn {_uri, document}, state -> + compile_file(state, document) end) + %__MODULE__{new_state | uri_to_document: %{}, project_compile: :none} + end + + def on_file_compile(%__MODULE__{} = state, %Document{} = document) do %__MODULE__{ - new_state - | uri_to_source_and_edit_time: Map.drop(state.uri_to_source_and_edit_time, compiled_uris) + state + | uri_to_document: Map.put(state.uri_to_document, document.uri, document) } end - def compile_scheduled?(%__MODULE__{} = state, uri) do - Map.has_key?(state.uri_to_source_and_edit_time, uri) + def on_project_compile(%__MODULE__{} = state, force?) do + if force? do + %__MODULE__{state | project_compile: :force} + else + %__MODULE__{state | project_compile: :normal} + end end def ensure_build_directory(%__MODULE__{} = state) do # If the project directory isn't there, for some reason the main build fails, so we create it here # to ensure that the build will succeed. project = state.project - build_path = Project.build_path(project) + build_path = RemoteControl.Build.path(project) unless Versions.compatible?(build_path) do Logger.info("Build path #{build_path} was compiled on a previous erlang version. Deleting") @@ -59,13 +71,15 @@ defmodule Lexical.RemoteControl.Build.State do end end + maybe_delete_old_builds(project) + unless File.exists?(build_path) do File.mkdir_p!(build_path) Versions.write(build_path) end end - def compile_project(%__MODULE__{} = state, force?) do + defp compile_project(%__MODULE__{} = state, initial?) do state = increment_build_number(state) project = state.project @@ -73,8 +87,8 @@ defmodule Lexical.RemoteControl.Build.State do compile_requested_message = project_compile_requested(project: project, build_number: state.build_number) - Dispatch.broadcast(compile_requested_message) - {elapsed_us, result} = :timer.tc(fn -> Build.Project.compile(project, force?) end) + RemoteControl.broadcast(compile_requested_message) + {elapsed_us, result} = :timer.tc(fn -> Build.Project.compile(project, initial?) end) elapsed_ms = to_ms(elapsed_us) {compile_message, diagnostics} = @@ -102,26 +116,20 @@ defmodule Lexical.RemoteControl.Build.State do diagnostics: diagnostics ) - Dispatch.broadcast(compile_message) - Dispatch.broadcast(diagnostics_message) - CodeIntelligence.Structs.discover_deps_structs() + RemoteControl.broadcast(compile_message) + RemoteControl.broadcast(diagnostics_message) Plugin.diagnose(project, state.build_number) end) - end - def on_file_compile(%__MODULE__{} = state, %Document{} = document) do - %__MODULE__{ - state - | uri_to_source_and_edit_time: - Map.put(state.uri_to_source_and_edit_time, document.uri, {document, now()}) - } + state end def compile_file(%__MODULE__{} = state, %Document{} = document) do + state = increment_build_number(state) project = state.project Build.with_lock(fn -> - Dispatch.broadcast(file_compile_requested(uri: document.uri)) + RemoteControl.broadcast(file_compile_requested(uri: document.uri)) safe_compile_func = fn -> RemoteControl.Mix.in_project(fn _ -> Build.Document.compile(document) end) @@ -166,10 +174,12 @@ defmodule Lexical.RemoteControl.Build.State do diagnostics: List.wrap(diagnostics) ) - Dispatch.broadcast(compile_message) - Dispatch.broadcast(diagnostics) + RemoteControl.broadcast(compile_message) + RemoteControl.broadcast(diagnostics) Plugin.diagnose(project, state.build_number, document) end) + + state end def set_compiler_options do @@ -181,7 +191,7 @@ defmodule Lexical.RemoteControl.Build.State do :ok end - def mix_compile_opts(force?) do + def mix_compile_opts(initial?) do opts = ~w( --return-errors --ignore-module-conflict @@ -191,7 +201,7 @@ defmodule Lexical.RemoteControl.Build.State do --no-protocol-consolidation ) - if force? do + if initial? do ["--force " | opts] else opts @@ -202,15 +212,6 @@ defmodule Lexical.RemoteControl.Build.State do "Building #{Project.display_name(project)}" end - defp now do - System.system_time(:millisecond) - end - - defp should_compile?(last_edit_time) do - millis_since_last_edit = now() - last_edit_time - millis_since_last_edit >= edit_window_millis() - end - defp to_ms(microseconds) do microseconds / 1000 end @@ -219,11 +220,42 @@ defmodule Lexical.RemoteControl.Build.State do [columns: true, token_metadata: true] end - defp edit_window_millis do - Application.get_env(:remote_control, :edit_window_millis, 250) - end - defp increment_build_number(%__MODULE__{} = state) do %__MODULE__{state | build_number: state.build_number + 1} end + + @two_month_seconds 86_400 * 31 * 2 + defp maybe_delete_old_builds(%Project{} = project) do + build_root = Project.build_path(project) + two_months_ago = System.system_time(:second) - @two_month_seconds + + case File.ls(build_root) do + {:ok, entries} -> + for file_name <- entries, + absolute_path = Path.join(build_root, file_name), + File.dir?(absolute_path), + newest_beam_mtime(absolute_path) <= + two_months_ago do + File.rm_rf!(absolute_path) + end + + _ -> + :ok + end + end + + defp newest_beam_mtime(directory) do + directory + |> Path.join("**/*.beam") + |> Path.wildcard() + |> then(fn + [] -> + 0 + + beam_files -> + beam_files + |> Enum.map(&File.stat!(&1, time: :posix).mtime) + |> Enum.max() + end) + end end diff --git a/apps/remote_control/lib/lexical/remote_control/code_action.ex b/apps/remote_control/lib/lexical/remote_control/code_action.ex new file mode 100644 index 000000000..4bbef10de --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_action.ex @@ -0,0 +1,62 @@ +defmodule Lexical.RemoteControl.CodeAction do + alias Lexical.Document + alias Lexical.Document.Changes + alias Lexical.Document.Range + alias Lexical.RemoteControl.CodeAction.Diagnostic + alias Lexical.RemoteControl.CodeAction.Handlers + + defstruct [:title, :kind, :changes, :uri] + + @type code_action_kind :: + :empty + | :quick_fix + | :refactor + | :refactor_extract + | :refactor_inline + | :refactor_rewrite + | :source + | :source_organize_imports + | :source_fix_all + + @type t :: %__MODULE__{ + title: String.t(), + kind: code_action_kind, + changes: Changes.t(), + uri: Lexical.uri() + } + + @handlers [ + Handlers.ReplaceRemoteFunction, + Handlers.ReplaceWithUnderscore, + Handlers.OrganizeAliases, + Handlers.AddAlias, + Handlers.RemoveUnusedAlias + ] + + @spec new(Lexical.uri(), String.t(), code_action_kind(), Changes.t()) :: t() + def new(uri, title, kind, changes) do + %__MODULE__{uri: uri, title: title, changes: changes, kind: kind} + end + + @spec for_range(Document.t(), Range.t(), [Diagnostic.t()], [code_action_kind] | :all) :: [t()] + def for_range(%Document{} = doc, %Range{} = range, diagnostics, kinds) do + results = + Enum.flat_map(@handlers, fn handler -> + if applies?(kinds, handler) do + handler.actions(doc, range, diagnostics) + else + [] + end + end) + + results + end + + defp applies?(:all, _handler_module) do + true + end + + defp applies?(kinds, handler_module) do + kinds -- handler_module.kinds() != kinds + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_action/diagnostic.ex b/apps/remote_control/lib/lexical/remote_control/code_action/diagnostic.ex new file mode 100644 index 000000000..78b6ee83d --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_action/diagnostic.ex @@ -0,0 +1,17 @@ +defmodule Lexical.RemoteControl.CodeAction.Diagnostic do + alias Lexical.Document.Range + + defstruct [:range, :message, :source] + @type message :: String.t() + @type source :: String.t() + @type t :: %__MODULE__{ + range: Range.t(), + message: message() | nil, + source: source() | nil + } + + @spec new(Range.t(), message(), source() | nil) :: t + def new(%Range{} = range, message, source) do + %__MODULE__{range: range, message: message, source: source} + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_action/handler.ex b/apps/remote_control/lib/lexical/remote_control/code_action/handler.ex new file mode 100644 index 000000000..e02a61b45 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_action/handler.ex @@ -0,0 +1,9 @@ +defmodule Lexical.RemoteControl.CodeAction.Handler do + alias Lexical.Document + alias Lexical.Document.Range + alias Lexical.RemoteControl.CodeAction + alias Lexical.RemoteControl.CodeAction.Diagnostic + + @callback actions(Document.t(), Range.t(), [Diagnostic.t()]) :: [CodeAction.t()] + @callback kinds() :: [CodeAction.code_action_kind()] +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_action/handlers/add_alias.ex b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/add_alias.ex new file mode 100644 index 000000000..f3203cdc9 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/add_alias.ex @@ -0,0 +1,211 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.AddAlias do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Alias + alias Lexical.Document + alias Lexical.Document.Changes + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.Formats + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.CodeAction + alias Lexical.RemoteControl.CodeIntelligence.Entity + alias Lexical.RemoteControl.CodeMod + alias Lexical.RemoteControl.Modules + alias Lexical.RemoteControl.Search.Fuzzy + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Mix.Tasks.Namespace + alias Sourceror.Zipper + + @behaviour CodeAction.Handler + + @impl CodeAction.Handler + def actions(%Document{} = doc, %Range{} = range, _diagnostics) do + with {:ok, _doc, %Analysis{valid?: true} = analysis} <- + Document.Store.fetch(doc.uri, :analysis), + {:ok, resolved, _} <- Entity.resolve(analysis, range.start), + {:ok, unaliased_module} <- fetch_unaliased_module(analysis, range.start, resolved) do + current_aliases = CodeMod.Aliases.in_scope(analysis, range) + + unaliased_module + |> possible_aliases() + |> filter_by_resolution(resolved) + |> Stream.map(&build_code_action(analysis, range, current_aliases, &1)) + |> Enum.reject(&is_nil/1) + else + _ -> + [] + end + end + + @impl CodeAction.Handler + def kinds do + [:quick_fix] + end + + defp build_code_action(%Analysis{} = analysis, range, current_aliases, potential_alias_module) do + case Ast.Module.safe_split(potential_alias_module, as: :atoms) do + {:erlang, _} -> + nil + + {:elixir, segments} -> + {insert_position, trailer} = CodeMod.Aliases.insert_position(analysis, range.start) + alias_to_add = %Alias{module: segments, as: List.last(segments), explicit?: true} + replace_current_alias = get_current_replacement(analysis, range, segments) + + alias_edits = + CodeMod.Aliases.to_edits( + [alias_to_add | current_aliases], + insert_position, + trailer + ) + + changes = Changes.new(analysis.document, replace_current_alias ++ alias_edits) + + CodeAction.new( + analysis.document.uri, + "alias #{Formats.module(potential_alias_module)}", + :quick_fix, + changes + ) + end + end + + def fetch_unaliased_module(%Analysis{} = analysis, %Position{} = position, resolved) do + with {:ok, module} <- fetch_module(resolved), + %{} = aliases <- Analyzer.aliases_at(analysis, position), + false <- module in Map.values(aliases) do + {:ok, module} + else + _ -> + :error + end + end + + defp fetch_module({:module, module}), do: {:ok, module} + defp fetch_module({:struct, module}), do: {:ok, module} + defp fetch_module({:call, module, _function, _arity}), do: {:ok, module} + defp fetch_module(_), do: :error + + defp get_current_replacement(%Analysis{} = analysis, %Range{} = range, segments) do + with {:ok, patches} <- replace_full_module_on_line(analysis, range.start.line, segments), + {:ok, edits} <- Ast.patches_to_edits(analysis.document, patches) do + edits + else + _ -> + [] + end + end + + defp replace_full_module_on_line(%Analysis{} = analysis, line, segments) do + aliased_module = + segments + |> List.last() + |> List.wrap() + |> Module.concat() + |> Formats.module() + + analysis.document + |> Ast.traverse_line(line, [], fn + %Zipper{node: {:__aliases__, _, ^segments}} = zipper, patches -> + range = Sourceror.get_range(zipper.node) + + patch = %{range: range, change: aliased_module} + {zipper, [patch | patches]} + + zipper, acc -> + {zipper, acc} + end) + |> case do + {:ok, _, patches} -> {:ok, patches} + error -> error + end + end + + @similarity_threshold 0.75 + defp similar?(a, b), do: String.jaro_distance(a, b) >= @similarity_threshold + + defp filter_by_resolution(modules_stream, {:call, _module, function, _arity}) do + query_function = Atom.to_string(function) + + Stream.filter(modules_stream, fn module -> + case Modules.fetch_functions(module) do + {:ok, functions} -> + Enum.any?(functions, fn {name, _arity} -> + module_function = Atom.to_string(name) + similar?(module_function, query_function) + end) + + _ -> + false + end + end) + end + + defp filter_by_resolution(modules_stream, {:struct, _}) do + Stream.filter(modules_stream, fn module -> + case Modules.fetch_functions(module) do + {:ok, functions} -> Keyword.has_key?(functions, :__struct__) + _ -> false + end + end) + end + + defp filter_by_resolution(modules_stream, _) do + modules_stream + end + + def possible_aliases(unaliased_module) do + module_subject = Formats.module(unaliased_module) + + case Ast.Module.safe_split(unaliased_module) do + {:elixir, unaliased_strings} -> + module_subject + |> do_fuzzy_search() + |> Stream.filter(fn module -> + {:elixir, split} = Ast.Module.safe_split(module) + alias_as = List.last(split) + subject_module = module + RemoteControl.Module.Loader.ensure_loaded(subject_module) + + protocol_or_implementation? = function_exported?(module, :__impl__, 1) + + not protocol_or_implementation? and + Enum.any?(unaliased_strings, &similar?(&1, alias_as)) + end) + + _ -> + [] + end + end + + defp do_fuzzy_search(subject) do + # Note: we can't use the indexer's fuzzy matcher here, since it + # ignores all deps, and then we won't be able to alias any deps module + + for {mod, _, _} <- all_modules(), + elixir_module?(mod), + not Namespace.Module.prefixed?(mod) do + module_name = List.to_atom(mod) + + %Entry{ + id: module_name, + path: "", + subject: module_name, + subtype: :definition, + type: :module + } + end + |> Fuzzy.from_entries() + |> Fuzzy.match(subject) + end + + defp all_modules do + # Note: this is for testing + :code.all_available() + end + + defp elixir_module?([?E, ?l, ?i, ?x, ?i, ?r, ?. | _]), do: true + defp elixir_module?(_), do: false +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_action/handlers/organize_aliases.ex b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/organize_aliases.ex new file mode 100644 index 000000000..01638ba12 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/organize_aliases.ex @@ -0,0 +1,45 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.OrganizeAliases do + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Scope + alias Lexical.Document + alias Lexical.Document.Changes + alias Lexical.Document.Range + alias Lexical.RemoteControl.CodeAction + alias Lexical.RemoteControl.CodeMod + + require Logger + + @behaviour CodeAction.Handler + + @impl CodeAction.Handler + def actions(%Document{} = doc, %Range{} = range, _diagnostics) do + with {:ok, _doc, analysis} <- Document.Store.fetch(doc.uri, :analysis), + :ok <- check_aliases(doc, analysis, range) do + aliases = CodeMod.Aliases.in_scope(analysis, range) + {insert_position, trailer} = CodeMod.Aliases.insert_position(analysis, range.start) + edits = CodeMod.Aliases.to_edits(aliases, insert_position, trailer) + + if Enum.empty?(edits) do + [] + else + changes = Changes.new(doc, edits) + [CodeAction.new(doc.uri, "Organize aliases", :source_organize_imports, changes)] + end + else + _ -> + [] + end + end + + @impl CodeAction.Handler + def kinds do + [:source, :source_organize_imports] + end + + defp check_aliases(%Document{}, %Analysis{} = analysis, %Range{} = range) do + case Analysis.module_scope(analysis, range) do + %Scope{aliases: [_ | _]} -> :ok + _ -> :error + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_action/handlers/remove_unused_alias.ex b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/remove_unused_alias.ex new file mode 100644 index 000000000..2dfa432fd --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/remove_unused_alias.ex @@ -0,0 +1,245 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.RemoveUnusedAlias do + @moduledoc """ + A code action that removes an unused alias + + Most of the code actions are fairly straightforward, but I think this one deserves a couple of comments on + the approach. I initially tried the following: + + * Finding the alias via Sourceror's Zipper.find + * Rewriting the ast via Macro.prewalk / postwalk + * Using Macro.travesrse where I'd mark the metadata as being deleted in the prewalker, and + delete it in the postwalker. + + They had the following problems. + Sourceror would consistently produce ast that was not recognized by elixir 1.14's code normalizer, causing a crash. + Using AST rewriting was susceptible to infinite recursion, and it was extremely difficult to delete blocks reliably. + Blocks in one context would be deleted, but with a different formulation, nils would appear in the output code. + It was also very difficult to pop up the stack and delete an entire multiple alias without zippers. + + So the approach we have here utilizes a hybrid of AST walking / text replacement. It works for all the examples + I could come up with, but it's a bit longer than I desired. Dorgan said he'd take a look at the errors in the + normalizer and possibly fix sourceror, so until then, this is what we have. + """ + + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Document + alias Lexical.Document.Changes + alias Lexical.Document.Edit + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.CodeAction + alias Lexical.RemoteControl.CodeAction.Diagnostic + alias Sourceror.Zipper + + import Record + + defrecordp :multi_alias_metadata, [ + :document, + :multi_alias_range, + :removed_alias_range, + :alias_count + ] + + defrecordp :single_alias_metadata, [:document, :range] + @behaviour CodeAction.Handler + + @impl CodeAction.Handler + def actions(%Document{} = document, %Range{} = range, diagnostics) do + Enum.reduce(diagnostics, [], fn %Diagnostic{} = diagnostic, acc -> + case to_edit(document, range.start, diagnostic) do + {:ok, module_name, edit} -> + changes = Changes.new(document, [edit]) + action = CodeAction.new(document.uri, "Remove alias #{module_name}", :source, changes) + + [action | acc] + + _ -> + acc + end + end) + end + + @impl CodeAction.Handler + def kinds do + [:source] + end + + defp to_edit(%Document{} = document, %Position{} = position, %Diagnostic{} = diagnostic) do + with {:ok, module_string} <- fetch_unused_alias_module_string(diagnostic), + {:ok, _doc, %Analysis{} = analysis} <- Document.Store.fetch(document.uri, :analysis), + last_segment = String.to_atom(module_string), + {:ok, full_alias} <- fetch_full_alias(analysis, position, last_segment), + {:ok, alias_meta} <- fetch_alias_metadata(position, analysis, full_alias, last_segment), + {:ok, edit} <- fetch_edit(alias_meta) do + {:ok, module_string, edit} + else + _ -> + :error + end + end + + @alias_regex ~r/unused alias (\w+)/ + defp fetch_unused_alias_module_string(%Diagnostic{} = diagnostic) do + case Regex.scan(@alias_regex, diagnostic.message) do + [[_, module_string]] -> {:ok, module_string} + _ -> :error + end + end + + defp fetch_alias_metadata( + %Position{} = cursor, + %Analysis{} = analysis, + full_alias, + last_segment + ) do + zipper = Zipper.zip(analysis.ast) + document = analysis.document + + with :error <- find_single_alias(document, cursor, zipper, full_alias, last_segment) do + find_multi_alias(document, cursor, zipper, last_segment) + end + end + + defp find_single_alias( + %Document{} = document, + %Position{} = cursor, + %Zipper{} = zipper, + full_alias, + last_segment + ) do + finder = fn + {:alias, _, [{:__aliases__, _, ^full_alias}]} = node -> + Ast.contains_position?(node, cursor) + + {:alias, _, + [ + {:__aliases__, _, ^full_alias}, + [{{:__block__, _, [:as]}, {:__aliases__, _, [^last_segment]}}] + ]} = node -> + Ast.contains_position?(node, cursor) + + _ -> + false + end + + case Zipper.find(zipper, finder) do + nil -> + :error + + %Zipper{node: node} -> + metadata = + single_alias_metadata(document: document, range: Ast.Range.fetch!(node, document)) + + {:ok, metadata} + end + end + + defp find_multi_alias( + %Document{} = document, + %Position{} = cursor, + %Zipper{} = zipper, + last_segment + ) do + finder = fn + {:alias, _, [{{:., _, _}, _, multi_alias_list}]} = node -> + Enum.find_value(multi_alias_list, &segment_matches?(&1, last_segment)) and + Ast.contains_position?(node, cursor) + + _ -> + false + end + + case Zipper.find(zipper, finder) do + nil -> + :error + + %Zipper{node: {:alias, _, [{{:., _, _}, _, multi_alias_list}]}} = zipper -> + alias_node = Enum.find(multi_alias_list, &segment_matches?(&1, last_segment)) + + multi_alias = + multi_alias_metadata( + document: document, + multi_alias_range: Ast.Range.fetch!(zipper.node, document), + removed_alias_range: Ast.Range.fetch!(alias_node, document), + alias_count: length(multi_alias_list) + ) + + {:ok, multi_alias} + end + end + + defp fetch_full_alias(%Analysis{} = analysis, %Position{} = position, last_segment) do + aliases = Analyzer.aliases_at(analysis, position) + + with {:ok, aliased_module} <- Map.fetch(aliases, last_segment), + {:elixir, full_alias} <- Ast.Module.safe_split(aliased_module, as: :atoms) do + {:ok, full_alias} + end + end + + defp segment_matches?({:__aliases__, _, segments}, last_segment) do + List.last(segments) == last_segment + end + + defp segment_matches?(_, _), do: false + + defp fetch_edit(single_alias_metadata(range: %Range{} = range)) do + updated_range = + range + |> put_in([:start, :character], 1) + |> include_next_line() + + {:ok, Edit.new("", updated_range)} + end + + defp fetch_edit(multi_alias_metadata(alias_count: 1, multi_alias_range: range)) do + # we're removing the last alias, so we can remove the entire thing. + {:ok, Edit.new("", range)} + end + + defp fetch_edit( + multi_alias_metadata( + document: %Document{} = document, + removed_alias_range: %Range{} = range + ) + ) do + current_line = line_text(document, range.start.line) + previous_line = line_text(document, range.start.line - 1) + + {range, edit_text} = + if not String.ends_with?(current_line, ",") and String.ends_with?(previous_line, ",") do + # delete the previous line's comma + range = %Range{ + range + | start: Position.new(document, range.start.line - 1, String.length(previous_line)) + } + + {range, "\n"} + else + {put_in(range.start.character, 1), ""} + end + + {:ok, Edit.new(edit_text, include_next_line(range))} + end + + defp fetch_edit(_), do: :error + + defp line_text(%Document{} = document, line_number) do + case Document.fetch_text_at(document, line_number) do + {:ok, line_text} -> line_text + _ -> "" + end + end + + defp include_next_line(%Range{} = range) do + update_in(range.end, fn old_position -> + %Position{ + old_position + | line: old_position.line + 1, + character: 1 + } + end) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_action/handlers/replace_remote_function.ex b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/replace_remote_function.ex new file mode 100644 index 000000000..d7062bc53 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/replace_remote_function.ex @@ -0,0 +1,146 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.ReplaceRemoteFunction do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Document.Changes + alias Lexical.Document.Edit + alias Lexical.Document.Range + alias Lexical.RemoteControl + alias Lexical.RemoteControl.CodeAction + alias Lexical.RemoteControl.CodeAction.Diagnostic + alias Lexical.RemoteControl.Modules + alias Sourceror.Zipper + + @behaviour CodeAction.Handler + + @impl CodeAction.Handler + def actions(%Document{} = doc, %Range{}, diagnostics) do + Enum.flat_map(diagnostics, fn %Diagnostic{} = diagnostic -> + with {:ok, module, function, arity, line_number} <- extract_function_and_line(diagnostic), + {:ok, suggestions} <- prepare_suggestions(module, function, arity) do + to_code_actions(doc, line_number, module, function, suggestions) + else + _ -> + [] + end + end) + end + + @impl CodeAction.Handler + def kinds do + [:quick_fix] + end + + @spec to_code_actions(Document.t(), non_neg_integer(), module(), String.t(), [atom()]) :: + [CodeAction.t()] + defp to_code_actions(%Document{} = doc, line_number, module, function, suggestions) do + suggestions + |> Enum.reduce([], fn suggestion, acc -> + case apply_transform(doc, line_number, module, function, suggestion) do + {:ok, edits} -> + changes = Changes.new(doc, edits) + code_action = CodeAction.new(doc.uri, "Rename to #{suggestion}", :quick_fix, changes) + + [code_action | acc] + + :error -> + acc + end + end) + |> Enum.reverse() + end + + @spec apply_transform(Document.t(), non_neg_integer(), module(), String.t(), atom()) :: + {:ok, [Edit.t()]} | :error + defp apply_transform(%Document{} = doc, line_number, module, function, suggestion) do + {:ok, doc, analysis} = Document.Store.fetch(doc.uri, :analysis) + function_atom = String.to_atom(function) + position = Document.Position.new(doc, line_number, 0) + + doc + |> Ast.traverse_line(line_number, [], fn + %Zipper{node: {{:., _, [{:__aliases__, _, module_alias}, ^function_atom]}, _, _}} = zipper, + patches -> + case RemoteControl.Analyzer.expand_alias(module_alias, analysis, position) do + {:ok, ^module} -> + patch = Sourceror.Patch.rename_call(zipper.node, suggestion) + {zipper, [patch | patches]} + + _ -> + {zipper, patches} + end + + %Zipper{node: {{:., _, [{:__block__, _, [^module]}, ^function_atom]}, _, _}} = zipper, + patches -> + # this is an erlang call :ets.insert(...) + patch = Sourceror.Patch.rename_call(zipper.node, suggestion) + + {zipper, [patch | patches]} + + zipper, patches -> + {zipper, patches} + end) + |> case do + {:ok, _zipper, patches} -> + Ast.patches_to_edits(doc, patches) + + _ -> + :error + end + end + + defp extract_function_and_line(%Diagnostic{} = diagnostic) do + with {:ok, module, function, arity} <- extract_function(diagnostic.message) do + {:ok, module, function, arity, diagnostic.range.start.line} + end + end + + @function_re ~r/(warning: |function )?([^\/]+)\/(.*) is undefined or private. Did you mean:.*/ + defp extract_function(message) do + result = + with [[_, _, module_and_function, arity]] <- Regex.scan(@function_re, message), + {:ok, module, function_name} <- separate_module_from_function(module_and_function) do + {:ok, module, function_name, String.to_integer(arity)} + end + + result + end + + defp separate_module_from_function(module_and_function) do + module_and_function + |> String.split(".") + |> List.pop_at(-1) + |> case do + {function_name, [_ | _] = module_alias} -> + {:ok, alias_to_module(module_alias), function_name} + + _ -> + :error + end + end + + defp alias_to_module([":" <> erlang_alias]) do + String.to_atom(erlang_alias) + end + + defp alias_to_module(module_alias) do + Module.concat(module_alias) + end + + @function_threshold 0.77 + @max_suggestions 5 + defp prepare_suggestions(module, function, arity) do + with {:ok, module_functions} <- Modules.fetch_functions(module) do + suggestions = + for {module_function, ^arity} <- module_functions, + distance = module_function |> Atom.to_string() |> String.jaro_distance(function), + distance >= @function_threshold do + {distance, module_function} + end + |> Enum.sort(:desc) + |> Enum.take(@max_suggestions) + |> Enum.map(fn {_distance, module_function} -> module_function end) + + {:ok, suggestions} + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_action/handlers/replace_with_underscore.ex b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/replace_with_underscore.ex new file mode 100644 index 000000000..620d06046 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_action/handlers/replace_with_underscore.ex @@ -0,0 +1,83 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.ReplaceWithUnderscore do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Document.Changes + alias Lexical.Document.Range + alias Lexical.RemoteControl.CodeAction + alias Lexical.RemoteControl.CodeAction.Diagnostic + alias Sourceror.Zipper + + @behaviour CodeAction.Handler + + @impl CodeAction.Handler + def actions(%Document{} = doc, %Range{}, diagnostics) do + Enum.reduce(diagnostics, [], fn %Diagnostic{} = diagnostic, acc -> + with {:ok, variable_name, line_number} <- extract_variable_and_line(diagnostic), + {:ok, changes} <- to_changes(doc, line_number, variable_name) do + action = CodeAction.new(doc.uri, "Rename to _#{variable_name}", :quick_fix, changes) + + [action | acc] + else + _ -> + acc + end + end) + end + + @impl CodeAction.Handler + def kinds do + [:quick_fix] + end + + @spec to_changes(Document.t(), non_neg_integer(), String.t() | atom) :: + {:ok, Changes.t()} | :error + defp to_changes(%Document{} = document, line_number, variable_name) do + case apply_transform(document, line_number, variable_name) do + {:ok, edits} -> + {:ok, Changes.new(document, edits)} + + error -> + error + end + end + + defp apply_transform(document, line_number, unused_variable_name) do + underscored_variable_name = :"_#{unused_variable_name}" + + result = + Ast.traverse_line(document, line_number, [], fn + %Zipper{node: {^unused_variable_name, _meta, nil} = node} = zipper, patches -> + patch = Sourceror.Patch.rename_identifier(node, underscored_variable_name) + {zipper, [patch | patches]} + + zipper, acc -> + {zipper, acc} + end) + + with {:ok, _, patches} <- result do + Ast.patches_to_edits(document, patches) + end + end + + defp extract_variable_and_line(%Diagnostic{} = diagnostic) do + with {:ok, variable_name} <- extract_variable_name(diagnostic.message), + {:ok, line} <- extract_line(diagnostic) do + {:ok, variable_name, line} + end + end + + @variable_re ~r/variable "([^"]+)" is unused/ + defp extract_variable_name(message) do + case Regex.scan(@variable_re, message) do + [[_, variable_name]] -> + {:ok, String.to_atom(variable_name)} + + _ -> + {:error, {:no_variable, message}} + end + end + + defp extract_line(%Diagnostic{} = diagnostic) do + {:ok, diagnostic.range.start.line} + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/definition.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/definition.ex index 8e9bfdae8..b854f00b8 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_intelligence/definition.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/definition.ex @@ -1,25 +1,115 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Definition do - alias Future.Code, as: Code + alias ElixirSense.Providers.Location, as: ElixirSenseLocation + alias Future.Code + alias Lexical.Ast + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Location alias Lexical.Document.Position + alias Lexical.Formats alias Lexical.RemoteControl.CodeIntelligence.Entity + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Store alias Lexical.Text + require Logger + + @spec definition(Document.t(), Position.t()) :: {:ok, [Location.t()]} | {:error, String.t()} def definition(%Document{} = document, %Position{} = position) do - document + with {:ok, _, analysis} <- Document.Store.fetch(document.uri, :analysis), + {:ok, entity, _range} <- Entity.resolve(analysis, position) do + fetch_definition(entity, analysis, position) + end + end + + defp fetch_definition({type, entity} = resolved, %Analysis{} = analysis, %Position{} = position) + when type in [:struct, :module] do + module = Formats.module(entity) + + locations = + case Store.exact(module, type: type, subtype: :definition) do + {:ok, entries} -> + for entry <- entries, + result = to_location(entry), + match?({:ok, _}, result) do + {:ok, location} = result + location + end + + _ -> + [] + end + + maybe_fallback_to_elixir_sense(resolved, locations, analysis, position) + end + + defp fetch_definition( + {:call, module, function, arity} = resolved, + %Analysis{} = analysis, + %Position{} = position + ) do + mfa = Formats.mfa(module, function, arity) + + definitions = + mfa + |> query_search_index(subtype: :definition) + |> Stream.flat_map(fn entry -> + case entry do + %Entry{type: {:function, :delegate}} -> + mfa = get_in(entry, [:metadata, :original_mfa]) + query_search_index(mfa, subtype: :definition) ++ [entry] + + _ -> + [entry] + end + end) + |> Stream.uniq_by(& &1.subject) + + locations = + for entry <- definitions, + result = to_location(entry), + match?({:ok, _}, result) do + {:ok, location} = result + location + end + + maybe_fallback_to_elixir_sense(resolved, locations, analysis, position) + end + + defp fetch_definition(_, %Analysis{} = analysis, %Position{} = position) do + elixir_sense_definition(analysis, position) + end + + defp maybe_fallback_to_elixir_sense(resolved, locations, analysis, position) do + case locations do + [] -> + Logger.info("No definition found for #{inspect(resolved)} with Indexer.") + + elixir_sense_definition(analysis, position) + + [location] -> + {:ok, location} + + _ -> + {:ok, locations} + end + end + + defp elixir_sense_definition(%Analysis{} = analysis, %Position{} = position) do + analysis.document |> Document.to_string() |> ElixirSense.definition(position.line, position.character) - |> parse_location(document) + |> parse_location(analysis.document) end - defp parse_location(%ElixirSense.Location{} = location, document) do - %{file: file, line: line, column: column} = location + defp parse_location(%ElixirSenseLocation{} = location, document) do + %{file: file, line: line, column: column, type: type} = location file_path = file || document.path uri = Document.Path.ensure_uri(file_path) with {:ok, document} <- Document.Store.open_temporary(uri), {:ok, text} <- Document.fetch_text_at(document, line) do + {line, column} = maybe_move_cursor_to_next_token(type, document, line, column) range = to_precise_range(document, text, line, column) {:ok, Location.new(range, document)} else @@ -32,6 +122,29 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Definition do {:ok, nil} end + defp maybe_move_cursor_to_next_token(type, document, line, column) + when type in [:function, :module, :macro] do + position = Position.new(document, line, column) + + with {:ok, zipper} <- Ast.zipper_at(document, position), + %{node: {entity_name, meta, _}} <- Sourceror.Zipper.next(zipper) do + meta = + if entity_name == :when do + %{node: {_entity_name, meta, _}} = Sourceror.Zipper.next(zipper) + meta + else + meta + end + + {meta[:line], meta[:column]} + else + _ -> + {line, column} + end + end + + defp maybe_move_cursor_to_next_token(_, _, line, column), do: {line, column} + defp to_precise_range(%Document{} = document, text, line, column) do case Code.Fragment.surround_context(text, {line, column}) do %{begin: start_pos, end: end_pos} -> @@ -45,4 +158,26 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Definition do Entity.to_range(document, pos, pos) end end + + defp to_location(entry) do + uri = Document.Path.ensure_uri(entry.path) + + case Document.Store.open_temporary(uri) do + {:ok, document} -> + {:ok, Location.new(entry.range, document)} + + _ -> + :error + end + end + + defp query_search_index(subject, condition) do + case Store.exact(subject, condition) do + {:ok, entries} -> + entries + + _ -> + [] + end + end end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs.ex index 3a95bd9f6..5fa5da56d 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/docs.ex @@ -31,9 +31,8 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Docs do def for_module(module, opts) when is_atom(module) do exclude_hidden? = Keyword.get(opts, :exclude_hidden, false) - with {:ok, beam} <- Modules.ensure_beam(module) do - %__MODULE__{} = docs = parse_docs(module, beam) - + with {:ok, beam} <- Modules.ensure_beam(module), + {:ok, docs} <- parse_docs(module, beam) do if docs.doc == :hidden and exclude_hidden? do {:error, :hidden} else @@ -43,25 +42,31 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Docs do end defp parse_docs(module, beam) do - with {:ok, {:docs_v1, _anno, _lang, _format, module_doc, _meta, entries}} <- - Modules.fetch_docs(beam) do - entries_by_kind = Enum.group_by(entries, &doc_kind/1) - function_entries = Map.get(entries_by_kind, :function, []) - macro_entries = Map.get(entries_by_kind, :macro, []) - callback_entries = Map.get(entries_by_kind, :callback, []) - type_entries = Map.get(entries_by_kind, :type, []) - - spec_defs = beam |> Modules.fetch_specs() |> ok_or([]) - callback_defs = beam |> Modules.fetch_callbacks() |> ok_or([]) - type_defs = beam |> Modules.fetch_types() |> ok_or([]) - - %__MODULE__{ - module: module, - doc: Entry.parse_doc(module_doc), - functions_and_macros: parse_entries(module, function_entries ++ macro_entries, spec_defs), - callbacks: parse_entries(module, callback_entries, callback_defs), - types: parse_entries(module, type_entries, type_defs) - } + case Modules.fetch_docs(beam) do + {:ok, {:docs_v1, _anno, _lang, _format, module_doc, _meta, entries}} -> + entries_by_kind = Enum.group_by(entries, &doc_kind/1) + function_entries = Map.get(entries_by_kind, :function, []) + macro_entries = Map.get(entries_by_kind, :macro, []) + callback_entries = Map.get(entries_by_kind, :callback, []) + type_entries = Map.get(entries_by_kind, :type, []) + + spec_defs = beam |> Modules.fetch_specs() |> ok_or([]) + callback_defs = beam |> Modules.fetch_callbacks() |> ok_or([]) + type_defs = beam |> Modules.fetch_types() |> ok_or([]) + + result = %__MODULE__{ + module: module, + doc: Entry.parse_doc(module_doc), + functions_and_macros: + parse_entries(module, function_entries ++ macro_entries, spec_defs), + callbacks: parse_entries(module, callback_entries, callback_defs), + types: parse_entries(module, type_entries, type_defs) + } + + {:ok, result} + + _ -> + {:error, :no_docs} end end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/entity.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/entity.ex index f975ed6c8..5f89024d6 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_intelligence/entity.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/entity.ex @@ -1,18 +1,25 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do alias Future.Code, as: Code alias Lexical.Ast + alias Lexical.Ast.Analysis alias Lexical.Document alias Lexical.Document.Position alias Lexical.Document.Range + alias Lexical.Formats + alias Lexical.RemoteControl + alias Sourceror.Zipper require Logger require Sourceror.Identifier + @type maybe_module :: module() | nil @type resolved :: - {:module, module()} - | {:struct, module()} - | {:call, module(), fun_name :: atom(), arity :: non_neg_integer()} - | {:type, module(), type_name :: atom(), arity :: non_neg_integer()} + {:module, maybe_module()} + | {:struct, maybe_module()} + | {:call, maybe_module(), fun_name :: atom(), arity :: non_neg_integer()} + | {:type, maybe_module(), type_name :: atom(), arity :: non_neg_integer()} + | {:module_attribute, container_module :: maybe_module(), attribute_name :: atom()} + | {:variable, variable_name :: atom()} defguardp is_call(form) when Sourceror.Identifier.is_call(form) and elem(form, 0) != :. @@ -21,15 +28,20 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do Returns `{:ok, resolved, range}` if successful, `{:error, error}` otherwise. """ - @spec resolve(Document.t(), Position.t()) :: {:ok, resolved, Range.t()} | {:error, term()} - def resolve(%Document{} = document, %Position{} = position) do - with {:ok, surround_context} <- Ast.surround_context(document, position), - {:ok, resolved, {begin_pos, end_pos}} <- resolve(surround_context, document, position) do + @spec resolve(Analysis.t(), Position.t()) :: {:ok, resolved, Range.t()} | {:error, term()} + def resolve(%Analysis{} = analysis, %Position{} = position) do + analysis = Ast.reanalyze_to(analysis, position) + + with :ok <- check_commented(analysis, position), + {:ok, surround_context} <- Ast.surround_context(analysis, position), + {:ok, resolved, {begin_pos, end_pos}} <- + resolve(surround_context, analysis, position) do Logger.info("Resolved entity: #{inspect(resolved)}") - {:ok, resolved, to_range(document, begin_pos, end_pos)} + {:ok, resolved, to_range(analysis.document, begin_pos, end_pos)} else - {:error, :surround_context} -> {:error, :not_found} - error -> error + :error -> {:error, :not_found} + {:error, :surround_context} -> maybe_local_capture_func(analysis, position) + {:error, _} = error -> error end end @@ -41,38 +53,88 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do ) end - defp resolve(%{context: context, begin: begin_pos, end: end_pos}, document, position) do - resolve(context, {begin_pos, end_pos}, document, position) + defp check_commented(%Analysis{} = analysis, %Position{} = position) do + if Analysis.commented?(analysis, position) do + :error + else + :ok + end + end + + defp resolve(%{context: context, begin: begin_pos, end: end_pos}, analysis, position) do + resolve(context, {begin_pos, end_pos}, analysis, position) end - defp resolve({:alias, charlist}, node_range, document, position) do - resolve_alias(charlist, node_range, document, position) + defp resolve({:alias, charlist}, node_range, analysis, position) do + resolve_alias(charlist, node_range, analysis, position) end - defp resolve({:alias, {:local_or_var, prefix}, charlist}, node_range, document, position) do - resolve_alias(prefix ++ [?.] ++ charlist, node_range, document, position) + defp resolve({:alias, {:local_or_var, prefix}, charlist}, node_range, analysis, position) do + resolve_alias(prefix ++ [?.] ++ charlist, node_range, analysis, position) end - defp resolve({:local_or_var, ~c"__MODULE__" = chars}, node_range, document, position) do - resolve_alias(chars, node_range, document, position) + defp resolve({:local_or_var, ~c"__MODULE__" = chars}, node_range, analysis, position) do + resolve_alias(chars, node_range, analysis, position) + end + + defp resolve({:local_or_var, chars}, node_range, analysis, position) do + maybe_fun = List.to_atom(chars) + + case Ast.path_at(analysis, position) do + {:ok, [{^maybe_fun, _, nil} = local, {def, _, [local | _]} | _]} + when def in [:def, :defp, :defmacro, :defmacrop] -> + # This case handles resolving calls that come from zero-arg definitions in + # a module, like hovering in `def my_fun| do` + {:ok, module} = RemoteControl.Analyzer.current_module(analysis, position) + {:ok, {:call, module, maybe_fun, 0}, node_range} + + {:ok, [{^maybe_fun, _, args} | _]} -> + # imported functions + arity = + case args do + arg_list when is_list(arg_list) -> length(arg_list) + _ -> 0 + end + + case fetch_module_for_function(analysis, position, maybe_fun, arity) do + {:ok, module} -> {:ok, {:call, module, maybe_fun, arity}, node_range} + _ -> {:ok, {:variable, List.to_atom(chars)}, node_range} + end + + _ -> + {:ok, {:variable, List.to_atom(chars)}, node_range} + end end - defp resolve({:struct, charlist}, {{start_line, start_col}, end_pos}, document, position) do + defp resolve({:local_arity, chars}, node_range, analysis, position) do + current_module = current_module(analysis, position) + + with {:ok, %Zipper{node: {:/, _, [_, {:__block__, _, [arity]}]}} = zipper} <- + Ast.zipper_at(analysis.document, position), + true <- inside_capture?(zipper) do + {:ok, {:call, current_module, List.to_atom(chars), arity}, node_range} + else + _ -> + {:error, :not_found} + end + end + + defp resolve({:struct, charlist}, {{start_line, start_col}, end_pos}, analysis, position) do # exclude the leading % from the node range so that it can be # resolved like a normal module alias node_range = {{start_line, start_col + 1}, end_pos} - case resolve_alias(charlist, node_range, document, position) do + case resolve_alias(charlist, node_range, analysis, position) do {:ok, {struct_or_module, struct}, range} -> {:ok, {struct_or_module, struct}, range} :error -> {:error, :not_found} end end - defp resolve({:dot, alias_node, fun_chars}, node_range, document, position) do + defp resolve({:dot, alias_node, fun_chars}, node_range, analysis, position) do fun = List.to_atom(fun_chars) - with {:ok, module} <- expand_alias(alias_node, document, position) do - case Ast.path_at(document, position) do + with {:ok, module} <- expand_alias(alias_node, analysis, position) do + case Ast.path_at(analysis, position) do {:ok, path} -> arity = arity_at_position(path, position) kind = kind_of_call(path, position) @@ -84,35 +146,68 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do end end - defp resolve(context, _node_range, _document, _position) do + defp resolve({:local_call, fun_chars}, node_range, analysis, position) do + fun = List.to_atom(fun_chars) + + with {:ok, path} <- Ast.path_at(analysis, position), + arity = arity_at_position(path, position), + {module, ^fun, ^arity} <- + RemoteControl.Analyzer.resolve_local_call(analysis, position, fun, arity) do + {:ok, {:call, module, fun, arity}, node_range} + else + _ -> + module = current_module(analysis, position) + {:ok, {:call, module, fun, 0}, node_range} + end + end + + defp resolve({:unquoted_atom, _} = context, node_range, analysis, position) do + case expand_alias(context, analysis, position) do + {:ok, module} -> {:ok, {:module, module}, node_range} + _ -> {:error, {:unsupported, context}} + end + end + + defp resolve({:module_attribute, attr_name}, node_range, analysis, position) do + current_module = current_module(analysis, position) + + {:ok, {:module_attribute, current_module, List.to_atom(attr_name)}, node_range} + end + + defp resolve(context, _node_range, _analysis, _position) do {:error, {:unsupported, context}} end - defp resolve_alias(charlist, node_range, document, position) do + defp resolve_alias(charlist, node_range, analysis, position) do {{_line, start_column}, _} = node_range with false <- suffix_contains_module?(charlist, start_column, position), - {:ok, path} <- Ast.path_at(document, position), + {:ok, path} <- Ast.path_at(analysis, position), :struct <- kind_of_alias(path) do - resolve_struct(charlist, node_range, document, position) + resolve_struct(charlist, node_range, analysis, position) else _ -> - resolve_module(charlist, node_range, document, position) + resolve_module(charlist, node_range, analysis, position) end end - defp resolve_struct(charlist, node_range, document, %Position{} = position) do - with {:ok, struct} <- expand_alias(charlist, document, position) do + defp resolve_struct(charlist, node_range, analysis, %Position{} = position) do + with {:ok, struct} <- expand_alias(charlist, analysis, position) do {:ok, {:struct, struct}, node_range} end end # Modules on a single line, e.g. "Foo.Bar.Baz" - defp resolve_module(charlist, {{line, column}, {line, _}}, document, %Position{} = position) do - module_string = module_before_position(charlist, column, position) + defp resolve_module(charlist, {{line, column}, {line, _}}, analysis, %Position{} = position) do + module_before_cursor = module_before_position(charlist, column, position) - with {:ok, module} <- expand_alias(module_string, document, position) do - end_column = column + String.length(module_string) + maybe_prepended = + module_before_cursor + |> maybe_prepend_phoenix_scope_module(analysis, position) + |> maybe_prepend_ecto_schema(analysis, position) + + with {:ok, module} <- expand_alias(maybe_prepended, analysis, position) do + end_column = column + String.length(module_before_cursor) {:ok, {:module, module}, {{line, column}, {line, end_column}}} end end @@ -120,12 +215,93 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do # Modules on multiple lines, e.g. "Foo.\n Bar.\n Baz" # Since we no longer have formatting information at this point, we # just return the entire module for now. - defp resolve_module(charlist, node_range, document, %Position{} = position) do - with {:ok, module} <- expand_alias(charlist, document, position) do + defp resolve_module(charlist, node_range, analysis, %Position{} = position) do + with {:ok, module} <- expand_alias(charlist, analysis, position) do {:ok, {:module, module}, node_range} end end + defp maybe_prepend_ecto_schema(module_string, %Analysis{} = analysis, %Position{} = position) do + with true <- Ecto.Schema in RemoteControl.Analyzer.uses_at(analysis, position), + true <- in_inline_embed?(analysis, position), + {:ok, parent_module} <- RemoteControl.Analyzer.current_module(analysis, position) do + parent_module + |> Module.concat(module_string) + |> Formats.module() + else + _ -> + module_string + end + end + + @embeds [:embeds_one, :embeds_many] + defp in_inline_embed?(%Analysis{} = analysis, %Position{} = position) do + case Ast.path_at(analysis, position) do + {:ok, path} -> + path + |> Zipper.zip() + |> Zipper.find(fn + {embed, meta, _} when embed in @embeds -> + Keyword.has_key?(meta, :do) + + _ -> + false + end) + |> then(&match?(%Zipper{}, &1)) + end + end + + defp maybe_prepend_phoenix_scope_module(module_string, analysis, position) do + with {:ok, scope_segments} <- fetch_phoenix_scope_alias_segments(analysis, position), + {:ok, scope_module} <- + RemoteControl.Analyzer.expand_alias(scope_segments, analysis, position), + cursor_module = Module.concat(scope_module, module_string), + true <- + phoenix_controller_module?(cursor_module) or phoenix_liveview_module?(cursor_module) do + Formats.module(cursor_module) + else + _ -> + module_string + end + end + + defp fetch_phoenix_scope_alias_segments(analysis, position) do + # fetch the alias segments from the `scope` macro + # e.g. `scope "/foo", FooWeb.Controllers` + # the alias module is `FooWeb.Controllers`, and the segments is `[:FooWeb, :Controllers]` + path = + analysis + |> Ast.cursor_path(position) + |> Enum.filter(&match?({:scope, _, [_ | _]}, &1)) + # There might be nested `scope` macros, we need the immediate ancestor + |> List.last() + + if path do + {_, paths} = + path + |> Zipper.zip() + |> Zipper.traverse([], fn + %Zipper{node: {:scope, _, [_, {:__aliases__, _, segments} | _]}} = zipper, acc -> + {zipper, [segments | acc]} + + zipper, acc -> + {zipper, acc} + end) + + {:ok, paths |> Enum.reverse() |> List.flatten()} + else + :error + end + end + + defp phoenix_controller_module?(module) do + function_exists?(module, :call, 2) and function_exists?(module, :action, 2) + end + + defp phoenix_liveview_module?(module) do + function_exists?(module, :mount, 3) and function_exists?(module, :render, 1) + end + # Take only the segments at and before the cursor, e.g. # Foo|.Bar.Baz -> Foo # Foo.|Bar.Baz -> Foo.Bar @@ -170,27 +346,40 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do String.upcase(first_char) == first_char end - defp expand_alias({:alias, {:local_or_var, prefix}, charlist}, document, %Position{} = position) do - expand_alias(prefix ++ [?.] ++ charlist, document, position) + defp expand_alias({:alias, {:local_or_var, prefix}, charlist}, analysis, %Position{} = position) do + expand_alias(prefix ++ [?.] ++ charlist, analysis, position) end - defp expand_alias({:alias, charlist}, document, %Position{} = position) do - expand_alias(charlist, document, position) + defp expand_alias({:alias, charlist}, analysis, %Position{} = position) do + expand_alias(charlist, analysis, position) end - defp expand_alias(charlist, document, %Position{} = position) when is_list(charlist) do + defp expand_alias({:unquoted_atom, maybe_module_charlist}, _analysis, _position) do + maybe_module = List.to_existing_atom(maybe_module_charlist) + + if function_exported?(maybe_module, :module_info, 1) do + {:ok, maybe_module} + else + :error + end + rescue + ArgumentError -> + :error + end + + defp expand_alias(charlist, analysis, %Position{} = position) when is_list(charlist) do charlist |> List.to_string() - |> expand_alias(document, position) + |> expand_alias(analysis, position) end - defp expand_alias(module, document, %Position{} = position) when is_binary(module) do + defp expand_alias(module, analysis, %Position{} = position) when is_binary(module) do [module] |> Module.concat() - |> Ast.expand_aliases(document, position) + |> RemoteControl.Analyzer.expand_alias(analysis, position) end - defp expand_alias(_, _document, _position), do: :error + defp expand_alias(_, _analysis, _position), do: :error # Pipes: defp arity_at_position([{:|>, _, _} = pipe | _], %Position{} = position) do @@ -212,6 +401,23 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do length(args) + 1 end + # Calls as part of a capture: + # &MyModule.some_function/2 + defp arity_at_position( + [ + # To correctly identify a fun/arity capture, the zero-arg call + # should be the first argument to a `/` binary op, and that `/` + # should be the only argument to a `&` unary op. + {_, _, []} = call, + {:/, _, [call, {:__block__, _, [arity]}]} = slash, + {:&, _, [slash]} | _ + ], + _position + ) + when is_call(call) and is_integer(arity) do + arity + end + # Calls not inside of a pipe: # MyModule.some_function(1, 2) # some_function.(1, 2) @@ -267,4 +473,67 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Entity do # Catch-all: defp kind_of_alias(_), do: :module + + defp fetch_module_for_function(analysis, position, function_name, arity) do + with :error <- fetch_module_for_local_function(analysis, position, function_name, arity) do + fetch_module_for_imported_function(analysis, position, function_name, arity) + end + end + + defp fetch_module_for_imported_function(analysis, position, function_name, arity) do + analysis + |> RemoteControl.Analyzer.imports_at(position) + |> Enum.find_value({:error, :not_found}, fn + {imported_module, ^function_name, ^arity} -> + {:ok, imported_module} + + _ -> + false + end) + end + + defp fetch_module_for_local_function(analysis, position, function_name, arity) do + with {:ok, current_module} <- RemoteControl.Analyzer.current_module(analysis, position), + true <- function_exported?(current_module, function_name, arity) do + {:ok, current_module} + else + _ -> :error + end + end + + defp function_exists?(module, function, arity) do + # Wrap the `function_exported?` from `Kernel` to simplify testing. + function_exported?(module, function, arity) + end + + defp current_module(%Analysis{} = analysis, %Position{} = position) do + case RemoteControl.Analyzer.current_module(analysis, position) do + {:ok, module} -> module + _ -> nil + end + end + + defp maybe_local_capture_func(analysis, position) do + with {:ok, %Zipper{node: {:/, _, [_, {:__block__, _, _}]}} = zipper} <- + Ast.zipper_at(analysis.document, position), + true <- inside_capture?(zipper) do + {:/, _, [{local_func_name, _meta, _}, {:__block__, _, [arity]}]} = zipper.node + function_name_length = local_func_name |> to_string() |> String.length() + range = Ast.Range.fetch!(zipper.node, analysis.document) + range = put_in(range.end.character, range.start.character + function_name_length) + + current_module = current_module(analysis, position) + {:ok, {:call, current_module, local_func_name, arity}, range} + else + _ -> + {:error, :not_found} + end + end + + defp inside_capture?(zipper) do + case Zipper.up(zipper) do + %Zipper{node: {:&, _, _}} -> true + _ -> false + end + end end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/references.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/references.ex new file mode 100644 index 000000000..878c6b37a --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/references.ex @@ -0,0 +1,100 @@ +defmodule Lexical.RemoteControl.CodeIntelligence.References do + alias Lexical.Ast.Analysis + alias Lexical.Document + alias Lexical.Document.Location + alias Lexical.Document.Position + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.CodeIntelligence.Entity + alias Lexical.RemoteControl.CodeIntelligence.Variable + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Store + alias Lexical.RemoteControl.Search.Subject + + require Logger + + def references(%Analysis{} = analysis, %Position{} = position, include_definitions?) do + with {:ok, resolved, _range} <- Entity.resolve(analysis, position) do + resolved + |> maybe_rewrite_resolution(analysis, position) + |> find_references(analysis, position, include_definitions?) + end + end + + defp find_references({:module, module}, _analysis, _position, include_definitions?) do + subject = Subject.module(module) + subtype = subtype(include_definitions?) + + query(subject, type: :module, subtype: subtype) + end + + defp find_references({:struct, struct_module}, _analysis, _position, include_definitions?) do + subject = Subject.module(struct_module) + subtype = subtype(include_definitions?) + + query(subject, type: :struct, subtype: subtype) + end + + defp find_references( + {:call, module, function_name, _arity}, + _analysis, + _position, + include_definitions? + ) do + subject = Subject.mfa(module, function_name, "") + subtype = subtype(include_definitions?) + + case Store.prefix(subject, type: {:function, :_}, subtype: subtype) do + {:ok, entries} -> Enum.map(entries, &to_location/1) + _ -> [] + end + end + + defp find_references( + {:module_attribute, module, attribute_name}, + _analysis, + _position, + include_definitions? + ) do + subject = Subject.module_attribute(module, attribute_name) + subtype = subtype(include_definitions?) + + query(subject, type: :module_attribute, subtype: subtype) + end + + defp find_references({:variable, var_name}, analysis, position, include_definitions?) do + analysis + |> Variable.references(position, var_name, include_definitions?) + |> Enum.map(&to_location/1) + end + + defp find_references(resolved, _, _, _include_definitions?) do + Logger.info("Not attempting to find references for unhandled type: #{inspect(resolved)}") + :error + end + + def maybe_rewrite_resolution({:call, Kernel, :defstruct, 1}, analysis, position) do + case Analyzer.current_module(analysis, position) do + {:ok, struct_module} -> {:struct, struct_module} + orig -> orig + end + end + + def maybe_rewrite_resolution(resolution, _analysis, _position) do + resolution + end + + defp to_location(%Entry{} = entry) do + uri = Document.Path.ensure_uri(entry.path) + Location.new(entry.range, uri) + end + + defp query(subject, opts) do + case Store.exact(subject, opts) do + {:ok, entries} -> Enum.map(entries, &to_location/1) + _ -> [] + end + end + + defp subtype(true = _include_definitions?), do: :_ + defp subtype(false = _include_definitions?), do: :reference +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/structs.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/structs.ex index 564be5c20..8ef9e238f 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_intelligence/structs.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/structs.ex @@ -1,58 +1,27 @@ defmodule Lexical.RemoteControl.CodeIntelligence.Structs do alias Lexical.RemoteControl - alias Lexical.RemoteControl.Api.Messages - alias Lexical.RemoteControl.Dispatch + alias Lexical.RemoteControl.Module.Loader + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Store - import Messages - - def discover_deps_structs do + def for_project do if Mix.Project.get() do - deps_projects() + {:ok, structs_from_index()} else - RemoteControl.Mix.in_project(fn _ -> deps_projects() end) + RemoteControl.Mix.in_project(fn _ -> structs_from_index() end) end end - defp elixir_module?(module_name_charlist) when is_list(module_name_charlist) do - List.starts_with?(module_name_charlist, 'Elixir.') - end - - defp elixir_module?(module_atom) when is_atom(module_atom) do - module_atom - |> Atom.to_charlist() - |> elixir_module?() - end - - defp deps_projects do - # This might be a performance / memory issue on larger projects. It - # iterates through all modules, loading each as necessary and then removing them - # if they're not already loaded to try and claw back some memory - - for dep_app <- Mix.Project.deps_apps(), - module_name <- dep_modules(dep_app), - elixir_module?(module_name), - was_loaded? = :code.is_loaded(module_name), - Code.ensure_loaded?(module_name) do - case module_name.__info__(:struct) do - struct_fields when is_list(struct_fields) -> - message = struct_discovered(module: module_name, fields: struct_fields) - Dispatch.broadcast(message) - - _ -> - :ok - end - - unless was_loaded? do - :code.delete(module_name) - :code.purge(module_name) - end - end - end + defp structs_from_index do + case Store.exact(type: :struct, subtype: :definition) do + {:ok, entries} -> + for %Entry{subject: struct_module} <- entries, + Loader.ensure_loaded?(struct_module) do + struct_module + end - defp dep_modules(app_name) do - case :application.get_key(app_name, :modules) do - {:ok, modules} -> modules - _ -> [] + _ -> + [] end end end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols.ex new file mode 100644 index 000000000..2459e43e9 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols.ex @@ -0,0 +1,125 @@ +defmodule Lexical.RemoteControl.CodeIntelligence.Symbols do + alias Lexical.Document + alias Lexical.Document.Range + alias Lexical.RemoteControl.CodeIntelligence.Symbols + alias Lexical.RemoteControl.Search + alias Lexical.RemoteControl.Search.Indexer + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Extractors + + @block_types [ + :ex_unit_describe, + :ex_unit_setup, + :ex_unit_setup_all, + :ex_unit_test, + :module + ] + + @symbol_extractors [ + Extractors.FunctionDefinition, + Extractors.Module, + Extractors.ModuleAttribute, + Extractors.StructDefinition, + Extractors.ExUnit + ] + + def for_document(%Document{} = document) do + {:ok, entries} = Indexer.Source.index_document(document, @symbol_extractors) + + definitions = Enum.filter(entries, &(&1.subtype == :definition)) + to_symbols(document, definitions) + end + + def for_workspace(query) do + case Search.Store.fuzzy(query, []) do + {:ok, entries} -> + Enum.map(entries, &Symbols.Workspace.from_entry/1) + + _ -> + [] + end + end + + defp to_symbols(%Document{} = document, entries) do + entries_by_block_id = Enum.group_by(entries, & &1.block_id) + rebuild_structure(entries_by_block_id, document, :root) + end + + defp rebuild_structure(entries_by_block_id, %Document{} = document, block_id) do + block_entries = Map.get(entries_by_block_id, block_id, []) + + Enum.flat_map(block_entries, fn + %Entry{type: {:protocol, _}} = entry -> + map_block_type(document, entry, entries_by_block_id) + + %Entry{type: {:function, type}} = entry when type in [:public, :private] -> + map_block_type(document, entry, entries_by_block_id) + + %Entry{type: type, subtype: :definition} = entry when type in @block_types -> + map_block_type(document, entry, entries_by_block_id) + + %Entry{} = entry -> + case Symbols.Document.from(document, entry) do + {:ok, symbol} -> [symbol] + _ -> [] + end + end) + end + + defp map_block_type(%Document{} = document, %Entry{} = entry, entries_by_block_id) do + result = + if Map.has_key?(entries_by_block_id, entry.id) do + children = + entries_by_block_id + |> rebuild_structure(document, entry.id) + |> Enum.sort_by(&sort_by_start/1) + |> group_functions() + + Symbols.Document.from(document, entry, children) + else + Symbols.Document.from(document, entry) + end + + case result do + {:ok, symbol} -> [symbol] + _ -> [] + end + end + + defp group_functions(children) do + {functions, other} = Enum.split_with(children, &match?({:function, _}, &1.original_type)) + + grouped_functions = + functions + |> Enum.group_by(fn symbol -> + symbol.subject |> String.split(".") |> List.last() |> String.trim() + end) + |> Enum.map(fn + {_name_and_arity, [definition]} -> + definition + + {name_and_arity, [first | _] = defs} -> + last = List.last(defs) + [type, _] = String.split(first.name, " ", parts: 2) + name = "#{type} #{name_and_arity}" + + children = + Enum.map(defs, fn child -> + [_, rest] = String.split(child.name, " ", parts: 2) + %Symbols.Document{child | name: rest} + end) + + range = Range.new(first.range.start, last.range.end) + %Symbols.Document{first | name: name, range: range, children: children} + end) + + grouped_functions + |> Enum.concat(other) + |> Enum.sort_by(&sort_by_start/1) + end + + defp sort_by_start(%Symbols.Document{} = symbol) do + start = symbol.range.start + {start.line, start.character} + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols/document.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols/document.ex new file mode 100644 index 000000000..8009244e8 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols/document.ex @@ -0,0 +1,99 @@ +defmodule Lexical.RemoteControl.CodeIntelligence.Symbols.Document do + alias Lexical.Document + alias Lexical.Formats + alias Lexical.RemoteControl.Search.Indexer.Entry + + defstruct [:name, :type, :range, :detail_range, :detail, :original_type, :subject, children: []] + + def from(%Document{} = document, %Entry{} = entry, children \\ []) do + case name_and_type(entry.type, entry, document) do + {name, type} -> + range = entry.block_range || entry.range + + {:ok, + %__MODULE__{ + name: name, + type: type, + range: range, + detail_range: entry.range, + children: children, + original_type: entry.type, + subject: entry.subject + }} + + _ -> + :error + end + end + + @do_regex ~r/\s*do\s*$/ + + defp name_and_type({:function, type}, %Entry{} = entry, %Document{} = document) + when type in [:public, :private, :delegate] do + fragment = + document + |> Document.fragment(entry.range.start, entry.range.end) + |> remove_line_breaks_and_multiple_spaces() + + prefix = + case type do + :public -> "def " + :private -> "defp " + :delegate -> "defdelegate " + end + + {prefix <> fragment, entry.type} + end + + @ignored_attributes ~w[spec doc moduledoc derive impl tag] + @type_name_regex ~r/@type\s+[^\s]+/ + + defp name_and_type(:module_attribute, %Entry{} = entry, document) do + case String.split(entry.subject, "@") do + [_, name] when name in @ignored_attributes -> + nil + + [_, "type"] -> + type_text = Document.fragment(document, entry.range.start, entry.range.end) + + name = + case Regex.scan(@type_name_regex, type_text) do + [[match]] -> match + _ -> "@type ??" + end + + {name, :type} + + [_, name] -> + {"@#{name}", :module_attribute} + end + end + + defp name_and_type(ex_unit, %Entry{} = entry, document) + when ex_unit in [:ex_unit_describe, :ex_unit_setup, :ex_unit_test] do + name = + document + |> Document.fragment(entry.range.start, entry.range.end) + |> String.trim() + |> String.replace(@do_regex, "") + + {name, ex_unit} + end + + defp name_and_type(:struct, %Entry{} = entry, _document) do + module_name = Formats.module(entry.subject) + {"%#{module_name}{}", :struct} + end + + defp name_and_type(type, %Entry{subject: name}, _document) when is_atom(name) do + {Formats.module(name), type} + end + + defp name_and_type(type, %Entry{} = entry, _document) do + {to_string(entry.subject), type} + end + + defp remove_line_breaks_and_multiple_spaces(string) do + string |> String.split(~r/\s/) |> Enum.reject(&match?("", &1)) |> Enum.join(" ") + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols/workspace.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols/workspace.ex new file mode 100644 index 000000000..a78a66797 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/symbols/workspace.ex @@ -0,0 +1,55 @@ +defmodule Lexical.RemoteControl.CodeIntelligence.Symbols.Workspace do + defmodule Link do + defstruct [:uri, :range, :detail_range] + + @type t :: %__MODULE__{ + uri: Lexical.uri(), + range: Lexical.Document.Range.t(), + detail_range: Lexical.Document.Range.t() + } + + def new(uri, range, detail_range \\ nil) do + %__MODULE__{uri: uri, range: range, detail_range: detail_range} + end + end + + alias Lexical.Document + alias Lexical.Formats + alias Lexical.RemoteControl.Search.Indexer.Entry + + defstruct [:name, :type, :link, container_name: nil] + + @type t :: %__MODULE__{ + container_name: String.t() | nil, + link: Link.t(), + name: String.t(), + type: atom() + } + + def from_entry(%Entry{} = entry) do + link = + entry.path + |> Document.Path.to_uri() + |> Link.new(entry.block_range, entry.range) + + name = symbol_name(entry.type, entry) + + %__MODULE__{ + name: name, + type: entry.type, + link: link + } + end + + @module_types [:struct, :module] + defp symbol_name(type, entry) when type in @module_types do + Formats.module(entry.subject) + end + + defp symbol_name({:protocol, _}, entry) do + Formats.module(entry.subject) + end + + defp symbol_name(_, entry), + do: entry.subject +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_intelligence/variable.ex b/apps/remote_control/lib/lexical/remote_control/code_intelligence/variable.ex new file mode 100644 index 000000000..241684574 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_intelligence/variable.ex @@ -0,0 +1,249 @@ +defmodule Lexical.RemoteControl.CodeIntelligence.Variable do + alias Lexical.Ast.Analysis + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.RemoteControl.Search.Indexer + alias Lexical.RemoteControl.Search.Indexer.Entry + + require Logger + + @extractors [Indexer.Extractors.Variable] + + @spec definition(Analysis.t(), Position.t(), atom()) :: {:ok, Entry.t()} | :error + def definition(%Analysis{} = analysis, %Position{} = position, variable_name) do + with {:ok, block_structure, entries} <- index_variables(analysis), + {:ok, %Entry{} = definition_entry} <- + do_find_definition(variable_name, block_structure, entries, position) do + {:ok, definition_entry} + else + _ -> + :error + end + end + + @spec references(Analysis.t(), Position.t(), charlist(), boolean()) :: [Range.t()] + def references( + %Analysis{} = analysis, + %Position{} = position, + variable_name, + include_definitions? \\ false + ) do + with {:ok, block_structure, entries} <- index_variables(analysis), + {:ok, %Entry{} = definition_entry} <- + do_find_definition(variable_name, block_structure, entries, position) do + references = search_for_references(entries, definition_entry, block_structure) + + entries = + if include_definitions? do + [definition_entry | references] + else + references + end + + Enum.sort_by(entries, fn %Entry{} = entry -> + {entry.range.start.line, entry.range.start.character} + end) + else + _ -> + [] + end + end + + defp index_variables(%Analysis{} = analysis) do + with {:ok, entries} <- Indexer.Quoted.index(analysis, @extractors), + {[block_structure], entries} <- Enum.split_with(entries, &(&1.type == :metadata)) do + {:ok, block_structure.subject, entries} + end + end + + defp do_find_definition(variable_name, block_structure, entries, position) do + with {:ok, entry} <- fetch_entry(entries, variable_name, position) do + search_for_definition(entries, entry, block_structure) + end + end + + defp fetch_entry(entries, variable_name, position) do + entries + |> Enum.find(fn %Entry{} = entry -> + entry.subject == variable_name and entry.type == :variable and + Range.contains?(entry.range, position) + end) + |> case do + %Entry{} = entry -> + {:ok, entry} + + _ -> + :error + end + end + + defp search_for_references(entries, %Entry{} = definition_entry, block_structure) do + block_id_to_children = block_id_to_children(block_structure) + + definition_children = Map.get(block_id_to_children, definition_entry.block_id, []) + + # The algorithm here is to first clean up the entries so they either are definitions or references to a + # variable with the given name. We sort them by their occurrence in the file, working backwards on a line, so + # definitions earlier in the line shadow definitions later in the line. + # Then we start at the definition entry, and then for each entry after that, + # if it's a definition, we mark the state as being shadowed, but reset the state if the block + # id isn't in the children of the current block id. If we're not in a child of the current block + # id, then we're no longer shadowed + # + # Note, this algorithm doesn't work when we have a block definition whose result rebinds a variable. + # For example: + # entries = [4, 5, 6] + # entries = + # if something() do + # [1 | entries] + # else + # entries + # end + # Searching for the references to the initial variable won't find anything inside the block, but + # searching for the rebound variable will. + + {entries, _, _} = + entries + |> Enum.filter(fn %Entry{} = entry -> + after_definition? = Position.compare(entry.range.start, definition_entry.range.end) == :gt + + variable_type? = entry.type == :variable + correct_subject? = entry.subject == definition_entry.subject + child_of_definition_block? = entry.block_id in definition_children + + variable_type? and correct_subject? and child_of_definition_block? and after_definition? + end) + |> Enum.sort_by(fn %Entry{} = entry -> + start = entry.range.start + {start.line, -start.character, entry.block_id} + end) + |> Enum.reduce({[], false, definition_entry.block_id}, fn + %Entry{subtype: :definition} = entry, {entries, _, _} -> + # we have a definition that's shadowing our definition entry + {entries, true, entry.block_id} + + %Entry{subtype: :reference} = entry, {entries, true, current_block_id} -> + shadowed? = entry.block_id in Map.get(block_id_to_children, current_block_id, []) + + entries = + if shadowed? do + entries + else + [entry | entries] + end + + {entries, shadowed?, entry.block_id} + + %Entry{} = entry, {entries, false, _} -> + # we're a reference and we're not being shadowed; collect it and move on. + {[entry | entries], false, entry.block_id} + end) + + entries + end + + defp search_for_definition(entries, %Entry{} = entry, block_structure) do + block_id_to_parents = collect_parents(block_structure) + block_path = Map.get(block_id_to_parents, entry.block_id) + entries_by_block_id = entries_by_block_id(entries) + + Enum.reduce_while([entry.block_id | block_path], :error, fn block_id, _ -> + block_entries = + entries_by_block_id + |> Map.get(block_id, []) + |> then(fn entries -> + # In the current block, reject all entries that come after the entry whose definition + # we're searching for. This prevents us from finding definitions who are shadowing + # our entry. For example, the definition on the left of the equals in: `param = param + 1`. + + if block_id == entry.block_id do + Enum.drop_while(entries, &(&1.id != entry.id)) + else + entries + end + end) + + case Enum.find(block_entries, &definition_of?(entry, &1)) do + %Entry{} = definition -> + {:halt, {:ok, definition}} + + nil -> + {:cont, :error} + end + end) + end + + defp definition_of?(%Entry{} = needle, %Entry{} = compare) do + compare.type == :variable and compare.subtype == :definition and + compare.subject == needle.subject + end + + defp entries_by_block_id(entries) do + entries + |> Enum.reduce(%{}, fn %Entry{} = entry, acc -> + Map.update(acc, entry.block_id, [entry], &[entry | &1]) + end) + |> Map.new(fn {block_id, entries} -> + entries = + Enum.sort_by( + entries, + fn %Entry{} = entry -> + {entry.range.start.line, -entry.range.start.character} + end, + :desc + ) + + {block_id, entries} + end) + end + + def block_id_to_parents(hierarchy) do + hierarchy + |> flatten_hierarchy() + |> Enum.reduce(%{}, fn {parent_id, child_id}, acc -> + old_parents = [parent_id | Map.get(acc, parent_id, [])] + Map.update(acc, child_id, old_parents, &Enum.concat(&1, old_parents)) + end) + |> Map.put(:root, []) + end + + def block_id_to_children(hierarchy) do + # Note: Parent ids are included in their children list in order to simplify + # checks for "is this id in one of its children" + + hierarchy + |> flatten_hierarchy() + |> Enum.reverse() + |> Enum.reduce(%{root: [:root]}, fn {parent_id, child_id}, current_mapping -> + current_children = [child_id | Map.get(current_mapping, child_id, [parent_id])] + + current_mapping + |> Map.put_new(child_id, [child_id]) + |> Map.update(parent_id, current_children, &Enum.concat(&1, current_children)) + end) + end + + def flatten_hierarchy(hierarchy) do + Enum.flat_map(hierarchy, fn + {k, v} when is_map(v) and map_size(v) > 0 -> + v + |> Map.keys() + |> Enum.map(&{k, &1}) + |> Enum.concat(flatten_hierarchy(v)) + + _ -> + [] + end) + end + + defp collect_parents(block_structure) do + do_collect_parents(block_structure, %{}, []) + end + + defp do_collect_parents(hierarchy, parent_map, path) do + Enum.reduce(hierarchy, parent_map, fn {block_id, children}, acc -> + parent_map = Map.put(acc, block_id, path) + do_collect_parents(children, parent_map, [block_id | path]) + end) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_mod/aliases.ex b/apps/remote_control/lib/lexical/remote_control/code_mod/aliases.ex new file mode 100644 index 000000000..105aed952 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/code_mod/aliases.ex @@ -0,0 +1,256 @@ +defmodule Lexical.RemoteControl.CodeMod.Aliases do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Analysis.Alias + alias Lexical.Ast.Analysis.Scope + alias Lexical.Document + alias Lexical.Document.Edit + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.RemoteControl + alias Sourceror.Zipper + + @doc """ + Returns the aliases that are in scope at the given range. + """ + @spec in_scope(Analysis.t(), Range.t()) :: [Alias.t()] + def in_scope(%Analysis{} = analysis, %Range{} = range) do + analysis + |> Analysis.module_scope(range) + |> aliases_in_scope() + end + + @doc """ + Sorts the given aliases according to our rules + """ + @spec sort(Enumerable.t(Alias.t())) :: [Alias.t()] + def sort(aliases) do + Enum.sort_by(aliases, fn %Alias{} = scope_alias -> + Enum.map(scope_alias.module, fn elem -> elem |> to_string() |> String.downcase() end) + end) + end + + @doc """ + Returns the position in the document where aliases should be inserted + Since a document can have multiple module definitions, the cursor position is used to + determine the initial starting point. + + This function also returns a string that should be appended to the end of the + edits that are performed. + """ + @spec insert_position(Analysis.t(), Position.t()) :: {Position.t(), String.t() | nil} + def insert_position(%Analysis{} = analysis, %Position{} = cursor_position) do + range = Range.new(cursor_position, cursor_position) + current_aliases = in_scope(analysis, range) + do_insert_position(analysis, current_aliases, range) + end + + @doc """ + Turns a list of aliases into aliases into edits + """ + @spec to_edits([Alias.t()], Position.t(), trailer :: String.t() | nil) :: [Edit.t()] + + def to_edits(aliases, position, trailer \\ nil) + def to_edits([], _, _), do: [] + + def to_edits(aliases, %Position{} = insert_position, trailer) do + aliases = sort(aliases) + initial_spaces = insert_position.character - 1 + + alias_text = + aliases + # get rid of duplicate aliases + |> Enum.uniq_by(& &1.module) + |> Enum.map_join("\n", fn %Alias{} = a -> + text = + if List.last(a.module) == a.as do + "alias #{join(a.module)}" + else + "alias #{join(a.module)}, as: #{join(List.wrap(a.as))}" + end + + indent(text, initial_spaces) + end) + |> String.trim_trailing() + + zeroed = put_in(insert_position.character, 1) + new_alias_range = Range.new(zeroed, zeroed) + + alias_text = + if is_binary(trailer) do + alias_text <> trailer + else + alias_text + end + + edits = remove_old_aliases(aliases) + + edits ++ + [Edit.new(alias_text, new_alias_range)] + end + + defp aliases_in_scope(%Scope{} = scope) do + scope.aliases + |> Enum.filter(fn %Alias{} = scope_alias -> + scope_alias.explicit? and Range.contains?(scope.range, scope_alias.range.start) + end) + |> sort() + end + + defp join(module) do + Enum.join(module, ".") + end + + defp indent(text, spaces) do + String.duplicate(" ", spaces) <> text + end + + defp remove_old_aliases(aliases) do + ranges = + aliases + # Reject new aliases that don't have a range + |> Enum.reject(&is_nil(&1.range)) + # iterating back to start means we won't have prior edits + # clobber subsequent edits + |> Enum.sort_by(& &1.range.start.line, :desc) + |> Enum.uniq_by(& &1.range) + |> Enum.map(fn %Alias{} = alias -> + orig_range = alias.range + + orig_range + |> put_in([:start, :character], 1) + |> update_in([:end], fn %Position{} = pos -> + %Position{pos | character: 1, line: pos.line + 1} + end) + end) + + first_alias_index = length(ranges) - 1 + + ranges + |> Enum.with_index() + |> Enum.map(fn + {range, ^first_alias_index} -> + # add a new line where the first alias was to make space + # for the rewritten aliases + Edit.new("\n", range) + + {range, _} -> + Edit.new("", range) + end) + |> merge_adjacent_edits() + end + + defp merge_adjacent_edits([]), do: [] + defp merge_adjacent_edits([_] = edit), do: edit + + defp merge_adjacent_edits([edit | rest]) do + rest + |> Enum.reduce([edit], fn %Edit{} = current, [%Edit{} = last | rest] = edits -> + with {same_text, same_text} <- {last.text, current.text}, + {same, same} <- {to_tuple(current.range.end), to_tuple(last.range.start)} do + collapsed = put_in(current.range.end, last.range.end) + + [collapsed | rest] + else + _ -> + [current | edits] + end + end) + |> Enum.reverse() + end + + defp to_tuple(%Position{} = position) do + {position.line, position.character} + end + + defp do_insert_position(%Analysis{}, [%Alias{} | _] = aliases, _) do + first = Enum.min_by(aliases, &{&1.range.start.line, &1.range.start.character}) + {first.range.start, nil} + end + + defp do_insert_position(%Analysis{} = analysis, _, range) do + case Analysis.module_scope(analysis, range) do + %Scope{id: :global} = scope -> + {scope.range.start, "\n"} + + %Scope{} = scope -> + scope_start = scope.range.start + # we use the end position here because the start position is right after + # the do for modules, which puts it well into the line. The end position + # is before the end, which is equal to the indent of the scope. + + initial_position = + scope_start + |> put_in([:line], scope_start.line + 1) + |> put_in([:character], scope.range.end.character) + |> constrain_to_range(scope.range) + + position = + case Ast.zipper_at(analysis.document, scope_start) do + {:ok, zipper} -> + {_, position} = + Zipper.traverse(zipper, initial_position, fn + %Zipper{node: {:@, _, [{:moduledoc, _, _}]}} = zipper, _acc -> + # If we detect a moduledoc node, place the alias after it + range = Sourceror.get_range(zipper.node) + + {zipper, after_node(analysis.document, scope.range, range)} + + zipper, acc -> + {zipper, acc} + end) + + position + + _ -> + initial_position + end + + maybe_move_cursor_to_token_start(position, analysis) + end + end + + defp after_node(%Document{} = document, %Range{} = scope_range, %{ + start: start_pos, + end: end_pos + }) do + document + |> Position.new(end_pos[:line] + 1, start_pos[:column]) + |> constrain_to_range(scope_range) + end + + defp constrain_to_range(%Position{} = position, %Range{} = scope_range) do + cond do + position.line == scope_range.end.line -> + character = min(scope_range.end.character, position.character) + %Position{position | character: character} + + position.line > scope_range.end.line -> + %Position{scope_range.end | character: 1} + + true -> + position + end + end + + defp maybe_move_cursor_to_token_start(%Position{} = position, %Analysis{} = analysis) do + project = RemoteControl.get_project() + + with {:ok, env} <- Ast.Env.new(project, analysis, position), + false <- String.last(env.prefix) in [" ", ""] do + # ` en|d` -> `2` + # `en|d` -> `2` + non_empty_characters_count = env.prefix |> String.trim_leading() |> String.length() + + new_position = %Position{ + position + | character: position.character - non_empty_characters_count + } + + {new_position, "\n"} + else + _ -> + {position, "\n"} + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/code_mod/format.ex b/apps/remote_control/lib/lexical/remote_control/code_mod/format.ex index dcaabd7dc..e934b2867 100644 --- a/apps/remote_control/lib/lexical/remote_control/code_mod/format.ex +++ b/apps/remote_control/lib/lexical/remote_control/code_mod/format.ex @@ -8,10 +8,97 @@ defmodule Lexical.RemoteControl.CodeMod.Format do require Logger + @built_in_locals_without_parens [ + # Special forms + alias: 1, + alias: 2, + case: 2, + cond: 1, + for: :*, + import: 1, + import: 2, + quote: 1, + quote: 2, + receive: 1, + require: 1, + require: 2, + try: 1, + with: :*, + + # Kernel + def: 1, + def: 2, + defp: 1, + defp: 2, + defguard: 1, + defguardp: 1, + defmacro: 1, + defmacro: 2, + defmacrop: 1, + defmacrop: 2, + defmodule: 2, + defdelegate: 2, + defexception: 1, + defoverridable: 1, + defstruct: 1, + destructure: 2, + raise: 1, + raise: 2, + reraise: 2, + reraise: 3, + if: 2, + unless: 2, + use: 1, + use: 2, + + # Stdlib, + defrecord: 2, + defrecord: 3, + defrecordp: 2, + defrecordp: 3, + + # Testing + assert: 1, + assert: 2, + assert_in_delta: 3, + assert_in_delta: 4, + assert_raise: 2, + assert_raise: 3, + assert_receive: 1, + assert_receive: 2, + assert_receive: 3, + assert_received: 1, + assert_received: 2, + doctest: 1, + doctest: 2, + refute: 1, + refute: 2, + refute_in_delta: 3, + refute_in_delta: 4, + refute_receive: 1, + refute_receive: 2, + refute_receive: 3, + refute_received: 1, + refute_received: 2, + setup: 1, + setup: 2, + setup_all: 1, + setup_all: 2, + test: 1, + test: 2, + + # Mix config + config: 2, + config: 3, + import_config: 1 + ] + @type formatter_function :: (String.t() -> any) | nil - @spec edits(Project.t(), Document.t()) :: {:ok, Changes.t()} | {:error, any} - def edits(%Project{} = project, %Document{} = document) do + @spec edits(Document.t()) :: {:ok, Changes.t()} | {:error, any} + def edits(%Document{} = document) do + project = RemoteControl.get_project() + with :ok <- Build.compile_document(project, document), {:ok, formatted} <- do_format(project, document) do edits = Diff.diff(document, formatted) @@ -33,7 +120,7 @@ defmodule Lexical.RemoteControl.CodeMod.Format do @spec formatter_for(Project.t(), String.t()) :: {:ok, formatter_function} defp formatter_for(%Project{} = project, uri_or_path) do path = Document.Path.ensure_path(uri_or_path) - formatter_function = formatter_for_file(project, path) + {formatter_function, _opts} = formatter_for_file(project, path) wrapped_formatter_function = wrap_with_try_catch(formatter_function) {:ok, wrapped_formatter_function} end @@ -69,16 +156,19 @@ defmodule Lexical.RemoteControl.CodeMod.Format do String.starts_with?(child, normalized_parent) end - defp formatter_for_file(%Project{} = project, file_path) do + @doc """ + Returns `{formatter_function, opts}` for the given file. + """ + def formatter_for_file(%Project{} = project, file_path) do fetch_formatter = fn _ -> Mix.Tasks.Format.formatter_for_file(file_path) end - {formatter, _opts} = + {formatter_function, opts} = if RemoteControl.project_node?() do - case RemoteControl.Mix.in_project(project, fetch_formatter) do + case mix_formatter_from_task(project, file_path) do {:ok, result} -> result - _error -> + :error -> formatter_opts = case find_formatter_exs(project, file_path) do {:ok, opts} -> @@ -94,13 +184,21 @@ defmodule Lexical.RemoteControl.CodeMod.Format do IO.iodata_to_binary([formatted_source, ?\n]) end - {formatter, nil} + {formatter, formatter_opts} end else fetch_formatter.(nil) end - formatter + opts = + Keyword.update( + opts, + :locals_without_parens, + @built_in_locals_without_parens, + &(@built_in_locals_without_parens ++ &1) + ) + + {formatter_function, opts} end defp find_formatter_exs(%Project{} = project, file_path) do @@ -113,13 +211,19 @@ defmodule Lexical.RemoteControl.CodeMod.Format do end defp do_find_formatter_exs(root_path, current_path) do - with :error <- formatter_exs_contents(current_path) do - parent = - current_path - |> Path.join("..") - |> Path.expand() + if File.exists?(current_path) do + with :error <- formatter_exs_contents(current_path) do + parent = + current_path + |> Path.join("..") + |> Path.expand() - do_find_formatter_exs(root_path, parent) + do_find_formatter_exs(root_path, parent) + end + else + # the current path doesn't exist, it doesn't make sense to keep looking + # for the .formatter.exs in its parents. Look for one in the root directory + do_find_formatter_exs(root_path, Path.join(root_path, ".formatter.exs")) end end @@ -137,4 +241,23 @@ defmodule Lexical.RemoteControl.CodeMod.Format do :error end end + + defp mix_formatter_from_task(%Project{} = project, file_path) do + try do + root_path = Project.root_path(project) + deps_paths = RemoteControl.deps_paths() + + formatter_and_opts = + Mix.Tasks.Future.Format.formatter_for_file(file_path, + root: root_path, + deps_paths: deps_paths, + plugin_loader: fn plugins -> Enum.filter(plugins, &Code.ensure_loaded?/1) end + ) + + {:ok, formatter_and_opts} + rescue + _ -> + :error + end + end end diff --git a/apps/remote_control/lib/lexical/remote_control/code_mod/replace_with_underscore.ex b/apps/remote_control/lib/lexical/remote_control/code_mod/replace_with_underscore.ex deleted file mode 100644 index 6ee6fa8e7..000000000 --- a/apps/remote_control/lib/lexical/remote_control/code_mod/replace_with_underscore.ex +++ /dev/null @@ -1,46 +0,0 @@ -defmodule Lexical.RemoteControl.CodeMod.ReplaceWithUnderscore do - alias Lexical.Ast - alias Lexical.Document - alias Lexical.Document.Changes - alias Sourceror.Zipper - - @spec edits(Document.t(), non_neg_integer(), String.t() | atom) :: - {:ok, Changes.t()} | :error - def edits(%Document{} = document, line_number, variable_name) do - variable_name = ensure_atom(variable_name) - - case apply_transform(document, line_number, variable_name) do - {:ok, edits} -> - {:ok, Changes.new(document, edits)} - - error -> - error - end - end - - defp ensure_atom(variable_name) when is_binary(variable_name) do - String.to_atom(variable_name) - end - - defp ensure_atom(variable_name) when is_atom(variable_name) do - variable_name - end - - defp apply_transform(document, line_number, unused_variable_name) do - underscored_variable_name = :"_#{unused_variable_name}" - - result = - Ast.traverse_line(document, line_number, [], fn - %Zipper{node: {^unused_variable_name, _meta, nil} = node} = zipper, patches -> - [patch] = Sourceror.Patch.rename_identifier(node, underscored_variable_name) - {zipper, [patch | patches]} - - zipper, acc -> - {zipper, acc} - end) - - with {:ok, _, patches} <- result do - Ast.patches_to_edits(document, patches) - end - end -end diff --git a/apps/remote_control/lib/lexical/remote_control/commands/reindex.ex b/apps/remote_control/lib/lexical/remote_control/commands/reindex.ex new file mode 100644 index 000000000..610854b2f --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/commands/reindex.ex @@ -0,0 +1,166 @@ +defmodule Lexical.RemoteControl.Commands.Reindex do + defmodule State do + alias Lexical.Ast.Analysis + alias Lexical.Document + alias Lexical.ProcessCache + alias Lexical.RemoteControl.Search + alias Lexical.RemoteControl.Search.Indexer + + require Logger + require ProcessCache + + defstruct reindex_fun: nil, index_task: nil, pending_updates: %{} + + def new(reindex_fun) do + %__MODULE__{reindex_fun: reindex_fun} + end + + def set_task(%__MODULE__{} = state, {_, _} = task) do + %__MODULE__{state | index_task: task} + end + + def clear_task(%__MODULE__{} = state) do + %__MODULE__{state | index_task: nil} + end + + def reindex_uri(%__MODULE__{index_task: nil} = state, uri) do + case entries_for_uri(uri) do + {:ok, path, entries} -> + Search.Store.update(path, entries) + + _ -> + :ok + end + + state + end + + def reindex_uri(%__MODULE__{} = state, uri) do + case entries_for_uri(uri) do + {:ok, path, entries} -> + put_in(state.pending_updates[path], entries) + + _ -> + state + end + end + + def flush_pending_updates(%__MODULE__{} = state) do + Enum.each(state.pending_updates, fn {path, entries} -> + Search.Store.update(path, entries) + end) + + %__MODULE__{state | pending_updates: %{}} + end + + defp entries_for_uri(uri) do + with {:ok, %Document{} = document, %Analysis{} = analysis} <- + Document.Store.fetch(uri, :analysis), + {:ok, entries} <- Indexer.Quoted.index_with_cleanup(analysis) do + {:ok, document.path, entries} + else + error -> + Logger.error("Could not update index because #{inspect(error)}") + error + end + end + end + + @moduledoc """ + A simple genserver that prevents more than one reindexing job from running at the same time + """ + + alias Lexical.Document + alias Lexical.Project + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Api + alias Lexical.RemoteControl.Search + + use GenServer + import Api.Messages + + def start_link(opts) do + [reindex_fun: fun] = Keyword.validate!(opts, reindex_fun: &do_reindex/1) + GenServer.start_link(__MODULE__, fun, name: __MODULE__) + end + + def uri(uri) do + GenServer.cast(__MODULE__, {:reindex_uri, uri}) + end + + def perform do + perform(RemoteControl.get_project()) + end + + def perform(%Project{} = project) do + GenServer.call(__MODULE__, {:perform, project}) + end + + def running? do + GenServer.call(__MODULE__, :running?) + end + + @impl GenServer + def init(reindex_fun) do + Process.flag(:fullsweep_after, 5) + schedule_gc() + {:ok, State.new(reindex_fun)} + end + + @impl GenServer + def handle_call(:running?, _from, %State{index_task: index_task} = state) do + {:reply, match?({_, _}, index_task), state} + end + + def handle_call({:perform, project}, _from, %State{index_task: nil} = state) do + index_task = spawn_monitor(fn -> state.reindex_fun.(project) end) + {:reply, :ok, State.set_task(state, index_task)} + end + + def handle_call({:perform, _project}, _from, state) do + {:reply, {:error, "Already Running"}, state} + end + + @impl GenServer + def handle_cast({:reindex_uri, uri}, %State{} = state) do + {:noreply, State.reindex_uri(state, uri)} + end + + @impl GenServer + def handle_info({:DOWN, ref, :process, pid, _reason}, %State{index_task: {pid, ref}} = state) do + new_state = + state + |> State.flush_pending_updates() + |> State.clear_task() + + {:noreply, new_state} + end + + @impl GenServer + def handle_info(:gc, %State{} = state) do + :erlang.garbage_collect() + schedule_gc() + {:noreply, state} + end + + defp do_reindex(%Project{} = project) do + RemoteControl.broadcast(project_reindex_requested(project: project)) + + {elapsed_us, result} = + :timer.tc(fn -> + with {:ok, entries} <- Search.Indexer.create_index(project) do + Search.Store.replace(entries) + end + end) + + RemoteControl.broadcast( + project_reindexed(project: project, elapsed_ms: round(elapsed_us / 1000), status: :success) + ) + + result + end + + defp schedule_gc do + Process.send_after(self(), :gc, :timer.seconds(5)) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/compilation/tracer.ex b/apps/remote_control/lib/lexical/remote_control/compilation/tracer.ex index 203a6a60f..e779be3d5 100644 --- a/apps/remote_control/lib/lexical/remote_control/compilation/tracer.ex +++ b/apps/remote_control/lib/lexical/remote_control/compilation/tracer.ex @@ -1,14 +1,14 @@ defmodule Lexical.RemoteControl.Compilation.Tracer do alias Lexical.RemoteControl alias Lexical.RemoteControl.Build - alias Lexical.RemoteControl.Dispatch + alias Lexical.RemoteControl.Module.Loader import RemoteControl.Api.Messages def trace({:on_module, module_binary, _filename}, %Macro.Env{} = env) do message = extract_module_updated(env.module, module_binary, env.file) maybe_report_progress(env.file) - Dispatch.broadcast(message) + RemoteControl.broadcast(message) :ok end @@ -17,7 +17,7 @@ defmodule Lexical.RemoteControl.Compilation.Tracer do end def extract_module_updated(module, module_binary, filename) do - unless Code.ensure_loaded?(module) do + unless Loader.ensure_loaded?(module) do erlang_filename = filename |> ensure_filename() @@ -60,7 +60,7 @@ defmodule Lexical.RemoteControl.Compilation.Tracer do if Path.extname(file) == ".ex" do file |> progress_message() - |> Dispatch.broadcast() + |> RemoteControl.broadcast() end end diff --git a/apps/remote_control/lib/lexical/remote_control/completion.ex b/apps/remote_control/lib/lexical/remote_control/completion.ex index f21160fcc..fedd3a558 100644 --- a/apps/remote_control/lib/lexical/remote_control/completion.ex +++ b/apps/remote_control/lib/lexical/remote_control/completion.ex @@ -1,10 +1,18 @@ defmodule Lexical.RemoteControl.Completion do - alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Ast.Env alias Lexical.Document alias Lexical.Document.Position + alias Lexical.RemoteControl + alias Lexical.RemoteControl.CodeMod.Format alias Lexical.RemoteControl.Completion.Candidate - def elixir_sense_expand(doc_string, %Position{} = position) do + import Document.Line + import Lexical.Logging + + def elixir_sense_expand(%Env{} = env) do + {doc_string, position} = strip_struct_operator(env) + line = position.line character = position.character hint = ElixirSense.Core.Source.prefix(doc_string, line, character) @@ -12,21 +20,60 @@ defmodule Lexical.RemoteControl.Completion do if String.trim(hint) == "" do [] else - for suggestion <- ElixirSense.suggestions(doc_string, line, character), - candidate = Candidate.from_elixir_sense(suggestion), + {_formatter, opts} = + timed_log("formatter for file", fn -> + Format.formatter_for_file(env.project, env.document.path) + end) + + locals_without_parens = Keyword.fetch!(opts, :locals_without_parens) + + for suggestion <- + timed_log("ES suggestions", fn -> + ElixirSense.suggestions(doc_string, line, character) + end), + candidate = + timed_log("from_elixir_sense", fn -> + from_elixir_sense(suggestion, locals_without_parens) + end), candidate != nil do candidate end end end - def struct_fields(%Document{} = document, %Position{} = position) do + defp from_elixir_sense(suggestion, locals_without_parens) do + suggestion + |> Candidate.from_elixir_sense() + |> maybe_suppress_parens(locals_without_parens) + end + + defp maybe_suppress_parens(%struct{} = candidate, locals_without_parens) + when struct in [Candidate.Function, Candidate.Macro, Candidate.Typespec] do + atom_name = String.to_atom(candidate.name) + suppress_parens? = local_without_parens?(atom_name, candidate.arity, locals_without_parens) + + %{candidate | parens?: not suppress_parens?} + end + + defp maybe_suppress_parens(candidate, _), do: candidate + + defp local_without_parens?(fun, arity, locals_without_parens) do + arity > 0 and + Enum.any?(locals_without_parens, fn + {^fun, :*} -> true + {^fun, ^arity} -> true + _ -> false + end) + end + + def struct_fields(%Analysis{} = analysis, %Position{} = position) do container_struct_module = - document + analysis |> Lexical.Ast.cursor_path(position) |> container_struct_module() - with {:ok, struct_module} <- Ast.expand_aliases(container_struct_module, document, position), + with {:ok, struct_module} <- + RemoteControl.Analyzer.expand_alias(container_struct_module, analysis, position), true <- function_exported?(struct_module, :__struct__, 0) do struct_module |> struct() @@ -49,4 +96,55 @@ defmodule Lexical.RemoteControl.Completion do _ -> nil end) end + + # HACK: This fixes ElixirSense struct completions for certain cases. + # We should try removing when we update or remove ElixirSense. + defp strip_struct_operator(%Env{} = env) do + with true <- Env.in_context?(env, :struct_reference), + {:ok, completion_length} <- fetch_struct_completion_length(env) do + column = env.position.character + percent_position = column - (completion_length + 1) + + new_line_start = String.slice(env.line, 0, percent_position - 1) + new_line_end = String.slice(env.line, percent_position..-1//1) + new_line = [new_line_start, new_line_end] + new_position = Position.new(env.document, env.position.line, env.position.character - 1) + line_to_replace = env.position.line + + stripped_text = + env.document.lines + |> Enum.with_index(1) + |> Enum.reduce([], fn + {line(ending: ending), ^line_to_replace}, acc -> + [acc, new_line, ending] + + {line(text: line_text, ending: ending), _}, acc -> + [acc, line_text, ending] + end) + |> IO.iodata_to_binary() + + {stripped_text, new_position} + else + _ -> + doc_string = Document.to_string(env.document) + {doc_string, env.position} + end + end + + defp fetch_struct_completion_length(env) do + case Code.Fragment.cursor_context(env.prefix) do + {:struct, {:dot, {:alias, struct_name}, []}} -> + # add one because of the trailing period + {:ok, length(struct_name) + 1} + + {:struct, {:local_or_var, local_name}} -> + {:ok, length(local_name)} + + {:struct, struct_name} -> + {:ok, length(struct_name)} + + {:local_or_var, local_name} -> + {:ok, length(local_name)} + end + end end diff --git a/apps/remote_control/lib/lexical/remote_control/completion/candidate.ex b/apps/remote_control/lib/lexical/remote_control/completion/candidate.ex index 5de7e5a39..931252009 100644 --- a/apps/remote_control/lib/lexical/remote_control/completion/candidate.ex +++ b/apps/remote_control/lib/lexical/remote_control/completion/candidate.ex @@ -4,7 +4,18 @@ defmodule Lexical.RemoteControl.Completion.Candidate do defmodule Function do @moduledoc false - defstruct [:argument_names, :arity, :name, :origin, :type, :visibility, :spec, :metadata] + defstruct [ + :argument_names, + :arity, + :name, + :origin, + :type, + :visibility, + :spec, + :summary, + :metadata, + parens?: true + ] def new(%{} = elixir_sense_map) do arg_names = @@ -21,7 +32,17 @@ defmodule Lexical.RemoteControl.Completion.Candidate do defmodule Callback do @moduledoc false - defstruct [:argument_names, :arity, :metadata, :name, :origin, :spec, :summary, :type] + defstruct [ + :argument_names, + :arity, + :metadata, + :name, + :origin, + :spec, + :summary, + :type, + parens?: true + ] def new(%{} = elixir_sense_map) do arg_names = @@ -38,7 +59,45 @@ defmodule Lexical.RemoteControl.Completion.Candidate do defmodule Macro do @moduledoc false - defstruct [:argument_names, :arity, :name, :origin, :type, :visibility, :spec, :metadata] + defstruct [ + :argument_names, + :arity, + :name, + :origin, + :type, + :visibility, + :spec, + :metadata, + parens?: true + ] + + def new(%{} = elixir_sense_map) do + arg_names = + case ArgumentNames.from_elixir_sense_map(elixir_sense_map) do + :error -> [] + names -> names + end + + __MODULE__ + |> struct(elixir_sense_map) + |> Map.put(:argument_names, arg_names) + end + end + + defmodule Typespec do + @moduledoc false + defstruct [ + :argument_names, + :arity, + :doc, + :metadata, + :type, + :name, + :origin, + :signature, + :spec, + parens?: true + ] def new(%{} = elixir_sense_map) do arg_names = @@ -100,7 +159,7 @@ defmodule Lexical.RemoteControl.Completion.Candidate do defmodule StructField do @moduledoc false - defstruct [:call?, :name, :origin] + defstruct [:call?, :name, :origin, :type_spec] def new(%{} = elixir_sense_map) do struct(__MODULE__, elixir_sense_map) @@ -129,15 +188,6 @@ defmodule Lexical.RemoteControl.Completion.Candidate do end end - defmodule Typespec do - @moduledoc false - defstruct [:args_list, :arity, :doc, :metadata, :name, :signature, :spec] - - def new(%{} = elixir_sense_map) do - struct(__MODULE__, elixir_sense_map) - end - end - defmodule Variable do @moduledoc false defstruct [:name] @@ -176,6 +226,10 @@ defmodule Lexical.RemoteControl.Completion.Candidate do Module.new(elixir_sense_map) end + def from_elixir_sense(%{type: :module, subtype: :alias} = elixir_sense_map) do + Module.new(elixir_sense_map) + end + def from_elixir_sense(%{type: :module, subtype: :behaviour} = elixir_sense_map) do Behaviour.new(elixir_sense_map) end @@ -196,6 +250,11 @@ defmodule Lexical.RemoteControl.Completion.Candidate do MixTask.new(elixir_sense_map) end + # elixir_sense suggests test cases as functions, which need to be filtered. + def from_elixir_sense(%{type: :function, name: "test " <> _} = _elixir_sense_map) do + nil + end + def from_elixir_sense(%{type: :function} = elixir_sense_map) do Function.new(elixir_sense_map) end diff --git a/apps/remote_control/lib/lexical/remote_control/dispatch.ex b/apps/remote_control/lib/lexical/remote_control/dispatch.ex index e5eaed119..9cc3b3e87 100644 --- a/apps/remote_control/lib/lexical/remote_control/dispatch.ex +++ b/apps/remote_control/lib/lexical/remote_control/dispatch.ex @@ -7,10 +7,11 @@ defmodule Lexical.RemoteControl.Dispatch do `Lexical.RemoteControl.Dispatch.Handler` behaviour and add the module to the @handlers module attribute. """ alias Lexical.RemoteControl + alias Lexical.RemoteControl.Dispatch.Handlers alias Lexical.RemoteControl.Dispatch.PubSub import Lexical.RemoteControl.Api.Messages - @handlers [PubSub] + @handlers [PubSub, Handlers.Indexing] # public API @@ -72,7 +73,7 @@ defmodule Lexical.RemoteControl.Dispatch do end defp register_progress_listener do - register_listener(progress_pid(), [project_progress()]) + register_listener(progress_pid(), [project_progress(), percent_progress()]) end defp progress_pid do diff --git a/apps/remote_control/lib/lexical/remote_control/dispatch/handlers/indexing.ex b/apps/remote_control/lib/lexical/remote_control/dispatch/handlers/indexing.ex index 0eda49eca..f20ec4bd3 100644 --- a/apps/remote_control/lib/lexical/remote_control/dispatch/handlers/indexing.ex +++ b/apps/remote_control/lib/lexical/remote_control/dispatch/handlers/indexing.ex @@ -1,17 +1,17 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.Indexing do alias Lexical.Document alias Lexical.RemoteControl.Api.Messages + alias Lexical.RemoteControl.Commands alias Lexical.RemoteControl.Dispatch alias Lexical.RemoteControl.Search - alias Lexical.RemoteControl.Search.Indexer require Logger import Messages - use Dispatch.Handler, [file_quoted(), filesystem_event()] + use Dispatch.Handler, [file_compile_requested(), filesystem_event()] - def on_event(file_quoted(document: document, quoted_ast: quoted_ast), state) do - reindex(document, quoted_ast) + def on_event(file_compile_requested(uri: uri), state) do + reindex(uri) {:ok, state} end @@ -24,11 +24,8 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.Indexing do {:ok, state} end - defp reindex(%Document{} = document, quoted_ast) do - with :ok <- ensure_latest_version(document), - {:ok, entries} <- Indexer.Quoted.index(document, quoted_ast) do - Search.Store.update_async(document.path, entries) - end + defp reindex(uri) do + Commands.Reindex.uri(uri) end def delete_path(uri) do @@ -36,14 +33,4 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.Indexing do |> Document.Path.ensure_path() |> Search.Store.clear() end - - defp ensure_latest_version(%Document{version: version, uri: uri}) do - case Document.Store.fetch(uri) do - {:ok, %Document{version: ^version}} -> - :ok - - _ -> - {:error, :version_mismatch} - end - end end diff --git a/apps/remote_control/lib/lexical/remote_control/mix.ex b/apps/remote_control/lib/lexical/remote_control/mix.ex index 6b378132b..e64f52306 100644 --- a/apps/remote_control/lib/lexical/remote_control/mix.ex +++ b/apps/remote_control/lib/lexical/remote_control/mix.ex @@ -1,7 +1,6 @@ defmodule Lexical.RemoteControl.Mix do alias Lexical.Project alias Lexical.RemoteControl - alias Lexical.RemoteControl.Build def in_project(fun) do if RemoteControl.project_node?() do @@ -17,11 +16,11 @@ defmodule Lexical.RemoteControl.Mix do old_cwd = File.cwd!() - Build.with_lock(fn -> + with_lock(fn -> try do Mix.ProjectStack.post_config(prune_code_paths: false) - build_path = Project.build_path(project) + build_path = RemoteControl.Build.path(project) project_root = Project.root_path(project) project @@ -48,4 +47,8 @@ defmodule Lexical.RemoteControl.Mix do end end) end + + defp with_lock(fun) do + RemoteControl.with_lock(__MODULE__, fun) + end end diff --git a/apps/remote_control/lib/lexical/remote_control/mix.tasks.deps.safe_compile.ex b/apps/remote_control/lib/lexical/remote_control/mix.tasks.deps.safe_compile.ex index 9b5074fe3..51d871cc8 100644 --- a/apps/remote_control/lib/lexical/remote_control/mix.tasks.deps.safe_compile.ex +++ b/apps/remote_control/lib/lexical/remote_control/mix.tasks.deps.safe_compile.ex @@ -48,7 +48,7 @@ defmodule Mix.Tasks.Deps.SafeCompile do @impl true def run(args) do - if Version.match?(System.version(), ">= 1.15.0") do + if Elixir.Features.compile_keeps_current_directory?() do Mix.Tasks.Deps.Compile.run(args) else unless "--no-archives-check" in args do diff --git a/apps/remote_control/lib/lexical/remote_control/module/loader.ex b/apps/remote_control/lib/lexical/remote_control/module/loader.ex new file mode 100644 index 000000000..149c3d069 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/module/loader.ex @@ -0,0 +1,43 @@ +defmodule Lexical.RemoteControl.Module.Loader do + @moduledoc """ + Apparently, Code.ensure_loaded?/1 is pretty slow. I'm guessing because it has to do a + round trip to the code server for each check. This in turn slows down indexing, so the thought + is that having a cache will improve performance + """ + + alias Future.Code + use Agent + + def start_link(_) do + initialize = fn -> + Map.new(:code.all_loaded(), fn {name, _} -> {:module, name} end) + end + + Agent.start_link(initialize, name: __MODULE__) + end + + def ensure_loaded(module_name) do + Agent.get_and_update(__MODULE__, fn + %{^module_name => result} = state -> + {result, state} + + state -> + result = Code.ensure_loaded(module_name) + {result, Map.put(state, module_name, result)} + end) + end + + def ensure_loaded?(module_name) do + match?({:module, ^module_name}, ensure_loaded(module_name)) + end + + def loaded?(module_name) do + Agent.get(__MODULE__, fn + %{^module_name => {:module, _}} -> + true + + _ -> + false + end) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/module_mappings.ex b/apps/remote_control/lib/lexical/remote_control/module_mappings.ex index 7bb6e61d5..bcb8ce71e 100644 --- a/apps/remote_control/lib/lexical/remote_control/module_mappings.ex +++ b/apps/remote_control/lib/lexical/remote_control/module_mappings.ex @@ -39,8 +39,8 @@ defmodule Lexical.RemoteControl.ModuleMappings do end end + alias Lexical.RemoteControl alias Lexical.RemoteControl.Api.Messages - alias Lexical.RemoteControl.Dispatch use GenServer @@ -59,15 +59,11 @@ defmodule Lexical.RemoteControl.ModuleMappings do GenServer.call(__MODULE__, {:modules_in_file, file_path}) end - def update(module, file_path) do - GenServer.cast(__MODULE__, {:update, module, file_path}) - end - # GenServer callbacks @impl GenServer def init(_) do - Dispatch.register_listener(self(), [module_updated()]) + RemoteControl.register_listener(self(), [module_updated()]) {:ok, State.new()} end @@ -81,12 +77,6 @@ defmodule Lexical.RemoteControl.ModuleMappings do {:reply, State.file_for_module(state, module), state} end - @impl GenServer - def handle_cast({:update, module, file_path}, %State{} = state) do - new_state = State.update(state, module, file_path) - {:noreply, new_state} - end - @impl GenServer def handle_info(module_updated(name: module_name, file: file_path), %State{} = state) do new_state = State.update(state, module_name, file_path) diff --git a/apps/remote_control/lib/lexical/remote_control/modules.ex b/apps/remote_control/lib/lexical/remote_control/modules.ex index c71027cc8..3b32e8719 100644 --- a/apps/remote_control/lib/lexical/remote_control/modules.ex +++ b/apps/remote_control/lib/lexical/remote_control/modules.ex @@ -1,54 +1,10 @@ defmodule Lexical.RemoteControl.Modules do - defmodule Predicate.Syntax do - @moduledoc """ - Syntax helpers for the predicate syntax - """ - defmacro __using__(_) do - quote do - import unquote(__MODULE__), only: [predicate: 1] - end - end - - defmacro predicate(call) do - predicate_mfa = - case call do - {:&, _, [{{:., _, [{:__aliases__, _, module}, fn_name]}, _, args}]} -> - # This represents the syntax of &Kernel.foo(&1, :a) - {Module.concat(module), fn_name, capture_to_placeholder(args)} - - {:&, _, [{fn_name, _, args}]} -> - # This represents foo(:a, :b) - {Kernel, fn_name, capture_to_placeholder(args)} - - _ -> - message = """ - Invalid predicate. - - Predicates should look like function captures, i.e. - predicate(&Module.function(&1, :other)). - - Instead, I got predicate(#{Macro.to_string(call)}) - """ - - raise CompileError, description: message, file: __CALLER__.file, line: __CALLER__.line - end - - Macro.escape(predicate_mfa) - end - - defp capture_to_placeholder(args) do - Enum.map(args, fn - {:&, _, [1]} -> :"$1" - arg -> arg - end) - end - end - @moduledoc """ Utilities for dealing with modules on the remote control node """ alias Future.Code.Typespec + alias Lexical.RemoteControl.Module.Loader @typedoc "Module documentation record as defined by EEP-48" @type docs_v1 :: tuple() @@ -166,6 +122,28 @@ defmodule Lexical.RemoteControl.Modules do end end + @doc """ + Get a list of a module's functions + + This function will attempt to load the given module using our module cache, and + if it's found, will return a keyword list of function names and arities. + """ + @spec fetch_functions(module()) :: {:ok, [{function(), arity()}]} | :error + def fetch_functions(module) when is_atom(module) do + with {:module, module} <- Loader.ensure_loaded(module) do + cond do + function_exported?(module, :__info__, 1) -> + {:ok, module.__info__(:functions)} + + function_exported?(module, :module_info, 1) -> + {:ok, module.module_info(:functions)} + + true -> + :error + end + end + end + defp format_definition(quoted) do quoted |> Future.Code.quoted_to_algebra() @@ -179,14 +157,18 @@ defmodule Lexical.RemoteControl.Modules do `with_prefix` returns all modules on the node on which it runs that start with the given prefix. It's worth noting that it will return _all modules_ regardless if they have been loaded or not. - You can optionally pass a predicate function to further select which modules are returned, but + You can optionally pass a predicate MFA to further select which modules are returned, but it's important to understand that the predicate can only be a function reference to a function that exists on the `remote_control` node. I.e. you CANNOT pass anonymous functions to this module. - To ease things, there is a syntax helper in the `Predicate.Syntax` module that allows you to specify - predicates via a syntax that looks like function captures. + Each module will be added as the first argument to the given list of args in the predicate, + for example: + + iex> Modules.with_prefix("Gen", {Kernel, :macro_exported?, [:__using__, 1]}) + [GenEvent, GenServer] + """ - def with_prefix(prefix_module, predicate_mfa \\ {Function, :identity, [:"$1"]}) + def with_prefix(prefix_module, predicate_mfa \\ {Function, :identity, []}) def with_prefix(prefix_module, mfa) when is_atom(prefix_module) do prefix_module @@ -216,20 +198,11 @@ defmodule Lexical.RemoteControl.Modules do end defp apply_predicate(module_arg, {invoked_module, function, args}) do - args = - Enum.map(args, fn - :"$1" -> - module_arg - - other -> - other - end) - - apply(invoked_module, function, args) + apply(invoked_module, function, [module_arg | args]) end defp ensure_loaded?(_, true), do: true - defp ensure_loaded?(module, _), do: Code.ensure_loaded?(module) + defp ensure_loaded?(module, _), do: Loader.ensure_loaded?(module) defp mark_loaded(modules) when is_list(modules) do newly_loaded = Map.new(modules, &{&1, true}) diff --git a/apps/remote_control/lib/lexical/remote_control/plugin.ex b/apps/remote_control/lib/lexical/remote_control/plugin.ex index d9507aa6b..f99fdba5f 100644 --- a/apps/remote_control/lib/lexical/remote_control/plugin.ex +++ b/apps/remote_control/lib/lexical/remote_control/plugin.ex @@ -1,8 +1,8 @@ defmodule Lexical.RemoteControl.Plugin do alias Lexical.Document alias Lexical.Project + alias Lexical.RemoteControl alias Lexical.RemoteControl.Api.Messages - alias Lexical.RemoteControl.Dispatch alias Lexical.RemoteControl.Plugin.Runner import Messages @@ -20,7 +20,7 @@ defmodule Lexical.RemoteControl.Plugin do diagnostics: diagnostics ) - Dispatch.broadcast(message) + RemoteControl.broadcast(message) end Runner.diagnose(project, on_complete) @@ -40,7 +40,7 @@ defmodule Lexical.RemoteControl.Plugin do diagnostics: diagnostics ) - Dispatch.broadcast(message) + RemoteControl.broadcast(message) end Runner.diagnose(document, on_complete) diff --git a/apps/remote_control/lib/lexical/remote_control/plugin/discovery.ex b/apps/remote_control/lib/lexical/remote_control/plugin/discovery.ex index 4798e78bb..4633218c3 100644 --- a/apps/remote_control/lib/lexical/remote_control/plugin/discovery.ex +++ b/apps/remote_control/lib/lexical/remote_control/plugin/discovery.ex @@ -10,6 +10,7 @@ defmodule Lexical.RemoteControl.Plugin.Discovery do and the plugin will crash. """ + alias Lexical.RemoteControl.Module.Loader alias Lexical.RemoteControl.Plugin.Runner alias Mix.Tasks.Namespace @@ -41,7 +42,7 @@ defmodule Lexical.RemoteControl.Plugin.Discovery do Enum.each(modules, fn module -> namespace_module(module) unload_module(module) - Code.ensure_loaded(module) + Loader.ensure_loaded?(module) end) end @@ -67,6 +68,6 @@ defmodule Lexical.RemoteControl.Plugin.Discovery do end defp namespaced? do - Code.ensure_loaded?(@namespaced_document_module) + Loader.ensure_loaded?(@namespaced_document_module) end end diff --git a/apps/remote_control/lib/lexical/remote_control/plugin/runner.ex b/apps/remote_control/lib/lexical/remote_control/plugin/runner.ex index c1bc8cd40..23af365d4 100644 --- a/apps/remote_control/lib/lexical/remote_control/plugin/runner.ex +++ b/apps/remote_control/lib/lexical/remote_control/plugin/runner.ex @@ -3,10 +3,14 @@ defmodule Lexical.RemoteControl.Plugin.Runner do alias Lexical.Document alias Lexical.Project + alias Lexical.RemoteControl.Module.Loader alias Lexical.RemoteControl.Plugin.Runner require Logger + @project_level_plugin_timeout_ms :timer.seconds(3) + @file_level_plugin_timeout_ms 50 + @doc false def clear_config do :persistent_term.erase(__MODULE__) @@ -31,12 +35,17 @@ defmodule Lexical.RemoteControl.Plugin.Runner do @doc false def diagnose(%Project{} = project, on_complete) do - Runner.Coordinator.run_all(project, :diagnostic, on_complete, 2000) + Runner.Coordinator.run_all( + project, + :diagnostic, + on_complete, + @project_level_plugin_timeout_ms + ) end @doc false def diagnose(%Document{} = document, on_complete) do - Runner.Coordinator.run_all(document, :diagnostic, on_complete, 50) + Runner.Coordinator.run_all(document, :diagnostic, on_complete, @file_level_plugin_timeout_ms) end @doc false @@ -51,7 +60,7 @@ defmodule Lexical.RemoteControl.Plugin.Runner do @doc false def register_all(modules) when is_list(modules) do for module <- modules, - Code.ensure_loaded?(module), + Loader.ensure_loaded?(module), plugin_module?(module) do register(module) end diff --git a/apps/remote_control/lib/lexical/remote_control/progress.ex b/apps/remote_control/lib/lexical/remote_control/progress.ex new file mode 100644 index 000000000..3448609c0 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/progress.ex @@ -0,0 +1,68 @@ +defmodule Lexical.RemoteControl.Progress do + alias Lexical.RemoteControl + + import Lexical.RemoteControl.Api.Messages + + @type label :: String.t() + @type message :: String.t() + + @type delta :: pos_integer() + @type on_complete_callback :: (-> any()) + @type report_progress_callback :: (delta(), message() -> any()) + + defmacro __using__(_) do + quote do + import unquote(__MODULE__), only: [with_progress: 2] + end + end + + @spec with_progress(label(), (-> any())) :: any() + def with_progress(label, func) when is_function(func, 0) do + on_complete = begin_progress(label) + + try do + func.() + after + on_complete.() + end + end + + @spec with_percent_progress(label(), pos_integer(), (report_progress_callback() -> any())) :: + any() + def with_percent_progress(label, max, func) when is_function(func, 1) do + {report_progress, on_complete} = begin_percent(label, max) + + try do + func.(report_progress) + after + on_complete.() + end + end + + @spec begin_progress(label :: label()) :: on_complete_callback() + def begin_progress(label) do + RemoteControl.broadcast(project_progress(label: label, stage: :begin)) + + fn -> + RemoteControl.broadcast(project_progress(label: label, stage: :complete)) + end + end + + @spec begin_percent(label(), pos_integer()) :: + {report_progress_callback(), on_complete_callback()} + def begin_percent(label, max) do + RemoteControl.broadcast(percent_progress(label: label, max: max, stage: :begin)) + + report_progress = fn delta, message -> + RemoteControl.broadcast( + percent_progress(label: label, message: message, delta: delta, stage: :report) + ) + end + + complete = fn -> + RemoteControl.broadcast(percent_progress(label: label, stage: :complete)) + end + + {report_progress, complete} + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/project_node.ex b/apps/remote_control/lib/lexical/remote_control/project_node.ex index 094e15207..e79e288f6 100644 --- a/apps/remote_control/lib/lexical/remote_control/project_node.ex +++ b/apps/remote_control/lib/lexical/remote_control/project_node.ex @@ -69,13 +69,20 @@ defmodule Lexical.RemoteControl.ProjectNode do def on_nodedown(%__MODULE__{} = state, node_name) do if node_name == Project.node_name(state.project) do - GenServer.reply(state.stopped_by, :ok) + maybe_reply_to_stopper(state) {:shutdown, %{state | status: :stopped}} else :continue end end + def maybe_reply_to_stopper(%State{stopped_by: stopped_by} = state) + when is_tuple(stopped_by) do + GenServer.reply(state.stopped_by, :ok) + end + + def maybe_reply_to_stopper(%State{}), do: :ok + def on_monitored_dead(%__MODULE__{} = state) do if project_rpc(state, Node, :alive?) do halt(state) @@ -97,20 +104,17 @@ defmodule Lexical.RemoteControl.ProjectNode do end end + alias Lexical.Document alias Lexical.RemoteControl.ProjectNodeSupervisor use GenServer def start(project, paths) do node_name = Project.node_name(project) - remote_control_config = Application.get_all_env(:remote_control) + bootstrap_args = [project, Document.Store.entropy(), all_app_configs()] with {:ok, node_pid} <- ProjectNodeSupervisor.start_project_node(project), :ok <- start_node(project, paths), - :ok <- - :rpc.call(node_name, RemoteControl.Bootstrap, :init, [ - project, - remote_control_config - ]) do + :ok <- :rpc.call(node_name, RemoteControl.Bootstrap, :init, bootstrap_args) do {:ok, node_pid} end end @@ -212,7 +216,7 @@ defmodule Lexical.RemoteControl.ProjectNode do @impl true def handle_info(:timeout, %State{} = state) do state = State.halt(state) - GenServer.reply(state.stopped_by, :ok) + State.maybe_reply_to_stopper(state) {:stop, :shutdown, state} end @@ -230,4 +234,11 @@ defmodule Lexical.RemoteControl.ProjectNode do def name(%Project{} = project) do :"#{Project.name(project)}::node_process" end + + @deps_apps Mix.Project.deps_apps() + defp all_app_configs do + Enum.map(@deps_apps, fn app_name -> + {app_name, Application.get_all_env(app_name)} + end) + end end diff --git a/apps/remote_control/lib/lexical/remote_control/project_node/launcher.ex b/apps/remote_control/lib/lexical/remote_control/project_node/launcher.ex deleted file mode 100644 index a2a675e77..000000000 --- a/apps/remote_control/lib/lexical/remote_control/project_node/launcher.ex +++ /dev/null @@ -1,19 +0,0 @@ -defmodule Lexical.RemoteControl.ProjectNode.Launcher do - @moduledoc """ - A module that provides the path of an executable to launch another - erlang node via ports. - """ - def path do - path(:os.type()) - end - - def path({:unix, _}) do - with :non_existing <- :code.where_is_file(~c"port_wrapper.sh") do - :remote_control - |> :code.priv_dir() - |> Path.join("port_wrapper.sh") - |> Path.expand() - end - |> to_string() - end -end diff --git a/apps/remote_control/lib/lexical/remote_control/search/fuzzy.ex b/apps/remote_control/lib/lexical/remote_control/search/fuzzy.ex index d5d385cc1..436818284 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/fuzzy.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/fuzzy.ex @@ -11,15 +11,27 @@ defmodule Lexical.RemoteControl.Search.Fuzzy do returned. """ + alias Lexical.Project + alias Lexical.RemoteControl alias Lexical.RemoteControl.Search.Fuzzy.Scorer alias Lexical.RemoteControl.Search.Indexer.Entry + import Record defstruct subject_to_values: %{}, grouping_key_to_values: %{}, preprocessed_subjects: %{}, mapper: nil, + filter_fn: nil, subject_converter: nil + defrecordp :mapped, + application: nil, + grouping_key: nil, + subject: nil, + subtype: nil, + type: nil, + value: nil + @type subject :: String.t() @type extracted_subject :: term() @type grouping_key :: term() @@ -38,11 +50,21 @@ defmodule Lexical.RemoteControl.Search.Fuzzy do @spec from_entries([Entry.t()]) :: t def from_entries(entries) do - mapper = fn %Entry{} = entry -> - {entry.subject, entry.path, entry.ref} - end + mapper = default_mapper() + + new(entries, mapper, &stringify/1, true) + end + + def from_backend(backend) do + mapper = default_mapper() + + mapped_items = + backend.reduce([], fn + %Entry{subtype: :definition} = entry, acc -> [mapper.(entry) | acc] + _, acc -> acc + end) - new(entries, mapper, &stringify/1) + new(mapped_items, mapper, &stringify/1, false) end @doc """ @@ -52,17 +74,27 @@ defmodule Lexical.RemoteControl.Search.Fuzzy do For each tuple returned, the first element will then have the subject converter applied. This will produce a subject, which is what the `match/2` function uses for fuzzy matching. """ - @spec new(Enumerable.t(), mapper(), subject_converter()) :: t - def new(items, mapper, subject_converter) do - subject_grouping_key_values = Enum.map(items, mapper) + @spec new(Enumerable.t(), mapper(), subject_converter(), boolean()) :: t + def new(items, mapper, subject_converter, map_items?) do + filter_fun = build_filter_fn() + + mapped_items = + if map_items? do + items + |> Stream.map(mapper) + |> Enum.filter(filter_fun) + else + Enum.filter(items, filter_fun) + end - extract_and_fix_subject = fn {subject, _, _} -> subject_converter.(subject) end + extract_and_fix_subject = fn mapped() = mapped -> subject_converter.(mapped) end + extract_value = fn mapped(value: value) -> value end - subject_to_values = - Enum.group_by(subject_grouping_key_values, extract_and_fix_subject, &elem(&1, 2)) + subject_to_values = Enum.group_by(mapped_items, extract_and_fix_subject, extract_value) - grouping_key_to_values = - Enum.group_by(subject_grouping_key_values, &elem(&1, 1), &elem(&1, 2)) + extract_grouping_key = fn mapped(grouping_key: grouping_key) -> grouping_key end + + grouping_key_to_values = Enum.group_by(mapped_items, extract_grouping_key, extract_value) preprocessed_subjects = subject_to_values @@ -70,11 +102,12 @@ defmodule Lexical.RemoteControl.Search.Fuzzy do |> Map.new(fn subject -> {subject, Scorer.preprocess(subject)} end) %__MODULE__{ - subject_to_values: subject_to_values, + filter_fn: filter_fun, grouping_key_to_values: grouping_key_to_values, - preprocessed_subjects: preprocessed_subjects, mapper: mapper, - subject_converter: subject_converter + preprocessed_subjects: preprocessed_subjects, + subject_converter: subject_converter, + subject_to_values: subject_to_values } end @@ -85,13 +118,13 @@ defmodule Lexical.RemoteControl.Search.Fuzzy do in descending order of the match relevance. Items at the beginning of the list will have a higher score than items at the end. """ - @spec match(t(), String.t()) :: [reference()] + @spec match(t(), String.t()) :: [Entry.entry_id()] def match(%__MODULE__{} = fuzzy, pattern) do fuzzy.subject_to_values - |> Stream.map(fn {subject, references} -> + |> Stream.map(fn {subject, ids} -> case score(fuzzy, subject, pattern) do {:ok, score} -> - {score, references} + {score, ids} :error -> nil @@ -113,30 +146,36 @@ defmodule Lexical.RemoteControl.Search.Fuzzy do end def add(%__MODULE__{} = fuzzy, item) do - {extracted_subject, grouping_key, value} = fuzzy.mapper.(item) - subject = fuzzy.subject_converter.(extracted_subject) - - updated_grouping_key_to_values = - Map.update(fuzzy.grouping_key_to_values, grouping_key, [value], fn old_refs -> - [value | old_refs] - end) - - updated_subject_to_values = - Map.update(fuzzy.subject_to_values, subject, [value], fn old_refs -> - [value | old_refs] - end) - - updated_preprocessed_subjects = - Map.put_new_lazy(fuzzy.preprocessed_subjects, subject, fn -> - Scorer.preprocess(subject) - end) - - %__MODULE__{ + mapped_item = fuzzy.mapper.(item) + + if fuzzy.filter_fn.(mapped_item) do + subject = fuzzy.subject_converter.(mapped_item) + mapped(grouping_key: grouping_key, value: value) = mapped_item + + updated_grouping_key_to_values = + Map.update(fuzzy.grouping_key_to_values, grouping_key, [value], fn old_ids -> + [value | old_ids] + end) + + updated_subject_to_values = + Map.update(fuzzy.subject_to_values, subject, [value], fn old_ids -> + [value | old_ids] + end) + + updated_preprocessed_subjects = + Map.put_new_lazy(fuzzy.preprocessed_subjects, subject, fn -> + Scorer.preprocess(subject) + end) + + %__MODULE__{ + fuzzy + | grouping_key_to_values: updated_grouping_key_to_values, + subject_to_values: updated_subject_to_values, + preprocessed_subjects: updated_preprocessed_subjects + } + else fuzzy - | grouping_key_to_values: updated_grouping_key_to_values, - subject_to_values: updated_subject_to_values, - preprocessed_subjects: updated_preprocessed_subjects - } + end end @doc """ @@ -241,11 +280,107 @@ defmodule Lexical.RemoteControl.Search.Fuzzy do end end - defp stringify(string) when is_binary(string) do + defp stringify(mapped(type: {:function, _}, subject: subject)) do + subject + |> String.split(".") + |> List.last() + |> String.split("/") + |> List.first() + end + + defp stringify(mapped(type: :module, subject: module_name)) do + Lexical.Formats.module(module_name) + end + + defp stringify(mapped(subject: string)) when is_binary(string) do string end + defp stringify(mapped(subject: thing)) do + inspect(thing) + end + + defp stringify(thing) when is_binary(thing) do + thing + end + + defp stringify(atom) when is_atom(atom) do + cond do + function_exported?(atom, :__info__, 1) -> + Lexical.Formats.module(atom) + + function_exported?(atom, :module_info, 0) -> + Lexical.Formats.module(atom) + + true -> + inspect(atom) + end + end + defp stringify(thing) do inspect(thing) end + + defp default_mapper do + fn %Entry{} = entry -> + mapped( + application: entry.application, + grouping_key: entry.path, + subject: entry.subject, + subtype: entry.subtype, + type: entry.type, + value: entry.id + ) + end + end + + defp build_filter_fn do + deps_directories = + if Mix.Project.get() do + deps_roots() + else + {:ok, deps_roots} = + RemoteControl.Mix.in_project(fn _ -> + deps_roots() + end) + + deps_roots + end + + fn + mapped(subtype: :definition, grouping_key: path) -> + # if we don't have an app name, just make sure we're not + # in what looks like a deps directory + not Enum.any?(deps_directories, &String.starts_with?(path, &1)) + + _ -> + false + end + end + + defp deps_roots do + deps_roots(RemoteControl.get_project()) + end + + defp deps_roots(%Project{mix_project?: true} = project) do + # Note: This function assumes that the deps directories for all + # found projects is `deps`. Projects may override this directory + # and lexical won't understand this. This was done because loading + # each sub-project is expensive and changes our global directory. + + [Project.root_path(project), "**", "mix.exs"] + |> Path.join() + |> Path.wildcard() + |> Enum.map(fn relative_mix_path -> + relative_mix_path + |> Path.absname() + |> Path.dirname() + |> Path.join("deps") + end) + |> Enum.filter(&File.exists?/1) + end + + defp deps_roots(_) do + [] + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/fuzzy/scorer.ex b/apps/remote_control/lib/lexical/remote_control/search/fuzzy/scorer.ex index d81d2ec0f..03b1903ee 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/fuzzy/scorer.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/fuzzy/scorer.ex @@ -13,6 +13,7 @@ defmodule Lexical.RemoteControl.Search.Fuzzy.Scorer do 2. Patterns that match more consecutive characters 3. Patterns that match the beginning of the subject 4. Patterns that match the case of the subject + 5. Patterns that match the tail of a subject starting at the last period Based loosely on https://medium.com/@Srekel/implementing-a-fuzzy-search-algorithm-for-the-debuginator-cacc349e6c55 """ @@ -22,7 +23,7 @@ defmodule Lexical.RemoteControl.Search.Fuzzy.Scorer do import Record - defrecord :subject, original: nil, graphemes: nil, normalized: nil + defrecord :subject, graphemes: nil, normalized: nil, period_positions: [-1] @typedoc "A match score. Higher numbers mean a more relevant match." @type score :: integer @@ -30,8 +31,8 @@ defmodule Lexical.RemoteControl.Search.Fuzzy.Scorer do @type subject :: term() @type pattern :: String.t() @type preprocessed :: - record(:subject, original: String.t(), graphemes: tuple(), normalized: String.t()) - @non_match_score -500 + record(:subject, graphemes: tuple(), normalized: String.t()) + @non_match_score -5000 @doc """ Pre-processes a subject into separate parts that will be helpful during the search phase. @@ -45,7 +46,13 @@ defmodule Lexical.RemoteControl.Search.Fuzzy.Scorer do |> String.graphemes() |> List.to_tuple() - subject(original: subject, graphemes: graphemes, normalized: normalize(subject)) + normalized = normalize(subject) + + subject( + graphemes: graphemes, + normalized: normalized, + period_positions: period_positions(normalized) + ) end def preprocess(subject) do @@ -69,23 +76,59 @@ defmodule Lexical.RemoteControl.Search.Fuzzy.Scorer do end def score(subject(normalized: normalized) = subject, pattern) do - %__MODULE__{} = - score = - normalized - |> do_score(normalize(pattern), %__MODULE__{}) - |> Map.update!(:matched_character_positions, &Enum.reverse/1) + normalized_pattern = normalize(pattern) + + case collect_scores(normalized, normalized_pattern) do + [] -> + {false, @non_match_score} + + elems -> + max_score = + elems + |> Enum.map(&calculate_score(&1, subject, pattern)) + |> Enum.max() - {score.match?, calculate_score(score, subject, pattern)} + {true, max_score} + end + end + + defp collect_scores(normalized, normalized_pattern, starting_index \\ 0, acc \\ []) + + defp collect_scores(normalized_subject, normalized_pattern, starting_index, scores) do + # we collect scores because it's possible that a better match occurs later + # in the subject, and if we start peeling off characters greedily, we'll miss + # it. This is more expensive, but it's still pretty quick. + + initial_score = %__MODULE__{index: starting_index} + + case do_score(normalized_subject, normalized_pattern, initial_score) do + %__MODULE__{match?: true, matched_character_positions: [pos | _]} = score -> + slice_start = pos + 1 + next_index = starting_index + slice_start + subject_substring = String.slice(normalized_subject, slice_start..-1//1) + scores = [score | scores] + collect_scores(subject_substring, normalized_pattern, next_index, scores) + + _ -> + scores + end end # out of pattern, we have a match. defp do_score(_, <<>>, %__MODULE__{} = score) do - %__MODULE__{score | match?: true} + %__MODULE__{ + score + | match?: true, + matched_character_positions: Enum.reverse(score.matched_character_positions) + } end # we're out of subject, but we still have pattern, no match defp do_score(<<>>, _, %__MODULE__{} = score) do - score + %__MODULE__{ + score + | matched_character_positions: Enum.reverse(score.matched_character_positions) + } end defp do_score( @@ -119,70 +162,149 @@ defmodule Lexical.RemoteControl.Search.Fuzzy.Scorer do @non_match_score end - defp calculate_score(%__MODULE__{} = score, subject() = subject, pattern) do - subject(graphemes: graphemes) = subject - match_amount_boost = 0 - (tuple_size(graphemes) - length(score.matched_character_positions)) + defp calculate_score(%__MODULE__{} = score, subject(graphemes: graphemes) = subject, pattern) do + pattern_length = String.length(pattern) - [first_match_position | _] = score.matched_character_positions + {consecutive_count, consecutive_bonus} = + consecutive_match_boost(score.matched_character_positions) + + match_amount_boost = consecutive_count * pattern_length - pattern_length_boost = String.length(pattern) + match_boost = tail_match_boost(score, subject, pattern_length) - consecutive_bonus = consecutive_match_bonus(score.matched_character_positions) + camel_case_boost = camel_case_boost(score.matched_character_positions, subject) - # penalize first matches further in the string by making them negative. - first_match_bonus = 0 - first_match_position + mismatched_penalty = mismatched_penalty(score.matched_character_positions) - case_match_boost = case_match_boost(pattern, score.matched_character_positions, subject) + incompleteness_penalty = tuple_size(graphemes) - length(score.matched_character_positions) - pattern_length_boost + consecutive_bonus + first_match_bonus + case_match_boost + - match_amount_boost + consecutive_bonus + match_boost + camel_case_boost + + match_amount_boost - mismatched_penalty - incompleteness_penalty end defp normalize(string) do String.downcase(string) end - @consecutive_character_bonus 5 + @tail_match_boost 55 + + defp tail_match_boost( + %__MODULE__{} = score, + subject(graphemes: graphemes, period_positions: period_positions), + pattern_length + ) do + [first_match_position | _] = score.matched_character_positions + + match_end = first_match_position + pattern_length + subject_length = tuple_size(graphemes) + + if MapSet.member?(period_positions, first_match_position - 1) and match_end == subject_length do + # reward a complete match at the end of the last period. This is likely a module + # and the pattern matches the most local parts + @tail_match_boost + else + 0 + end + end + + @consecutive_character_bonus 15 - defp consecutive_match_bonus(matched_positions) do + def consecutive_match_boost(matched_positions) do # This function checks for consecutive matched characters, and # makes matches with more consecutive matched characters worth more. # This means if I type En, it will match Enum more than it will match # Something - matched_positions - |> Enum.chunk_every(2, 1) - |> Enum.reduce(0, fn - [last, current], acc when current == last + 1 -> - acc + @consecutive_character_bonus + max_streak = + matched_positions + |> Enum.reduce([[]], fn + current, [[last | streak] | rest] when last == current - 1 -> + [[current, last | streak] | rest] - _, acc -> - acc - end) - end + current, acc -> + [[current] | acc] + end) + |> Enum.max_by(&length/1) - defp case_match_boost(pattern, matched_positions, subject(graphemes: graphemes)) do - do_case_match_boost(pattern, matched_positions, graphemes, 0) + streak_length = length(max_streak) + {streak_length, @consecutive_character_bonus * streak_length} end - # iterate over the matches, find the character in the subject with that index, and compare it - # to the one in the pattern, boost if they're the same. - defp do_case_match_boost(_, [], _, boost), do: boost + @mismatched_chracter_penalty 5 + + def mismatched_penalty(matched_positions) do + {penalty, _} = + matched_positions + |> Enum.reduce({0, -1}, fn + matched_position, {0, _} -> + # only start counting the penalty after the first match, + # otherwise we will inadvertently penalize matches deeper in the string + {0, matched_position} + + matched_position, {penalty, last_match} -> + distance = matched_position - last_match + + {penalty + distance * @mismatched_chracter_penalty, matched_position} + end) + + penalty + end - defp do_case_match_boost(<>, [index | rest], graphemes, boost) do - boost = - if grapheme_to_utf8(graphemes, index) == char do - boost + 1 + @camel_case_boost 5 + defp camel_case_boost(matched_positions, subject(graphemes: graphemes)) do + graphemes + |> Tuple.to_list() + |> camel_positions() + |> Enum.reduce(0, fn position, score -> + if position in matched_positions do + score + @camel_case_boost else - boost + score end + end) + end - do_case_match_boost(pattern_rest, rest, graphemes, boost) + defp camel_positions(graphemes) do + camel_positions(graphemes, {nil, :lower}, 0, []) end - defp grapheme_to_utf8(graphemes, position) do - <> = elem(graphemes, position) + defp camel_positions([], _, _, positions) do + Enum.reverse(positions) + end + + defp camel_positions([grapheme | rest], {_last_char, :lower}, position, positions) do + case case_of(grapheme) do + :lower -> + camel_positions(rest, {grapheme, :lower}, position + 1, positions) + + :upper -> + camel_positions(rest, {grapheme, :upper}, position + 1, [position | positions]) + end + end + + defp camel_positions([grapheme | rest], {_last_char, :upper}, position, positions) do + camel_positions(rest, {grapheme, case_of(grapheme)}, position + 1, positions) + end + + defp case_of(grapheme) do + if String.downcase(grapheme) == grapheme do + :lower + else + :upper + end + end + + defp period_positions(string) do + period_positions(string, 0, [-1]) + end + + defp period_positions(<<>>, _, positions), do: MapSet.new(positions) + + defp period_positions(<<".", rest::binary>>, position, positions) do + period_positions(rest, position + 1, [position | positions]) + end - c + defp period_positions(<<_::utf8, rest::binary>>, position, positions) do + period_positions(rest, position + 1, positions) end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer.ex index 216395c8f..c5dddaed6 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer.ex @@ -1,40 +1,60 @@ defmodule Lexical.RemoteControl.Search.Indexer do + alias Lexical.Identifier + alias Lexical.ProcessCache alias Lexical.Project + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Progress alias Lexical.RemoteControl.Search.Indexer + alias Lexical.RemoteControl.Search.Indexer.Entry + + require ProcessCache @indexable_extensions "*.{ex,exs}" def create_index(%Project{} = project) do - entries = - project - |> indexable_files() - |> async_chunks(&index_path/1) - |> List.flatten() + ProcessCache.with_cleanup do + deps_dir = deps_dir() + + entries = + project + |> indexable_files() + |> async_chunks(&index_path(&1, deps_dir)) + + {:ok, entries} + end + end - {:ok, entries} + def update_index(%Project{} = project, backend) do + ProcessCache.with_cleanup do + do_update_index(project, backend) + end end - def update_index(%Project{} = project, existing_entries) do - path_to_last_index_at = - existing_entries - |> Enum.group_by(& &1.path, & &1.updated_at) - |> Map.new(fn {k, v} -> {k, Enum.max(v)} end) + defp do_update_index(%Project{} = project, backend) do + path_to_ids = + backend.reduce(%{}, fn + %Entry{path: path} = entry, path_to_ids when is_integer(entry.id) -> + Map.update(path_to_ids, path, entry.id, &max(&1, entry.id)) + + _entry, path_to_ids -> + path_to_ids + end) project_files = project - |> indexable_files + |> indexable_files() |> MapSet.new() - previously_indexed_paths = MapSet.new(path_to_last_index_at, fn {path, _} -> path end) + previously_indexed_paths = MapSet.new(path_to_ids, fn {path, _} -> path end) new_paths = MapSet.difference(project_files, previously_indexed_paths) {paths_to_examine, paths_to_delete} = - Enum.split_with(path_to_last_index_at, fn {path, _} -> File.regular?(path) end) + Enum.split_with(path_to_ids, fn {path, _} -> File.regular?(path) end) changed_paths = - for {path, updated_at_timestamp} <- paths_to_examine, - newer_than?(path, updated_at_timestamp) do + for {path, id} <- paths_to_examine, + newer_than?(path, id) do path end @@ -42,18 +62,21 @@ defmodule Lexical.RemoteControl.Search.Indexer do paths_to_reindex = changed_paths ++ Enum.to_list(new_paths) - entries = - paths_to_reindex - |> async_chunks(&index_path/1) - |> List.flatten() + entries = async_chunks(paths_to_reindex, &index_path(&1, deps_dir())) {:ok, entries, paths_to_delete} end - defp index_path(path) do + defp index_path(path, deps_dir) do with {:ok, contents} <- File.read(path), {:ok, entries} <- Indexer.Source.index(path, contents) do - entries + Enum.filter(entries, fn entry -> + if contained_in?(path, deps_dir) do + entry.subtype == :definition + else + true + end + end) else _ -> [] @@ -67,6 +90,19 @@ defmodule Lexical.RemoteControl.Search.Indexer do # async stream by making each chunk emitted by the initial stream to # be roughly equivalent + # Shuffling the results helps speed in some projects, as larger files tend to clump + # together, like when there are auto-generated elixir modules. + paths_to_sizes = + file_paths + |> path_to_sizes() + |> Enum.shuffle() + + path_to_size_map = Map.new(paths_to_sizes) + + total_bytes = paths_to_sizes |> Enum.map(&elem(&1, 1)) |> Enum.sum() + + {on_update_progess, on_complete} = Progress.begin_percent("Indexing source code", total_bytes) + initial_state = {0, []} chunk_fn = fn {path, file_size}, {block_size, paths} -> @@ -88,17 +124,34 @@ defmodule Lexical.RemoteControl.Search.Indexer do {:cont, paths, []} end - # Shuffling the results helps speed in some projects, as larger files tend to clump - # together, like when there are auto-generated elixir modules. - file_paths - |> path_to_sizes() - |> Enum.shuffle() + paths_to_sizes |> Stream.chunk_while(initial_state, chunk_fn, after_fn) - |> Task.async_stream(&Enum.map(&1, processor), timeout: timeout) - |> Enum.flat_map(fn + |> Task.async_stream( + fn chunk -> + block_bytes = chunk |> Enum.map(&Map.get(path_to_size_map, &1)) |> Enum.sum() + result = Enum.map(chunk, processor) + on_update_progess.(block_bytes, "Indexing") + result + end, + timeout: timeout + ) + |> Stream.flat_map(fn {:ok, entry_chunks} -> entry_chunks _ -> [] end) + # The next bit is the only way i could figure out how to + # call complete once the stream was realized + |> Stream.transform( + fn -> nil end, + fn chunk_items, acc -> + # By the chunk items list directly, each transformation + # will flatten the resulting steam + {chunk_items, acc} + end, + fn _acc -> + on_complete.() + end + ) end defp path_to_sizes(paths) do @@ -113,10 +166,10 @@ defmodule Lexical.RemoteControl.Search.Indexer do end) end - defp newer_than?(path, timestamp) do + defp newer_than?(path, entry_id) do case stat(path) do {:ok, %File.Stat{} = stat} -> - stat.mtime > timestamp + stat.mtime > Identifier.to_erl(entry_id) _ -> false @@ -125,14 +178,34 @@ defmodule Lexical.RemoteControl.Search.Indexer do def indexable_files(%Project{} = project) do root_dir = Project.root_path(project) + build_dir = build_dir() [root_dir, "**", @indexable_extensions] |> Path.join() |> Path.wildcard() + |> Enum.reject(&contained_in?(&1, build_dir)) end # stat(path) is here for testing so it can be mocked defp stat(path) do File.stat(path) end + + defp contained_in?(file_path, possible_parent) do + String.starts_with?(file_path, possible_parent) + end + + defp deps_dir do + case RemoteControl.Mix.in_project(&Mix.Project.deps_path/0) do + {:ok, path} -> path + _ -> Mix.Project.deps_path() + end + end + + defp build_dir do + case RemoteControl.Mix.in_project(&Mix.Project.build_path/0) do + {:ok, path} -> path + _ -> Mix.Project.build_path() + end + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/entry.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/entry.ex index 666da9cbb..cb17f9f0c 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/entry.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/entry.ex @@ -1,70 +1,155 @@ defmodule Lexical.RemoteControl.Search.Indexer.Entry do - @type entry_type :: :module + @type function_type :: :public | :private | :delegated | :usage + @type protocol_type :: :implementation | :definition + + @type entry_type :: + :ex_unit_describe + | :ex_unit_test + | :module + | :module_attribute + | :struct + | :variable + | {:protocol, protocol_type()} + | {:function, function_type()} + @type subject :: String.t() @type entry_subtype :: :reference | :definition @type version :: String.t() - @type entry_reference :: reference() | nil + @type entry_id :: pos_integer() | nil + @type block_id :: pos_integer() | :root + @type subject_query :: subject() | :_ + @type entry_type_query :: entry_type() | :_ + @type entry_subtype_query :: entry_subtype() | :_ + @type constraint :: {:type, entry_type_query()} | {:subtype, entry_subtype_query()} + @type constraints :: [constraint()] defstruct [ :application, - :elixir_version, - :erlang_version, - :parent, + :id, + :block_id, + :block_range, :path, :range, - :ref, :subject, :subtype, :type, - :updated_at + :metadata ] @type t :: %__MODULE__{ application: module(), - elixir_version: version(), - erlang_version: version(), subject: subject(), - parent: entry_reference(), + block_id: block_id(), + block_range: Lexical.Document.Range.t() | nil, path: Path.t(), range: Lexical.Document.Range.t(), - ref: entry_reference(), subtype: entry_subtype(), type: entry_type(), - updated_at: :calendar.datetime() + metadata: nil | map() } + @type datetime_format :: :erl | :unix | :datetime + @type date_type :: :calendar.datetime() | integer() | DateTime.t() + alias Lexical.Identifier + alias Lexical.RemoteControl.Search.Indexer.Source.Block alias Lexical.StructAccess - alias Lexical.VM.Versions use StructAccess - def reference(path, ref, parent, subject, type, range, application) do - new(path, ref, parent, subject, type, :reference, range, application) + defguard is_structure(entry) when entry.type == :metadata and entry.subtype == :block_structure + defguard is_block(entry) when entry.id == entry.block_id + + @doc """ + Creates a new entry by copying the passed-in entry. + + The returned entry will have the same fields set as the one passed in, + but a different id. + You can also pass in a keyword list of overrides, which will overwrit values in + the returned struct. + """ + def copy(%__MODULE__{} = orig, overrides \\ []) when is_list(overrides) do + %__MODULE__{orig | id: Identifier.next_global!()} + |> struct(overrides) + end + + def block_structure(path, structure) do + %__MODULE__{ + path: path, + subject: structure, + type: :metadata, + subtype: :block_structure + } + end + + def reference(path, %Block{} = block, subject, type, range, application) do + new(path, Identifier.next_global!(), block.id, subject, type, :reference, range, application) + end + + def definition(path, %Block{} = block, subject, type, range, application) do + new(path, Identifier.next_global!(), block.id, subject, type, :definition, range, application) end - def definition(path, ref, parent, subject, type, range, application) do - new(path, ref, parent, subject, type, :definition, range, application) + def block_definition( + path, + %Block{} = block, + subject, + type, + block_range, + detail_range, + application + ) do + definition = + definition( + path, + block.id, + block.parent_id, + subject, + type, + detail_range, + application + ) + + %__MODULE__{definition | block_range: block_range} end - defp new(path, ref, parent, subject, type, subtype, range, application) do - versions = Versions.current() + defp definition(path, id, block_id, subject, type, range, application) do + new(path, id, block_id, subject, type, :definition, range, application) + end + defp new(path, id, block_id, subject, type, subtype, range, application) do %__MODULE__{ application: application, - elixir_version: versions.elixir, - erlang_version: versions.erlang, - subject: subject, - parent: parent, + block_id: block_id, + id: id, path: path, range: range, - ref: ref, + subject: subject, subtype: subtype, - type: type, - updated_at: timestamp() + type: type } end - defp timestamp do - :calendar.universal_time() + def block?(%__MODULE__{} = entry) do + is_block(entry) + end + + @spec updated_at(t()) :: date_type() + @spec updated_at(t(), datetime_format) :: date_type() + def updated_at(entry, format \\ :erl) + + def updated_at(%__MODULE__{id: id} = entry, format) when is_integer(id) do + case format do + :erl -> Identifier.to_erl(entry.id) + :unix -> Identifier.to_unix(id) + :datetime -> Identifier.to_datetime(id) + end + end + + def updated_at(%__MODULE__{}, _format) do + nil + end + + def put_metadata(%__MODULE__{} = entry, metadata) do + %__MODULE__{entry | metadata: metadata} end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/ecto_schema.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/ecto_schema.ex new file mode 100644 index 000000000..fd7ff34b0 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/ecto_schema.ex @@ -0,0 +1,112 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.EctoSchema do + alias Lexical.Ast + alias Lexical.Document.Position + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Metadata + alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + + def extract( + {:schema, meta, [{:__block__, _, [_source]} | _]} = schema_block, + %Reducer{} = reducer + ) do + case extract_schema_entry(schema_block, meta, reducer) do + {:ok, _} = success -> success + :error -> :ignored + end + end + + def extract( + {:embedded_schema, meta, _} = schema_block, + %Reducer{} = reducer + ) do + case extract_schema_entry(schema_block, meta, reducer) do + {:ok, _} = success -> success + :error -> :ignored + end + end + + @embeds [:embeds_one, :embeds_many] + def extract( + {embed_type, _, [_name, {:__aliases__, _, schema_segments} = schema_module | rest]} = + embed, + %Reducer{} = reducer + ) + when embed_type in @embeds do + document = reducer.analysis.document + position = Reducer.position(reducer) + + with true <- block_form?(rest), + {:ok, module} <- Analyzer.current_module(reducer.analysis, position), + {:ok, expanded} <- Analyzer.expand_alias(schema_segments, reducer.analysis, position), + {:ok, block_range} <- Ast.Range.fetch(embed, document), + {:ok, detail_range} <- Ast.Range.fetch(schema_module, document) do + struct_module = Module.concat(module, expanded) + + definition = + Entry.block_definition( + document.path, + Reducer.current_block(reducer), + struct_module, + :struct, + block_range, + detail_range, + Application.get_application(struct_module) + ) + + {:ok, definition} + else + _ -> + :ignored + end + end + + def extract(_ast, _reducer) do + :ignored + end + + defp extract_schema_entry(schema_block, meta, %Reducer{} = reducer) do + document = reducer.analysis.document + position = Reducer.position(reducer) + + with true <- defines_schema?(reducer, position), + {:ok, current_module} <- Analyzer.current_module(reducer.analysis, position), + {do_line, do_column} <- Metadata.position(meta, :do), + {:ok, range} <- Ast.Range.fetch(schema_block, document) do + detail_range = put_in(range.end, Position.new(document, do_line, do_column + 2)) + + definition_entry = + Entry.block_definition( + document.path, + Reducer.current_block(reducer), + current_module, + :struct, + range, + detail_range, + Application.get_application(current_module) + ) + + {:ok, definition_entry} + else + _ -> + :error + end + end + + defp defines_schema?(%Reducer{} = reducer, %Position{} = position) do + Ecto.Schema in Analyzer.uses_at(reducer.analysis, position) + end + + defp block_form?(ast) do + {_, result} = + Macro.prewalk(ast, false, fn + {:__block__, _, [:do]}, false -> + {nil, true} + + ast, acc -> + {ast, acc} + end) + + result + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/ex_unit.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/ex_unit.ex new file mode 100644 index 000000000..d36f73ab2 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/ex_unit.ex @@ -0,0 +1,128 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.ExUnit do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.Formats + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Metadata + alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + + require Logger + + # setup block i.e. setup do... or setup arg do... + def extract({setup_fn, _, args} = setup, %Reducer{} = reducer) + when setup_fn in [:setup, :setup_all] and length(args) > 0 do + {:ok, module} = Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) + arity = arity_for(args) + subject = Formats.mfa(module, setup_fn, arity) + setup_type = :"ex_unit_#{setup_fn}" + + entry = + case Metadata.location(setup) do + {:block, _, _, _} -> + block_entry(reducer, setup, setup_type, subject) + + {:expression, _} -> + expression_entry(reducer, setup, setup_type, subject) + end + + {:ok, entry} + end + + # Test block test "test name" do ... or test "test name", arg do + def extract({:test, _, [{_, _, [test_name]} | _] = args} = test, %Reducer{} = reducer) + when is_binary(test_name) do + {:ok, module} = Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) + arity = arity_for(args) + module_name = Formats.module(module) + subject = "#{module_name}.[\"#{test_name}\"]/#{arity}" + + entry = + case Metadata.location(test) do + {:block, _, _, _} -> + # a test with a body + block_entry(reducer, test, :ex_unit_test, subject) + + {:expression, _} -> + # a pending test + expression_entry(reducer, test, :ex_unit_test, subject) + end + + {:ok, entry} + end + + # describe blocks + def extract({:describe, _, [{_, _, [describe_name]} | _] = args} = test, %Reducer{} = reducer) do + {:ok, module} = Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) + arity = arity_for(args) + module_name = Formats.module(module) + subject = "#{module_name}[\"#{describe_name}\"]/#{arity}" + + entry = block_entry(reducer, test, :ex_unit_describe, subject) + + {:ok, entry} + end + + def extract(_ign, _) do + :ignored + end + + defp expression_entry(%Reducer{} = reducer, ast, type, subject) do + path = reducer.analysis.document.path + block = Reducer.current_block(reducer) + + {:ok, module} = Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) + app = Application.get_application(module) + detail_range = detail_range(reducer.analysis, ast) + + Entry.definition(path, block, subject, type, detail_range, app) + end + + defp block_entry(%Reducer{} = reducer, ast, type, subject) do + path = reducer.analysis.document.path + block = Reducer.current_block(reducer) + + {:ok, module} = Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) + app = Application.get_application(module) + detail_range = detail_range(reducer.analysis, ast) + block_range = block_range(reducer.analysis, ast) + Entry.block_definition(path, block, subject, type, block_range, detail_range, app) + end + + defp block_range(%Analysis{} = analysis, ast) do + case Ast.Range.fetch(ast, analysis.document) do + {:ok, range} -> range + _ -> nil + end + end + + defp detail_range(%Analysis{} = analysis, ast) do + case Metadata.location(ast) do + {:block, {start_line, start_column}, {do_line, do_column}, _} -> + Range.new( + Position.new(analysis.document, start_line, start_column), + Position.new(analysis.document, do_line, do_column + 2) + ) + + {:expression, {start_line, start_column}} -> + %{end: [line: end_line, column: end_column]} = Sourceror.get_range(ast) + + Range.new( + Position.new(analysis.document, start_line, start_column), + Position.new(analysis.document, end_line, end_column) + ) + end + end + + defp arity_for([{:__block__, _meta, labels}]) do + length(labels) + end + + defp arity_for(args) when is_list(args) do + length(args) + end + + defp arity_for(_), do: 0 +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/function_definition.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/function_definition.ex new file mode 100644 index 000000000..39631474b --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/function_definition.ex @@ -0,0 +1,110 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.FunctionDefinition do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + alias Lexical.RemoteControl.Search.Subject + + @function_definitions [:def, :defp] + + def extract({definition, _, [{fn_name, _, args} = def_ast, body]} = ast, %Reducer{} = reducer) + when is_atom(fn_name) and definition in @function_definitions do + with {:ok, detail_range} <- Ast.Range.fetch(def_ast, reducer.analysis.document), + {:ok, module} <- Analyzer.current_module(reducer.analysis, detail_range.start), + {fun_name, arity} when is_atom(fun_name) <- fun_name_and_arity(def_ast) do + entry = + Entry.block_definition( + reducer.analysis.document.path, + Reducer.current_block(reducer), + Subject.mfa(module, fun_name, arity), + type(definition), + block_range(reducer.analysis, ast), + detail_range, + Application.get_application(module) + ) + + {:ok, entry, [args, body]} + else + _ -> + :ignored + end + end + + def extract({:defdelegate, _, [call, _]} = node, %Reducer{} = reducer) do + document = reducer.analysis.document + + with {:ok, detail_range} <- Ast.Range.fetch(call, document), + {:ok, module} <- Analyzer.current_module(reducer.analysis, detail_range.start), + {:ok, {delegated_module, delegated_name, _delegated_arity}} <- + fetch_delegated_mfa(node, reducer.analysis, detail_range.start) do + {delegate_name, args} = Macro.decompose_call(call) + arity = length(args) + metadata = %{original_mfa: Subject.mfa(delegated_module, delegated_name, arity)} + + entry = + Entry.definition( + document.path, + Reducer.current_block(reducer), + Subject.mfa(module, delegate_name, arity), + {:function, :delegate}, + detail_range, + Application.get_application(module) + ) + + {:ok, Entry.put_metadata(entry, metadata)} + else + _ -> + :ignored + end + end + + def extract(_ast, _reducer) do + :ignored + end + + def fetch_delegated_mfa({:defdelegate, _, [call | keywords]}, analysis, position) do + {_, keyword_args} = + Macro.prewalk(keywords, [], fn + {{:__block__, _, [:to]}, {:__aliases__, _, delegated_module}} = ast, acc -> + {ast, Keyword.put(acc, :to, delegated_module)} + + {{:__block__, _, [:as]}, {:__block__, _, [remote_fun_name]}} = ast, acc -> + {ast, Keyword.put(acc, :as, remote_fun_name)} + + ast, acc -> + {ast, acc} + end) + + with {function_name, args} <- Macro.decompose_call(call), + {:ok, module} <- Analyzer.expand_alias(keyword_args[:to], analysis, position) do + function_name = Keyword.get(keyword_args, :as, function_name) + {:ok, {module, function_name, length(args)}} + else + _ -> + :error + end + end + + defp type(:def), do: {:function, :public} + defp type(:defp), do: {:function, :private} + + defp fun_name_and_arity({:when, _, [{fun_name, _, fun_args} | _]}) do + # a function with guards + {fun_name, arity(fun_args)} + end + + defp fun_name_and_arity({fun_name, _, fun_args}) do + {fun_name, arity(fun_args)} + end + + defp arity(nil), do: 0 + defp arity(args) when is_list(args), do: length(args) + + defp block_range(%Analysis{} = analysis, def_ast) do + case Ast.Range.fetch(def_ast, analysis.document) do + {:ok, range} -> range + _ -> nil + end + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/function_reference.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/function_reference.ex new file mode 100644 index 000000000..aee640b28 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/function_reference.ex @@ -0,0 +1,276 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.FunctionReference do + alias Lexical.Ast + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Extractors.FunctionDefinition + alias Lexical.RemoteControl.Search.Indexer.Metadata + alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + alias Lexical.RemoteControl.Search.Subject + + require Logger + + @excluded_functions_key {__MODULE__, :excluded_functions} + # Dynamic calls using apply apply(Module, :function, [1, 2]) + def extract( + {:apply, apply_meta, + [ + {:__aliases__, _, module}, + {:__block__, _, [function_name]}, + {:__block__, _, + [ + arg_list + ]} + ]}, + %Reducer{} = reducer + ) + when is_list(arg_list) and is_atom(function_name) do + entry = entry(reducer, apply_meta, apply_meta, module, function_name, arg_list) + {:ok, entry, nil} + end + + # Dynamic call via Kernel.apply Kernel.apply(Module, :function, [1, 2]) + def extract( + {{:., _, [{:__aliases__, start_metadata, [:Kernel]}, :apply]}, apply_meta, + [ + {:__aliases__, _, module}, + {:__block__, _, [function_name]}, + {:__block__, _, [arg_list]} + ]}, + %Reducer{} = reducer + ) + when is_list(arg_list) and is_atom(function_name) do + entry = entry(reducer, start_metadata, apply_meta, module, function_name, arg_list) + {:ok, entry, nil} + end + + # remote function OtherModule.foo(:arg), OtherModule.foo() or OtherModule.foo + def extract( + {{:., _, [{:__aliases__, start_metadata, module}, fn_name]}, end_metadata, args}, + %Reducer{} = reducer + ) + when is_atom(fn_name) do + entry = entry(reducer, start_metadata, end_metadata, module, fn_name, args) + + {:ok, entry} + end + + # local function capture &downcase/1 + def extract( + {:/, _, [{fn_name, end_metadata, nil}, {:__block__, arity_meta, [arity]}]}, + %Reducer{} = reducer + ) do + position = Reducer.position(reducer) + + {module, _, _} = + RemoteControl.Analyzer.resolve_local_call(reducer.analysis, position, fn_name, arity) + + entry = entry(reducer, end_metadata, arity_meta, module, fn_name, arity) + {:ok, entry, nil} + end + + # Function capture with arity: &OtherModule.foo/3 + def extract( + {:&, _, + [ + {:/, _, + [ + {{:., _, [{:__aliases__, start_metadata, module}, function_name]}, _, []}, + {:__block__, end_metadata, [arity]} + ]} + ]}, + %Reducer{} = reducer + ) do + entry = entry(reducer, start_metadata, end_metadata, module, function_name, arity) + + # we return nil here to stop analysis from progressing down the syntax tree, + # because if it did, the function head that deals with normal calls will pick + # up the rest of the call and return a reference to MyModule.function/0, which + # is incorrect + {:ok, entry, nil} + end + + def extract({:|>, pipe_meta, [pipe_start, {fn_name, meta, args}]}, %Reducer{}) do + # we're in a pipeline. Skip this node by returning nil, but add a marker to the metadata + # that will be picked up by call_arity. + updated_meta = Keyword.put(meta, :pipeline?, true) + new_pipe = {:|>, pipe_meta, [pipe_start, {fn_name, updated_meta, args}]} + + {:ok, nil, new_pipe} + end + + def extract({:defdelegate, _, _} = ast, %Reducer{} = reducer) do + analysis = reducer.analysis + position = Reducer.position(reducer) + + case FunctionDefinition.fetch_delegated_mfa(ast, analysis, position) do + {:ok, {module, function_name, arity}} -> + entry = + Entry.reference( + analysis.document.path, + Reducer.current_block(reducer), + Lexical.Formats.mfa(module, function_name, arity), + {:function, :usage}, + Ast.Range.get(ast, analysis.document), + Application.get_application(module) + ) + + {:ok, entry, []} + + _ -> + :ignored + end + end + + # local function call foo() foo(arg) + def extract({fn_name, meta, args}, %Reducer{} = reducer) + when is_atom(fn_name) and is_list(args) do + if fn_name in excluded_functions() do + :ignored + else + arity = call_arity(args, meta) + position = Reducer.position(reducer) + + {module, _, _} = + RemoteControl.Analyzer.resolve_local_call(reducer.analysis, position, fn_name, arity) + + entry = entry(reducer, meta, meta, [module], fn_name, args) + + {:ok, entry} + end + end + + def extract(_ast, _reducer) do + :ignored + end + + defp entry( + %Reducer{} = reducer, + start_metadata, + end_metadata, + module, + function_name, + args_arity + ) do + arity = call_arity(args_arity, end_metadata) + block = Reducer.current_block(reducer) + + range = + get_reference_range(reducer.analysis.document, start_metadata, end_metadata, function_name) + + case RemoteControl.Analyzer.expand_alias(module, reducer.analysis, range.start) do + {:ok, module} -> + mfa = Subject.mfa(module, function_name, arity) + + Entry.reference( + reducer.analysis.document.path, + block, + mfa, + {:function, :usage}, + range, + Application.get_application(module) + ) + + _ -> + human_location = Reducer.human_location(reducer) + + Logger.warning( + "Could not expand #{inspect(module)} into an alias. Please report this. (at #{human_location})" + ) + + nil + end + end + + defp get_reference_range(document, start_metadata, end_metadata, function_name) do + {start_line, start_column} = start_position(start_metadata) + start_position = Position.new(document, start_line, start_column) + has_parens? = not Keyword.get(end_metadata, :no_parens, false) + + {end_line, end_column} = + case Metadata.position(end_metadata, :closing) do + {line, column} -> + if has_parens? do + {line, column + 1} + else + {line, column} + end + + nil -> + {line, column} = Metadata.position(end_metadata) + + if has_parens? do + {line, column + 1} + else + name_length = function_name |> Atom.to_string() |> String.length() + # without parens, the metadata points to the beginning of the call, so + # we need to add the length of the function name to be sure we have it + # all + {line, column + name_length} + end + end + + end_position = Position.new(document, end_line, end_column) + Range.new(start_position, end_position) + end + + defp start_position(metadata) do + Metadata.position(metadata) + end + + defp call_arity(args, metadata) when is_list(args) do + length(args) + pipeline_arity(metadata) + end + + defp call_arity(arity, metadata) when is_integer(arity) do + arity + pipeline_arity(metadata) + end + + defp call_arity(_, metadata), do: pipeline_arity(metadata) + + defp pipeline_arity(metadata) do + if Keyword.get(metadata, :pipeline?, false) do + 1 + else + 0 + end + end + + defp excluded_functions do + case :persistent_term.get(@excluded_functions_key, :not_found) do + :not_found -> + excluded_functions = build_excluded_functions() + :persistent_term.put(@excluded_functions_key, excluded_functions) + excluded_functions + + excluded_functions -> + excluded_functions + end + end + + defp build_excluded_functions do + excluded_kernel_macros = + for {macro_name, _arity} <- Kernel.__info__(:macros), + string_name = Atom.to_string(macro_name), + String.starts_with?(string_name, "def") do + macro_name + end + + # syntax specific functions to exclude from our matches + excluded_operators = + ~w[<- -> && ** ++ -- .. "..//" ! <> =~ @ |> | || * + - / != !== < <= == === > >=]a + + excluded_keywords = ~w[and if import in not or raise require try use]a + + excluded_special_forms = + :macros + |> Kernel.SpecialForms.__info__() + |> Keyword.keys() + + excluded_kernel_macros + |> Enum.concat(excluded_operators) + |> Enum.concat(excluded_special_forms) + |> Enum.concat(excluded_keywords) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module.ex index 504edf3b7..e010c3c35 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module.ex @@ -4,43 +4,96 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do """ alias Lexical.Ast - alias Lexical.Document alias Lexical.Document.Position alias Lexical.Document.Range alias Lexical.ProcessCache + alias Lexical.RemoteControl alias Lexical.RemoteControl.Search.Indexer.Entry alias Lexical.RemoteControl.Search.Indexer.Metadata alias Lexical.RemoteControl.Search.Indexer.Source.Block alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + alias Lexical.RemoteControl.Search.Subject + + require Logger + + @definition_mappings %{ + defmodule: :module, + defprotocol: {:protocol, :definition} + } + @module_definitions Map.keys(@definition_mappings) # extract a module definition def extract( - {:defmodule, defmodule_meta, - [{:__aliases__, module_name_meta, module_name}, module_block]}, + {definition, defmodule_meta, + [{:__aliases__, module_name_meta, module_name}, module_block]} = defmodule_ast, + %Reducer{} = reducer + ) + when definition in @module_definitions do + %Block{} = block = Reducer.current_block(reducer) + + case resolve_alias(reducer, module_name) do + {:ok, aliased_module} -> + module_position = Metadata.position(module_name_meta) + detail_range = to_range(reducer, module_name, module_position) + + entry = + Entry.block_definition( + reducer.analysis.document.path, + block, + Subject.module(aliased_module), + @definition_mappings[definition], + block_range(reducer.analysis.document, defmodule_ast), + detail_range, + Application.get_application(aliased_module) + ) + + module_name_meta = Reducer.skip(module_name_meta) + + elem = + {:defmodule, defmodule_meta, + [{:__aliases__, module_name_meta, module_name}, module_block]} + + {:ok, entry, elem} + + _ -> + :ignored + end + end + + # defimpl MyProtocol, for: MyStruct do ... + def extract( + {:defimpl, _, [{:__aliases__, _, module_name}, [for_block], _impl_body]} = defimpl_ast, %Reducer{} = reducer ) do %Block{} = block = Reducer.current_block(reducer) - aliased_module = resolve_alias(reducer, module_name) - module_position = Metadata.position(module_name_meta) - range = to_range(reducer.document, module_name, module_position) - - entry = - Entry.definition( - reducer.document.path, - block.ref, - block.parent_ref, - aliased_module, - :module, - range, - Application.get_application(aliased_module) - ) - module_name_meta = Reducer.skip(module_name_meta) + with {:ok, protocol_module} <- resolve_alias(reducer, module_name), + {:ok, for_target} <- resolve_for_block(reducer, for_block) do + detail_range = defimpl_range(reducer, defimpl_ast) + implemented_module = Module.concat(protocol_module, for_target) + + implementation_entry = + Entry.block_definition( + reducer.analysis.document.path, + block, + Subject.module(protocol_module), + {:protocol, :implementation}, + block_range(reducer.analysis.document, defimpl_ast), + detail_range, + Application.get_application(protocol_module) + ) - elem = - {:defmodule, defmodule_meta, [{:__aliases__, module_name_meta, module_name}, module_block]} + module_entry = + Entry.copy(implementation_entry, + subject: Subject.module(implemented_module), + type: :module + ) - {:ok, entry, elem} + {:ok, [implementation_entry, module_entry]} + else + _ -> + :ignored + end end # This matches an elixir module reference @@ -49,20 +102,57 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do case module(reducer, maybe_module) do {:ok, module} -> start = Metadata.position(metadata) - range = to_range(reducer.document, maybe_module, start) + range = to_range(reducer, maybe_module, start) %Block{} = current_block = Reducer.current_block(reducer) entry = Entry.reference( - reducer.document.path, - make_ref(), - current_block.ref, - module, + reducer.analysis.document.path, + current_block, + Subject.module(module), :module, range, Application.get_application(module) ) + {:ok, entry, nil} + + _ -> + :ignored + end + end + + @module_length String.length("__MODULE__") + # This matches __MODULE__ references + def extract({:__MODULE__, metadata, _} = ast, %Reducer{} = reducer) do + line = Sourceror.get_line(ast) + pos = Position.new(reducer.analysis.document, line - 1, 1) + + case RemoteControl.Analyzer.current_module(reducer.analysis, pos) do + {:ok, current_module} -> + {start_line, start_col} = Metadata.position(metadata) + start_pos = Position.new(reducer.analysis.document, start_line, start_col) + + end_pos = + Position.new( + reducer.analysis.document, + start_line, + start_col + @module_length + ) + + range = Range.new(start_pos, end_pos) + %Block{} = current_block = Reducer.current_block(reducer) + + entry = + Entry.reference( + reducer.analysis.document.path, + current_block, + Subject.module(current_module), + :module, + range, + Application.get_application(current_module) + ) + {:ok, entry} _ -> @@ -77,14 +167,13 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do {:ok, module} -> start = Metadata.position(metadata) %Block{} = current_block = Reducer.current_block(reducer) - range = to_range(reducer.document, module, start) + range = to_range(reducer, module, start) entry = Entry.reference( - reducer.document.path, - make_ref(), - current_block.ref, - module, + reducer.analysis.document.path, + current_block, + Subject.module(module), :module, range, Application.get_application(module) @@ -97,26 +186,86 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do end end + # Function capture with arity: &OtherModule.foo/3 + def extract( + {:&, _, + [ + {:/, _, + [ + {{:., _, [{:__aliases__, start_metadata, maybe_module}, _function_name]}, _, []}, + _ + ]} + ]}, + %Reducer{} = reducer + ) do + case module(reducer, maybe_module) do + {:ok, module} -> + start = Metadata.position(start_metadata) + range = to_range(reducer, maybe_module, start) + %Block{} = current_block = Reducer.current_block(reducer) + + entry = + Entry.reference( + reducer.analysis.document.path, + current_block, + Subject.module(module), + :module, + range, + Application.get_application(module) + ) + + {:ok, entry} + + _ -> + :ignored + end + end + def extract(_, _) do :ignored end - defp resolve_alias(%Reducer{} = reducer, unresolved_alias) do - {line, column} = reducer.position - position = Position.new(reducer.document, line, column) + defp defimpl_range(%Reducer{} = reducer, {_, protocol_meta, _} = protocol_ast) do + start = Sourceror.get_start_position(protocol_ast) + {finish_line, finish_column} = Metadata.position(protocol_meta, :do) + # add two to include the do + finish_column = finish_column + 2 + document = reducer.analysis.document - {:ok, expanded} = - Ast.expand_aliases(unresolved_alias, reducer.document, reducer.quoted_document, position) + Range.new( + Position.new(document, start[:line], start[:column]), + Position.new(document, finish_line, finish_column) + ) + end - expanded + defp resolve_for_block( + %Reducer{} = reducer, + {{:__block__, _, [:for]}, {:__aliases__, _, for_target}} + ) do + resolve_alias(reducer, for_target) + end + + defp resolve_for_block(_, _), do: :error + + defp resolve_alias(%Reducer{} = reducer, unresolved_alias) do + position = Reducer.position(reducer) + + RemoteControl.Analyzer.expand_alias(unresolved_alias, reducer.analysis, position) end defp module(%Reducer{} = reducer, maybe_module) when is_list(maybe_module) do - if Enum.all?(maybe_module, &module_part?/1) do - resolved = resolve_alias(reducer, maybe_module) + with true <- Enum.all?(maybe_module, &module_part?/1), + {:ok, resolved} <- resolve_alias(reducer, maybe_module) do {:ok, resolved} else - :error + _ -> + human_location = Reducer.human_location(reducer) + + Logger.warning( + "Could not expand module #{inspect(maybe_module)}. Please report this (at #{human_location})" + ) + + :error end end @@ -130,11 +279,18 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do defp module(_, _), do: :error + @protocol_module_attribue_names [:protocol, :for] + @starts_with_capital ~r/[A-Z]+/ defp module_part?(part) when is_atom(part) do Regex.match?(@starts_with_capital, Atom.to_string(part)) end + defp module_part?({:@, _, [{type, _, _} | _]}) when type in @protocol_module_attribue_names, + do: true + + defp module_part?({:__MODULE__, _, context}) when is_atom(context), do: true + defp module_part?(_), do: false defp available_module?(potential_module) do @@ -149,7 +305,25 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do end) end - defp to_range(%Document{} = document, module_name, {line, column}) do + # handles @protocol and @for in defimpl blocks + defp to_range(%Reducer{} = reducer, [{:@, _, [{type, _, _} | _]} = attribute | segments], _) + when type in @protocol_module_attribue_names do + range = Sourceror.get_range(attribute) + + document = reducer.analysis.document + module_length = segments |> Ast.Module.name() |> String.length() + # add one because we're off by the @ sign + end_column = range.end[:column] + module_length + 1 + + Range.new( + Position.new(document, range.start[:line], range.start[:column]), + Position.new(document, range.end[:line], end_column) + ) + end + + defp to_range(%Reducer{} = reducer, module_name, {line, column}) do + document = reducer.analysis.document + module_length = module_name |> Ast.Module.name() @@ -160,4 +334,11 @@ defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Module do Position.new(document, line, column + module_length) ) end + + defp block_range(document, ast) do + case Ast.Range.fetch(ast, document) do + {:ok, range} -> range + _ -> nil + end + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module_attribute.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module_attribute.ex new file mode 100644 index 000000000..f182e832b --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/module_attribute.ex @@ -0,0 +1,107 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.ModuleAttribute do + @moduledoc """ + Extracts module attribute definitions and references from AST + """ + + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + alias Lexical.RemoteControl.Search.Subject + + require Logger + + # Finds module attribute usages + def extract({:@, _, [{attr_name, _, nil}]}, %Reducer{} = reducer) do + block = Reducer.current_block(reducer) + + case Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) do + {:ok, current_module} -> + reference = + Entry.reference( + reducer.analysis.document.path, + block, + Subject.module_attribute(current_module, attr_name), + :module_attribute, + reference_range(reducer, attr_name), + Application.get_application(current_module) + ) + + {:ok, reference} + + :error -> + :ignored + end + end + + # an attribute being typed above an already existing attribute will have the name `@`, which we ignore + # example: + # @| + # @callback foo() :: :ok + def extract({:@, _, [{:@, _, _attr_value}]}, %Reducer{}) do + :ignored + end + + # Finds module attribute definitions @foo 3 + def extract({:@, _, [{attr_name, _, _attr_value}]} = attr, %Reducer{} = reducer) do + block = Reducer.current_block(reducer) + + case Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) do + {:ok, current_module} -> + definition = + Entry.definition( + reducer.analysis.document.path, + block, + Subject.module_attribute(current_module, attr_name), + :module_attribute, + definition_range(reducer, attr), + Application.get_application(current_module) + ) + + {:ok, definition} + + _ -> + :ignored + end + end + + def extract(_, _) do + :ignored + end + + defp reference_range(%Reducer{} = reducer, attr_name) do + document = reducer.analysis.document + + name_length = + attr_name + |> Atom.to_string() + |> String.length() + + {start_line, start_column} = reducer.position + + # add 1 to include the @ character + end_column = start_column + name_length + 1 + + Range.new( + Position.new(document, start_line, start_column), + Position.new(document, start_line, end_column) + ) + end + + defp definition_range(%Reducer{} = reducer, attr_ast) do + document = reducer.analysis.document + + [line: start_line, column: start_column] = Sourceror.get_start_position(attr_ast) + + end_line = Sourceror.get_end_line(attr_ast) + {:ok, line_text} = Lexical.Document.fetch_text_at(document, end_line) + # add one because lsp positions are one-based + end_column = String.length(line_text) + 1 + + Range.new( + Position.new(document, start_line, start_column), + Position.new(document, end_line, end_column) + ) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/struct_definition.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/struct_definition.ex new file mode 100644 index 000000000..b8663a300 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/struct_definition.ex @@ -0,0 +1,35 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.StructDefinition do + alias Lexical.Ast + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + + def extract({:defstruct, _, [_fields]} = definition, %Reducer{} = reducer) do + document = reducer.analysis.document + block = Reducer.current_block(reducer) + + case Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) do + {:ok, current_module} -> + range = Ast.Range.fetch!(definition, document) + + entry = + Entry.definition( + document.path, + block, + current_module, + :struct, + range, + Application.get_application(current_module) + ) + + {:ok, entry} + + _ -> + :ignored + end + end + + def extract(_, _) do + :ignored + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/struct_reference.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/struct_reference.ex new file mode 100644 index 000000000..46b770182 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/struct_reference.ex @@ -0,0 +1,93 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.StructReference do + alias Lexical.Ast + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + alias Lexical.RemoteControl.Search.Subject + + require Logger + + @struct_fn_names [:struct, :struct!] + + # Handles usages via an alias, e.g. x = %MyStruct{...} or %__MODULE__{...} + def extract( + {:%, _, [struct_alias, {:%{}, _, _struct_args}]} = reference, + %Reducer{} = reducer + ) do + case expand_alias(struct_alias, reducer) do + {:ok, struct_module} -> + {:ok, entry(reducer, struct_module, reference)} + + _ -> + :ignored + end + end + + # Call to Kernel.struct with a fully qualified module e.g. Kernel.struct(MyStruct, ...) + def extract( + {{:., _, [kernel_alias, struct_fn_name]}, _, [struct_alias | _]} = reference, + %Reducer{} = reducer + ) + when struct_fn_name in @struct_fn_names do + with {:ok, Kernel} <- expand_alias(kernel_alias, reducer), + {:ok, struct_module} <- expand_alias(struct_alias, reducer) do + {:ok, entry(reducer, struct_module, reference)} + else + _ -> + :ignored + end + end + + # handles calls to Kernel.struct e.g. struct(MyModule) or struct(MyModule, foo: 3) + def extract({struct_fn_name, _, [struct_alias | _] = args} = reference, %Reducer{} = reducer) + when struct_fn_name in @struct_fn_names do + reducer_position = Reducer.position(reducer) + imports = Analyzer.imports_at(reducer.analysis, reducer_position) + arity = length(args) + + with true <- Enum.member?(imports, {Kernel, struct_fn_name, arity}), + {:ok, struct_module} <- expand_alias(struct_alias, reducer) do + {:ok, entry(reducer, struct_module, reference)} + else + _ -> + :ignored + end + end + + def extract(_, _) do + :ignored + end + + defp entry(%Reducer{} = reducer, struct_module, reference) do + document = reducer.analysis.document + block = Reducer.current_block(reducer) + subject = Subject.module(struct_module) + + Entry.reference( + document.path, + block, + subject, + :struct, + Ast.Range.fetch!(reference, document), + Application.get_application(struct_module) + ) + end + + defp expand_alias({:__aliases__, _, struct_alias}, %Reducer{} = reducer) do + Analyzer.expand_alias(struct_alias, reducer.analysis, Reducer.position(reducer)) + end + + defp expand_alias({:__MODULE__, _, _}, %Reducer{} = reducer) do + Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) + end + + defp expand_alias(alias, %Reducer{} = reducer) do + {line, column} = reducer.position + + Logger.error( + "Could not expand alias: #{inspect(alias)} at #{reducer.analysis.document.path} #{line}:#{column}" + ) + + :error + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/variable.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/variable.ex new file mode 100644 index 000000000..bd433aeed --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/extractors/variable.ex @@ -0,0 +1,249 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.Variable do + alias Lexical.Ast + alias Lexical.RemoteControl.Analyzer + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Indexer.Source.Reducer + + @defs [:def, :defmacro, :defp, :defmacrop] + + def extract( + {def, _, [{:when, _, [{_fn_name, _, params} | when_args]}, body]}, + %Reducer{} = reducer + ) + when def in @defs do + entries = extract_definitions(params, reducer) ++ extract_references(when_args, reducer) + {:ok, entries, body} + end + + def extract({def, _, [{_fn_name, _, params}, body]}, %Reducer{} = reducer) + when def in @defs do + entries = extract_definitions(params, reducer) + + {:ok, entries, body} + end + + # Stab operator x -> body + def extract({:->, _, [params, body]}, %Reducer{} = reducer) do + entries = extract_definitions(params, reducer) ++ extract_in_definitions(params, reducer) + + {:ok, entries, List.wrap(body)} + end + + # with match operator. with {:ok, var} <- something() + def extract({:<-, _, [left, right]}, %Reducer{} = reducer) do + entries = extract_definitions(left, reducer) + + {:ok, entries, List.wrap(right)} + end + + # Match operator left = right + def extract({:=, _, [left, right]}, %Reducer{} = reducer) do + definitions = extract_definitions(left, reducer) + + {:ok, definitions, List.wrap(right)} + end + + # String interpolations "#{foo}" + def extract( + {:<<>>, _, [{:"::", _, [{{:., _, [Kernel, :to_string]}, _, body}, {:binary, _, _}]}]}, + %Reducer{} + ) do + {:ok, [], body} + end + + # Test declarations + def extract( + {:test, _metadata, + [ + {:__block__, [delimiter: "\"", line: 3, column: 8], ["my test"]}, + args, + body + ]}, + %Reducer{} = reducer + ) do + entries = extract_definitions(args, reducer) + {:ok, entries, body} + end + + def extract({:binary, _, _}, %Reducer{}) do + :ignored + end + + def extract({:@, _, _}, %Reducer{}) do + {:ok, nil, nil} + end + + # Generic variable reference + def extract({var_name, _, _} = ast, %Reducer{} = reducer) when is_atom(var_name) do + case extract_reference(ast, reducer, get_current_app(reducer)) do + %Entry{} = entry -> {:ok, entry} + _ -> :ignored + end + end + + # Pin operator ^pinned_variable + def extract({:^, _, [reference]}, %Reducer{} = reducer) do + reference = extract_reference(reference, reducer, get_current_app(reducer)) + + {:ok, reference, nil} + end + + def extract(_ast, _reducer) do + :ignored + end + + defp extract_definitions(ast, reducer) do + current_app = get_current_app(reducer) + + {_ast, entries} = + Macro.prewalk(ast, [], fn ast, acc -> + case extract_definition(ast, reducer, current_app) do + %Entry{} = entry -> + {ast, [entry | acc]} + + {%Entry{} = entry, ast} -> + {ast, [entry | acc]} + + {entries, ast} when is_list(entries) -> + {ast, entries ++ acc} + + {_, ast} -> + {ast, acc} + + _ -> + {ast, acc} + end + end) + + Enum.reverse(entries) + end + + # the pin operator is always on the left side of a pattern match, but it's + # not defining a variable, just referencing one. + defp extract_definition({:^, _, [reference]}, %Reducer{} = reducer, current_app) do + reference = extract_reference(reference, reducer, current_app) + + {reference, nil} + end + + # unquote(expression) + defp extract_definition({:unquote, _, [expr]}, %Reducer{} = reducer, current_app) do + reference = extract_reference(expr, reducer, current_app) + {reference, nil} + end + + defp extract_definition({:@, _, _}, %Reducer{}, _current_app) do + {nil, []} + end + + # when clauses actually contain parameters and references + defp extract_definition({:when, _, when_args}, %Reducer{} = reducer, _current_app) do + {definitions, references} = + Enum.split_with(when_args, fn {_, _, context} -> is_atom(context) end) + + definitions = extract_definitions(definitions, reducer) + references = extract_references(references, reducer) + + {Enum.reverse(definitions ++ references), nil} + end + + # This is an effect of string interpolation + defp extract_definition({:binary, _metadata, nil}, _reducer, _current_app) do + nil + end + + defp extract_definition({var_name, _metadata, nil} = ast, reducer, current_app) do + if used_variable?(var_name) do + document = reducer.analysis.document + block = Reducer.current_block(reducer) + + Entry.definition( + document.path, + block, + var_name, + :variable, + Ast.Range.fetch!(ast, document), + current_app + ) + end + end + + defp extract_definition(_, _reducer, _current_app), do: nil + + defp extract_references(ast, reducer) do + current_app = get_current_app(reducer) + + {_ast, entries} = + Macro.prewalk(ast, [], fn ast, acc -> + case extract_reference(ast, reducer, current_app) do + %Entry{} = entry -> + {ast, [entry | acc]} + + _ -> + {ast, acc} + end + end) + + Enum.reverse(entries) + end + + defp extract_reference({var_name, _metadata, nil} = ast, reducer, current_app) do + if used_variable?(var_name) do + document = reducer.analysis.document + block = Reducer.current_block(reducer) + + Entry.reference( + document.path, + block, + var_name, + :variable, + Ast.Range.fetch!(ast, document), + current_app + ) + end + end + + defp extract_reference(_, _, _) do + nil + end + + # extracts definitions like e in SomeException -> + defp extract_in_definitions(ast, %Reducer{} = reducer) do + current_app = get_current_app(reducer) + + {_ast, entries} = + Macro.prewalk(ast, [], fn ast, acc -> + case extract_in_definition(ast, reducer, current_app) do + %Entry{} = entry -> + {ast, [entry | acc]} + + _ -> + {ast, acc} + end + end) + + Enum.reverse(entries) + end + + defp extract_in_definition( + [[{:in, _, [definition, _right]}], _body], + %Reducer{} = reducer, + current_app + ) do + extract_definition(definition, reducer, current_app) + end + + defp extract_in_definition(_ast, %Reducer{}, _current_app), do: nil + + defp get_current_app(%Reducer{} = reducer) do + with {:ok, module} <- Analyzer.current_module(reducer.analysis, Reducer.position(reducer)) do + Application.get_application(module) + end + end + + defp used_variable?(variable_name) do + not (variable_name + |> Atom.to_string() + |> String.starts_with?("_")) + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/metadata.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/metadata.ex index 602e0525c..e84532195 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/metadata.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/metadata.ex @@ -2,14 +2,14 @@ defmodule Lexical.RemoteControl.Search.Indexer.Metadata do @moduledoc """ Utilities for extracting location information from AST metadata nodes. """ - def location({_, metadata, _}) do + def location({_, metadata, _} = ast) do if Keyword.has_key?(metadata, :do) do position = position(metadata) block_start = position(metadata, :do) block_end = position(metadata, :end_of_expression) || position(metadata, :end) {:block, position, block_start, block_end} else - {:expression, position(metadata)} + maybe_handle_terse_function(ast) end end @@ -35,4 +35,40 @@ defmodule Lexical.RemoteControl.Search.Indexer.Metadata do |> Keyword.get(key, []) |> position() end + + @defines [:def, :defp, :defmacro, :defmacrop, :fn] + # a terse function is one without a do/end block + defp maybe_handle_terse_function({define, metadata, [_name_and_args | [[block | _]]]}) + when define in @defines do + block_location(block, metadata) + end + + defp maybe_handle_terse_function({:fn, metadata, _} = ast) do + block_location(ast, metadata) + end + + defp maybe_handle_terse_function({_, metadata, _}) do + {:expression, position(metadata)} + end + + defp block_location(block, metadata) do + case Sourceror.get_range(block) do + %{start: start_pos, end: end_pos} -> + position = position(metadata) + {:block, position, keyword_to_position(start_pos), keyword_to_position(end_pos)} + + _ -> + {:expression, position(metadata)} + end + end + + defp keyword_to_position(keyword) do + case Keyword.take(keyword, [:line, :column]) do + [line: line, column: column] when is_number(line) and is_number(column) -> + {line, column} + + _ -> + nil + end + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/quoted.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/quoted.ex index c15312324..18cac3885 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/quoted.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/quoted.ex @@ -1,14 +1,29 @@ defmodule Lexical.RemoteControl.Search.Indexer.Quoted do - alias Lexical.Document + alias Lexical.Ast.Analysis + alias Lexical.ProcessCache alias Lexical.RemoteControl.Search.Indexer.Source.Reducer - def index(%Document{} = document, quoted_ast) do + require ProcessCache + + def index_with_cleanup(%Analysis{} = analysis) do + ProcessCache.with_cleanup do + index(analysis) + end + end + + def index(analysis, extractors \\ nil) + + def index(%Analysis{valid?: true} = analysis, extractors) do {_, reducer} = - Macro.prewalk(quoted_ast, Reducer.new(document, quoted_ast), fn elem, reducer -> + Macro.prewalk(analysis.ast, Reducer.new(analysis, extractors), fn elem, reducer -> {reducer, elem} = Reducer.reduce(reducer, elem) {elem, reducer} end) {:ok, Reducer.entries(reducer)} end + + def index(%Analysis{valid?: false}, _extractors) do + {:ok, []} + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/source.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/source.ex index e858af656..cd21c0094 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/source.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/source.ex @@ -5,16 +5,15 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source do require Logger - def index(path, source) do - document = Document.new(path, source, 1) - - case Ast.from(document) do - {:ok, quoted} -> - Indexer.Quoted.index(document, quoted) + def index(path, source, extractors \\ nil) do + path + |> Document.new(source, 1) + |> index_document(extractors) + end - _ -> - Logger.error("Could not compile #{path} into AST for indexing") - :error - end + def index_document(%Document{} = document, extractors \\ nil) do + document + |> Ast.analyze() + |> Indexer.Quoted.index(extractors) end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/source/block.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/source/block.ex index 7257d3d6b..7cc5cd563 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/source/block.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/source/block.ex @@ -2,13 +2,16 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source.Block do @moduledoc """ A struct that represents a block of source code """ - defstruct [:starts_at, :ends_at, :ref, :parent_ref] + + defstruct [:starts_at, :ends_at, :id, :parent_id] + alias Lexical.Identifier def root do - %__MODULE__{ref: :root} + %__MODULE__{id: :root} end def new(starts_at, ends_at) do - %__MODULE__{starts_at: starts_at, ends_at: ends_at, ref: make_ref()} + id = Identifier.next_global!() + %__MODULE__{starts_at: starts_at, ends_at: ends_at, id: id} end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/indexer/source/reducer.ex b/apps/remote_control/lib/lexical/remote_control/search/indexer/source/reducer.ex index d6c2ba19c..9880f0303 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/indexer/source/reducer.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/indexer/source/reducer.ex @@ -5,28 +5,45 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source.Reducer do The reducer keeps track of blocks and parent / child relationships so extractors don't have to concern themselves with the AST's overall structure, and can focus on extracting content from it. """ - alias Lexical.Document + + alias Lexical.Ast.Analysis + alias Lexical.Document.Position alias Lexical.RemoteControl.Search.Indexer.Entry alias Lexical.RemoteControl.Search.Indexer.Extractors alias Lexical.RemoteControl.Search.Indexer.Metadata alias Lexical.RemoteControl.Search.Indexer.Source.Block - defstruct [:entries, :document, :quoted_document, :position, :ends_at, :blocks] + defstruct [:analysis, :entries, :position, :blocks, :block_hierarchy, extractors: []] - @extractors [Extractors.Module] + @extractors [ + Extractors.Module, + Extractors.ModuleAttribute, + Extractors.FunctionDefinition, + Extractors.FunctionReference, + Extractors.StructDefinition, + Extractors.StructReference, + Extractors.EctoSchema + ] - def new(%Document{} = document, quoted_document) do + def new(%Analysis{} = analysis, extractors \\ nil) do %__MODULE__{ - document: document, - quoted_document: quoted_document, + analysis: analysis, + block_hierarchy: %{root: %{}}, + blocks: [Block.root()], entries: [], - position: {0, 0}, - blocks: [Block.root()] + extractors: extractors || @extractors, + position: {0, 0} } end + def human_location(%__MODULE__{} = reducer) do + {line, column} = reducer.position + path = reducer.analysis.document.path + "#{path} #{line}:#{column}" + end + def entries(%__MODULE__{} = reducer) do - Enum.reverse(reducer.entries) + [hierarchy(reducer) | Enum.reverse(reducer.entries)] end def skip(meta) do @@ -47,6 +64,15 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source.Reducer do end end + def position(%__MODULE__{} = reducer) do + {line, column} = reducer.position + Position.new(reducer.analysis.document, line, column) + end + + defp hierarchy(%__MODULE__{} = reducer) do + Entry.block_structure(reducer.analysis.document.path, reducer.block_hierarchy) + end + defp do_reduce(%__MODULE__{} = reducer, element) do case Metadata.location(element) do {:block, position, block_start, block_end} -> @@ -61,12 +87,13 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source.Reducer do {:expression, position} -> reducer |> update_position(position) + |> maybe_pop_block() |> apply_extractors(element) end end defp apply_extractors(%__MODULE__{} = reducer, element) do - Enum.reduce(@extractors, {reducer, element}, fn detector_module, {reducer, element} -> + Enum.reduce(reducer.extractors, {reducer, element}, fn detector_module, {reducer, element} -> case detector_module.extract(element, reducer) do {:ok, entry} -> reducer = push_entry(reducer, entry) @@ -96,13 +123,20 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source.Reducer do end defp push_block(%__MODULE__{} = reducer, %Block{} = block) do - parent = List.first(reducer.blocks) - block = %Block{block | parent_ref: parent.ref} - %__MODULE__{reducer | blocks: [block | reducer.blocks]} + parent = current_block(reducer) + block = %Block{block | parent_id: parent.id} + id_path = Enum.reduce(reducer.blocks, [], fn block, acc -> [block.id | acc] end) + + hierarchy = + update_in(reducer.block_hierarchy, id_path, fn current -> + Map.put(current, block.id, %{}) + end) + + %__MODULE__{reducer | blocks: [block | reducer.blocks], block_hierarchy: hierarchy} end # you never pop the root block in a document - defp pop_block(%__MODULE__{blocks: [%Block{ref: :root}]} = reducer), do: reducer + defp pop_block(%__MODULE__{blocks: [%Block{id: :root}]} = reducer), do: reducer defp pop_block(%__MODULE__{} = reducer) do [_ | rest] = reducer.blocks @@ -110,7 +144,7 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source.Reducer do end # The root block in the document goes on forever - defp block_ended?(%__MODULE__{blocks: [%Block{ref: :root}]}), do: false + defp block_ended?(%__MODULE__{blocks: [%Block{id: :root}]}), do: false defp block_ended?(%__MODULE__{} = reducer) do %Block{} = block = current_block(reducer) @@ -131,13 +165,21 @@ defmodule Lexical.RemoteControl.Search.Indexer.Source.Reducer do end end + defp push_entry(%__MODULE__{} = reducer, entries) when is_list(entries) do + Enum.reduce(entries, reducer, &push_entry(&2, &1)) + end + defp push_entry(%__MODULE__{} = reducer, %Entry{} = entry) do %__MODULE__{reducer | entries: [entry | reducer.entries]} end + defp push_entry(%__MODULE__{} = reducer, _), do: reducer + defp maybe_pop_block(%__MODULE__{} = reducer) do if block_ended?(reducer) do - pop_block(reducer) + reducer + |> pop_block() + |> maybe_pop_block() else reducer end diff --git a/apps/remote_control/lib/lexical/remote_control/search/store.ex b/apps/remote_control/lib/lexical/remote_control/search/store.ex index 5c492a873..0ad31fb2f 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/store.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/store.ex @@ -4,6 +4,8 @@ defmodule Lexical.RemoteControl.Search.Store do """ alias Lexical.Project + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Api alias Lexical.RemoteControl.Search.Indexer.Entry alias Lexical.RemoteControl.Search.Store alias Lexical.RemoteControl.Search.Store.State @@ -28,19 +30,20 @@ defmodule Lexical.RemoteControl.Search.Store do {:ok, new_entries, paths_to_delete} | {:error, term()}) @backend Application.compile_env(:remote_control, :search_store_backend, Store.Backends.Ets) + @flush_interval_ms Application.compile_env( + :remote_control, + :search_store_quiescent_period_ms, + 2500 + ) + import Api.Messages use GenServer require Logger def stop do - GenServer.call(__MODULE__, :drop) GenServer.stop(__MODULE__) end - def all do - GenServer.call(__MODULE__, :all) - end - def loaded? do GenServer.call(__MODULE__, :loaded?) end @@ -49,12 +52,29 @@ defmodule Lexical.RemoteControl.Search.Store do GenServer.call(__MODULE__, {:replace, entries}) end + @spec exact(Entry.subject_query(), Entry.constraints()) :: {:ok, [Entry.t()]} | {:error, term()} def exact(subject \\ :_, constraints) do - GenServer.call(__MODULE__, {:exact, subject, constraints}) + call_or_default({:exact, subject, constraints}, []) + end + + @spec prefix(String.t(), Entry.constraints()) :: {:ok, [Entry.t()]} | {:error, term()} + def prefix(prefix, constraints) do + call_or_default({:prefix, prefix, constraints}, []) + end + + @spec parent(Entry.t()) :: {:ok, Entry.t()} | {:error, term()} + def parent(%Entry{} = entry) do + call_or_default({:parent, entry}, nil) + end + + @spec siblings(Entry.t()) :: {:ok, [Entry.t()]} | {:error, term()} + def siblings(%Entry{} = entry) do + call_or_default({:siblings, entry}, []) end + @spec fuzzy(Entry.subject(), Entry.constraints()) :: {:ok, [Entry.t()]} | {:error, term()} def fuzzy(subject, constraints) do - GenServer.call(__MODULE__, {:fuzzy, subject, constraints}) + call_or_default({:fuzzy, subject, constraints}, []) end def clear(path) do @@ -65,14 +85,14 @@ defmodule Lexical.RemoteControl.Search.Store do GenServer.call(__MODULE__, {:update, path, entries}) end - def update_async(path, entries) do - GenServer.cast(__MODULE__, {:update, path, entries}) - end - def destroy do GenServer.call(__MODULE__, :destroy) end + def enable do + GenServer.call(__MODULE__, :enable) + end + @spec start_link(Project.t(), create_index, refresh_index, module()) :: GenServer.on_start() def start_link(%Project{} = project, create_index, refresh_index, backend) do GenServer.start_link(__MODULE__, [project, create_index, refresh_index, backend], @@ -100,84 +120,175 @@ defmodule Lexical.RemoteControl.Search.Store do args end + @impl GenServer def init([%Project{} = project, create_index, update_index, backend]) do - state = - project - |> State.new(create_index, update_index, backend) - |> State.async_load() - + Process.flag(:fullsweep_after, 5) + schedule_gc() + # I've found that if indexing happens before the first compile, for some reason + # the compilation is 4x slower than if indexing happens after it. I was + # unable to figure out why this is the case, and I looked extensively, so instead + # we have this bandaid. We wait for the first compilation to complete, and then + # the search store enables itself, at which point we index the code. + + RemoteControl.register_listener(self(), project_compiled()) + state = State.new(project, create_index, update_index, backend) {:ok, state} end + @impl GenServer + # enable ourselves when the project is force compiled + def handle_info(project_compiled(), %State{} = state) do + {:noreply, enable(state)} + end + + def handle_info(project_compiled(), {_, _} = state) do + # we're already enabled, no need to do anything + {:noreply, state} + end + # handle the result from `State.async_load/1` - def handle_info({ref, result}, %State{async_load_ref: ref} = state) do - {:noreply, State.async_load_complete(state, result)} + def handle_info({ref, result}, {update_ref, %State{async_load_ref: ref} = state}) do + {:noreply, {update_ref, State.async_load_complete(state, result)}} + end + + def handle_info(:flush_updates, {_, %State{} = state}) do + {:ok, state} = State.flush_buffered_updates(state) + ref = schedule_flush() + {:noreply, {ref, state}} + end + + def handle_info(:gc, state) do + :erlang.garbage_collect() + schedule_gc() + {:noreply, state} end def handle_info(_, state) do {:noreply, state} end - def handle_call({:replace, entities}, _from, %State{} = state) do + @impl GenServer + def handle_call(:enable, _from, %State{} = state) do + {:reply, :ok, enable(state)} + end + + def handle_call(:enable, _from, state) do + {:reply, :ok, state} + end + + def handle_call({:replace, entities}, _from, {ref, %State{} = state}) do {reply, new_state} = case State.replace(state, entities) do {:ok, new_state} -> - {:ok, new_state} + {:ok, State.drop_buffered_updates(new_state)} {:error, _} = error -> {error, state} end - {:reply, reply, new_state} + {:reply, reply, {ref, new_state}} + end + + def handle_call({:exact, subject, constraints}, _from, {ref, %State{} = state}) do + {:reply, State.exact(state, subject, constraints), {ref, state}} end - def handle_call({:exact, subject, constraints}, _from, %State{} = state) do - {:reply, State.exact(state, subject, constraints), state} + def handle_call({:prefix, prefix, constraints}, _from, {ref, %State{} = state}) do + {:reply, State.prefix(state, prefix, constraints), {ref, state}} end - def handle_call({:fuzzy, subject, constraints}, _from, %State{} = state) do - {:reply, State.fuzzy(state, subject, constraints), state} + def handle_call({:fuzzy, subject, constraints}, _from, {ref, %State{} = state}) do + {:reply, State.fuzzy(state, subject, constraints), {ref, state}} end - def handle_call(:all, _from, %State{} = state) do - {:reply, State.all(state), state} + def handle_call({:update, path, entries}, _from, {ref, %State{} = state}) do + {reply, new_ref, new_state} = do_update(state, ref, path, entries) + + {:reply, reply, {new_ref, new_state}} end - def handle_call({:update, path, entries}, _from, %State{} = state) do - {reply, new_state} = do_update(state, path, entries) - {:reply, reply, new_state} + def handle_call({:parent, entry}, _from, {_, %State{} = state} = orig_state) do + parent = State.parent(state, entry) + {:reply, parent, orig_state} end - def handle_call(:drop, _, %State{} = state) do + def handle_call({:siblings, entry}, _from, {_, %State{} = state} = orig_state) do + siblings = State.siblings(state, entry) + {:reply, siblings, orig_state} + end + + def handle_call(:on_stop, _, {ref, %State{} = state}) do + {:ok, state} = State.flush_buffered_updates(state) + State.drop(state) - {:reply, :ok, state} + {:reply, :ok, {ref, state}} + end + + def handle_call(:loaded?, _, {ref, %State{loaded?: loaded?} = state}) do + {:reply, loaded?, {ref, state}} end def handle_call(:loaded?, _, %State{loaded?: loaded?} = state) do + # We're not enabled yet, but we can still reply to the query {:reply, loaded?, state} end - def handle_call(:destroy, _, %State{} = state) do + def handle_call(:destroy, _, {ref, %State{} = state}) do new_state = State.destroy(state) - {:reply, :ok, new_state} + {:reply, :ok, {ref, new_state}} end - def handle_cast({:update, path, entries}, %State{} = state) do - {_reply, new_state} = do_update(state, path, entries) - {:noreply, new_state} + def handle_call(message, _from, %State{} = state) do + Logger.warning("Received #{inspect(message)}, but the search store isn't enabled yet.") + {:reply, {:error, {:not_enabled, message}}, state} + end + + @impl GenServer + def terminate(_reason, {_, state}) do + {:ok, state} = State.flush_buffered_updates(state) + {:noreply, state} end defp backend do @backend end - defp do_update(state, path, entries) do - case State.update(state, path, entries) do - {:ok, new_state} -> - {:ok, new_state} + defp do_update(state, old_ref, path, entries) do + {:ok, schedule_flush(old_ref), State.buffer_updates(state, path, entries)} + end + + defp schedule_flush(ref) when is_reference(ref) do + Process.cancel_timer(ref) + schedule_flush() + end + + defp schedule_flush(_) do + schedule_flush() + end + + defp schedule_flush do + Process.send_after(self(), :flush_updates, @flush_interval_ms) + end - {:error, _} = error -> - {error, state} + defp enable(%State{} = state) do + state = State.async_load(state) + :persistent_term.put({__MODULE__, :enabled?}, true) + {nil, state} + end + + defp schedule_gc do + Process.send_after(self(), :gc, :timer.seconds(5)) + end + + defp call_or_default(call, default) do + if enabled?() do + GenServer.call(__MODULE__, call) + else + default end end + + defp enabled? do + :persistent_term.get({__MODULE__, :enabled?}, false) + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backend.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backend.ex index b6f67fb02..042f4942c 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/store/backend.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backend.ex @@ -20,6 +20,10 @@ defmodule Lexical.RemoteControl.Search.Store.Backend do @type subject_query :: Entry.subject() | wildcard() @type type_query :: Entry.entry_type() | wildcard() @type subtype_query :: Entry.entry_subtype() | wildcard() + @type block_structure :: %{Entry.block_id() => block_structure()} | %{} + @type path_structures :: %{Path.t() => block_structure()} + @type accumulator :: any() + @type reducer_fun :: (Entry.t(), accumulator() -> accumulator()) @doc """ Create a new backend. @@ -35,7 +39,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backend do @callback prepare(priv_state()) :: {:ok, load_state()} @doc """ - Syncs the backend to the file system (optional) + Synchronizes the backend to the file system (optional) """ @callback sync(Project.t()) :: :ok | {:error, any()} @@ -55,9 +59,9 @@ defmodule Lexical.RemoteControl.Search.Store.Backend do @callback destroy(Project.t()) :: :ok @doc """ - Returns all entries currently residing in the backend + Applies a reducer function to the backend's entries """ - @callback select_all :: [Entry.t()] + @callback reduce(accumulator(), reducer_fun()) :: accumulator() @doc """ Replaces all the entries in the store with those passed in @@ -67,17 +71,39 @@ defmodule Lexical.RemoteControl.Search.Store.Backend do @doc """ Deletes all entries whose path is equal to the one passed in. """ - @callback delete_by_path(Path.t()) :: {:ok, [reference()]} | {:error, any()} + @callback delete_by_path(Path.t()) :: {:ok, [Entry.entry_id()]} | {:error, any()} + + @doc """ + Returns the block structure for the given path + """ + @callback structure_for_path(Path.t()) :: {:ok, block_structure()} | :error @doc """ Finds all entries """ @callback find_by_subject(subject_query(), type_query(), subtype_query()) :: [Entry.t()] + @doc """ + Finds all entries by prefix + """ + @callback find_by_prefix(subject_query(), type_query(), subtype_query()) :: [Entry.t()] + @doc """ Finds entries whose ref attribute is in the given list """ - @callback find_by_refs([reference()], type_query(), subtype_query()) :: [Entry.t()] + @callback find_by_ids([Entry.entry_id()], type_query(), subtype_query()) :: [Entry.t()] + + @doc """ + Returns all the sibling elements of the given element. + + Elements are returned in the order they appear in the source + """ + @callback siblings(Entry.t()) :: [Entry.t()] + + @doc """ + Returns the parent block of the given entry, or :error if there is no parent + """ + @callback parent(Entry.t()) :: {:ok, Entry.t()} | :error @optional_callbacks sync: 1 end diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets.ex index bb3b381bb..144fa2eba 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets.ex @@ -1,18 +1,17 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do alias Lexical.Project alias Lexical.RemoteControl + alias Lexical.RemoteControl.Search.Indexer.Entry alias Lexical.RemoteControl.Search.Store.Backend alias Lexical.RemoteControl.Search.Store.Backends.Ets.State use GenServer - @sync_interval_ms 5000 - @behaviour Backend @impl Backend - def new(%Project{} = project) do - start_link(project) + def new(%Project{}) do + {:ok, Process.whereis(__MODULE__)} end @impl Backend @@ -20,15 +19,6 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do GenServer.call(pid, :prepare, :infinity) end - @impl Backend - def sync(%Project{}) do - GenServer.call(genserver_name(), :sync) - end - - def force_sync(%Project{}) do - GenServer.call(genserver_name(), :force_sync) - end - @impl Backend def insert(entries) do GenServer.call(genserver_name(), {:insert, [entries]}, :infinity) @@ -43,20 +33,20 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do def destroy(%Project{} = project) do name = genserver_name(project) - case :global.whereis_name(name) do - pid when is_pid(pid) -> - GenServer.call(name, {:destroy, []}) - - _ -> - State.destroy(project) + if pid = GenServer.whereis(name) do + GenServer.call(pid, {:destroy, []}) end :ok end + def destroy_all(%Project{} = project) do + State.destroy_all(project) + end + @impl Backend - def select_all do - GenServer.call(genserver_name(), {:select_all, []}) + def reduce(acc, reducer_fun) do + GenServer.call(genserver_name(), {:reduce, [acc, reducer_fun]}, :infinity) end @impl Backend @@ -75,16 +65,49 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do end @impl Backend - def find_by_refs(references, type, subtype) do - GenServer.call(genserver_name(), {:find_by_references, [references, type, subtype]}) + def find_by_prefix(prefix, type, subtype) do + GenServer.call(genserver_name(), {:find_by_prefix, [prefix, type, subtype]}) + end + + @impl Backend + def find_by_ids(ids, type, subtype) do + GenServer.call(genserver_name(), {:find_by_ids, [ids, type, subtype]}) + end + + @impl Backend + def structure_for_path(path) do + GenServer.call(genserver_name(), {:structure_for_path, [path]}) + end + + @impl Backend + def siblings(%Entry{} = entry) do + GenServer.call(genserver_name(), {:siblings, [entry]}) + end + + @impl Backend + def parent(%Entry{} = entry) do + GenServer.call(genserver_name(), {:parent, [entry]}) end def start_link(%Project{} = project) do - GenServer.start_link(__MODULE__, [project]) + GenServer.start_link(__MODULE__, [project], name: __MODULE__) + end + + def start_link do + start_link(RemoteControl.get_project()) + end + + def child_spec([%Project{}] = init_args) do + %{id: __MODULE__, start: {__MODULE__, :start_link, init_args}} + end + + def child_spec(_) do + child_spec([RemoteControl.get_project()]) end @impl GenServer def init([%Project{} = project]) do + Process.flag(:fullsweep_after, 5) :ok = connect_to_project_nodes(project) {:ok, project, {:continue, :try_for_leader}} end @@ -95,7 +118,6 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do with :undefined <- :global.whereis_name(leader_name), :ok <- become_leader(project) do - schedule_sync() {:noreply, create_leader(project)} else _ -> @@ -105,9 +127,13 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do @impl GenServer def handle_call(:prepare, _from, %State{} = state) do - {reply, new_state} = State.prepare(state) + case State.prepare(state) do + {:error, :not_leader} = error -> + {:stop, :normal, error, state} - {:reply, reply, new_state} + {reply, new_state} -> + {:reply, reply, new_state} + end end def handle_call({function_name, arguments}, _from, %State{} = state) do @@ -116,16 +142,6 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do {:reply, reply, state} end - def handle_call(:sync, _from, %State{} = state) do - state = State.sync(state) - {:reply, :ok, state} - end - - def handle_call(:force_sync, _from, %State{} = state) do - new_state = State.do_sync(state) - {:reply, :ok, new_state} - end - @impl GenServer def handle_info({:EXIT, _, _reason}, %State{leader?: true} = state) do # ets died, and we own it. Restart it @@ -144,10 +160,16 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do {:noreply, new_state} end - def handle_info(:do_sync, %State{} = state) do - new_state = State.do_sync(state) - schedule_sync() - {:noreply, new_state} + def handle_info(:gc, %State{} = state) do + :erlang.garbage_collect() + schedule_gc() + {:noreply, state} + end + + @impl GenServer + def terminate(_reason, %State{} = state) do + State.terminate(state) + state end # Private @@ -218,7 +240,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets do end end - def schedule_sync do - Process.send_after(self(), :do_sync, @sync_interval_ms) + defp schedule_gc do + Process.send_after(self(), :gc, :timer.seconds(5)) end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schema.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schema.ex index 30947a476..0c9e7ad71 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schema.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schema.ex @@ -14,6 +14,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schema do version = Keyword.fetch!(opts, :version) quote do + @behaviour unquote(__MODULE__) @version unquote(version) alias Lexical.Project import unquote(__MODULE__), only: [defkey: 2] @@ -42,6 +43,29 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schema do end end + alias Lexical.Project + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Store.Backends.Ets.Wal + + import Wal, only: :macros + + @type write_concurrency_alternative :: boolean() | :auto + @type table_tweak :: + :compressed + | {:write_concurrency, write_concurrency_alternative()} + | {:read_concurrency, boolean()} + | {:decentralized_counters, boolean()} + + @type table_option :: :ets.table_type() | table_tweak() + + @type key :: tuple() + @type row :: {key, tuple()} + + @callback index_file_name() :: String.t() + @callback table_options() :: [table_option()] + @callback to_rows(Entry.t()) :: [row()] + @callback migrate([Entry.t()]) :: {:ok, [row()]} | {:error, term()} + defmacro defkey(name, fields) do query_keys = Enum.map(fields, fn name -> {name, :_} end) query_record_name = :"query_#{name}" @@ -54,75 +78,95 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schema do end end - alias Lexical.Project + @spec entries_to_rows(Enumerable.t(Entry.t()), module()) :: [tuple()] + def entries_to_rows(entries, schema_module) do + entries + |> Stream.flat_map(&schema_module.to_rows(&1)) + |> Enum.reduce(%{}, fn {key, value}, acc -> + Map.update(acc, key, [value], fn old_values -> [value | old_values] end) + end) + |> Enum.to_list() + end def load(%Project{} = project, schema_order) do ensure_unique_versions(schema_order) - ensure_index_directory_exists(project) - - case upgrade_chain(project, schema_order) do - {:ok, [[schema_module]]} -> - # this case is that there are no migrations to perform - # we get a single entry, which is the schema module - {table_name, entries} = load_initial(project, schema_module) - {:ok, table_name, load_status(entries)} - {:ok, [[initial, _] | _] = chain} -> - {table_name, entries} = load_initial(project, initial) - - case apply_migrations(project, chain, entries) do - {:ok, schema_module, entries} -> - dest_table_name = populate_schema_table(schema_module, entries) - :ets.delete(table_name) - {:ok, dest_table_name, load_status(entries)} - - error -> - error - end - - :error -> + with {:ok, initial_schema, chain} <- upgrade_chain(project, schema_order), + {:ok, wal, table_name, entries} <- load_initial_schema(project, initial_schema) do + handle_upgrade_chain(chain, project, wal, table_name, entries) + else + _ -> schema_module = List.last(schema_order) table_name = schema_module.table_name() ensure_schema_table_exists(table_name, schema_module.table_options()) - {:ok, table_name, :empty} + + {:ok, new_wal} = Wal.load(project, schema_module.version(), schema_module.table_name()) + + {:ok, new_wal, table_name, :empty} end end - def index_root(%Project{} = project) do - Project.workspace_path(project, "indexes") + defp load_status([]), do: :empty + defp load_status(_entries), do: :stale + + defp handle_upgrade_chain([_schema_module], _project, wal, table_name, entries) do + # this case is that there are no migrations to perform + # we get a single entry, which is the schema module + + {:ok, wal, table_name, load_status(entries)} end - def index_file_path(%Project{} = project, schema) do - project - |> index_root() - |> Path.join(schema.index_file_name()) + defp handle_upgrade_chain(chain, project, _wal, _table_name, entries) do + with {:ok, schema_module, entries} <- apply_migrations(project, chain, entries), + {:ok, new_wal, dest_table_name} <- populate_schema_table(project, schema_module, entries) do + {:ok, new_wal, dest_table_name, load_status(entries)} + end end - defp load_status([]), do: :empty - defp load_status(_entries), do: :stale + defp apply_migrations(_project, [initial], entries) do + {:ok, initial, entries} + end - defp apply_migrations(%Project{} = project, chain, entries) do + defp apply_migrations(project, chain, entries) do Enum.reduce_while(chain, {:ok, nil, entries}, fn - [current], {:ok, _, entries} -> - {:halt, {:ok, current, entries}} - - [from, to], {:ok, _, entries} -> - with {:ok, new_entries} <- to.migrate(entries), - :ok <- remove_old_schema_file(project, from) do - {:cont, {:ok, to, new_entries}} - else + initial, {:ok, nil, entries} -> + Wal.destroy(project, initial.version()) + {:cont, {:ok, initial, entries}} + + to, {:ok, _, entries} -> + case to.migrate(entries) do + {:ok, new_entries} -> + Wal.destroy(project, to.version()) + {:cont, {:ok, to, new_entries}} + error -> {:halt, error} end end) end - defp populate_schema_table(schema_module, entries) do + defp populate_schema_table(%Project{} = project, schema_module, entries) do dest_table_name = schema_module.table_name() ensure_schema_table_exists(dest_table_name, schema_module.table_options()) - :ets.delete_all_objects(dest_table_name) - :ets.insert(dest_table_name, entries) - dest_table_name + + with {:ok, wal} <- Wal.load(project, schema_module.version(), dest_table_name), + {:ok, new_wal_state} <- do_populate_schema(wal, dest_table_name, entries), + {:ok, checkpoint_wal} <- Wal.checkpoint(new_wal_state) do + {:ok, checkpoint_wal, dest_table_name} + end + end + + defp do_populate_schema(%Wal{} = wal, table_name, entries) do + result = + with_wal wal do + :ets.delete_all_objects(table_name) + :ets.insert(table_name, entries) + end + + case result do + {:ok, new_wal_state, _} -> {:ok, new_wal_state} + error -> error + end end defp ensure_schema_table_exists(table_name, table_options) do @@ -132,61 +176,40 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schema do end end - defp load_initial(%Project{} = project, schema_module) do - filename = - project - |> index_file_path(schema_module) - |> String.to_charlist() - + defp load_initial_schema(%Project{} = project, schema_module) do table_name = schema_module.table_name() + ensure_schema_table_exists(table_name, schema_module.table_options()) - entries = - case :ets.file2tab(filename) do - {:ok, ^table_name} -> - :ets.tab2list(table_name) - - {:ok, other_name} -> - # the data file loaded was saved from some other module - # likely due to namespacing. We delete the table and create - # another one with the correct name. - entries = :ets.tab2list(other_name) - :ets.delete(other_name) - ensure_schema_table_exists(table_name, schema_module.table_options()) - :ets.insert(table_name, entries) - entries - end - - {table_name, entries} + case Wal.load(project, schema_module.version(), table_name) do + {:ok, wal} -> {:ok, wal, table_name, :ets.tab2list(table_name)} + error -> error + end end defp upgrade_chain(%Project{} = project, schema_order) do - filtered = + {_, initial_schema, schemas} = schema_order - |> Enum.chunk_every(2, 1) - |> Enum.filter(fn - [schema_module | _] -> - File.exists?(index_file_path(project, schema_module)) + |> Enum.reduce({:not_found, nil, []}, fn + schema_module, {:not_found, nil, _} -> + if Wal.exists?(project, schema_module.version()) do + {:found, schema_module, [schema_module]} + else + {:not_found, nil, []} + end + + schema_module, {:found, initial_schema, chain} -> + {:found, initial_schema, [schema_module | chain]} end) - case filtered do + case Enum.reverse(schemas) do [] -> :error other -> - {:ok, other} + {:ok, initial_schema, other} end end - defp remove_old_schema_file(%Project{} = project, schema_module) do - File.rm(index_file_path(project, schema_module)) - end - - defp ensure_index_directory_exists(%Project{} = project) do - project - |> index_root() - |> File.mkdir_p!() - end - defp ensure_unique_versions(schemas) do Enum.reduce(schemas, %{}, fn schema, seen_versions -> schema_version = schema.version() diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/legacy_v0.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/legacy_v0.ex index 09239e137..572532443 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/legacy_v0.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/legacy_v0.ex @@ -12,4 +12,8 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas.LegacyV0 do def index_file_name do "source.index.ets" end + + def to_rows(_) do + [] + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v1.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v1.ex index 34595078b..5653a3ec2 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v1.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v1.ex @@ -5,7 +5,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas.V1 do It does this by storing data under three different key types. The first type groups references to ids by path, and is accessible via the `by_path` utility macros. The second, stores data by subject, type, subtype, path and the elixir and erlang versions. This is what powers exact matching. - Finally, entries are stored by their reference, which powers direct lookups, which are used in fuzzy matching. + Finally, entries are stored by their id, which powers direct lookups, which are used in fuzzy matching. """ @@ -14,14 +14,12 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas.V1 do use Schema, version: 1 - defkey :by_id, [:id, :type, :subtype, :elixir_version, :erlang_version] + defkey :by_id, [:id, :type, :subtype] defkey :by_subject, [ :subject, :type, :subtype, - :elixir_version, - :erlang_version, :path ] @@ -31,31 +29,18 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas.V1 do migrated = entries |> Stream.filter(fn - {_, %_{elixir_version: _, erlang_version: _, type: _, subtype: _, ref: _}} -> true + {_, %_{type: _, subtype: _, id: _}} -> true _ -> false end) |> Stream.map(fn {_, entry} -> entry end) - |> entries_to_rows() + |> Schema.entries_to_rows(__MODULE__) {:ok, migrated} end - @spec entries_to_rows(Enumerable.t(Entry.t())) :: [tuple()] - def entries_to_rows(entries) do - entries - |> Stream.uniq_by(& &1.ref) - |> Stream.flat_map(&to_rows(&1)) - |> Enum.reduce(%{}, fn {key, value}, acc -> - Map.update(acc, key, [value], fn old_values -> [value | old_values] end) - end) - |> Enum.to_list() - end - def to_rows(%Entry{} = entry) do subject_key = by_subject( - elixir_version: entry.elixir_version, - erlang_version: entry.erlang_version, subject: to_subject(entry.subject), type: entry.type, subtype: entry.subtype, @@ -64,20 +49,18 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas.V1 do id_key = by_id( - id: entry.ref, + id: entry.id, type: entry.type, - subtype: entry.subtype, - elixir_version: entry.elixir_version, - erlang_version: entry.erlang_version + subtype: entry.subtype ) path_key = by_path(path: entry.path) - [{subject_key, entry}, {id_key, entry}, {path_key, id_key}] + [{subject_key, id_key}, {id_key, entry}, {path_key, id_key}] end # This case will handle any namespaced entries - def to_rows(%{elixir_version: _, erlang_version: _, type: _, subtype: _, ref: _} = entry) do + def to_rows(%{type: _, subtype: _, id: _} = entry) do map = Map.delete(entry, :__struct__) Entry diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v2.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v2.ex new file mode 100644 index 000000000..ce326b658 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v2.ex @@ -0,0 +1,69 @@ +defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas.V2 do + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Store.Backends.Ets.Schema + + require Entry + use Schema, version: 2 + + defkey :by_id, [:id, :type, :subtype] + + defkey :by_subject, [ + :subject, + :type, + :subtype, + :path + ] + + defkey :by_path, [:path] + defkey :by_block_id, [:block_id, :path] + defkey :structure, [:path] + + def migrate(_) do + {:ok, []} + end + + def to_rows(%Entry{} = entry) when Entry.is_structure(entry) do + structure_key = structure(path: entry.path) + [{structure_key, entry.subject}] + end + + def to_rows(%Entry{} = entry) do + subject_key = + by_subject( + subject: to_subject(entry.subject), + type: entry.type, + subtype: entry.subtype, + path: entry.path + ) + + id_key = + by_id( + id: entry.id, + type: entry.type, + subtype: entry.subtype + ) + + path_key = by_path(path: entry.path) + block_key = by_block_id(path: entry.path, block_id: entry.block_id) + + [{id_key, entry}, {subject_key, id_key}, {path_key, id_key}, {block_key, id_key}] + end + + # This case will handle any namespaced entries + def to_rows(%{type: _, subtype: _, id: _} = entry) do + map = Map.delete(entry, :__struct__) + + Entry + |> struct(map) + |> to_rows() + end + + def table_options do + [:named_table, :ordered_set, :compressed] + end + + defp to_subject(binary) when is_binary(binary), do: binary + defp to_subject(:_), do: :_ + defp to_subject(atom) when is_atom(atom), do: inspect(atom) + defp to_subject(other), do: to_string(other) +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v3.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v3.ex new file mode 100644 index 000000000..98c36f773 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/schemas/v3.ex @@ -0,0 +1,82 @@ +defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas.V3 do + alias Lexical.RemoteControl.Search.Indexer.Entry + alias Lexical.RemoteControl.Search.Store.Backends.Ets.Schema + + require Entry + use Schema, version: 3 + + defkey :by_id, [:id, :type, :subtype] + + defkey :by_subject, [ + :subject, + :type, + :subtype, + :path + ] + + defkey :by_path, [:path] + defkey :by_block_id, [:block_id, :path] + defkey :structure, [:path] + + def migrate(entries) do + migrated = + entries + |> Stream.filter(fn + {query_by_subject(), %Entry{}} -> true + _ -> false + end) + |> Stream.map(fn {_, entry} -> entry end) + |> Schema.entries_to_rows(__MODULE__) + + {:ok, migrated} + end + + def to_rows(%Entry{} = entry) when Entry.is_structure(entry) do + structure_key = structure(path: entry.path) + [{structure_key, entry.subject}] + end + + def to_rows(%Entry{} = entry) do + subject_key = + by_subject( + subject: to_subject(entry.subject), + type: entry.type, + subtype: entry.subtype, + path: entry.path + ) + + id_key = + by_id( + id: entry.id, + type: entry.type, + subtype: entry.subtype + ) + + path_key = by_path(path: entry.path) + block_key = by_block_id(path: entry.path, block_id: entry.block_id) + + [{id_key, entry}, {subject_key, id_key}, {path_key, id_key}, {block_key, id_key}] + end + + # This case will handle any namespaced entries + def to_rows(%{type: _, subtype: _, id: _} = entry) do + map = Map.delete(entry, :__struct__) + + Entry + |> struct(map) + |> to_rows() + end + + def table_options do + if Features.can_use_compressed_ets_table?() do + [:named_table, :ordered_set, :compressed] + else + [:named_table, :ordered_set] + end + end + + def to_subject(charlist) when is_list(charlist), do: charlist + def to_subject(:_), do: :_ + def to_subject(atom) when is_atom(atom), do: atom |> inspect() |> to_charlist() + def to_subject(other), do: to_charlist(other) +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/state.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/state.ex index 67aca8607..08891fa3d 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/state.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/state.ex @@ -6,27 +6,36 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.State do """ alias Lexical.Project + alias Lexical.RemoteControl.Search.Indexer.Entry alias Lexical.RemoteControl.Search.Store.Backends.Ets.Schema alias Lexical.RemoteControl.Search.Store.Backends.Ets.Schemas - alias Lexical.VM.Versions + alias Lexical.RemoteControl.Search.Store.Backends.Ets.Wal @schema_order [ Schemas.LegacyV0, - Schemas.V1 + Schemas.V1, + Schemas.V2, + Schemas.V3 ] - import Schemas.V1, + import Wal, only: :macros + import Entry, only: :macros + + import Schemas.V3, only: [ - query_by_id: 0, + by_block_id: 1, query_by_id: 1, query_by_path: 1, - query_by_subject: 1 + query_structure: 1, + query_by_subject: 1, + structure: 1, + to_subject: 1 ] - defstruct [:project, :table_name, :leader?, :leader_pid, :needs_sync?] + defstruct [:project, :table_name, :leader?, :leader_pid, :wal_state] def new_leader(%Project{} = project) do - %__MODULE__{project: project, leader?: true, leader_pid: self(), needs_sync?: false} + %__MODULE__{project: project, leader?: true, leader_pid: self()} end def new_follower(%Project{} = project, leader_pid) do @@ -34,9 +43,9 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.State do end def prepare(%__MODULE__{leader?: true} = state) do - {:ok, table_name, result} = Schema.load(state.project, @schema_order) - :ets.info(table_name) - {{:ok, result}, %__MODULE__{state | table_name: table_name}} + {:ok, wal, table_name, result} = Schema.load(state.project, @schema_order) + + {{:ok, result}, %__MODULE__{state | table_name: table_name, wal_state: wal}} end def prepare(%__MODULE__{leader?: false}) do @@ -44,44 +53,124 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.State do end def drop(%__MODULE__{leader?: true} = state) do + Wal.truncate(state.wal_state) :ets.delete_all_objects(state.table_name) end def insert(%__MODULE__{leader?: true} = state, entries) do - rows = Schemas.V1.entries_to_rows(entries) - true = :ets.insert(state.table_name, rows) + rows = Schema.entries_to_rows(entries, current_schema()) + + with_wal state.wal_state do + true = :ets.insert(state.table_name, rows) + end + :ok end - def select_all(%__MODULE__{} = state) do - state.table_name - |> :ets.match({query_by_id(), :"$1"}) - |> List.flatten() + def reduce(%__MODULE__{} = state, acc, reducer_fun) do + ets_reducer = fn + {{:by_id, _, _, _}, entries}, acc when is_list(entries) -> + Enum.reduce(entries, acc, reducer_fun) + + {{:by_id, _, _, _}, %Entry{} = entry}, acc -> + reducer_fun.(entry, acc) + + _, acc -> + acc + end + + :ets.foldl(ets_reducer, acc, state.table_name) end def find_by_subject(%__MODULE__{} = state, subject, type, subtype) do - versions = Versions.current() - match_pattern = query_by_subject( subject: to_subject(subject), type: type, - subtype: subtype, - elixir_version: versions.elixir, - erlang_version: versions.erlang + subtype: subtype ) state.table_name |> :ets.match_object({match_pattern, :_}) - |> Enum.flat_map(fn {_, match} -> match end) + |> Enum.flat_map(fn {_, id_keys} -> + id_keys + end) + |> MapSet.new() + |> Enum.flat_map(&:ets.lookup_element(state.table_name, &1, 2)) + end + + def find_by_prefix(%__MODULE__{} = state, subject, type, subtype) do + match_pattern = + query_by_subject( + subject: to_prefix(subject), + type: type, + subtype: subtype + ) + + state.table_name + |> :ets.select([{{match_pattern, :_}, [], [:"$_"]}]) + |> Stream.flat_map(fn {_, id_keys} -> id_keys end) + |> Stream.uniq() + |> Enum.flat_map(&:ets.lookup_element(state.table_name, &1, 2)) + end + + @dialyzer {:nowarn_function, to_prefix: 1} + + defp to_prefix(prefix) when is_binary(prefix) do + # what we really want to do here is convert the prefix to a improper list + # like this: `'abc' -> [97, 98, 99 | :_]`, it's different from `'abc' ++ [:_]` + # this is the required format for the `:ets.select` function. + {last_char, others} = prefix |> String.to_charlist() |> List.pop_at(-1) + others ++ [last_char | :_] end - def find_by_references(%__MODULE__{} = state, references, type, subtype) - when is_list(references) do - versions = Versions.current() + def siblings(%__MODULE__{} = state, %Entry{} = entry) do + key = by_block_id(block_id: entry.block_id, path: entry.path) + + siblings = + state.table_name + |> :ets.lookup_element(key, 2) + |> Enum.map(&:ets.lookup_element(state.table_name, &1, 2)) + |> List.flatten() + |> Enum.filter(fn sibling -> + case {is_block(entry), is_block(sibling)} do + {same, same} -> true + _ -> false + end + end) + |> Enum.sort_by(& &1.id) + |> Enum.uniq() + + {:ok, siblings} + rescue + ArgumentError -> + :error + end - for reference <- references, - match_pattern = match_id_key(reference, versions, type, subtype), + def parent(%__MODULE__{} = state, %Entry{} = entry) do + with {:ok, structure} <- structure_for_path(state, entry.path), + {:ok, child_path} <- child_path(structure, entry.block_id) do + child_path = + if is_block(entry) do + # if we're a block, finding the first block will find us, so pop + # our id off the path. + tl(child_path) + else + child_path + end + + find_first_by_block_id(state, child_path) + end + end + + def parent(%__MODULE__{}, :root) do + :error + end + + def find_by_ids(%__MODULE__{} = state, ids, type, subtype) + when is_list(ids) do + for id <- ids, + match_pattern = match_id_key(id, type, subtype), {_key, entry} <- :ets.match_object(state.table_name, match_pattern) do entry end @@ -89,12 +178,19 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.State do end def replace_all(%__MODULE__{leader?: true} = state, entries) do - rows = Schemas.V1.entries_to_rows(entries) + rows = Schema.entries_to_rows(entries, current_schema()) - with true <- :ets.delete_all_objects(state.table_name), - true <- :ets.insert(state.table_name, rows) do - :ok - end + {:ok, _, result} = + with_wal state.wal_state do + true = :ets.delete_all_objects(state.table_name) + true = :ets.insert(state.table_name, rows) + :ok + end + + # When we replace everything, the old checkpoint is invalidated + # so it makes sense to force a new one. + Wal.checkpoint(state.wal_state) + result end def delete_by_path(%__MODULE__{leader?: true} = state, path) do @@ -103,54 +199,97 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.State do |> :ets.match({query_by_path(path: path), :"$0"}) |> List.flatten() - :ets.match_delete(state.table_name, {query_by_subject(path: path), :_}) - :ets.match_delete(state.table_name, {query_by_path(path: path), :_}) - Enum.each(ids_to_delete, &:ets.delete(state.table_name, &1)) + with_wal state.wal_state do + :ets.match_delete(state.table_name, {query_by_subject(path: path), :_}) + :ets.match_delete(state.table_name, {query_by_path(path: path), :_}) + :ets.match_delete(state.table_name, {query_structure(path: path), :_}) + end + + Enum.each(ids_to_delete, fn id -> + with_wal state.wal_state do + :ets.delete(state.table_name, id) + end + end) + {:ok, ids_to_delete} end - def destroy(%__MODULE__{leader?: true} = state) do - destroy(state.project) + def destroy_all(%Project{} = project) do + Wal.destroy_all(project) end - def destroy(%Project{} = project) do - project - |> Schema.index_root() - |> File.rm_rf() + def destroy(%__MODULE__{leader?: true, wal_state: %Wal{}} = state) do + Wal.destroy(state.wal_state) end - def sync(%__MODULE__{leader?: true} = state) do - %__MODULE__{state | needs_sync?: true} + def destroy(%__MODULE__{leader?: true}) do + :ok end - def do_sync(%__MODULE__{leader?: true, needs_sync?: true} = state) do - file_path_charlist = - state.project - |> Schema.index_file_path(current_schema()) - |> String.to_charlist() + def terminate(%__MODULE__{wal_state: %Wal{}} = state) do + Wal.close(state.wal_state) + end - :ets.tab2file(state.table_name, file_path_charlist) - %__MODULE__{state | needs_sync?: false} + def terminate(%__MODULE__{}) do + :ok end - def do_sync(%__MODULE__{} = state) do - %__MODULE__{state | needs_sync?: false} + defp child_path(structure, child_id) do + path = + Enum.reduce_while(structure, [], fn + {^child_id, _children}, children -> + {:halt, [child_id | children]} + + {_, children}, path when map_size(children) == 0 -> + {:cont, path} + + {current_id, children}, path -> + case child_path(children, child_id) do + {:ok, child_path} -> {:halt, [current_id | path] ++ Enum.reverse(child_path)} + :error -> {:cont, path} + end + end) + + case path do + [] -> :error + path -> {:ok, Enum.reverse(path)} + end end - defp match_id_key(reference, versions, type, subtype) do - {query_by_id( - id: reference, - type: type, - subtype: subtype, - elixir_version: versions.elixir, - erlang_version: versions.erlang - ), :_} + defp find_first_by_block_id(%__MODULE__{} = state, block_ids) do + Enum.reduce_while(block_ids, :error, fn block_id, failure -> + case find_entry_by_id(state, block_id) do + {:ok, _} = success -> + {:halt, success} + + _ -> + {:cont, failure} + end + end) end - defp to_subject(binary) when is_binary(binary), do: binary - defp to_subject(:_), do: :_ - defp to_subject(atom) when is_atom(atom), do: inspect(atom) - defp to_subject(other), do: to_string(other) + def find_entry_by_id(%__MODULE__{} = state, id) do + case find_by_ids(state, [id], :_, :_) do + [entry] -> {:ok, entry} + _ -> :error + end + end + + def structure_for_path(%__MODULE__{} = state, path) do + key = structure(path: path) + + case :ets.lookup_element(state.table_name, key, 2) do + [structure] -> {:ok, structure} + _ -> :error + end + rescue + ArgumentError -> + :error + end + + defp match_id_key(id, type, subtype) do + {query_by_id(id: id, type: type, subtype: subtype), :_} + end defp current_schema do List.last(@schema_order) diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/wal.ex b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/wal.ex new file mode 100644 index 000000000..ef0928380 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/store/backends/ets/wal.ex @@ -0,0 +1,423 @@ +defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.Wal do + @moduledoc """ + A (hopefully) simple write-ahead log + """ + alias Lexical.Identifier + alias Lexical.Project + alias Lexical.VM.Versions + + import Record + + defrecord :operation, id: nil, function: nil, args: nil + + defstruct [ + :checkpoint_version, + :ets_table, + :max_wal_operations, + :project, + :schema_version, + :update_log, + :update_log_name + ] + + @write_functions [ + :delete, + :delete_all_objects, + :delete_object, + :insert, + :insert_new, + :match_delete, + :select_delete, + :select_replace, + :update_counter, + :update_element + ] + + @no_checkpoint_id 0 + @chunk_size 10_000 + @checkpoint_int_length 20 + @default_max_operations 50_000 + + defmacro with_wal(wal_state, do: block) do + {_, write_calls} = + Macro.prewalk(block, [], fn ast, acc -> + {ast, collect_ets_writes(ast, acc)} + end) + + operations = + write_calls + |> Enum.reverse() + |> Enum.map(&to_operation/1) + + quote do + case unquote(__MODULE__).append(unquote(wal_state), unquote(operations)) do + {:ok, wal_state} -> + result = unquote(block) + {:ok, wal_state, result} + + error -> + error + end + end + end + + def load(%Project{} = project, schema_version, ets_table, options \\ []) do + max_wal_operations = Keyword.get(options, :max_wal_operations, @default_max_operations) + + wal = %__MODULE__{ + ets_table: ets_table, + max_wal_operations: max_wal_operations, + project: project, + schema_version: to_string(schema_version) + } + + ensure_wal_directory_exists(wal) + + with {:ok, checkpoint_id} <- load_latest_checkpoint(wal), + {:ok, new_wal} <- open_update_wal(wal, checkpoint_id), + :ok <- apply_updates(new_wal) do + {:ok, new_wal} + end + end + + def exists?(%__MODULE__{} = wal) do + exists?(wal.project, wal.schema_version) + end + + def exists?(%Project{} = project, schema_vesion) do + case File.ls(wal_directory(project, schema_vesion)) do + {:ok, [_]} -> true + {:ok, [_ | _]} -> true + _ -> false + end + end + + def append(%__MODULE__{} = wal, operations) do + case :disk_log.log_terms(wal.update_log, operations) do + :ok -> + maybe_checkpoint(wal) + + error -> + error + end + end + + def close(%__MODULE__{} = wal) do + case wal.update_log do + nil -> + :ok + + log -> + :disk_log.sync(log) + :disk_log.close(log) + end + end + + def truncate(%__MODULE__{} = wal) do + :disk_log.truncate(wal.update_log) + end + + def destroy(%__MODULE__{} = wal) do + close(wal) + destroy(wal.project, wal.schema_version) + end + + def destroy(%Project{} = project, schema_version) do + project + |> wal_directory(schema_version) + |> File.rm_rf!() + end + + def destroy_all(%Project{} = project) do + project + |> root_path() + |> File.rm_rf!() + end + + def checkpoint(%__MODULE__{} = wal) do + case :ets.info(wal.ets_table) do + :undefined -> + {:error, :no_table} + + _ -> + do_checkpoint(wal) + end + end + + def size(%__MODULE__{update_log: nil}) do + {:error, :not_loaded} + end + + def size(%__MODULE__{update_log: update_log}) do + with info when is_list(info) <- :disk_log.info(update_log), + {:ok, size} <- Keyword.fetch(info, :items) do + {:ok, size} + else + _ -> + {:error, :not_loaded} + end + end + + def root_path(%Project{} = project) do + Project.workspace_path(project, ["indexes", "ets"]) + end + + # Private + + defp collect_ets_writes({{:., _, [:ets, function_name]}, _, args}, acc) + when function_name in @write_functions do + [{:ets, function_name, args} | acc] + end + + defp collect_ets_writes(_, acc), do: acc + + defp to_operation({:ets, call_name, args}) do + quote do + operation(id: Identifier.next_global!(), function: unquote(call_name), args: unquote(args)) + end + end + + defp ensure_wal_directory_exists(%__MODULE__{} = wal) do + wal |> wal_directory() |> File.mkdir_p!() + end + + defp wal_directory(%__MODULE__{} = wal) do + wal_directory(wal.project, wal.schema_version) + end + + defp wal_directory(%Project{} = project, schema_version) do + versions = Versions.current() + Path.join([root_path(project), versions.erlang, versions.elixir, to_string(schema_version)]) + end + + defp open_update_wal(%__MODULE__{} = wal, checkpoint_version) do + wal_path = update_wal_path(wal) + wal_name = update_wal_name(wal) + + case :disk_log.open(name: wal_name, file: String.to_charlist(wal_path)) do + {:ok, log} -> + new_wal = %__MODULE__{ + wal + | update_log: log, + update_log_name: wal_name, + checkpoint_version: checkpoint_version + } + + {:ok, new_wal} + + {:repaired, log, {:recovered, _}, _bad} -> + new_wal = %__MODULE__{ + wal + | update_log: log, + update_log_name: wal_name, + checkpoint_version: checkpoint_version + } + + {:ok, new_wal} + + error -> + error + end + end + + defp update_wal_name(%__MODULE__{} = wal) do + :"updates_for_#{Project.name(wal.project)}_v#{wal.schema_version}" + end + + # Updates + defp apply_updates(%__MODULE__{} = wal) do + stream_updates(wal, wal.update_log, :start) + end + + defp stream_updates(%__MODULE__{} = wal, log, continuation) do + case :disk_log.chunk(log, continuation, @chunk_size) do + {continuation, items} when is_list(items) -> + apply_relevant_items(wal, items) + stream_updates(wal, log, continuation) + + {continuation, items, _bad_bytes} -> + apply_relevant_items(wal, items) + stream_updates(wal, log, continuation) + + :eof -> + :ok + + {:error, _} = error -> + error + end + end + + defp apply_relevant_items(%__MODULE__{} = wal, items) do + checkpoint_version = wal.checkpoint_version + + items + |> Stream.filter(fn operation(id: id) -> id >= checkpoint_version end) + |> Enum.each(fn operation(function: function, args: args) -> + apply(:ets, function, args) + end) + end + + defp get_wal_operations(%__MODULE__{} = wal) do + stats = :disk_log.info(wal.update_log) + Keyword.get(stats, :items, 0) + end + + # Checkpoints + defp needs_checkpoint?(%__MODULE__{} = wal) do + get_wal_operations(wal) >= wal.max_wal_operations + end + + defp maybe_checkpoint(%__MODULE__{} = wal) do + with true <- needs_checkpoint?(wal), + {:ok, new_wal} <- checkpoint(wal) do + {:ok, new_wal} + else + _ -> + {:ok, wal} + end + end + + defp do_checkpoint(%__MODULE__{} = wal) do + checkpoint_version = Identifier.next_global!() + checkpoint_file_name = checkpoint_file_name(checkpoint_version) + + log_path = wal |> wal_directory() |> Path.join(checkpoint_file_name) + log_name = checkpoint_log_name(wal.project) + + with {:ok, log} <- :disk_log.open(name: log_name, file: String.to_charlist(log_path)), + :ok <- checkpoint_ets_table(wal, log), + :ok <- :disk_log.close(log), + :ok <- :disk_log.truncate(wal.update_log) do + new_wal = %__MODULE__{wal | checkpoint_version: checkpoint_version} + delete_old_checkpoints(new_wal) + {:ok, new_wal} + else + error -> + # Checkpoint loading failed. Give up and start over + delete_old_checkpoints(wal) + error + end + end + + defp checkpoint_ets_table(%__MODULE__{} = wal, log) do + log_chunks = fn + item, {@chunk_size, items} -> + :disk_log.log_terms(log, Enum.reverse(items)) + {1, [item]} + + item, {count, items} -> + {count + 1, [item | items]} + end + + {_count, items} = :ets.foldl(log_chunks, {0, []}, wal.ets_table) + :disk_log.log_terms(log, Enum.reverse(items)) + end + + defp load_latest_checkpoint(%__MODULE__{} = wal) do + with {:ok, checkpoint_file} <- find_latest_checkpoint(wal), + {:ok, checkpoint_version} <- extract_checkpoint_version(checkpoint_file), + :ok <- load_checkpoint(wal, checkpoint_file) do + {:ok, checkpoint_version} + else + _ -> + # There's no checkpoint, or our checkpoint is invalid. Start from scratch. + {:ok, @no_checkpoint_id} + end + end + + defp load_checkpoint(%__MODULE__{} = wal, checkpoint_file) do + log_name = checkpoint_log_name(wal.project) + + case :disk_log.open(name: log_name, file: String.to_charlist(checkpoint_file)) do + {:ok, log} -> + stream_checkpoint(wal, log, :start) + + {:repaired, log, _recovered, _bad_bytes} -> + stream_checkpoint(wal, log, :start) + + error -> + error + end + end + + defp delete_old_checkpoints(%__MODULE__{} = wal) do + current_checkpoint_file_name = checkpoint_file_name(wal.checkpoint_version) + + [wal_directory(wal), "*.checkpoint"] + |> Path.join() + |> Path.wildcard() + |> Enum.each(fn checkpoint -> + if Path.basename(checkpoint) != current_checkpoint_file_name do + File.rm(checkpoint) + end + end) + end + + defp checkpoint_file_name(checkpoint_id) when is_integer(checkpoint_id) do + checkpoint_id + |> Integer.to_string(10) + |> checkpoint_file_name() + end + + defp checkpoint_file_name(checkpoint_id) when is_binary(checkpoint_id) do + String.pad_leading(checkpoint_id, @checkpoint_int_length, "0") <> ".checkpoint" + end + + defp checkpoint_log_name(%Project{} = project) do + :"checkpoint_log_#{Project.name(project)}" + end + + defp stream_checkpoint(%__MODULE__{} = wal, log, continuation) do + case :disk_log.chunk(log, continuation, @chunk_size) do + {continuation, items} when is_list(items) -> + :ets.insert(wal.ets_table, items) + stream_checkpoint(wal, log, continuation) + + {continuation, items, _bad_bytes} -> + :ets.insert(wal.ets_table, items) + stream_checkpoint(wal, log, continuation) + + :eof -> + :disk_log.close(log) + :ok + + {:error, _} = error -> + :disk_log.close(log) + error + end + end + + defp find_latest_checkpoint(%__MODULE__{} = wal) do + checkpoints = + [wal_directory(wal), "*.checkpoint"] + |> Path.join() + |> Path.wildcard() + |> Enum.sort(:desc) + + case checkpoints do + [checkpoint | _] -> + {:ok, checkpoint} + + _ -> + {:error, :no_checkpoint} + end + end + + defp extract_checkpoint_version(checkpoint_path) do + file_name = Path.basename(checkpoint_path) + + with [id_string, _] <- String.split(file_name, "."), + {id, ""} <- Integer.parse(id_string, 10) do + {:ok, id} + else + _ -> + :error + end + end + + defp update_wal_path(%__MODULE__{} = wal) do + wal + |> wal_directory() + |> Path.join("updates.wal") + end +end diff --git a/apps/remote_control/lib/lexical/remote_control/search/store/state.ex b/apps/remote_control/lib/lexical/remote_control/search/store/state.ex index 498c35f08..9742282f2 100644 --- a/apps/remote_control/lib/lexical/remote_control/search/store/state.ex +++ b/apps/remote_control/lib/lexical/remote_control/search/store/state.ex @@ -1,8 +1,12 @@ defmodule Lexical.RemoteControl.Search.Store.State do alias Lexical.Project + alias Lexical.RemoteControl.Api.Messages + alias Lexical.RemoteControl.Dispatch alias Lexical.RemoteControl.Search.Fuzzy + alias Lexical.RemoteControl.Search.Indexer.Entry require Logger + import Messages defstruct [ :project, @@ -11,7 +15,8 @@ defmodule Lexical.RemoteControl.Search.Store.State do :update_index, :loaded?, :fuzzy, - :async_load_ref + :async_load_ref, + :update_buffer ] def new(%Project{} = project, create_index, update_index, backend) do @@ -20,7 +25,9 @@ defmodule Lexical.RemoteControl.Search.Store.State do create_index: create_index, project: project, loaded?: false, - update_index: update_index + update_index: update_index, + update_buffer: %{}, + fuzzy: Fuzzy.from_entries([]) } end @@ -53,27 +60,50 @@ defmodule Lexical.RemoteControl.Search.Store.State do def async_load_complete(%__MODULE__{} = state, result) do new_state = %__MODULE__{state | loaded?: true, async_load_ref: nil} - case result do - {:create_index, result} -> - create_index_complete(new_state, result) + response = + case result do + {:create_index, result} -> + create_index_complete(new_state, result) - {:update_index, result} -> - update_index_complete(new_state, result) - end + {:update_index, result} -> + update_index_complete(new_state, result) + + :initialize_fuzzy -> + initialize_fuzzy(new_state) + end + + Dispatch.broadcast(project_index_ready(project: state.project)) + response end def replace(%__MODULE__{} = state, entries) do with :ok <- state.backend.replace_all(entries), :ok <- maybe_sync(state) do - {:ok, %__MODULE__{state | fuzzy: Fuzzy.from_entries(entries)}} + {:ok, %__MODULE__{state | fuzzy: Fuzzy.from_backend(state.backend)}} end end def exact(%__MODULE__{} = state, subject, constraints) do type = Keyword.get(constraints, :type, :_) subtype = Keyword.get(constraints, :subtype, :_) - results = state.backend.find_by_subject(subject, type, subtype) - {:ok, results} + + case state.backend.find_by_subject(subject, type, subtype) do + l when is_list(l) -> {:ok, l} + error -> error + end + end + + def prefix(%__MODULE__{} = state, prefix, constraints) do + type = Keyword.get(constraints, :type, :_) + subtype = Keyword.get(constraints, :subtype, :_) + + case state.backend.find_by_prefix(prefix, type, subtype) do + l when is_list(l) -> + {:ok, l} + + error -> + error + end end def fuzzy(%__MODULE__{} = state, subject, constraints) do @@ -81,30 +111,69 @@ defmodule Lexical.RemoteControl.Search.Store.State do [] -> {:ok, []} - refs -> + ids -> type = Keyword.get(constraints, :type, :_) subtype = Keyword.get(constraints, :subtype, :_) - {:ok, state.backend.find_by_refs(refs, type, subtype)} + + case state.backend.find_by_ids(ids, type, subtype) do + l when is_list(l) -> {:ok, l} + error -> error + end end end - def all(%__MODULE__{} = state) do - state.backend.select_all() + def siblings(%__MODULE__{} = state, entry) do + case state.backend.siblings(entry) do + l when is_list(l) -> {:ok, l} + error -> error + end + end + + def parent(%__MODULE__{} = state, entry) do + case state.backend.parent(entry) do + %Entry{} = entry -> {:ok, entry} + error -> error + end end - def update(%__MODULE__{} = state, path, entries) do - with {:ok, state} <- update_nosync(state, path, entries), + def buffer_updates(%__MODULE__{} = state, path, entries) do + %__MODULE__{state | update_buffer: Map.put(state.update_buffer, path, entries)} + end + + def drop_buffered_updates(%__MODULE__{} = state) do + %__MODULE__{state | update_buffer: %{}} + end + + def flush_buffered_updates(%__MODULE__{update_buffer: buffer} = state) + when map_size(buffer) == 0 do + maybe_sync(state) + {:ok, state} + end + + def flush_buffered_updates(%__MODULE__{} = state) do + result = + Enum.reduce_while(state.update_buffer, state, fn {path, entries}, state -> + case update_nosync(state, path, entries) do + {:ok, new_state} -> + {:cont, new_state} + + error -> + {:halt, error} + end + end) + + with %__MODULE__{} = state <- result, :ok <- maybe_sync(state) do - {:ok, state} + {:ok, drop_buffered_updates(state)} end end def update_nosync(%__MODULE__{} = state, path, entries) do - with {:ok, deleted_refs} <- state.backend.delete_by_path(path), + with {:ok, deleted_ids} <- state.backend.delete_by_path(path), :ok <- state.backend.insert(entries) do fuzzy = state.fuzzy - |> Fuzzy.drop_values(deleted_refs) + |> Fuzzy.drop_values(deleted_ids) |> Fuzzy.add(entries) {:ok, %__MODULE__{state | fuzzy: fuzzy}} @@ -123,7 +192,10 @@ defmodule Lexical.RemoteControl.Search.Store.State do {:ok, :stale} -> Logger.info("backend reports stale") - {:update_index, state.update_index.(state.project, all(state))} + {:update_index, state.update_index.(state.project, state.backend)} + + {:error, :not_leader} -> + :initialize_fuzzy error -> Logger.error("Could not initialize index due to #{inspect(error)}") @@ -151,12 +223,7 @@ defmodule Lexical.RemoteControl.Search.Store.State do end defp update_index_complete(%__MODULE__{} = state, {:ok, updated_entries, deleted_paths}) do - fuzzy = - state - |> all() - |> Fuzzy.from_entries() - - starting_state = %__MODULE__{state | fuzzy: fuzzy, loaded?: true} + starting_state = initialize_fuzzy(%__MODULE__{state | loaded?: true}) new_state = updated_entries @@ -184,4 +251,10 @@ defmodule Lexical.RemoteControl.Search.Store.State do :ok end end + + defp initialize_fuzzy(%__MODULE__{} = state) do + fuzzy = Fuzzy.from_backend(state.backend) + + %__MODULE__{state | fuzzy: fuzzy} + end end diff --git a/apps/remote_control/lib/lexical/remote_control/search/subject.ex b/apps/remote_control/lib/lexical/remote_control/search/subject.ex new file mode 100644 index 000000000..1e7010426 --- /dev/null +++ b/apps/remote_control/lib/lexical/remote_control/search/subject.ex @@ -0,0 +1,18 @@ +defmodule Lexical.RemoteControl.Search.Subject do + @moduledoc """ + Functions for converting to a search entry's subject field + """ + alias Lexical.Formats + + def module(module) do + module + end + + def module_attribute(_module, attribute_name) do + "@#{attribute_name}" + end + + def mfa(module, function, arity) do + Formats.mfa(module, function, arity) + end +end diff --git a/apps/remote_control/lib/mix/tasks/namespace.ex b/apps/remote_control/lib/mix/tasks/namespace.ex index 774e52cfb..df3c33c66 100644 --- a/apps/remote_control/lib/mix/tasks/namespace.ex +++ b/apps/remote_control/lib/mix/tasks/namespace.ex @@ -10,6 +10,7 @@ defmodule Mix.Tasks.Namespace do This task takes a single argument, which is the full path to the release. """ + alias Lexical.Ast alias Mix.Tasks.Namespace.Transform use Mix.Task @@ -96,11 +97,9 @@ defmodule Mix.Tasks.Namespace do end defp safe_split_module(module) do - module_string = Atom.to_string(module) - - case String.split(module_string, ".") do - ["Elixir" | rest] -> rest - _ -> [] + case Ast.Module.safe_split(module) do + {:elixir, segments} -> segments + {:erlang, _} -> [] end end diff --git a/apps/remote_control/lib/mix/tasks/namespace/module.ex b/apps/remote_control/lib/mix/tasks/namespace/module.ex index 58fba401e..54833af6d 100644 --- a/apps/remote_control/lib/mix/tasks/namespace/module.ex +++ b/apps/remote_control/lib/mix/tasks/namespace/module.ex @@ -33,6 +33,10 @@ defmodule Mix.Tasks.Namespace.Module do def prefixed?("lx_" <> _), do: true + def prefixed?([?l, ?x, ?_ | _]), do: true + def prefixed?([?E, ?l, ?i, ?x, ?i, ?r, ?., ?L, ?X | _]), do: true + def prefixed?([?L, ?X | _]), do: true + def prefixed?(_), do: false diff --git a/apps/remote_control/lib/mix/tasks/namespace/transform/apps.ex b/apps/remote_control/lib/mix/tasks/namespace/transform/apps.ex index 42fc407f4..c6de667fa 100644 --- a/apps/remote_control/lib/mix/tasks/namespace/transform/apps.ex +++ b/apps/remote_control/lib/mix/tasks/namespace/transform/apps.ex @@ -66,7 +66,7 @@ defmodule Mix.Tasks.Namespace.Transform.Apps do end defp visit({:description, desc}) do - {:description, desc ++ ' namespaced by lexical.'} + {:description, desc ++ ~c" namespaced by lexical."} end defp visit({:mod, {module_name, args}}) do diff --git a/apps/remote_control/lib/mix/tasks/namespace/transform/configs.ex b/apps/remote_control/lib/mix/tasks/namespace/transform/configs.ex new file mode 100644 index 000000000..2c455a1ff --- /dev/null +++ b/apps/remote_control/lib/mix/tasks/namespace/transform/configs.ex @@ -0,0 +1,41 @@ +defmodule Mix.Tasks.Namespace.Transform.Configs do + alias Mix.Tasks.Namespace + + def apply_to_all(base_directory) do + base_directory + |> Path.join("**") + |> Path.wildcard() + |> Enum.map(&Path.absname/1) + |> tap(fn paths -> + Mix.Shell.IO.info("Rewriting #{length(paths)} config scripts.") + end) + |> Enum.each(&apply/1) + end + + def apply(path) do + namespaced = + path + |> File.read!() + |> Code.string_to_quoted!() + |> Macro.postwalk(fn + {:__aliases__, meta, alias} -> + namespaced_alias = + alias + |> Module.concat() + |> Namespace.Module.apply() + |> Module.split() + |> Enum.map(&String.to_atom/1) + + {:__aliases__, meta, namespaced_alias} + + atom when is_atom(atom) -> + Namespace.Module.apply(atom) + + ast -> + ast + end) + |> Macro.to_string() + + File.write!(path, namespaced) + end +end diff --git a/apps/remote_control/lib/mix/tasks/namespace/transform/erlang.ex b/apps/remote_control/lib/mix/tasks/namespace/transform/erlang.ex index f55e32575..77fa59c5f 100644 --- a/apps/remote_control/lib/mix/tasks/namespace/transform/erlang.ex +++ b/apps/remote_control/lib/mix/tasks/namespace/transform/erlang.ex @@ -19,7 +19,7 @@ defmodule Mix.Tasks.Namespace.Transform.Erlang do end def term_to_string(term) do - '~p.~n' + ~c"~p.~n" |> :io_lib.format([term]) |> :lists.flatten() |> List.to_string() diff --git a/apps/remote_control/mix.exs b/apps/remote_control/mix.exs index 1816ea609..cd2c9c7c6 100644 --- a/apps/remote_control/mix.exs +++ b/apps/remote_control/mix.exs @@ -4,7 +4,7 @@ defmodule Lexical.RemoteControl.MixProject do def project do [ app: :remote_control, - version: "0.3.0", + version: "0.5.0", build_path: "../../_build", config_path: "../../config/config.exs", deps_path: "../../deps", @@ -44,14 +44,15 @@ defmodule Lexical.RemoteControl.MixProject do [ {:benchee, "~> 1.1", only: :test}, {:common, in_umbrella: true}, - {:elixir_sense, git: "https://github.com/elixir-lsp/elixir_sense.git"}, + {:elixir_sense, github: "elixir-lsp/elixir_sense"}, {:lexical_plugin, path: "../../projects/lexical_plugin"}, {:lexical_shared, path: "../../projects/lexical_shared"}, {:lexical_test, path: "../../projects/lexical_test", only: :test}, {:patch, "~> 0.12", only: [:dev, :test], optional: true, runtime: false}, {:path_glob, "~> 0.2", optional: true}, - {:sourceror, "~> 0.14.0"}, - {:phoenix_live_view, "~> 0.19.5", only: [:test], optional: true, runtime: false} + {:phoenix_live_view, "~> 0.19.5", only: [:test], optional: true, runtime: false}, + {:snowflake, "~> 1.0"}, + {:sourceror, "~> 1.4"} ] end diff --git a/apps/remote_control/test/fixtures/compilation_callback_errors/.formatter.exs b/apps/remote_control/test/fixtures/compilation_callback_errors/.formatter.exs new file mode 100644 index 000000000..d2cda26ed --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_callback_errors/.formatter.exs @@ -0,0 +1,4 @@ +# Used by "mix format" +[ + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] +] diff --git a/apps/remote_control/test/fixtures/compilation_callback_errors/.gitignore b/apps/remote_control/test/fixtures/compilation_callback_errors/.gitignore new file mode 100644 index 000000000..5b5619e04 --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_callback_errors/.gitignore @@ -0,0 +1,26 @@ +# The directory Mix will write compiled artifacts to. +/_build/ + +# If you run "mix test --cover", coverage assets end up here. +/cover/ + +# The directory Mix downloads your dependencies sources to. +/deps/ + +# Where third-party dependencies like ExDoc output generated docs. +/doc/ + +# Ignore .fetch files in case you like to edit your project deps locally. +/.fetch + +# If the VM crashes, it generates a dump, let's ignore it too. +erl_crash.dump + +# Also ignore archive artifacts (built via "mix archive.build"). +*.ez + +# Ignore package tarball (built via "mix hex.build"). +compilation_errors-*.tar + +# Temporary files, for example, from tests. +/tmp/ diff --git a/apps/remote_control/test/fixtures/compilation_callback_errors/lib/compile_callback_error.ex b/apps/remote_control/test/fixtures/compilation_callback_errors/lib/compile_callback_error.ex new file mode 100644 index 000000000..de7ffd508 --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_callback_errors/lib/compile_callback_error.ex @@ -0,0 +1,6 @@ +defmodule CompileCallbackError do + @after_verify __MODULE__ + def __after_verify__(_) do + raise "boom" + end +end diff --git a/apps/remote_control/test/fixtures/compilation_callback_errors/mix.exs b/apps/remote_control/test/fixtures/compilation_callback_errors/mix.exs new file mode 100644 index 000000000..bda846f67 --- /dev/null +++ b/apps/remote_control/test/fixtures/compilation_callback_errors/mix.exs @@ -0,0 +1,25 @@ +defmodule CompilationCallbackErrors.MixProject do + use Mix.Project + + def project do + Code.put_compiler_option(:ignore_module_conflict, true) + + [ + app: :compilation_callback_errors, + version: "0.1.0", + elixir: "~> 1.13", + start_permanent: Mix.env() == :prod, + deps: deps() + ] + end + + def application do + [ + extra_applications: [:logger] + ] + end + + defp deps do + [] + end +end diff --git a/apps/remote_control/test/fixtures/navigations/lib/macro_struct.ex b/apps/remote_control/test/fixtures/navigations/lib/macro_struct.ex new file mode 100644 index 000000000..5b5c1643d --- /dev/null +++ b/apps/remote_control/test/fixtures/navigations/lib/macro_struct.ex @@ -0,0 +1,32 @@ +defmodule MacroStruct do + defmacro __using__(_) do + Module.register_attribute(__CALLER__.module, :field_names, accumulate: true) + + quote do + import(unquote(__MODULE__), only: [field: 2, typedstruct: 2]) + end + end + + defmacro __before_compile__(_) do + fields = __CALLER__.module |> Module.get_attribute(:field_names) |> List.wrap() + + quote do + defstruct unquote(fields) + end + end + + defmacro typedstruct(opts, do: body) do + Module.put_attribute(__CALLER__.module, :opts, opts) + + quote do + defmodule unquote(opts[:module]) do + @before_compile unquote(__MODULE__) + unquote(body) + end + end + end + + defmacro field(name, _type) do + Module.put_attribute(__CALLER__.module, :field_names, name) + end +end diff --git a/apps/remote_control/test/fixtures/navigations/lib/my_definition.ex b/apps/remote_control/test/fixtures/navigations/lib/my_definition.ex index 651982512..28d9d4e54 100644 --- a/apps/remote_control/test/fixtures/navigations/lib/my_definition.ex +++ b/apps/remote_control/test/fixtures/navigations/lib/my_definition.ex @@ -1,6 +1,8 @@ defmodule MyDefinition do @type result :: String.t() + defstruct [:field, another_field: nil] + defmacro __using__(_opts) do quote do import MyDefinition diff --git a/apps/remote_control/test/fixtures/navigations/lib/struct.ex b/apps/remote_control/test/fixtures/navigations/lib/struct.ex new file mode 100644 index 000000000..abb5e20c0 --- /dev/null +++ b/apps/remote_control/test/fixtures/navigations/lib/struct.ex @@ -0,0 +1,11 @@ +defmodule NormalStruct do + defstruct [:variant] +end + +defmodule TypedStructs do + use MacroStruct + + typedstruct enforce: true, module: MacroBasedStruct do + field(:contract_id, String.t()) + end +end diff --git a/apps/remote_control/test/fixtures/navigations/lib/uses.ex b/apps/remote_control/test/fixtures/navigations/lib/uses.ex index 40ce12083..808ef5535 100644 --- a/apps/remote_control/test/fixtures/navigations/lib/uses.ex +++ b/apps/remote_control/test/fixtures/navigations/lib/uses.ex @@ -4,4 +4,8 @@ defmodule Navigations.Uses do def my_function do MyDefinition.greet("world") end + + def other_function do + IO.puts("hi") + end end diff --git a/apps/remote_control/test/fixtures/project/.formatter.exs b/apps/remote_control/test/fixtures/project/.formatter.exs index d2cda26ed..3dfd7616d 100644 --- a/apps/remote_control/test/fixtures/project/.formatter.exs +++ b/apps/remote_control/test/fixtures/project/.formatter.exs @@ -1,4 +1,9 @@ # Used by "mix format" [ - inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] + inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"], + locals_without_parens: [ + macro_1_without_parens: 1, + macro_2_without_parens: :*, + fun_1_without_parens: 1 + ] ] diff --git a/apps/remote_control/test/fixtures/project/lib/behaviours.ex b/apps/remote_control/test/fixtures/project/lib/behaviours.ex new file mode 100644 index 000000000..05a310671 --- /dev/null +++ b/apps/remote_control/test/fixtures/project/lib/behaviours.ex @@ -0,0 +1,3 @@ +defmodule Unary do + @callback ary(any()) :: :un +end diff --git a/apps/remote_control/test/fixtures/project/lib/functions.ex b/apps/remote_control/test/fixtures/project/lib/functions.ex new file mode 100644 index 000000000..1a2605582 --- /dev/null +++ b/apps/remote_control/test/fixtures/project/lib/functions.ex @@ -0,0 +1,5 @@ +defmodule Project.Functions do + def fun_1_without_parens(arg) do + arg + end +end diff --git a/apps/remote_control/test/fixtures/project/lib/macros.ex b/apps/remote_control/test/fixtures/project/lib/macros.ex index 38a90406f..26bfbbeeb 100644 --- a/apps/remote_control/test/fixtures/project/lib/macros.ex +++ b/apps/remote_control/test/fixtures/project/lib/macros.ex @@ -11,4 +11,12 @@ defmodule Project.Macros do Logger.info("message is: #{unquote(inspect(message))}") end end + + defmacro macro_1_without_parens(arg) do + arg + end + + defmacro macro_2_without_parens(arg1, arg2, arg3, arg4) do + [arg1, arg2, arg3, arg4] + end end diff --git a/apps/remote_control/test/lexical/remote_control/analyzer/aliases_test.exs b/apps/remote_control/test/lexical/remote_control/analyzer/aliases_test.exs new file mode 100644 index 000000000..956d77d12 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/analyzer/aliases_test.exs @@ -0,0 +1,730 @@ +defmodule Lexical.RemoteControl.Analyzer.AliasesTest do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.RemoteControl.Analyzer + + import Lexical.Test.CursorSupport + import Lexical.Test.CodeSigil + import Lexical.Test.RangeSupport + + use ExUnit.Case + + def aliases_at_cursor(text) do + {position, document} = pop_cursor(text, as: :document) + + document + |> Ast.analyze() + |> Analyzer.aliases_at(position) + end + + defp scope_aliases(text) do + {position, document} = pop_cursor(text, as: :document) + + aliases = + document + |> Ast.analyze() + |> Ast.Analysis.scopes_at(position) + |> Enum.flat_map(& &1.aliases) + |> Map.new(&{&1.as, &1}) + + {aliases, document} + end + + describe "top level aliases" do + test "a useless alias" do + aliases = + ~q[ + alias Foo + | + ] + |> aliases_at_cursor() + + assert aliases[:Foo] == Foo + end + + test "an alias outside of a module" do + aliases = + ~q[ + alias Foo.Bar.Baz + defmodule Parent do + | + end + ] + |> aliases_at_cursor() + + assert aliases[:Baz] == Foo.Bar.Baz + end + + test "an alias inside the body of a module" do + aliases = + ~q[ + defmodule Basic do + alias Foo.Bar + | + end + ] + |> aliases_at_cursor() + + assert aliases == %{__MODULE__: Basic, Bar: Foo.Bar, Basic: Basic} + end + + test "an alias using as" do + aliases = + ~q[ + defmodule TopLevel do + alias Foo.Bar, as: FooBar + | + end + ] + |> aliases_at_cursor() + + assert aliases[:__MODULE__] == TopLevel + assert aliases[:FooBar] == Foo.Bar + end + + test "an alias using warn" do + aliases = + ~q[ + defmodule TopLevel do + alias Foo.Bar, warn: false + | + end + ] + |> aliases_at_cursor() + + assert aliases[:Bar] == Foo.Bar + end + + test "an alias using warn and as" do + aliases = + ~q[ + defmodule TopLevel do + alias Foo.Bar, warn: false, as: FooBar + | + end + ] + |> aliases_at_cursor() + + assert aliases[:FooBar] == Foo.Bar + end + + test "multiple aliases off of single alias" do + aliases = + ~q[ + defmodule TopLevel do + alias Foo.{First, Second, Third.Fourth} + | + end + ] + |> aliases_at_cursor() + + assert aliases[:First] == Foo.First + assert aliases[:Second] == Foo.Second + assert aliases[:Fourth] == Foo.Third.Fourth + end + + test "multiple aliases off of nested alias" do + aliases = + ~q[ + defmodule TopLevel do + alias Foo.Bar.{First, Second, Third.Fourth} + | + end + ] + |> aliases_at_cursor() + + assert aliases[:First] == Foo.Bar.First + assert aliases[:Second] == Foo.Bar.Second + assert aliases[:Fourth] == Foo.Bar.Third.Fourth + end + + test "aliasing __MODULE__" do + aliases = + ~q[ + defmodule Something.Is.Nested do + alias __MODULE__| + + end + ] + |> aliases_at_cursor() + + assert aliases[:Nested] == Something.Is.Nested + end + + test "multiple aliases leading by current module" do + aliases = + ~q[ + defmodule TopLevel do + alias __MODULE__.{First, Second} + | + end + ] + |> aliases_at_cursor() + + assert aliases[:First] == TopLevel.First + assert aliases[:Second] == TopLevel.Second + end + + test "multiple aliases leading by current module's child" do + aliases = + ~q[ + defmodule TopLevel do + alias __MODULE__.Child.{First, Second} + | + end + ] + |> aliases_at_cursor() + + assert aliases[:First] == TopLevel.Child.First + assert aliases[:Second] == TopLevel.Child.Second + end + + test "aliases expanding other aliases" do + aliases = + ~q[ + alias Foo.Bar.Baz + alias Baz.Quux| + ] + |> aliases_at_cursor() + + assert aliases[:Baz] == Foo.Bar.Baz + assert aliases[:Quux] == Foo.Bar.Baz.Quux + end + + test "aliases expanding current module" do + aliases = ~q[ + defmodule TopLevel do + alias __MODULE__.Foo| + end + ] |> aliases_at_cursor() + + assert aliases[:Foo] == TopLevel.Foo + end + + test "aliases expanding current module using as" do + aliases = ~q[ + defmodule TopLevel do + alias __MODULE__.Foo, as: OtherAlias| + end + ] |> aliases_at_cursor() + + assert aliases[:OtherAlias] == TopLevel.Foo + end + + test "can be overridden" do + aliases = + ~q[ + alias Foo.Bar.Baz + alias Other.Baz + | + ] + |> aliases_at_cursor() + + assert aliases[:Baz] == Other.Baz + end + + test "can be accessed before being overridden" do + aliases = + ~q[ + alias Foo.Bar.Baz + | + alias Other.Baz + ] + |> aliases_at_cursor() + + assert aliases[:Baz] == Foo.Bar.Baz + end + + test "aliases used to define a module" do + aliases = + ~q[ + alias Something.Else + defmodule Else.Other do + | + end + ] + |> aliases_at_cursor() + + assert aliases[:Else] == Something.Else + end + + test "in a protocol implementation" do + aliases = + ~q[ + defimpl MyProtocol, for: Atom do + | + end + ] + |> aliases_at_cursor() + + assert aliases[:"@protocol"] == MyProtocol + assert aliases[:"@for"] == Atom + end + end + + describe "alias ranges" do + test "for a simple alias" do + {aliases, doc} = + ~q[ + defmodule Parent do + alias Foo.Bar.Baz| + end + ] + |> scope_aliases() + + assert decorate(doc, aliases[:Baz].range) =~ " «alias Foo.Bar.Baz»" + end + + test "for a multiple alias on one line" do + {aliases, doc} = + ~q[ + defmodule Parent do + alias Foo.Bar.{Baz, Quux}| + end + ] + |> scope_aliases() + + assert decorate(doc, aliases[:Baz].range) =~ " «alias Foo.Bar.{Baz, Quux}»" + assert decorate(doc, aliases[:Quux].range) =~ " «alias Foo.Bar.{Baz, Quux}»" + end + + test "for a multiple alias on multiple lines" do + {aliases, doc} = + ~q[ + defmodule Parent do + alias Foo.Bar.{ + Baz, + Quux, + Other + }| + end + ] + |> scope_aliases() + + for name <- [:Baz, :Quux, :Other] do + assert decorate(doc, aliases[name].range) =~ + " «alias Foo.Bar.{\n Baz,\n Quux,\n Other\n}»" + end + end + + def column_after_do(%Document{} = doc, line) do + with {:ok, text} <- Document.fetch_text_at(doc, line), + {:ok, column} <- find_do_position(text, 0) do + column + 2 + else + _ -> + :not_found + end + end + + def find_do_position("do" <> _, position) do + {:ok, position} + end + + def find_do_position(<<_c::utf8, rest::binary>>, position) do + find_do_position(rest, position + 1) + end + + def find_do_position(<<>>, _) do + :not_found + end + + test "__MODULE__ implicit aliases don't have a visible range" do + {aliases, doc} = + ~q[ + defmodule MyModule do + | + end + ] + |> scope_aliases() + + module_range = aliases[:__MODULE__].range + + refute aliases[:__MODULE__].explicit? + assert module_range.start.line == 1 + assert module_range.start.character == column_after_do(doc, 1) + assert module_range.start == module_range.end + end + + test "implicit parent alias doesn't have a range" do + {aliases, doc} = + ~q[ + defmodule Parent do + defmodule Child do + | + end + end + ] + |> scope_aliases() + + parent_range = aliases[:Parent].range + + refute aliases[:Parent].explicit? + assert parent_range.start.line == 1 + assert parent_range.start.character == column_after_do(doc, 1) + assert parent_range.start == parent_range.end + end + + test "protocol implicit aliases doesn't have a visible range" do + {aliases, doc} = + ~q[ + defimpl MyThing, for: MyProtocol do + | + end + ] + |> scope_aliases() + + # the implicit aliases don't have any text in their range + + for_range = aliases[:"@for"].range + refute aliases[:"@for"].explicit? + assert for_range.start.line == 1 + assert for_range.start.character == column_after_do(doc, 1) + assert for_range.start == for_range.end + + protocol_range = aliases[:"@protocol"].range + refute aliases[:"@protocol"].explicit? + assert protocol_range.start.line == 1 + assert protocol_range.start.character == column_after_do(doc, 1) + assert protocol_range.start == protocol_range.end + end + end + + describe "nested modules" do + test "no aliases are defined for modules with dots" do + aliases = + ~q[ + defmodule GrandParent.Parent.Child do + | + end + ] + |> aliases_at_cursor() + + refute Map.has_key?(aliases, :Child) + end + + test "with children get their parents name" do + aliases = + ~q[ + defmodule Grandparent.Parent do + defmodule Child do + | + end + end + ] + |> aliases_at_cursor() + + assert aliases[:Child] == Grandparent.Parent.Child + assert aliases[:__MODULE__] == Grandparent.Parent.Child + end + + test "with a child that has an explicit parent" do + aliases = + ~q[ + defmodule Parent do + defmodule __MODULE__.Child do + | + end + end + ] + |> aliases_at_cursor() + + assert aliases[:__MODULE__] == Parent.Child + end + end + + describe "alias scopes" do + test "aliases are removed when leaving a module" do + aliases = + ~q[ + defmodule Basic do + alias Foo.Bar + end| + ] + |> aliases_at_cursor() + + assert aliases == %{Basic: Basic} + end + + test "aliases inside of nested modules" do + aliases = + ~q[ + defmodule Parent do + alias Foo.Grandparent + + defmodule Child do + alias Foo.Something + | + end + end + ] + |> aliases_at_cursor() + + assert aliases[:Grandparent] == Foo.Grandparent + assert aliases[:Something] == Foo.Something + assert aliases[:__MODULE__] == Parent.Child + assert aliases[:Child] == Parent.Child + end + + test "multiple nested module are aliased after definition" do + aliases = + ~q[ + defmodule Parent do + alias Foo.Grandparent + + defmodule Child do + alias Foo.Something + end + + defmodule AnotherChild do + alias Foo.Something + end + | + end + ] + |> aliases_at_cursor() + + assert aliases[:AnotherChild] == Parent.AnotherChild + assert aliases[:Child] == Parent.Child + end + + test "an alias defined in a named function" do + aliases = + ~q[ + defmodule Parent do + def fun do + alias Foo.Parent + | + end + end + ] + |> aliases_at_cursor() + + assert aliases[:Parent] == Foo.Parent + end + + test "an alias defined in a named function doesn't leak" do + aliases = + ~q[ + defmodule Parent do + def fun do + alias Foo.Parent + end| + end + ] + |> aliases_at_cursor() + + assert aliases[:Parent] == Parent + end + + test "an alias defined in a private named function" do + aliases = + ~q[ + defmodule Parent do + defp fun do + alias Foo.Parent + | + end + end + ] + |> aliases_at_cursor() + + assert aliases[:Parent] == Foo.Parent + end + + test "an alias defined in a private named function doesn't leak" do + aliases = + ~q[ + defmodule Parent do + defp fun do + alias Foo.InFun + end| + end + ] + |> aliases_at_cursor() + + refute aliases[:InFun] + end + + test "an alias defined in a DSL" do + aliases = + ~q[ + defmodule Parent do + my_dsl do + alias Foo.Parent + | + end + end + ] + |> aliases_at_cursor() + + assert aliases[:Parent] == Foo.Parent + end + + test "an alias defined in a DSL does not leak" do + aliases = + ~q[ + defmodule Parent do + my_dsl do + alias Foo.InDSL + end + | + end + ] + |> aliases_at_cursor() + + refute aliases[InDsl] + end + + test "an alias defined in an if statement" do + aliases = + ~q[ + if test() do + alias Foo.Something + | + end + ] + |> aliases_at_cursor() + + assert aliases[:Something] + end + + test "an alias defined in an if statement does not leak" do + aliases = + ~q[ + if test() do + alias Foo.Something + end + | + ] + |> aliases_at_cursor() + + refute aliases[:Something] + end + + test "an alias defined in an cond statement" do + aliases = + ~q[ + cond do + something() -> + alias Foo.Something + |Else + true -> + :ok + end + ] + |> aliases_at_cursor() + + assert aliases[:Something] + end + + test "an alias defined in an cond statement shouldn't leak" do + aliases = + ~q[ + cond do + something() -> + alias Foo.Something + true -> + | + :ok + end + ] + |> aliases_at_cursor() + + refute aliases[:Something] + + aliases = + ~q[ + cond do + something() -> + alias Foo.Something + true -> + :ok + end + | + ] + |> aliases_at_cursor() + + refute aliases[:Something] + end + + test "an alias defined in an with statement" do + aliases = + ~q[ + with {:ok, val} <- some_function() do + alias Foo.Something + | + end + ] + |> aliases_at_cursor() + + assert aliases[:Something] + end + + test "an alias defined in an with statement shouldn't leak" do + aliases = + ~q[ + with {:ok, val} <- some_function() do + alias Foo.Something + end + | + ] + |> aliases_at_cursor() + + refute aliases[:Something] + end + + test "sibling modules with nested blocks" do + aliases = + ~q[ + defmodule First do + defstuff do + field :x + end + end + + defmodule Second do + defstuff do + field :y + end + end + | + ] + |> aliases_at_cursor() + + assert aliases[:First] == First + assert aliases[:Second] == Second + end + + test "an alias defined in a anonymous function" do + aliases = + ~q[ + fn x -> + alias Foo.Bar + Bar| + end + ] + |> aliases_at_cursor() + + assert aliases[:Bar] == Foo.Bar + end + + test "an alias defined in a anonymous function doesn't leak" do + aliases = + ~q[ + fn + x -> + alias Foo.Bar + Bar.bar(x) + y -> + alias Baz.Buzz + |Buzz + end + ] + |> aliases_at_cursor() + + assert aliases[:Buzz] == Baz.Buzz + refute aliases[:Bar] + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/analyzer/imports_test.exs b/apps/remote_control/test/lexical/remote_control/analyzer/imports_test.exs new file mode 100644 index 000000000..7b84f6e3a --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/analyzer/imports_test.exs @@ -0,0 +1,495 @@ +defmodule Parent.Child.ImportedModule do + def _underscore do + end + + def function do + end + + def function(a) do + a + 1 + end + + def function(a, b) do + a + b + end + + defmacro macro(a) do + quote do + unquote(a) + 1 + end + end +end + +defmodule Override do + def function do + end +end + +defmodule WithStruct do + defstruct [:field] +end + +defmodule WithSigils do + def sigil_m(_) do + end + + def sigil_n(_, _) do + end +end + +defmodule Lexical.Ast.Analysis.ImportsTest do + alias Lexical.Ast + alias Lexical.RemoteControl.Analyzer + alias Parent.Child.ImportedModule + import Lexical.Test.CursorSupport + import Lexical.Test.CodeSigil + + use ExUnit.Case + + def imports_at_cursor(text) do + {position, document} = pop_cursor(text, as: :document) + + document + |> Ast.analyze() + |> Analyzer.imports_at(position) + end + + def assert_imported(imports, module) do + functions = module.__info__(:functions) + macros = module.__info__(:macros) + + for {function, arity} <- functions ++ macros, + function_name = Atom.to_string(function), + not String.starts_with?(function_name, "_") do + assert_imported(imports, module, function, arity) + end + end + + def assert_imported(imports, module, function, arity) do + module_imports = Enum.filter(imports, &match?({^module, _, _}, &1)) + + assert {module, function, arity} in module_imports + end + + def refute_imported(imports, module) do + functions = module.__info__(:functions) + macros = module.__info__(:macros) + + for {function, arity} <- functions ++ macros do + refute_imported(imports, module, function, arity) + end + end + + def refute_imported(imports, module, function, arity) do + module_imports = Enum.filter(imports, &match?({^module, _, _}, &1)) + + refute {module, function, arity} in module_imports + end + + describe "top level imports" do + test "a top-level global import" do + imports = + ~q[ + import Parent.Child.ImportedModule + | + ] + |> imports_at_cursor() + + assert_imported(imports, ImportedModule) + end + + test "single underscore functions aren't imported by defualt" do + imports = + ~q[ + import Parent.Child.ImportedModule + ] + |> imports_at_cursor() + + refute_imported(imports, ImportedModule, :_underscore, 0) + end + + test "double underscore functions aren't selected by default" do + imports = + ~q[ + import WithStruct + | + ] + |> imports_at_cursor() + + refute_imported(imports, WithStruct, :__struct__, 0) + refute_imported(imports, WithStruct, :__struct__, 1) + end + + test "an import of an aliased module" do + imports = + ~q[ + alias Parent.Child.ImportedModule + import ImportedModule| + ] + |> imports_at_cursor() + + assert_imported(imports, ImportedModule) + end + + test "an import of a module aliased to a different name using as" do + imports = + ~q[ + alias Parent.Child.ImportedModule, as: OtherModule + import OtherModule| + ] + |> imports_at_cursor() + + assert_imported(imports, ImportedModule) + end + + test "an import outside of a module" do + imports = + ~q[ + import Parent.Child.ImportedModule + defmodule Parent do + | + end + ] + |> imports_at_cursor() + + assert_imported(imports, ImportedModule) + end + + test "an import inside the body of a module" do + imports = + ~q[ + defmodule Basic do + import Parent.Child.ImportedModule + | + end + ] + |> imports_at_cursor() + + assert_imported(imports, ImportedModule) + end + + test "import with a leading __MODULE__" do + imports = + ~q[ + defmodule Parent do + + import __MODULE__.Child.ImportedModule + | + end + ] + |> imports_at_cursor() + + assert_imported(imports, ImportedModule) + end + + test "can be overridden" do + imports = + ~q[ + import Parent.Child.ImportedModule + import Override + | + ] + |> imports_at_cursor() + + assert_imported(imports, Override) + assert_imported(imports, ImportedModule, :function, 1) + assert_imported(imports, ImportedModule, :function, 2) + assert_imported(imports, ImportedModule, :macro, 1) + end + + test "can be accessed before being overridden" do + imports = + ~q[ + import Parent.Child.ImportedModule + | + import Override + ] + |> imports_at_cursor() + + assert_imported(imports, ImportedModule) + end + end + + describe "nested modules" do + test "children get their parent's imports" do + imports = + ~q[ + defmodule GrandParent do + import Enum + defmodule Child do + | + end + end + ] + |> imports_at_cursor() + + assert_imported(imports, Enum) + end + + test "with a child that has an explicit parent" do + imports = + ~q[ + defmodule Parent do + import Enum + defmodule __MODULE__.Child do + | + end + end + ] + |> imports_at_cursor() + + assert_imported(imports, Enum) + end + end + + describe "selecting functions" do + test "it is possible to select all functions" do + imports = + ~q[ + import Parent.Child.ImportedModule, only: :functions + | + ] + |> imports_at_cursor() + + refute_imported(imports, ImportedModule, :macro, 1) + assert_imported(imports, ImportedModule, :function, 0) + assert_imported(imports, ImportedModule, :function, 1) + assert_imported(imports, ImportedModule, :function, 2) + end + + test "it is possible to select all macros" do + imports = + ~q[ + import Parent.Child.ImportedModule, only: :macros + | + ] + |> imports_at_cursor() + + assert_imported(imports, ImportedModule, :macro, 1) + + refute_imported(imports, ImportedModule, :function, 0) + refute_imported(imports, ImportedModule, :function, 1) + refute_imported(imports, ImportedModule, :function, 2) + end + + test "it is possible to select all sigils" do + imports = + ~q[ + import WithSigils, only: :sigils + | + ] + |> imports_at_cursor() + + assert_imported(imports, WithSigils, :sigil_m, 1) + assert_imported(imports, WithSigils, :sigil_n, 2) + end + + test "it is possible to limit imports by name and arity with only" do + imports = + ~q{ + import Parent.Child.ImportedModule, only: [function: 0, function: 1] + | + } + |> imports_at_cursor() + + assert_imported(imports, ImportedModule, :function, 0) + assert_imported(imports, ImportedModule, :function, 1) + + refute_imported(imports, ImportedModule, :function, 2) + refute_imported(imports, ImportedModule, :macro, 1) + end + + test "it is possible to limit imports by name and arity with except" do + imports = + ~q{ + import Parent.Child.ImportedModule, except: [function: 0] + | + } + |> imports_at_cursor() + + refute_imported(imports, ImportedModule, :function, 0) + + assert_imported(imports, ImportedModule, :function, 1) + assert_imported(imports, ImportedModule, :function, 2) + assert_imported(imports, ImportedModule, :macro, 1) + end + + test "except only erases previous imports" do + # taken from https://hexdocs.pm/elixir/1.13.0/Kernel.SpecialForms.html#import/2-selector + imports = + ~q{ + import Parent.Child.ImportedModule, only: [function: 0, function: 1, function: 2] + import Parent.Child.ImportedModule, except: [function: 1] + | + } + |> imports_at_cursor() + + assert_imported(imports, ImportedModule, :function, 0) + assert_imported(imports, ImportedModule, :function, 2) + + refute_imported(imports, ImportedModule, :function, 1) + refute_imported(imports, ImportedModule, :macro, 1) + end + + test "import all by default when a syntax error occurs in the latter part" do + imports = ~q[ + import Parent.Child.ImportedModule, o + | + ] |> imports_at_cursor() + + assert_imported(imports, ImportedModule, :macro, 1) + + assert_imported(imports, ImportedModule, :function, 0) + assert_imported(imports, ImportedModule, :function, 1) + assert_imported(imports, ImportedModule, :function, 2) + end + + test "imports nothing when the only part is incomplete" do + imports = + ~q( + defmodule New do + import Parent.Child.ImportedModule, only: [wi|] + end + ) + |> imports_at_cursor() + + refute_imported(imports, ImportedModule) + end + end + + describe "import scopes" do + test "an import defined in a named function" do + imports = + ~q[ + defmodule Parent do + def fun do + import Enum + | + end + end + ] + |> imports_at_cursor() + + assert_imported(imports, Enum) + end + + test "an import defined in a named function doesn't leak" do + imports = + ~q[ + defmodule Parent do + def fun do + import Enum + end| + end + ] + |> imports_at_cursor() + + refute_imported(imports, Enum) + end + + test "an import defined in a private named function" do + imports = + ~q[ + defmodule Parent do + defp fun do + import Enum + | + end + end + ] + |> imports_at_cursor() + + assert_imported(imports, Enum) + end + + test "an import defined in a private named function doesn't leak" do + imports = + ~q[ + defmodule Parent do + defp fun do + import Enum + end| + end + ] + |> imports_at_cursor() + + refute_imported(imports, Enum) + end + + test "an import defined in a DSL" do + imports = + ~q[ + defmodule Parent do + my_dsl do + import Enum + | + end + end + ] + |> imports_at_cursor() + + assert_imported(imports, Enum) + end + + test "an import defined in a DSL does not leak" do + imports = + ~q[ + defmodule Parent do + my_dsl do + import Enum + end + | + end + ] + |> imports_at_cursor() + + refute_imported(imports, Enum) + end + + test "an import defined in a anonymous function" do + imports = + ~q[ + fn x -> + import Enum + |Enum + end + ] + |> imports_at_cursor() + + assert_imported(imports, Enum) + end + + test "an import defined in a anonymous function doesn't leak" do + imports = + ~q[ + fn + x -> + import Enum + Bar.bar(x) + y -> + nil| + end + ] + |> imports_at_cursor() + + refute_imported(imports, Enum) + end + + test "imports to the current module work in a quote block" do + imports = + ~q[ + defmodule Parent do + defmacro __using__(_) do + quote do + import unquote(__MODULE__).Child.ImportedModule + | + end + end + end + ] + |> imports_at_cursor() + + assert_imported(imports, Parent.Child.ImportedModule) + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/analyzer/requires_test.exs b/apps/remote_control/test/lexical/remote_control/analyzer/requires_test.exs new file mode 100644 index 000000000..b19a07a77 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/analyzer/requires_test.exs @@ -0,0 +1,107 @@ +defmodule Lexical.RemoteControl.Analyzer.RequiresTest do + alias Lexical.Ast + alias Lexical.RemoteControl.Analyzer + + import Lexical.Test.CursorSupport + import Lexical.Test.CodeSigil + + use ExUnit.Case + + def requires_at_cursor(text) do + {position, document} = pop_cursor(text, as: :document) + + document + |> Ast.analyze() + |> Analyzer.requires_at(position) + end + + describe "requires at the top level" do + test "are not present before the require statement" do + requires = requires_at_cursor("|require OtherModule") + + assert Enum.empty?(requires) + end + + test "work for a single require" do + requires = requires_at_cursor("require OtherModule|") + + assert requires == [OtherModule] + end + + test "handles aliased modules" do + requires = + ~q[ + alias Other.MyModule + require MyModule| + ] + |> requires_at_cursor() + + assert requires == [Other.MyModule] + end + + test "handles as" do + requires = + ~q[ + require Other.Module, as: ReqMod + | + ] + |> requires_at_cursor() + + assert requires == [Other.Module] + end + + test "work for a multiple require" do + requires = + ~q[ + require First + require Second + require Third + | + ] + |> requires_at_cursor() + + assert requires == [First, Second, Third] + end + end + + describe "in modules" do + test "begin after the require statement" do + requires = + ~q[ + defmodule Outer do + require Required| + end + ] + |> requires_at_cursor() + + assert requires == [Required] + end + + test "ends after the module" do + requires = + ~q[ + defmodule Outer do + require Required + end| + ] + |> requires_at_cursor() + + assert requires == [] + end + + test "carries over to nested modules" do + requires = + ~q[ + defmodule Outer do + require Required + defmodule Inner do + | + end + end + ] + |> requires_at_cursor() + + assert requires == [Required] + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/analyzer/uses_test.exs b/apps/remote_control/test/lexical/remote_control/analyzer/uses_test.exs new file mode 100644 index 000000000..19f342b73 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/analyzer/uses_test.exs @@ -0,0 +1,122 @@ +defmodule Lexical.RemoteControl.Analyzer.UsesTest do + alias Lexical.Ast + alias Lexical.RemoteControl.Analyzer + + import Lexical.Test.CursorSupport + import Lexical.Test.CodeSigil + + use ExUnit.Case + + def uses_at_cursor(text) do + {position, document} = pop_cursor(text, as: :document) + + document + |> Ast.analyze() + |> Analyzer.uses_at(position) + end + + describe "uses at the top level" do + test "are not present before the use statement" do + uses = uses_at_cursor("|use OtherModule") + + assert Enum.empty?(uses) + end + + test "are present after the use statement" do + uses = uses_at_cursor("use OtherModule |") + + assert uses == [OtherModule] + end + + test "handles aliased modules" do + uses = + ~q[ + alias Other.MyModule + use MyModule + | + ] + |> uses_at_cursor() + + assert uses == [Other.MyModule] + end + + test "handles options" do + uses = + ~q[ + use Other.Module, key: :value, other_key: :other_value + | + ] + |> uses_at_cursor() + + assert uses == [Other.Module] + end + + test "handles multiple uses" do + uses = + ~q[ + use FirstModule + use SecondModule + ] + |> uses_at_cursor() + + assert uses == [FirstModule, SecondModule] + end + end + + describe "in modules" do + test "begin after the use statement" do + uses = + ~q[ + defmodule Outer do + use Used| + end + ] + |> uses_at_cursor() + + assert uses == [Used] + end + + test "ends after the module" do + uses = + ~q[ + defmodule Outer do + use Used + end| + ] + |> uses_at_cursor() + + assert uses == [] + end + + test "are available in their module's functions" do + uses = + ~q[ + defmodule Outer do + use Used + + def my_function(a, b) do + | + end + end + ] + |> uses_at_cursor() + + assert uses == [Used] + end + + test "are not available in submodules" do + uses = + ~q[ + defmodule Outer do + use Used + defmodule Inner do + | + end + end + ] + |> uses_at_cursor() + + assert uses == [] + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/analyzer_test.exs b/apps/remote_control/test/lexical/remote_control/analyzer_test.exs new file mode 100644 index 000000000..4b7ed6ff2 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/analyzer_test.exs @@ -0,0 +1,249 @@ +defmodule Lexical.RemoteControl.AnalyzerTest do + alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.RemoteControl.Analyzer + + import Lexical.Test.CodeSigil + import Lexical.Test.CursorSupport + + use ExUnit.Case, async: true + + describe "current_module/2" do + test "fails if there is not __MODULE__ defined" do + {position, document} = + ~q[x + |defmodule Outer do + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + assert :error = Analyzer.current_module(analysis, position) + end + + test "fails in a defmodule call if there is no containing module" do + {position, document} = + ~q[ + defmodule| Outer do + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + assert :error = Analyzer.current_module(analysis, position) + end + + test "reutrns the current module right after the do" do + {position, document} = + ~q[ + defmodule Outer do| + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + assert {:ok, Outer} = Analyzer.current_module(analysis, position) + end + + test "returns the parent module in the child's defmodule" do + {position, document} = + ~q[ + defmodule Parent do + defmodule Child| do + end + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + assert {:ok, Parent} = Analyzer.current_module(analysis, position) + end + + test "returns a nested module in the child's module" do + {position, document} = + ~q[ + defmodule Parent do + defmodule Child do| + end + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + assert {:ok, Parent.Child} = Analyzer.current_module(analysis, position) + end + + test "works in a protocol definition" do + {position, document} = + ~q[ + defprotocol MyProtocol do + def something(data)| + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + assert {:ok, MyProtocol} = Analyzer.current_module(analysis, position) + end + end + + describe "expand_alias/4" do + test "works with aliased modules" do + {position, document} = + ~q[ + defmodule Parent do + alias Foo.Bar.Something + | + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + + assert {:ok, Foo.Bar.Something.Baz} = + Analyzer.expand_alias([:Something, :Baz], analysis, position) + end + + test "works with protocol definitions nested in a module" do + {position, document} = + ~q[ + defmodule Parent do + alias Foo.Bar.Something + alias Foo.Bar.Protocol + defimpl |Protocol, for: Something do + end + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + + assert {:ok, Foo.Bar.Protocol} = Analyzer.expand_alias([:Protocol], analysis, position) + assert {:ok, Foo.Bar.Something} = Analyzer.expand_alias([:Something], analysis, position) + assert {:ok, Parent} = Analyzer.expand_alias([:__MODULE__], analysis, position) + end + + test "works with protocol with aliased protocol and target" do + {position, document} = + ~q[ + alias Foo.Bar.Something + alias Foo.Bar.Protocol + defimpl Protocol, for: Something do + @test true| + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + + assert {:ok, Foo.Bar.Protocol} = Analyzer.expand_alias([:Protocol], analysis, position) + + assert {:ok, Foo.Bar.Protocol} = + Analyzer.expand_alias(quote(do: [@protocol]), analysis, position) + + assert {:ok, Foo.Bar.Something} = Analyzer.expand_alias([:Something], analysis, position) + + assert {:ok, Foo.Bar.Something} = + Analyzer.expand_alias(quote(do: [@for]), analysis, position) + + assert {:ok, Foo.Bar.Protocol.Foo.Bar.Something} = + Analyzer.expand_alias([:__MODULE__], analysis, position) + end + + test "works with __MODULE__ aliases" do + {position, document} = + ~q[ + defmodule Parent do + defmodule __MODULE__.Child do + | + end + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + + assert {:ok, Parent.Child} = + Analyzer.expand_alias([quote(do: __MODULE__), nil], analysis, position) + end + + test "works with @protocol in a protocol" do + {position, document} = + ~q[ + defimpl MyProtocol, for: Atom do + + def pack(atom) do + | + end + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + + assert {:ok, MyProtocol} = Analyzer.expand_alias([quote(do: @protocol)], analysis, position) + + assert {:ok, MyProtocol.BitString} = + Analyzer.expand_alias([quote(do: @protocol.BitString)], analysis, position) + + assert {:ok, Atom} = Analyzer.expand_alias([quote(do: @for)], analysis, position) + + assert {:ok, Atom.Something} = + Analyzer.expand_alias([quote(do: @for.Something)], analysis, position) + + assert {:ok, MyProtocol.BitString} = + Analyzer.expand_alias( + [ + {:@, [line: 9, column: 8], [{:protocol, [line: 9, column: 9], nil}]}, + :BitString + ], + analysis, + position + ) + end + + test "identifies the module in a protocol implementation" do + {position, document} = + ~q[ + defimpl MyProtocol, for: Atom do + + def pack(atom) do + | + end + end + ] + |> pop_cursor(as: :document) + + analysis = Ast.analyze(document) + assert {:ok, MyProtocol.Atom} == Analyzer.current_module(analysis, position) + end + end + + describe "reanalyze_to/2" do + test "is a no-op if the analysis is already valid" do + {position, document} = + ~q[ + defmodule Valid do + | + end + ] + |> pop_cursor(as: :document) + + assert %Analysis{valid?: true} = analysis = Ast.analyze(document) + assert analysis == Ast.reanalyze_to(analysis, position) + end + + test "returns a valid analysis if fragment can be parsed" do + {position, document} = + ~q[ + defmodule Invalid do + | + ] + |> pop_cursor(as: :document) + + assert %Analysis{valid?: false} = analysis = Ast.analyze(document) + assert %Analysis{valid?: true} = analysis = Ast.reanalyze_to(analysis, position) + assert {:ok, Invalid} = Analyzer.current_module(analysis, position) + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/api/proxy/buffering_state_test.exs b/apps/remote_control/test/lexical/remote_control/api/proxy/buffering_state_test.exs new file mode 100644 index 000000000..851a7a942 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/api/proxy/buffering_state_test.exs @@ -0,0 +1,206 @@ +defmodule Lexical.RemoteControl.Api.Proxy.BufferingStateStateTest do + alias Lexical.Document + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Api.Messages + alias Lexical.RemoteControl.Api.Proxy + alias Lexical.RemoteControl.Api.Proxy.BufferingState + alias Lexical.RemoteControl.Build + alias Lexical.RemoteControl.Commands + + import Lexical.Test.Fixtures + import Messages + import Proxy.Records + + use ExUnit.Case + + setup do + start_supervised!(Document.Store) + {:ok, project: project()} + end + + @default_uri "file:///file.ex" + + def document(uri \\ @default_uri) do + Document.new(uri, "", 1) + end + + def open_document(uri \\ @default_uri) do + :ok = Document.Store.open(uri, "", 1) + {:ok, document} = Document.Store.fetch(uri) + document + end + + def add_to_state_and_flush(messages) do + messages + |> Enum.reduce(BufferingState.new(self()), &BufferingState.add_mfa(&2, &1)) + |> BufferingState.flush() + end + + describe "command collapse" do + test "multiple project compilations are collapsed", %{project: project} do + flushed_messages = + [ + to_mfa(Build.schedule_compile(project)), + to_mfa(Build.schedule_compile(project)) + ] + |> add_to_state_and_flush() + + assert [{:mfa, Build, :schedule_compile, [^project], _}] = flushed_messages + end + + test "force project compilation takes precedence", %{project: project} do + flushed_messages = + [ + to_mfa(Build.schedule_compile(project)), + to_mfa(Build.schedule_compile(project, true)), + to_mfa(Build.schedule_compile(project)) + ] + |> add_to_state_and_flush() + + assert [{:mfa, Build, :schedule_compile, [^project, true], _}] = flushed_messages + end + + test "a project compilation removes all document compilations", %{project: project} do + flushed_messages = + [ + to_mfa(Build.compile_document(project, document())), + to_mfa(Build.schedule_compile(project)) + ] + |> add_to_state_and_flush() + + assert [{:mfa, Build, :schedule_compile, [^project], _}] = flushed_messages + end + + test "documents that aren't open are removed", %{project: project} do + document = document() + + flushed_messages = + [ + to_mfa(Build.compile_document(project, document)) + ] + |> add_to_state_and_flush() + + assert Enum.empty?(flushed_messages) + end + + test "document compiles for a single uri are collapsed", %{project: project} do + document = open_document() + + flushed_messages = + [ + to_mfa(Build.compile_document(project, document)), + to_mfa(Build.compile_document(project, document)), + to_mfa(Build.compile_document(project, document)) + ] + |> add_to_state_and_flush() + + assert [{:mfa, Build, :compile_document, [^project, ^document], _}] = flushed_messages + end + + test "there can only be one reindex", %{project: project} do + flushed_messages = + [ + to_mfa(Commands.Reindex.perform()), + to_mfa(Commands.Reindex.perform(project)) + ] + |> add_to_state_and_flush() + + assert [{:mfa, Commands.Reindex, :perform, [^project], _}] = flushed_messages + end + + test "a reindex is the last thing", %{project: project} do + other = open_document("file:///other.uri") + third = open_document("file:///third.uri") + + flushed_messages = + [ + to_mfa(Commands.Reindex.perform()), + to_mfa(Build.compile_document(project, other)), + to_mfa(Build.compile_document(project, third)) + ] + |> add_to_state_and_flush() + + assert [ + {:mfa, Build, :compile_document, [^project, ^other], _}, + {:mfa, Build, :compile_document, [^project, ^third], _}, + {:mfa, Commands.Reindex, :perform, [], _} + ] = flushed_messages + end + end + + defp wrap_broadcasts(messages) do + Enum.map(messages, fn + mfa() = mfa -> + mfa + + message -> + mfa(module: RemoteControl.Dispatch, function: :broadcast, arguments: [message]) + end) + end + + describe "message collapse" do + test "document-centric messages are discarded if their document isn't open" do + flushed_messages = + [ + filesystem_event(uri: @default_uri), + file_changed(uri: @default_uri), + file_compile_requested(uri: @default_uri), + file_compiled(uri: @default_uri), + file_deleted(uri: @default_uri) + ] + |> wrap_broadcasts() + |> add_to_state_and_flush() + + assert flushed_messages == [] + end + + test "document-centric messages are kept if their document is open" do + uri = open_document().uri + + orig_messages = + [ + filesystem_event(uri: uri), + file_changed(uri: uri), + file_compiled(uri: uri), + file_deleted(uri: uri) + ] + |> wrap_broadcasts() + + flushed_messages = add_to_state_and_flush(orig_messages) + + assert flushed_messages == orig_messages + end + + test "file diagnostics are removed if there's a document compile for that uri", %{ + project: project + } do + document = open_document() + + flushed_messages = + [ + to_mfa(Build.compile_document(project, document)), + file_diagnostics(uri: @default_uri) + ] + |> wrap_broadcasts() + |> add_to_state_and_flush() + + assert [{:mfa, Build, :compile_document, [^project, ^document], _}] = flushed_messages + end + + test "file compiles are removed" do + document = open_document() + + assert [] == + [file_compile_requested(uri: document.uri)] + |> wrap_broadcasts() + |> add_to_state_and_flush() + end + + test "project compiles are removed" do + assert [] == + [project_compile_requested()] + |> wrap_broadcasts() + |> add_to_state_and_flush() + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/api/proxy_test.exs b/apps/remote_control/test/lexical/remote_control/api/proxy_test.exs new file mode 100644 index 000000000..51a6f770e --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/api/proxy_test.exs @@ -0,0 +1,283 @@ +defmodule Lexical.RemoteControl.Api.ProxyTest do + alias Lexical.Document + alias Lexical.Document.Changes + alias Lexical.RemoteControl + alias Lexical.RemoteControl.Api + alias Lexical.RemoteControl.Api.Proxy + alias Lexical.RemoteControl.Api.Proxy.BufferingState + alias Lexical.RemoteControl.Api.Proxy.DrainingState + alias Lexical.RemoteControl.Build + alias Lexical.RemoteControl.CodeMod + alias Lexical.RemoteControl.Commands + alias Lexical.RemoteControl.Dispatch + + use ExUnit.Case + use Patch + + import Api.Messages + import Lexical.Test.Fixtures + + setup do + start_supervised!(Api.Proxy) + project = project() + RemoteControl.set_project(project) + + {:ok, project: project} + end + + describe "proxy mode" do + test "proxies broadcasts" do + patch(Dispatch, :broadcast, :ok) + assert :ok = Proxy.broadcast(:hello) + + assert_called(Dispatch.broadcast(:hello)) + end + + test "proxies broadcasts of progress messages" do + patch(Dispatch, :broadcast, :ok) + assert :ok = Proxy.broadcast(percent_progress()) + + assert_called(Dispatch.broadcast(percent_progress())) + end + + test "schedule compile is proxied", %{project: project} do + patch(Build, :schedule_compile, :ok) + assert :ok = Proxy.schedule_compile(true) + assert_called(Build.schedule_compile(^project, true)) + + assert :ok = Proxy.schedule_compile() + assert_called(Build.schedule_compile(^project, false)) + end + + test "compile document is proxied", %{project: project} do + document = %Document{} + patch(Build, :compile_document, :ok) + + assert :ok = Proxy.compile_document(document) + assert_called(Build.compile_document(^project, ^document)) + end + + test "reindex is proxied" do + patch(Commands.Reindex, :perform, :ok) + patch(Commands.Reindex, :running?, false) + + refute Proxy.index_running?() + assert :ok = Proxy.reindex() + assert_called(Commands.Reindex.perform()) + assert_called(Commands.Reindex.running?()) + end + + test "formatting is proxied" do + document = %Document{} + patch(CodeMod.Format, :edits, {:ok, Changes.new(document, [])}) + + assert {:ok, %Changes{}} = Proxy.format(document) + assert_called(CodeMod.Format.edits(^document)) + end + end + + def with_draining_mode(ctx) do + patch(Commands.Reindex, :perform, fn -> + Process.sleep(100) + :ok + end) + + me = self() + + spawn_link(fn -> + send(me, :ready) + result = Proxy.reindex() + send(me, {:proxy_result, result}) + end) + + assert_receive :ready + Process.sleep(50) + + with_buffer_mode(ctx) + end + + describe "draining mode" do + setup [:with_draining_mode] + + test "handles in-flight calls" do + assert {:draining, %DrainingState{}} = :sys.get_state(Proxy) + assert_receive {:proxy_result, :ok} + assert {:buffering, %BufferingState{}} = :sys.get_state(Proxy) + end + + test "buffers subsequent calls" do + me = self() + patch(Dispatch, :broadcast, fn message -> send(me, {:broadcast, message}) end) + assert :ok = Proxy.broadcast(:hello) + assert :ok = Proxy.broadcast(:goodbye) + + refute_receive {:broadcast, _} + end + + test "ends when in-flight requests end", %{stop_buffering: stop_buffering} do + patch(Build, :schedule_compile, callable(fn _ -> :ok end)) + + assert :ok = Proxy.schedule_compile() + refute_called(Build.schedule_compile(_, _)) + assert_receive {:proxy_result, :ok} + stop_buffering.() + assert_called(Build.schedule_compile(_, _)) + end + end + + def with_buffer_mode(_) do + buffer_proc = + spawn_link(fn -> + receive do + :continue -> + :ok + end + end) + + Proxy.start_buffering(buffer_proc) + + stop_buffering = fn -> + send(buffer_proc, :continue) + Process.sleep(50) + end + + {:ok, stop_buffering: stop_buffering} + end + + describe "buffer mode" do + setup [:with_buffer_mode] + + test "start_buffering can't be called twice" do + assert {:error, {:already_buffering, _}} = Proxy.start_buffering() + end + + test "proxies boradcasts of progress messages" do + patch(Dispatch, :broadcast, :ok) + assert :ok = Proxy.broadcast(percent_progress()) + + assert_called(Dispatch.broadcast(percent_progress())) + end + + test "buffers broadcasts" do + assert :ok = Proxy.broadcast(file_compile_requested()) + refute_any_call(Dispatch.broadcast()) + end + + test "buffers schedule compile" do + patch(Build, :schedule_compile, :ok) + assert :ok = Proxy.schedule_compile(true) + refute_any_call(Build.schedule_compile()) + + assert :ok = Proxy.schedule_compile() + refute_any_call(Build.schedule_compile()) + end + + test "buffers compile document" do + document = %Document{} + patch(Build, :compile_document, :ok) + + assert :ok = Proxy.compile_document(document) + refute_any_call(Build.compile_document()) + end + + test "buffers reindex" do + patch(Commands.Reindex, :perform, :ok) + patch(Commands.Reindex, :running?, false) + + refute Proxy.index_running?() + assert :ok = Proxy.reindex() + refute_any_call(Commands.Reindex.perform()) + refute_any_call(Commands.Reindex.running?()) + end + + test "buffers formatting" do + document = %Document{} + patch(CodeMod.Format, :edits, {:ok, Changes.new(document, [])}) + + assert {:ok, %Changes{}} = Proxy.format(document) + refute_any_call(CodeMod.Format.edits()) + end + end + + describe "flushing after buffered mode" do + setup [:with_buffer_mode] + + test "buffered messages are sent", %{stop_buffering: stop_buffering} do + patch(Dispatch, :broadcast, :ok) + + Proxy.broadcast(module_updated()) + Proxy.broadcast(project_diagnostics()) + + refute_any_call(Dispatch.broadcast()) + stop_buffering.() + + assert_called(Dispatch.broadcast(module_updated())) + assert_called(Dispatch.broadcast(project_diagnostics())) + end + + test "formats are dropped", %{stop_buffering: stop_buffering} do + document = %Document{} + patch(CodeMod.Format, :edits, {:ok, Changes.new(document, [])}) + + Proxy.format(document) + stop_buffering.() + refute_any_call(CodeMod.Format.edits()) + end + + test "a single compile is scheduled", %{project: project, stop_buffering: stop_buffering} do + patch(Build, :schedule_compile, :ok) + + Proxy.schedule_compile() + Proxy.schedule_compile() + + refute_any_call(Build.schedule_compile()) + + stop_buffering.() + + assert_called(Build.schedule_compile(^project, _), 1) + end + + test "document compilations are buffered", %{project: project, stop_buffering: stop_buffering} do + doc = %Document{} + patch(Document.Store, :open?, true) + patch(Build, :compile_document, :ok) + + Proxy.compile_document(doc) + Proxy.compile_document(doc) + + refute_any_call(Build.compile_document()) + + stop_buffering.() + + assert_called(Build.compile_document(^project, ^doc), 1) + end + + test "reindex calls are buffered", %{stop_buffering: stop_buffering} do + patch(Commands.Reindex, :perform, :ok) + + Proxy.reindex() + Proxy.reindex() + Proxy.reindex() + + refute_any_call(Commands.Reindex.perform()) + + stop_buffering.() + + assert_called(Commands.Reindex.perform()) + end + + test "calls to Reindex.running?() are dropped", %{stop_buffering: stop_buffering} do + patch(Commands.Reindex, :running?, false) + + Proxy.index_running?() + Proxy.index_running?() + Proxy.index_running?() + + refute_any_call(Commands.Reindex.running?()) + + stop_buffering.() + + refute_any_call(Commands.Reindex.running?()) + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/build/document/compilers/config_test.exs b/apps/remote_control/test/lexical/remote_control/build/document/compilers/config_test.exs index 5337119ee..d42cb8446 100644 --- a/apps/remote_control/test/lexical/remote_control/build/document/compilers/config_test.exs +++ b/apps/remote_control/test/lexical/remote_control/build/document/compilers/config_test.exs @@ -36,21 +36,21 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.ConfigTest do end describe "recognizes/1" do - test "files in the config directory are recognized" do + test "files in the config directory are detected" do assert recognizes?(document_with_path(config_dir(), "test.exs")) assert recognizes?(document_with_path(config_dir(), "foo.exs")) assert recognizes?(document_with_path([config_dir(), "other", "foo.exs"])) end - test "files in the config directory with relative paths are recognized" do + test "files in the config directory with relative paths are detected" do assert recognizes?(document_with_path("../../config/test.exs")) end - test "files outside the config directory are not recognized" do + test "files outside the config directory are not detected" do refute recognizes?(document_with_path(__ENV__.file)) end - test "only .exs files are recognized" do + test "only .exs files are detected" do refute recognizes?(document_with_path(config_dir(), "foo.ex")) refute recognizes?(document_with_path(config_dir(), "foo.yaml")) refute recognizes?(document_with_path(config_dir(), "foo.eex")) @@ -93,11 +93,53 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.ConfigTest do |> compile() assert result.message =~ "missing terminator" - assert result.position == {1, 12} + assert result.position in [{1, 12}, {1, 1}] assert result.severity == :error assert result.source == "Elixir" end + test "it produces diagnostics even in the `config_env` block" do + assert {:error, [result]} = + ~q[ + import Config + + if config_env() == :product do + f + end + ] + |> document() + |> compile() + + if Features.with_diagnostics?() do + assert result.message =~ ~s[undefined variable "f"] + else + assert result.message =~ ~s[undefined function f/0] + end + + assert result.position == 4 + assert result.severity == :error + assert result.source == "Elixir" + end + + test "it produces diagnostics for arbitrary exceptions" do + assert {:error, [result]} = + ~q[ + import Config + + System.fetch_env!("_LEXICAL_NON_EXISTING_ENV_VAR_") + ] + |> document() + |> compile() + + assert result.message =~ "could not fetch environment variable" + assert result.severity == :error + assert result.source == "Elixir" + + if Features.with_diagnostics?() do + assert result.position == 3 + end + end + test "it produces no diagnostics on success" do assert {:ok, []} = ~q[ diff --git a/apps/remote_control/test/lexical/remote_control/build/document/compilers/eex_test.exs b/apps/remote_control/test/lexical/remote_control/build/document/compilers/eex_test.exs index c9466fb82..08e45da8f 100644 --- a/apps/remote_control/test/lexical/remote_control/build/document/compilers/eex_test.exs +++ b/apps/remote_control/test/lexical/remote_control/build/document/compilers/eex_test.exs @@ -9,8 +9,11 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EExTest do use ExUnit.Case - import Compilers.EEx + import Compilers.EEx, only: [recognizes?: 1] + import Lexical.Test.Quiet import Lexical.Test.CodeSigil + import Lexical.Test.DiagnosticSupport + import Lexical.Test.RangeSupport def with_capture_server(_) do start_supervised!(CaptureServer) @@ -27,6 +30,10 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EExTest do Document.new("file:///file.eex", content, 0) end + def compile(document) do + quiet(:stderr, fn -> Compilers.EEx.compile(document) end) + end + setup_all do prev_compiler_options = Code.compiler_options() Build.State.set_compiler_options() @@ -80,6 +87,8 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EExTest do describe "compile_quoted/2" do setup [:with_capture_server] + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "handles unused variables" do assert {:ok, [%Result{} = result]} = ~q[ @@ -94,6 +103,24 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EExTest do assert result.source == "EEx" assert result.uri =~ "file:///file.eex" end + + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "handles unused variables when #{inspect(@feature_condition)}" do + document = + ~q[ + <%= something = 6 %> + ] + |> document_with_content() + + assert {:ok, [%Result{} = result]} = compile(document) + + assert result.message == ~s[variable "something" is unused] + assert decorate(document, result.position) == "<%= «something» = 6 %>" + assert result.severity == :warning + assert result.source == "EEx" + assert result.uri =~ "file:///file.eex" + end end describe "eval_quoted/2" do @@ -110,7 +137,8 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EExTest do assert result.uri =~ "file:///file.eex" end - @tag :with_diagnostics + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "handles undefinied variable" do document = document_with_content(~q[ <%= thing %> @@ -129,5 +157,21 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.EExTest do assert result.source == "EEx" assert result.uri =~ "file:///file.eex" end + + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "handles undefinied variable when #{inspect(@feature_condition)}" do + document = document_with_content(~q[ + <%= thing %> + ]) + + assert {:error, [%Result{} = result]} = compile(document) + + assert result.message == "undefined variable \"thing\"" + assert decorate(document, result.position) == "<%= «thing» %>" + assert result.severity == :error + assert result.source == "EEx" + assert result.uri =~ "file:///file.eex" + end end end diff --git a/apps/remote_control/test/lexical/remote_control/build/document/compilers/heex_test.exs b/apps/remote_control/test/lexical/remote_control/build/document/compilers/heex_test.exs index 6efa6d56e..54e86376e 100644 --- a/apps/remote_control/test/lexical/remote_control/build/document/compilers/heex_test.exs +++ b/apps/remote_control/test/lexical/remote_control/build/document/compilers/heex_test.exs @@ -8,8 +8,7 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.HeexTest do alias Lexical.RemoteControl.ModuleMappings import Lexical.Test.CodeSigil - import Compilers.HEEx, only: [compile: 1] - + import Lexical.Test.Quiet use ExUnit.Case def with_capture_server(_) do @@ -32,6 +31,12 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.HeexTest do end) end + def compile(document) do + quiet(:stderr, fn -> + Compilers.HEEx.compile(document) + end) + end + describe "compile/1" do setup [:with_capture_server] @@ -39,7 +44,7 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.HeexTest do document = document_with_content(~q[
thing
]) - assert {:ok, []} = compile(document) + assert {:ok, _} = compile(document) end test "ignore undefinied assigns" do @@ -50,6 +55,44 @@ defmodule Lexical.RemoteControl.Build.Document.Compilers.HeexTest do assert {:error, []} = compile(document) end + test "handles undefinied variables" do + document = document_with_content(~q[ +
<%= thing %>
+ ]) + + assert {:error, [%Result{} = result]} = compile(document) + + if Features.with_diagnostics?() do + assert result.message =~ ~S[undefined variable "thing"] + else + assert result.message =~ "undefined function thing/0" + assert result.position in [1, {1, 10}] + assert result.severity == :error + assert result.source == "EEx" + end + end + + test "shouldn't report errors if the variable is defined with :let " do + document = document_with_content(~q| + <.form + :let={f} + phx-change="change_name" + > + <.inputs_for :let={f_nested} field={f[:nested]}> + <.input type="text" field={f_nested[:name]} /> + <%= f_nested %> + + + |) + + if Features.with_diagnostics?() do + assert {:error, []} = compile(document) + else + # we don't want to ignore the undefined function error when Elixir < 1.15 + assert {:error, [_]} = compile(document) + end + end + test "returns error when there are unclosed tags" do document = document_with_content(~q[
thing diff --git a/apps/remote_control/test/lexical/remote_control/build/document/compilers/quoted_test.exs b/apps/remote_control/test/lexical/remote_control/build/document/compilers/quoted_test.exs new file mode 100644 index 000000000..b11c2c6f9 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/build/document/compilers/quoted_test.exs @@ -0,0 +1,137 @@ +defmodule Lexical.RemoteControl.Build.Document.Compilers.QuotedTest do + alias Lexical.RemoteControl.Build.Document.Compilers.Quoted + + import Lexical.Test.CodeSigil + + use ExUnit.Case, async: true + + defp parse!(code) do + Code.string_to_quoted!(code, columns: true, token_metadata: true) + end + + describe "wrap_top_level_forms/1" do + test "chunks and wraps unsafe top-level forms" do + quoted = + ~q[ + foo = 1 + bar = foo + 1 + + import Something + + defmodule MyModule do + :ok + end + + baz = bar + foo + ] + |> parse!() + + assert quoted |> Quoted.wrap_top_level_forms() |> Macro.to_string() == """ + defmodule :lexical_wrapper_0 do + def __lexical_wrapper__([]) do + foo = 1 + _ = foo + 1 + end + end + + import Something + + defmodule MyModule do + :ok + end + + defmodule :lexical_wrapper_2 do + def __lexical_wrapper__([foo, bar]) do + _ = bar + foo + end + end\ + """ + end + end + + describe "suppress_and_extract_vars/1" do + test "suppresses and extracts unused vars" do + quoted = + ~q[ + foo = 1 + bar = 2 + ] + |> parse!() + + assert {suppressed, [{:foo, _, nil}, {:bar, _, nil}]} = + Quoted.suppress_and_extract_vars(quoted) + + assert Macro.to_string(suppressed) == """ + _ = 1 + _ = 2\ + """ + end + + test "suppresses and extracts unused vars in nested assignments" do + quoted = + ~q[ + foo = bar = 1 + baz = qux = 2 + ] + |> parse!() + + assert {suppressed, [{:foo, _, nil}, {:bar, _, nil}, {:baz, _, nil}, {:qux, _, nil}]} = + Quoted.suppress_and_extract_vars(quoted) + + assert Macro.to_string(suppressed) == """ + _ = _ = 1 + _ = _ = 2\ + """ + end + + test "suppresses vars only referenced in RHS" do + quoted = ~q[foo = foo + 1] |> parse!() + + assert {suppressed, [{:foo, _, nil}]} = Quoted.suppress_and_extract_vars(quoted) + + assert Macro.to_string(suppressed) == "_ = foo + 1" + end + + test "suppresses deeply nested vars" do + quoted = ~q[{foo, {bar, %{baz: baz}}} = call()] |> parse!() + + assert {suppressed, [{:baz, _, nil}, {:bar, _, nil}, {:foo, _, nil}]} = + Quoted.suppress_and_extract_vars(quoted) + + assert Macro.to_string(suppressed) == "{_, {_, %{baz: _}}} = call()" + end + + test "does not suppress vars referenced in a later expression" do + quoted = + ~q[ + foo = 1 + bar = foo + 1 + ] + |> parse!() + + assert {suppressed, [{:foo, _, nil}, {:bar, _, nil}]} = + Quoted.suppress_and_extract_vars(quoted) + + assert Macro.to_string(suppressed) == """ + foo = 1 + _ = foo + 1\ + """ + end + + test "does not suppress vars referenced with pin operator in a later assignment" do + quoted = + ~q[ + foo = 1 + %{^foo => 2} = call() + ] + |> parse!() + + assert {suppressed, [{:foo, _, nil}]} = Quoted.suppress_and_extract_vars(quoted) + + assert Macro.to_string(suppressed) == """ + foo = 1 + %{^foo => 2} = call()\ + """ + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/build/error/parse_test.exs b/apps/remote_control/test/lexical/remote_control/build/error/parse_test.exs new file mode 100644 index 000000000..e10987673 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/build/error/parse_test.exs @@ -0,0 +1,362 @@ +defmodule Lexical.RemoteControl.Build.Error.ParseTest do + alias Lexical.Document + alias Lexical.Plugin.V1.Diagnostic + alias Lexical.RemoteControl.Build + + alias Lexical.RemoteControl.Build.CaptureServer + alias Lexical.RemoteControl.Dispatch + import Lexical.Test.RangeSupport + import Lexical.Test.DiagnosticSupport + + use ExUnit.Case, async: true + + setup do + start_supervised!(Dispatch) + start_supervised!(CaptureServer) + :ok + end + + def compile(source) do + doc = Document.new("file:///unknown.ex", source, 0) + Build.Document.compile(doc) + end + + def diagnostics({:error, diagnostics}) do + diagnostics + end + + def diagnostic({:error, [diagnostic]}) do + diagnostic + end + + describe "handling parse errors" do + @feature_condition details_in_context?: false + @tag execute_if(@feature_condition) + test "handles token missing errors" do + assert diagnostics = + ~s[%{foo: 3] + |> compile() + |> diagnostics() + + [diagnostic] = diagnostics + assert diagnostic.message =~ ~s[missing terminator: }] + assert diagnostic.position == {1, 9} + end + + @feature_condition details_in_context?: true, contains_set_theoretic_types?: false + @tag execute_if(@feature_condition) + test "handles token missing errors when #{inspect(@feature_condition)}" do + document_text = ~s[%{foo: 3] + + assert [start_diagnostic, end_diagnostic] = + document_text + |> compile() + |> diagnostics() + + assert start_diagnostic.message == + ~s[The `{` here is missing terminator `}`] + + assert decorate(document_text, start_diagnostic.position) == ~S[%«{»foo: 3] + + assert end_diagnostic.message == ~s[missing terminator: }] + assert end_diagnostic.position == {1, 9} + end + + @feature_condition contains_set_theoretic_types?: true + @tag execute_if(@feature_condition) + test "handles token missing errors when #{inspect(@feature_condition)}" do + document_text = ~s[%{foo: 3] + + assert [start_diagnostic, end_diagnostic] = + document_text + |> compile() + |> diagnostics() + + assert start_diagnostic.message == + ~s[The `{` here is missing terminator `}`] + + assert decorate(document_text, start_diagnostic.position) == ~S[«%»{foo: 3] + + assert end_diagnostic.message == ~s[missing terminator: }] + assert end_diagnostic.position == {1, 9} + end + + @feature_condition details_in_context?: false + @tag execute_if(@feature_condition) + test "returns both the error and the detail when provided" do + errors = + ~S[ + def handle_info(file_diagnostics(uri: uri, diagnostics: diagnostics), %State{} = state) do + state = State.clear(state, uri) + state = Enum.reduce(diagnostics, state, fn diagnostic, state -> + case State.add(diagnostic, state, uri) do + {:ok, new_state} -> + new_state + {:error, reason} -> + Logger.error("Could not add diagnostic #{inspect(diagnostic)} because #{inspect(error)}") + state + end + end + + publish_diagnostics(state) + end + ] + |> compile() + |> diagnostics() + + assert [start_diagnostic, end_diagnostic] = errors + + assert end_diagnostic.message =~ "unexpected reserved word: end" + assert end_diagnostic.position == {15, 9} + + assert String.downcase(start_diagnostic.message) =~ + ~S[the "(" here is missing terminator ")"] + + assert start_diagnostic.position == 4 + end + + @feature_condition details_in_context?: true + @tag execute_if(@feature_condition) + test "returns both the error and the detail when provided and #{inspect(@feature_condition)}" do + document_text = ~S[ + def handle_info(file_diagnostics(uri: uri, diagnostics: diagnostics), %State{} = state) do + state = State.clear(state, uri) + state = Enum.reduce(diagnostics, state, fn diagnostic, state -> + case State.add(diagnostic, state, uri) do + {:ok, new_state} -> + new_state + {:error, reason} -> + Logger.error("Could not add diagnostic #{inspect(diagnostic)} because #{inspect(error)}") + state + end + end + + publish_diagnostics(state) + end + ] + + errors = + document_text + |> compile() + |> diagnostics() + + assert [start_diagnostic, end_diagnostic] = errors + + assert start_diagnostic.message == ~S[The `(` here is missing terminator `)`] + + assert decorate(document_text, start_diagnostic.position) =~ + ~S[Enum.reduce«(»diagnostics, state, fn diagnostic, state ->] + + assert end_diagnostic.message =~ "unexpected reserved word: end" + assert end_diagnostic.position == {15, 9} + end + + @feature_condition details_in_context?: false, with_diagnostics?: false + @tag execute_if(@feature_condition) + test "returns multiple diagnostics on the same line" do + [end_diagnostic] = + ~S{Keywor.get([], fn x -> )} + |> compile() + |> diagnostics() + + assert end_diagnostic.message =~ ~s[The \"fn\" here is missing terminator \"end\"] + assert end_diagnostic.position == 1 + end + + @feature_condition details_in_context?: false, with_diagnostics?: true + @tag execute_if(@feature_condition) + test "returns multiple diagnostics on the same line when #{inspect(@feature_condition)}" do + [end_diagnostic] = + ~S{Keywor.get([], fn x -> )} + |> compile() + |> diagnostics() + + assert end_diagnostic.message =~ ~S[unexpected token: )] + assert end_diagnostic.position == {1, 24} + end + + @feature_condition details_in_context?: true + @tag execute_if(@feature_condition) + test "returns multiple diagnostics on the same line when #{inspect(@feature_condition)}" do + document_text = ~S{Keywor.get([], fn x -> )} + + [start_diagnostic, end_diagnostic] = + document_text + |> compile() + |> diagnostics() + + assert end_diagnostic.message == ~S[unexpected token: ), expected `end`] + assert end_diagnostic.position == {1, 24} + + assert start_diagnostic.message == ~S[The `fn` here is missing terminator `end`] + assert decorate(document_text, start_diagnostic.position) =~ ~S/Keywor.get([], «fn» x -> )/ + end + + @feature_condition details_in_context?: false + @tag execute_if(@feature_condition) + test "returns two diagnostics when missing end at the real end" do + errors = + ~S[ + defmodule Foo do + def bar do + :ok + end] + |> compile() + |> diagnostics() + + assert [start_diagnostic, end_diagnostic] = errors + + assert %Diagnostic.Result{} = end_diagnostic + assert end_diagnostic.message =~ "missing terminator: end" + assert end_diagnostic.position == {5, 12} + + assert %Diagnostic.Result{} = start_diagnostic + assert start_diagnostic.message == ~S[The `do` here is missing terminator `end`] + assert start_diagnostic.position == 2 + end + + @feature_condition details_in_context?: true + @tag execute_if(@feature_condition) + test "returns two diagnostics when missing end at the real end and #{inspect(@feature_condition)}" do + document_text = ~S[ + defmodule Foo do + def bar do + :ok + end] + + errors = + document_text + |> compile() + |> diagnostics() + + assert [start_diagnostic, end_diagnostic] = errors + + assert %Diagnostic.Result{} = end_diagnostic + assert end_diagnostic.message == "missing terminator: end" + assert end_diagnostic.position == {5, 12} + + assert %Diagnostic.Result{} = start_diagnostic + assert start_diagnostic.message == ~S[The `do` here is missing terminator `end`] + assert decorate(document_text, start_diagnostic.position) =~ ~S/defmodule Foo «do»/ + end + + test "returns the token in the message when encountering the `syntax error`" do + diagnostic = ~S[1 + * 3] |> compile() |> diagnostic() + assert diagnostic.message == "syntax error before: '*'" + assert diagnostic.position == {1, 5} + end + + @feature_condition details_in_context?: false + @tag execute_if(@feature_condition) + test "returns the approximate correct location when there is a hint." do + diagnostics = ~S[ + defmodule Foo do + def bar_missing_end do + :ok + + def bar do + :ok + end + end] |> compile() |> diagnostics() + + [start_diagnostic, hint_diagnostic, end_diagnostic] = diagnostics + assert start_diagnostic.message == ~S[The `do` here is missing terminator `end`] + assert start_diagnostic.position == 2 + assert end_diagnostic.message == ~S[missing terminator: end (for "do" starting at line 2)] + assert end_diagnostic.position == {9, 12} + + assert hint_diagnostic.message == + ~S[HINT: it looks like the "do" here does not have a matching "end"] + + assert hint_diagnostic.position == 3 + end + + @feature_condition details_in_context?: true + @tag execute_if(@feature_condition) + test "returns the approximate correct location when there is a hint and #{inspect(@feature_condition)}" do + document_text = ~S[ + defmodule Foo do + def bar_missing_end do + :ok + + def bar do + :ok + end + end] + + diagnostics = document_text |> compile() |> diagnostics() + + [start_diagnostic, hint_diagnostic, end_diagnostic] = diagnostics + + assert start_diagnostic.message == ~S[The `do` here is missing terminator `end`] + assert decorate(document_text, start_diagnostic.position) =~ ~S/defmodule Foo «do»/ + + assert hint_diagnostic.message == + ~S[HINT: it looks like the "do" here does not have a matching "end"] + + assert hint_diagnostic.position == 3 + + assert end_diagnostic.message == ~S[missing terminator: end] + assert end_diagnostic.position == {9, 12} + end + + @feature_condition details_in_context?: false + @tag execute_if(@feature_condition) + test "returns the last approximate correct location when there are multiple missing" do + diagnostics = ~S[ + defmodule Foo do + def bar_missing_end do + :ok + + def bar_missing_end2 do + + def bar do + :ok + end + end] |> compile() |> diagnostics() + + [start_diagnostic, hint_diagnostic, end_diagnostic] = diagnostics + + assert start_diagnostic.message == ~S[The `do` here is missing terminator `end`] + assert start_diagnostic.position == 3 + + assert hint_diagnostic.message == + ~S[HINT: it looks like the "do" here does not have a matching "end"] + + assert hint_diagnostic.position == 6 + + assert end_diagnostic.message == ~S[missing terminator: end (for "do" starting at line 3)] + assert end_diagnostic.position == {11, 12} + end + + @feature_condition details_in_context?: true + @tag execute_if(@feature_condition) + test "returns the last approximate correct location when there are multiple missing and #{inspect(@feature_condition)}" do + document_text = ~S[ + defmodule Foo do + def bar_missing_end do + :ok + + def bar_missing_end2 do + + def bar do + :ok + end + end] + + [start_diagnostic, hint_diagnostic, end_diagnostic] = + document_text |> compile() |> diagnostics() + + assert start_diagnostic.message == ~S[The `do` here is missing terminator `end`] + assert decorate(document_text, start_diagnostic.position) =~ "def bar_missing_end «do»" + + assert hint_diagnostic.message == + ~S[HINT: it looks like the "do" here does not have a matching "end"] + + assert hint_diagnostic.position == 6 + + assert end_diagnostic.message == ~S[missing terminator: end] + assert end_diagnostic.position == {11, 12} + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/build/error_test.exs b/apps/remote_control/test/lexical/remote_control/build/error_test.exs index 007a3037a..e14ba8ffb 100644 --- a/apps/remote_control/test/lexical/remote_control/build/error_test.exs +++ b/apps/remote_control/test/lexical/remote_control/build/error_test.exs @@ -1,11 +1,12 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do alias Lexical.Document - alias Lexical.Plugin.V1.Diagnostic alias Lexical.RemoteControl.Build alias Lexical.RemoteControl.Build.CaptureServer alias Lexical.RemoteControl.ModuleMappings require Logger + import Lexical.Test.DiagnosticSupport + import Lexical.Test.RangeSupport use ExUnit.Case use Patch @@ -28,6 +29,10 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do diagnostic end + def diagnostic({:ok, [diagnostic]}) do + diagnostic + end + describe "refine_diagnostics/1" do test "normalizes the message when its a iodata" do diagnostic = %Mix.Task.Compiler.Diagnostic{ @@ -53,147 +58,136 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do end end - describe "handling parse errors" do - test "handles token missing errors" do - assert diagnostic = - ~s[%{foo: 3] - |> compile() - |> diagnostic() - - assert diagnostic.message =~ ~s[missing terminator: } (for "{" starting at line 1)] + describe "diagnostic/3" do + setup do + patch(ModuleMappings, :modules_in_file, fn _ -> [] end) + :ok end - test "returns both the error and the detail when provided" do - errors = - ~S[ - def handle_info(file_diagnostics(uri: uri, diagnostics: diagnostics), %State{} = state) do - state = State.clear(state, uri) - state = Enum.reduce(diagnostics, state, fn diagnostic, state -> - case State.add(diagnostic, state, uri) do - {:ok, new_state} -> - new_state - {:error, reason} -> - Logger.error("Could not add diagnostic #{inspect(diagnostic)} because #{inspect(error)}") - state + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) + test "handles undefined variable" do + document_text = ~S[ + defmodule Foo do + def bar do + a end end + ] - publish_diagnostics(state) - end - ] - |> compile() - |> diagnostics() - - assert [detail, error] = errors - - assert error.message =~ "unexpected reserved word: end" - assert error.position == {15, 9} - - assert String.downcase(detail.message) =~ ~S[the "(" here is missing terminator ")"] - assert detail.position == 4 - end - - test "return the more precise one when there are multiple diagnostics on the same line" do diagnostic = - ~S{Keywor.get([], fn x -> )} + document_text |> compile() |> diagnostic() - assert diagnostic.message =~ - ~S[unexpected token: )] - - assert diagnostic.position == {1, 24} + assert diagnostic.message in [~s[undefined variable "a"], ~s[undefined function a/0]] + assert decorate(document_text, diagnostic.position) =~ "«a\n»" end - test "returns two diagnostics when missing end at the real end" do - errors = - ~S[ + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "handles undefined variable when #{inspect(@feature_condition)}" do + document_text = ~S[ defmodule Foo do def bar do - :ok - end] - |> compile() - |> diagnostics() - - assert [end_diagnostic, start_diagnostic] = errors - - assert %Diagnostic.Result{} = end_diagnostic - assert end_diagnostic.message == "missing terminator: end (for \"do\" starting at line 2)" - assert end_diagnostic.position == {5, 12} + a + end + end + ] - assert %Diagnostic.Result{} = start_diagnostic - assert start_diagnostic.message == ~S[The "do" here is missing a terminator: "end"] - assert start_diagnostic.position == 2 - end + diagnostic = + document_text + |> compile() + |> diagnostic() - test "returns the token in the message when there is a token" do - end_diagnostic = ~S[1 + * 3] |> compile() |> diagnostic() - assert end_diagnostic.message == "syntax error before: '*'" - assert end_diagnostic.position == {1, 5} + assert diagnostic.message == ~s[undefined variable "a"] + assert decorate(document_text, diagnostic.position) =~ "«a»" end - test "returns the approximate correct location when there is a hint." do - diagnostics = ~S[ + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) + test "handles unsued variable warning" do + document_text = ~S[ defmodule Foo do - def bar_missing_end do - :ok - def bar do - :ok + a = 1 end - end] |> compile() |> diagnostics() - - [end_message, start_message, hint_message] = diagnostics - - assert end_message.message == ~S[missing terminator: end (for "do" starting at line 2)] - assert end_message.position == {9, 12} - - assert start_message.message == ~S[The "do" here is missing a terminator: "end"] - assert start_message.position == 2 + end + ] - assert hint_message.message == - ~S[HINT: it looks like the "do" here does not have a matching "end"] + diagnostic = + document_text + |> compile() + |> diagnostic() - assert hint_message.position == 3 + assert diagnostic.message =~ ~s[variable "a" is unused] + assert decorate(document_text, diagnostic.position) =~ "«a = 1\n»" end - test "returns the last approximate correct location when there are multiple missing" do - diagnostics = ~S[ + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "handles unsued variable warning when #{inspect(@feature_condition)}" do + document_text = ~S[ defmodule Foo do - def bar_missing_end do - :ok - - def bar_missing_end2 do - def bar do - :ok + a = 1 end - end] |> compile() |> diagnostics() + end + ] - [end_message, start_message, hint_message] = diagnostics + diagnostic = + document_text + |> compile() + |> diagnostic() - assert end_message.message == ~S[missing terminator: end (for "do" starting at line 3)] - assert end_message.position == {11, 12} + assert diagnostic.message == ~s[variable "a" is unused] + assert decorate(document_text, diagnostic.position) =~ "«a»" + end - assert start_message.message == ~S[The "do" here is missing a terminator: "end"] - assert start_message.position == 3 + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) + test "handles unused function warning" do + document_text = ~S[ + defmodule UnusedDefp do + defp unused do + end + end + ] - assert hint_message.message == - ~S[HINT: it looks like the "do" here does not have a matching "end"] + diagnostic = + document_text + |> compile() + |> diagnostic() - assert hint_message.position == 6 + assert diagnostic.uri + assert diagnostic.severity == :warning + assert diagnostic.message =~ ~S[function unused/0 is unused] + assert decorate(document_text, diagnostic.position) =~ "«defp unused do\n»" end - end - describe "diagnostic/3" do - setup do - patch(ModuleMappings, :modules_in_file, fn _ -> [] end) - :ok + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "handles unused function warning when #{inspect(@feature_condition)}" do + document_text = ~S[ + defmodule UnusedDefp do + defp unused do + end + end + ] + + diagnostic = + document_text + |> compile() + |> diagnostic() + + assert diagnostic.uri + assert diagnostic.severity == :warning + assert diagnostic.message =~ ~S[function unused/0 is unused] + assert decorate(document_text, diagnostic.position) =~ "«unused do\n»" end test "handles FunctionClauseError" do - diagnostic = - ~S[ + document_text = ~S[ defmodule Foo do def add(a, b) when is_integer(a) and is_integer(b) do a + b @@ -202,72 +196,157 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do Foo.add("1", "2") ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[no function clause matching in Foo.add/2] - assert diagnostic.position == 3 + + assert decorate(document_text, diagnostic.position) =~ + "«def add(a, b) when is_integer(a) and is_integer(b) do\n»" end test "handles UndefinedError for erlang moudle" do - diagnostic = - ~S[ + document_text = ~S[ defmodule Foo do :slave.stop end ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[function :slave.stop/0 is undefined or private.] - assert diagnostic.position == {3, 17} + assert decorate(document_text, diagnostic.position) =~ ":slave.«stop\n»" end test "handles UndefinedError for erlang function without defined module" do - diagnostic = - ~S[ + document_text = ~S[ :slave.stop(:name, :name) ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[function :slave.stop/2 is undefined or private.] + assert decorate(document_text, diagnostic.position) =~ ":slave.«stop(:name, :name)\n»" assert diagnostic.position == {3, 17} end + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "handles UndefinedError" do + document_text = ~S[ + defmodule Foo do + def bar do + print(:bar) + end + end + ] + diagnostic = - ~S[ + document_text + |> compile() + |> diagnostic() + + assert diagnostic.message =~ + ~s[undefined function print/1] + + assert decorate(document_text, diagnostic.position) =~ "«print(:bar)\n»" + end + + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "handles UndefinedError when #{inspect(@feature_condition)}" do + document_text = ~S[ defmodule Foo do def bar do print(:bar) end end ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[undefined function print/1] - # NOTE: main is {4, 13} - assert diagnostic.position == 4 + assert decorate(document_text, diagnostic.position) =~ "«print»(:bar)" end - test "handles UndefinedError without moudle" do + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) + test "handles multiple UndefinedError in one line" do + document_text = ~S/ + defmodule Foo do + def bar do + [print(:bar), a, b] + end + end + / + diagnostic = - ~S[ + document_text + |> compile() + |> diagnostic() + + assert diagnostic.message in [~s[undefined function print/1], ~s[undefined function a/0]] + assert decorate(document_text, diagnostic.position) =~ "«[print(:bar), a, b]\n»" + end + + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "handles multiple UndefinedError in one line when #{inspect(@feature_condition)}" do + document_text = ~S/ + defmodule Foo do + def bar do + [print(:bar), a, b] + end + end + / + + [func_diagnotic, b, a] = + document_text + |> compile() + |> diagnostics() + + assert a.message == ~s[undefined variable "a"] + assert decorate(document_text, a.position) =~ "«a»" + + assert b.message == ~s[undefined variable "b"] + assert decorate(document_text, b.position) =~ "«b»" + + assert func_diagnotic.message == ~s[undefined function print/1] + assert decorate(document_text, func_diagnotic.position) =~ "«print»(:bar)" + end + + test "handles UndefinedError without moudle" do + document_text = ~S[ IO.ins ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[function IO.ins/0 is undefined or private] - assert diagnostic.position == {3, 14} + assert decorate(document_text, diagnostic.position) =~ "IO.«ins\n»" end + @feature_condition with_diagnostics?: false + @tag execute_if(@feature_condition) test "handles ArgumentError" do diagnostics = ~s[String.to_integer ""] @@ -276,41 +355,59 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do [diagnostic | _] = diagnostics + assert diagnostic.message =~ + "the call to String.to_integer/1 will fail with ArgumentError" + end + + @feature_condition with_diagnostics?: true + @tag execute_if(@feature_condition) + test "handles ArgumentError when #{inspect(@feature_condition)}" do + diagnostics = + ~s[String.to_integer ""] + |> compile() + |> diagnostics() + + [diagnostic | _] = diagnostics + assert diagnostic.message =~ ~s[errors were found at the given arguments:] end test "handles ArgumentError when in module" do - diagnostic = - ~s[ + document_text = ~s[ defmodule Foo do :a |> {1, 2} end ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[cannot pipe :a into {1, 2}, can only pipe into local calls foo()] - assert diagnostic.position == 3 + assert decorate(document_text, diagnostic.position) =~ "«:a |> {1, 2}\n»" end test "handles ArgumentError when in function" do - diagnostic = - ~s[ + document_text = ~s[ defmodule Foo do def foo do :a |> {1, 2} end end ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[cannot pipe :a into {1, 2}, can only pipe into local calls foo()] - assert diagnostic.position == 4 + assert decorate(document_text, diagnostic.position) =~ "«:a |> {1, 2}\n»" end test "can't find right line when use macro" do @@ -331,49 +428,53 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do end test "handles Protocol.UndefinedError for comprehension" do - diagnostic = - ~S[ + document_text = ~S[ defmodule Foo do for i <- 1, do: i end] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[protocol Enumerable not implemented for 1 of type Integer] - assert diagnostic.position == 3 + assert decorate(document_text, diagnostic.position) =~ "«for i <- 1, do: i\n»" end test "handles Protocol.UndefinedError for comprehension when no module" do - diagnostic = - ~S[ + document_text = ~S[ for i <- 1, do: i ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[protocol Enumerable not implemented for 1 of type Integer] - assert diagnostic.position == 2 + assert decorate(document_text, diagnostic.position) =~ "«for i <- 1, do: i\n»" end test "handles RuntimeError" do - diagnostic = - ~S[ - defmodule Foo do + document_text = ~S[defmodule Foo do raise RuntimeError.exception("This is a runtime error") end ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[This is a runtime error] - assert diagnostic.position == 1 + assert decorate(document_text, diagnostic.position) =~ "«defmodule Foo do\n»" end test "handles ExUnit.DuplicateTestError" do - diagnostic = - ~s[ + document_text = ~s[ defmodule FooTest do use ExUnit.Case, async: true @@ -386,16 +487,19 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do end end ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[\"test foo\" is already defined in FooTest] - assert diagnostic.position == 9 + assert decorate(document_text, diagnostic.position) =~ "«test \"foo\" do\n»" end test "handles ExUnit.DuplicateDescribeError" do - diagnostic = - ~s[ + document_text = ~s[ + defmodule FooTest do use ExUnit.Case, async: true @@ -412,16 +516,18 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do end end ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ ~s[describe \"foo\" is already defined in FooTest] - assert diagnostic.position == 11 + assert decorate(document_text, diagnostic.position) =~ "«describe \"foo\" do\n»" end test "handles struct `KeyError` when is in a function block" do - diagnostic = - ~s( + document_text = ~s( defmodule Foo do defstruct [:a, :b] end @@ -432,16 +538,20 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do end end ) + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ "key :c not found" - assert diagnostic.position == 8 + assert decorate(document_text, diagnostic.position) =~ "«%Foo{c: :value}\n»" end + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "handles struct `CompileError` when is in a function params" do - diagnostic = - ~s/ + document_text = ~S/ defmodule Foo do defstruct [:a, :b] end @@ -451,21 +561,50 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do end end / + + diagnostics = + document_text |> compile() - |> diagnostic() + |> diagnostics() + [diagnostic] = diagnostics assert diagnostic.message =~ "unknown key :c for struct Foo" if Features.with_diagnostics?() do - assert diagnostic.position == {7, 19} + assert decorate(document_text, diagnostic.position) =~ "def bar(«%Foo{c: c}) do\n»" else - assert diagnostic.position == 7 + assert decorate(document_text, diagnostic.position) =~ "«def bar(%Foo{c: c}) do\n»" end end + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "handles struct `CompileError` when is in a function params and #{inspect(@feature_condition)}" do + document_text = ~S/ + defmodule Foo do + defstruct [:a, :b] + end + + defmodule Bar do + def bar(%Foo{c: c}) do + end + end + / + + [undefined, unknown] = + document_text + |> compile() + |> diagnostics() + + assert unknown.message == "unknown key :c for struct Foo" + assert decorate(document_text, unknown.position) =~ "def bar(«%Foo{c: c}) do\n»" + + assert undefined.message == "variable \"c\" is unused" + assert decorate(document_text, undefined.position) =~ "def bar(%Foo{c: «c»}) do" + end + test "handles struct enforce key error" do - diagnostic = - ~s( + document_text = ~s( defmodule Foo do @enforce_keys [:a, :b] defstruct [:a, :b] @@ -477,18 +616,20 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do end end ) + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ "the following keys must also be given when building struct Foo: [:a, :b]" - assert diagnostic.position == 9 + assert decorate(document_text, diagnostic.position) =~ "«%Foo{}\n»" end test "handles record missing key's error" do - diagnostic = - ~s[ + document_text = ~s[ defmodule Bar do import Record defrecord :user, name: nil, age: nil @@ -498,13 +639,17 @@ defmodule Lexical.RemoteControl.Build.ErrorTest do end end ] + + diagnostic = + document_text |> compile() |> diagnostic() assert diagnostic.message =~ "record :user does not have the key: :email" - assert diagnostic.position == 7 + assert decorate(document_text, diagnostic.position) =~ + "«u = user(name: \"John\", email: \"\")\n»" end end end diff --git a/apps/remote_control/test/lexical/remote_control/build/state_test.exs b/apps/remote_control/test/lexical/remote_control/build/state_test.exs index 3fc5426ea..d4894e9b8 100644 --- a/apps/remote_control/test/lexical/remote_control/build/state_test.exs +++ b/apps/remote_control/test/lexical/remote_control/build/state_test.exs @@ -6,7 +6,6 @@ defmodule Lexical.RemoteControl.Build.StateTest do alias Lexical.RemoteControl.Build.State alias Lexical.RemoteControl.Plugin - import Lexical.Test.EventualAssertions import Lexical.Test.Fixtures use ExUnit.Case, async: false @@ -14,6 +13,7 @@ defmodule Lexical.RemoteControl.Build.StateTest do setup do start_supervised!(RemoteControl.Dispatch) + start_supervised!(RemoteControl.Api.Proxy) start_supervised!(Build.CaptureServer) start_supervised!(RemoteControl.ModuleMappings) start_supervised!(Plugin.Runner.Coordinator) @@ -66,22 +66,88 @@ defmodule Lexical.RemoteControl.Build.StateTest do {:ok, document: document} end - describe "throttled compilation" do - setup [:with_metadata_project, :with_a_valid_document] + def with_patched_compilation(_) do + patch(Build.Document, :compile, :ok) + patch(Build.Project, :compile, :ok) + :ok + end + + describe "throttled document compilation" do + setup [:with_metadata_project, :with_a_valid_document, :with_patched_compilation] test "it doesn't compile immediately", %{state: state, document: document} do - new_state = - state - |> State.on_file_compile(document) - |> State.on_tick() + State.on_file_compile(state, document) + + refute_called(Build.Document.compile(document)) + refute_called(Build.Project.compile(_, _)) + end + + test "it compiles files when on_timeout is called", %{state: state, document: document} do + state + |> State.on_file_compile(document) + |> State.on_timeout() + + assert_called(Build.Document.compile(document)) + refute_called(Build.Project.compile(_, _)) + end + end + + describe "throttled project compilation" do + setup [:with_metadata_project, :with_a_valid_document, :with_patched_compilation] + + test "doesn't compile immediately if forced", %{state: state} do + State.on_project_compile(state, true) + refute_called(Build.Project.compile(_, _)) + end + + test "doesn't compile immediately", %{state: state} do + State.on_project_compile(state, false) + refute_called(Build.Project.compile(_, _)) + end - assert State.compile_scheduled?(new_state, document.uri) + test "compiles if force is true after on_timeout is called", %{state: state} do + state + |> State.on_project_compile(true) + |> State.on_timeout() + + assert_called(Build.Project.compile(_, true)) end - test "it compiles after a timeout", %{state: state, document: document} do - state = State.on_file_compile(state, document) + test "compiles after on_timeout is called", %{state: state} do + state + |> State.on_project_compile(false) + |> State.on_timeout() + + assert_called(Build.Project.compile(_, false)) + end + end + + describe "mixed compilation" do + setup [:with_metadata_project, :with_a_valid_document, :with_patched_compilation] + + test "doesn't compile if both documents and projects are added", %{ + state: state, + document: document + } do + state + |> State.on_project_compile(false) + |> State.on_file_compile(document) + + refute_called(Build.Document.compile(_)) + refute_called(Build.Project.compile(_, _)) + end - refute_eventually(State.compile_scheduled?(State.on_tick(state), document.uri), 500) + test "compiles when on_timeout is called if both documents and projects are added", %{ + state: state, + document: document + } do + state + |> State.on_project_compile(false) + |> State.on_file_compile(document) + |> State.on_timeout() + + assert_called(Build.Document.compile(_)) + assert_called(Build.Project.compile(_, _)) end end end diff --git a/apps/remote_control/test/lexical/remote_control/build_test.exs b/apps/remote_control/test/lexical/remote_control/build_test.exs index 648ea3410..5e35c7d22 100644 --- a/apps/remote_control/test/lexical/remote_control/build_test.exs +++ b/apps/remote_control/test/lexical/remote_control/build_test.exs @@ -10,6 +10,7 @@ defmodule Lexical.BuildTest do import Messages import Lexical.Test.Fixtures + import Lexical.Test.DiagnosticSupport use ExUnit.Case use Patch @@ -23,12 +24,12 @@ defmodule Lexical.BuildTest do project |> Project.root_path() |> Path.join(to_string(sequence)) - |> Path.join("file.exs") + |> Path.join("file.ex") |> Document.Path.to_uri() end source = Document.new(uri, source_code, 0) - Build.force_compile_document(project, source) + RemoteControl.call(project, Build, :force_compile_document, [source]) end def with_project(project_name) do @@ -36,6 +37,10 @@ defmodule Lexical.BuildTest do fixture_dir = Path.join(fixtures_path(), project_name) project = Project.new("file://#{fixture_dir}") + project + |> Project.workspace_path() + |> File.rm_rf() + {:ok, _} = start_supervised({ProjectNodeSupervisor, project}) {:ok, _, _} = RemoteControl.start_link(project) RemoteControl.Api.register_listener(project, self(), [:all]) @@ -50,7 +55,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, module) - assert_receive file_compiled(), 500 + assert_receive file_compiled() :ok end @@ -67,18 +72,18 @@ defmodule Lexical.BuildTest do describe "compiling a project" do test "sends a message when complete " do {:ok, project} = with_project(:project_metadata) - Build.schedule_compile(project, true) + RemoteControl.Api.schedule_compile(project, true) - assert_receive project_compiled(status: :success), 500 - assert_receive project_progress(label: "Building " <> project_name), 500 + assert_receive project_compiled(status: :success) + assert_receive project_progress(label: "Building " <> project_name) assert project_name == "project_metadata" end test "receives metadata about the defined modules" do {:ok, project} = with_project(:project_metadata) - Build.schedule_compile(project, true) - assert_receive module_updated(name: ProjectMetadata, functions: functions), 500 + RemoteControl.Api.schedule_compile(project, true) + assert_receive module_updated(name: ProjectMetadata, functions: functions) assert {:zero_arity, 0} in functions assert {:one_arity, 1} in functions @@ -89,24 +94,24 @@ defmodule Lexical.BuildTest do describe "compiling an umbrella project" do test "it sends a message when compilation is complete" do {:ok, project} = with_project(:umbrella) - Build.schedule_compile(project, true) + RemoteControl.Api.schedule_compile(project, true) - assert_receive project_compiled(status: :success), 500 - assert_receive project_diagnostics(diagnostics: []), 500 + assert_receive project_compiled(status: :success) + assert_receive project_diagnostics(diagnostics: []) - assert_receive module_updated(name: Umbrella.First, functions: functions), 500 + assert_receive module_updated(name: Umbrella.First, functions: functions) assert {:arity_0, 0} in functions assert {:arity_1, 1} in functions assert {:arity_2, 2} in functions - assert_receive module_updated(name: Umbrella.Second, functions: functions), 500 + assert_receive module_updated(name: Umbrella.Second, functions: functions) assert {:arity_0, 0} in functions assert {:arity_1, 1} in functions assert {:arity_2, 2} in functions - assert_receive project_progress(label: "Building " <> project_name), 500 + assert_receive project_progress(label: "Building " <> project_name) assert project_name == "umbrella" end end @@ -114,34 +119,51 @@ defmodule Lexical.BuildTest do describe "compiling a project that has errors" do test "it reports the errors" do {:ok, project} = with_project(:compilation_errors) - Build.schedule_compile(project, true) + RemoteControl.Api.schedule_compile(project, true) - assert_receive project_compiled(status: :error), 500 - assert_receive project_diagnostics(diagnostics: [%Diagnostic.Result{}]), 500 + assert_receive project_compiled(status: :error) + assert_receive project_diagnostics(diagnostics: [%Diagnostic.Result{}]) end end describe "compilng a project with parse errors" do setup :with_parse_errors_project + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "stuff", %{project: project} do - Build.schedule_compile(project, true) + RemoteControl.Api.schedule_compile(project, true) - assert_receive project_compiled(status: :error), 500 - assert_receive project_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic]), 500 + assert_receive project_compiled(status: :error) + assert_receive project_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic]) assert diagnostic.uri assert diagnostic.message =~ "SyntaxError" end + + @feature_condition span_in_diagnostic?: true + @tag execute_if(@feature_condition) + test "stuff when #{inspect(@feature_condition)}", %{project: project} do + RemoteControl.Api.schedule_compile(project, true) + + assert_receive project_compiled(status: :error) + assert_receive project_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic]) + + assert diagnostic.uri + assert diagnostic.position == {4, 24} + + assert diagnostic.message =~ + "** (MismatchedDelimiterError) mismatched delimiter found on lib/parse_errors.ex:15" + end end describe "when compiling a project that has warnings" do test "it reports them" do {:ok, project} = with_project(:compilation_warnings) - Build.schedule_compile(project, true) + RemoteControl.Api.schedule_compile(project, true) - assert_receive project_compiled(status: :success), 500 - assert_receive project_diagnostics(diagnostics: diagnostics), 500 + assert_receive project_compiled(status: :success) + assert_receive project_diagnostics(diagnostics: diagnostics) assert [%Diagnostic.Result{}, %Diagnostic.Result{}] = diagnostics @@ -170,8 +192,8 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive file_compiled(status: :error), 500 - assert_receive file_diagnostics(diagnostics: [diagnostic]), 500 + assert_receive file_compiled(status: :error) + assert_receive file_diagnostics(diagnostics: [diagnostic]) assert %Diagnostic.Result{} = diagnostic assert diagnostic.uri @@ -180,12 +202,14 @@ defmodule Lexical.BuildTest do assert diagnostic.position == {4, 15} end + @feature_condition details_in_context?: false + @tag execute_if(@feature_condition) test "handles missing token errors", %{project: project} do source = ~S[%{foo: 3] compile_document(project, source) - assert_receive file_compiled(status: :error), 500 - assert_receive file_diagnostics(diagnostics: [diagnostic]), 500 + assert_receive file_compiled(status: :error) + assert_receive file_diagnostics(diagnostics: [diagnostic]) assert %Diagnostic.Result{} = diagnostic assert diagnostic.uri @@ -200,8 +224,8 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive file_compiled(status: :error), 500 - assert_receive file_diagnostics(diagnostics: [diagnostic]), 500 + assert_receive file_compiled(status: :error) + assert_receive file_diagnostics(diagnostics: [diagnostic]) assert %Diagnostic.Result{} = diagnostic assert diagnostic.uri @@ -222,8 +246,8 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive file_compiled(status: :error), 500 - assert_receive file_diagnostics(diagnostics: [diagnostic]), 500 + assert_receive file_compiled(status: :error) + assert_receive file_diagnostics(diagnostics: [diagnostic]) assert %Diagnostic.Result{} = diagnostic assert diagnostic.uri @@ -239,14 +263,16 @@ defmodule Lexical.BuildTest do ] compile_document(project, "my_test.ex", source) - assert_receive file_compiled(status: :error), 500 - assert_receive file_diagnostics(diagnostics: [diagnostic]), 500 + assert_receive file_compiled(status: :error) + assert_receive file_diagnostics(diagnostics: [diagnostic]) assert diagnostic.severity == :error assert diagnostic.uri =~ "my_test.ex" assert diagnostic.message =~ "function IO.ins/0 is undefined or private" assert diagnostic.position == {3, 12} end + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "reports unused variables", %{project: project} do source = ~S[ defmodule WithWarnings do @@ -257,8 +283,8 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive file_compiled(status: :success), 500 - assert_receive file_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic]), 500 + assert_receive file_compiled(status: :success) + assert_receive file_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic]) assert diagnostic.uri assert diagnostic.severity == :warning @@ -272,6 +298,8 @@ defmodule Lexical.BuildTest do end end + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "reports missing parens", %{project: project} do source = ~S[ defmodule WithWarnings do @@ -286,7 +314,7 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive file_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic | _]), 500 + assert_receive file_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic | _]) assert diagnostic.uri if Features.with_diagnostics?() do @@ -297,7 +325,7 @@ defmodule Lexical.BuildTest do assert diagnostic.position == {4, 13} else - assert_receive file_compiled(status: :success), 500 + assert_receive file_compiled(status: :success) assert diagnostic.severity == :warning assert diagnostic.details == {WithWarnings, :error, 0} assert diagnostic.position == 4 @@ -307,6 +335,8 @@ defmodule Lexical.BuildTest do end end + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "reports unused defp functions", %{project: project} do source = ~S[ defmodule UnusedDefp do @@ -316,8 +346,8 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive file_compiled(status: :success), 500 - assert_receive file_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic]), 500 + assert_receive file_compiled(status: :success) + assert_receive file_diagnostics(diagnostics: [%Diagnostic.Result{} = diagnostic]) assert diagnostic.uri assert diagnostic.severity == :warning @@ -326,6 +356,8 @@ defmodule Lexical.BuildTest do assert diagnostic.details == nil end + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "handles undefined usages", %{project: project} do source = ~S[ defmodule WithUndefinedFunction do @@ -336,8 +368,8 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive file_compiled(status: :error), 500 - assert_receive file_diagnostics(diagnostics: [diagnostic]), 500 + assert_receive file_compiled(status: :error) + assert_receive file_diagnostics(diagnostics: [diagnostic]) assert diagnostic.uri assert diagnostic.severity == :error @@ -346,6 +378,8 @@ defmodule Lexical.BuildTest do assert diagnostic.details == nil end + @feature_condition span_in_diagnostic?: false + @tag execute_if(@feature_condition) test "reports multiple errors", %{project: project} do source = ~S[ defmodule WithFiveErrors do @@ -358,15 +392,10 @@ defmodule Lexical.BuildTest do compile_document(project, source) - assert_receive file_compiled(status: :error), 500 + assert_receive file_compiled(status: :error) - if Features.with_diagnostics?() do - assert_receive file_diagnostics(diagnostics: [_, _, _] = diagnostics), 500 - assert length(diagnostics) == 3 - else - assert_receive file_diagnostics(diagnostics: [_, _, _, _, _] = diagnostics), 500 - assert length(diagnostics) == 5 - end + assert_receive file_diagnostics(diagnostics: [_, _, _] = diagnostics) + assert length(diagnostics) == 3 end test "adding a new module notifies the listener", %{project: project} do @@ -376,7 +405,7 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive module_updated(name: NewModule, functions: []), 500 + assert_receive module_updated(name: NewModule, functions: []) end test "adding a non-loaded module notifies the listener", %{project: project} do @@ -388,7 +417,7 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive module_updated(name: NotLoaded, struct: fields), 500 + assert_receive module_updated(name: NotLoaded, struct: fields) assert [%{field: :loaded, required?: true}] = fields end @@ -405,9 +434,9 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive module_updated(name: FirstModule), 500 - assert_receive module_updated(name: SecondModule), 500 - assert_receive module_updated(name: ThirdModule), 500 + assert_receive module_updated(name: FirstModule) + assert_receive module_updated(name: SecondModule) + assert_receive module_updated(name: ThirdModule) end test "adding a function notifies the listener", %{project: project} do @@ -420,7 +449,7 @@ defmodule Lexical.BuildTest do ] compile_document(project, source) - assert_receive module_updated(name: UnderTest, functions: [added_function: 2]), 500 + assert_receive module_updated(name: UnderTest, functions: [added_function: 2]) end test "removing a function notifies the listener", %{project: project} do @@ -437,10 +466,10 @@ defmodule Lexical.BuildTest do ] compile_document(project, initial) - assert_receive module_updated(), 500 + assert_receive module_updated() compile_document(project, removed) - assert_receive module_updated(name: Remove, functions: []), 500 + assert_receive module_updated(name: Remove, functions: []) end test "changing a function's arity notifies the listener", %{project: project} do @@ -451,7 +480,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, initial) - assert_receive module_updated(name: ArityChange, functions: [arity: 1]), 500 + assert_receive module_updated(name: ArityChange, functions: [arity: 1]) changed = ~S[ defmodule ArityChange do @@ -460,7 +489,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, changed) - assert_receive module_updated(name: ArityChange, functions: [arity: 2]), 500 + assert_receive module_updated(name: ArityChange, functions: [arity: 2]) end test "adding a macro notifies the listener", %{project: project} do @@ -474,7 +503,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, changed) - assert_receive module_updated(name: UnderTest, macros: [something: 1]), 500 + assert_receive module_updated(name: UnderTest, macros: [something: 1]) end test "removing a macro notifies the listener", %{project: project} do @@ -491,10 +520,10 @@ defmodule Lexical.BuildTest do ] compile_document(project, initial) - assert_receive module_updated(), 500 + assert_receive module_updated() compile_document(project, removed) - assert_receive module_updated(name: RemoveMacro, macros: []), 500 + assert_receive module_updated(name: RemoveMacro, macros: []) end test "changing a macro's arity notifies the listener", %{project: project} do @@ -505,7 +534,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, initial) - assert_receive module_updated(name: ArityChange, macros: [arity: 1]), 500 + assert_receive module_updated(name: ArityChange, macros: [arity: 1]) changed = ~S[ defmodule ArityChange do @@ -514,7 +543,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, changed) - assert_receive module_updated(name: ArityChange, macros: [arity: 2]), 500 + assert_receive module_updated(name: ArityChange, macros: [arity: 2]) end end @@ -530,8 +559,8 @@ defmodule Lexical.BuildTest do ] compile_document(project, initial) - assert_receive file_compile_requested(uri: file_uri), 500 - assert_receive file_diagnostics(uri: ^file_uri, diagnostics: []), 500 + assert_receive file_compile_requested(uri: file_uri) + assert_receive file_diagnostics(uri: ^file_uri, diagnostics: []) end end @@ -555,7 +584,7 @@ defmodule Lexical.BuildTest do module_name = Module.concat(["Module", submodule, "Submodule"]) do compile_document(project, __ENV__.file, source) - assert_receive module_updated(name: ^module_name), 500 + assert_receive module_updated(name: ^module_name) end refute loaded?(project, Module.S.Submodule) @@ -569,7 +598,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, source) - assert_receive file_compiled(status: :success), 500 + assert_receive file_compiled(status: :success) assert loaded?(project, EmptyModule) end @@ -580,7 +609,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, source) - assert_receive file_compiled(status: :success), 500 + assert_receive file_compiled(status: :success) assert loaded?(project, WithAFunction) end @@ -591,7 +620,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, source) - assert_receive file_compiled(status: :success), 500 + assert_receive file_compiled(status: :success) assert loaded?(project, WithAMacro) end @@ -602,7 +631,7 @@ defmodule Lexical.BuildTest do end ] compile_document(project, source) - assert_receive file_compiled(status: :success), 500 + assert_receive file_compiled(status: :success) assert loaded?(project, WithAStruct) end @@ -613,8 +642,70 @@ defmodule Lexical.BuildTest do end ] compile_document(project, source) - assert_receive file_compiled(status: :success), 500 + assert_receive file_compiled(status: :success) assert loaded?(project, WithAType) end end + + describe ".exs files" do + setup do + start_supervised!(RemoteControl.Dispatch) + start_supervised!(RemoteControl.ModuleMappings) + start_supervised!(Build.CaptureServer) + :ok + end + + test "should not run top-level forms" do + source = ~S[ + IO.puts("fail") + ] + + doc = Document.new("file:///file.exs", source, 0) + + captured = + ExUnit.CaptureIO.capture_io(fn -> + Build.Document.compile(doc) + end) + + refute captured =~ "fail" + end + end + + if Features.after_verify?() do + describe "exceptions during compilation" do + test "compiling a project with callback errors" do + {:ok, project} = with_project(:compilation_callback_errors) + RemoteControl.Api.schedule_compile(project, false) + assert_receive project_compiled(status: :error) + assert_receive project_diagnostics(diagnostics: [diagnostic]) + + file_name = + diagnostic.uri + |> Document.Path.ensure_path() + |> Path.basename() + + assert file_name == "compile_callback_error.ex" + assert diagnostic.position == 4 + assert diagnostic.severity == :error + assert diagnostic.message == "boom" + end + + test "compiling a file with callback errors" do + document = ~S[ + defmodule WithCallbackErrors do + @after_verify __MODULE__ + def __after_verify__(_) do + raise "boom" + end + end + ] + {:ok, project} = with_project(:project) + compile_document(project, document) + assert_receive file_compiled(status: :error) + assert_receive file_diagnostics(diagnostics: [diagnostic]) + assert diagnostic.message == "boom" + assert diagnostic.position == 1 + end + end + end end diff --git a/apps/remote_control/test/lexical/remote_control/code_action/handlers/add_alias_test.exs b/apps/remote_control/test/lexical/remote_control/code_action/handlers/add_alias_test.exs new file mode 100644 index 000000000..a0eb1c406 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/code_action/handlers/add_alias_test.exs @@ -0,0 +1,307 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.AddAliasTest do + alias Lexical.Ast.Analysis.Scope + alias Lexical.CodeUnit + alias Lexical.Document + alias Lexical.Document.Line + alias Lexical.Document.Range + alias Lexical.RemoteControl + alias Lexical.RemoteControl.CodeAction.Handlers.AddAlias + alias Lexical.RemoteControl.Search.Store + + import Lexical.Test.CursorSupport + import Lexical.Test.CodeSigil + + use Lexical.Test.CodeMod.Case, enable_ast_conversion: false + use Patch + + setup do + start_supervised!({Document.Store, derive: [analysis: &Lexical.Ast.analyze/1]}) + :ok + end + + def apply_code_mod(text, _ast, options) do + range = options[:range] + uri = "file:///file.ex" + :ok = Document.Store.open(uri, text, 1) + {:ok, document} = Document.Store.fetch(uri) + + edits = + case AddAlias.actions(document, range, []) do + [action] -> action.changes.edits + _ -> [] + end + + {:ok, edits} + end + + def add_alias(original_text, modules_to_return) do + {position, stripped_text} = pop_cursor(original_text) + patch_fuzzy_search(modules_to_return) + range = Range.new(position, position) + modify(stripped_text, range: range) + end + + def patch_fuzzy_search(modules_to_return) do + all_modules = + Enum.map(modules_to_return, fn module -> + {Atom.to_charlist(module), :code.which(module), :code.is_loaded(module)} + end) + + patch(AddAlias, :all_modules, all_modules) + end + + describe "in an existing module with no aliases" do + test "aliases are added at the top of the module" do + patch(RemoteControl, :get_project, %Lexical.Project{}) + + {:ok, added} = + ~q[ + defmodule MyModule do + def my_fn do + Line| + end + end + ] + |> add_alias([Line]) + + expected = ~q[ + defmodule MyModule do + alias Lexical.Document.Line + def my_fn do + Line + end + end + ]t + assert added =~ expected + end + end + + describe "in an existing module" do + end + + describe "in the root context" do + end + + describe "adding an alias" do + test "does nothing on an invalid document" do + {:ok, added} = add_alias("%Lexical.RemoteControl.Search.", [Lexical.RemoteControl.Search]) + + assert added == "%Lexical.RemoteControl.Search." + end + + test "outside of a module with aliases" do + {:ok, added} = + ~q[ + alias ZZ.XX.YY + Line| + ] + |> add_alias([Line]) + + expected = ~q[ + alias Lexical.Document.Line + alias ZZ.XX.YY + Line + ]t + + assert added == expected + end + + test "when a full module name is given" do + {:ok, added} = + ~q[ + Lexical.RemoteControl.Search.Store.Backend| + ] + |> add_alias([Store.Backend]) + + expected = ~q[ + alias Lexical.RemoteControl.Search.Store.Backend + Backend + ]t + + assert added == expected + end + + test "when a full module name is given in a module function" do + patch(RemoteControl, :get_project, %Lexical.Project{}) + + {:ok, added} = + ~q[ + defmodule MyModule do + def my_fun do + result = Lexical.RemoteControl.Search.Store| + end + end + ] + |> add_alias([Store]) + + expected = ~q[ + defmodule MyModule do + alias Lexical.RemoteControl.Search.Store + def my_fun do + result = Store + end + end + ]t + + assert added =~ expected + end + + test "outside of a module with no aliases" do + {:ok, added} = + ~q[Line|] + |> add_alias([Line]) + + expected = ~q[ + alias Lexical.Document.Line + Line + ]t + + assert added == expected + end + + test "in a module with no aliases" do + patch(RemoteControl, :get_project, %Lexical.Project{}) + + {:ok, added} = + ~q[ + defmodule MyModule do + def my_fun do + Line| + end + end + ] + |> add_alias([Line]) + + expected = ~q[ + defmodule MyModule do + alias Lexical.Document.Line + def my_fun do + Line + end + end + ]t + + assert added =~ expected + end + + test "outside of functions" do + {:ok, added} = + ~q[ + defmodule MyModule do + alias Something.Else + Line| + end + ] + |> add_alias([Line]) + + expected = ~q[ + defmodule MyModule do + alias Lexical.Document.Line + alias Something.Else + Line + end + ] + + assert expected =~ added + end + + test "inside a function" do + {:ok, added} = + ~q[ + defmodule MyModule do + alias Something.Else + def my_fn do + Line| + end + end + ] + |> add_alias([Line]) + + expected = ~q[ + defmodule MyModule do + alias Lexical.Document.Line + alias Something.Else + def my_fn do + Line + end + end + ] + assert expected =~ added + end + + test "inside a nested module" do + {:ok, added} = + ~q[ + defmodule Parent do + alias Top.Level + defmodule Child do + alias Some.Other + Line| + end + end + ] + |> add_alias([Line]) + + expected = ~q[ + defmodule Parent do + alias Top.Level + defmodule Child do + alias Lexical.Document.Line + alias Some.Other + Line + end + end + ]t + + assert added =~ expected + end + + test "aliases for struct references don't include non-struct modules" do + {:ok, added} = add_alias("%Scope|{}", [Lexical.Ast, Scope]) + + expected = ~q[ + alias Lexical.Ast.Analysis.Scope + %Scope + ]t + + assert added =~ expected + end + + test "only modules with a similarly named function will be included in aliases" do + {:ok, added} = add_alias("Document.fetch|", [Document, RemoteControl]) + + expected = ~q[ + alias Lexical.Document + Document.fetch + ]t + + assert added =~ expected + end + + test "protocols are excluded" do + {:ok, added} = add_alias("Co|", [Collectable, CodeUnit]) + expected = ~q[ + alias Lexical.CodeUnit + Co + ]t + + assert added =~ expected + end + + test "protocol implementations are excluded" do + {:ok, added} = + add_alias("Lin|", [Lexical.Document.Lines, Enumerable.Lexical.Document.Lines]) + + expected = ~q[ + alias Lexical.Document.Lines + Lin + ]t + assert added =~ expected + end + + test "erlang modules are excluded" do + {:ok, added} = add_alias(":ets|", [:ets]) + assert added =~ ":ets" + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/code_action/handlers/organize_aliases_test.exs b/apps/remote_control/test/lexical/remote_control/code_action/handlers/organize_aliases_test.exs new file mode 100644 index 000000000..344018896 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/code_action/handlers/organize_aliases_test.exs @@ -0,0 +1,323 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.OrganizeAliasesTest do + alias Lexical.Document + alias Lexical.Document.Range + alias Lexical.RemoteControl + alias Lexical.RemoteControl.CodeAction.Handlers.OrganizeAliases + + import Lexical.Test.CursorSupport + import Lexical.Test.CodeSigil + + use Lexical.Test.CodeMod.Case, enable_ast_conversion: false + use Patch + + setup do + start_supervised!({Document.Store, derive: [analysis: &Lexical.Ast.analyze/1]}) + :ok + end + + def apply_code_mod(text, _ast, options) do + range = options[:range] + uri = "file:///file.ex" + :ok = Document.Store.open(uri, text, 1) + {:ok, document} = Document.Store.fetch(uri) + + edits = + case OrganizeAliases.actions(document, range, []) do + [action] -> action.changes.edits + _ -> [] + end + + {:ok, edits} + end + + def organize_aliases(original_text) do + {position, stripped_text} = pop_cursor(original_text) + range = Range.new(position, position) + modify(stripped_text, range: range) + end + + describe "outside of a module" do + test "aliases are sorted alphabetically" do + {:ok, organized} = + ~q[ + alias ZZ.XX.YY + alias AA.BB.CC| + ] + |> organize_aliases() + + expected = ~q[ + alias AA.BB.CC + alias ZZ.XX.YY + ]t + + assert expected == organized + end + + test "aliases are sorted in a case insensitive way" do + {:ok, organized} = + ~q[ + defmodule Outer do + alias CSV + alias Common| + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Outer do + alias Common + alias CSV + end + ]t + + assert expected == organized + end + + test "nested aliases are flattened" do + {:ok, organized} = + ~q[ + alias A.B.{C, D, E}| + ] + |> organize_aliases() + + expected = ~q[ + alias A.B.C + alias A.B.D + alias A.B.E + ]t + + assert expected == organized + end + end + + describe "at the top of a module" do + test "does nothing if there are no aliases" do + patch(RemoteControl, :get_project, %Lexical.Project{}) + + {:ok, organized} = + ~q[ + defmodule Nothing do + @attr true| + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Nothing do + @attr true + end + ]t + + assert expected == organized + end + + test "aliases are sorted alphabetically " do + {:ok, organized} = + ~q[ + defmodule Simple do + alias Z.X.Y| + alias V.W.X, as: Unk + alias A.B.C + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Simple do + alias A.B.C + alias V.W.X, as: Unk + alias Z.X.Y + end + ]t + assert expected == organized + end + + test "aliases are removed duplicate aliases" do + {:ok, organized} = + ~q[ + defmodule Dupes do + alias Foo.Bar.Baz| + alias Other.Thing + alias Foo.Bar.Baz + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Dupes do + alias Foo.Bar.Baz + alias Other.Thing + end + ]t + assert expected == organized + end + + test "dependent aliase are honored" do + {:ok, organized} = + ~q[ + defmodule Deps do + alias First.Dep| + alias Dep.Action + alias Action.Third + alias Third.Fourth.{Fifth, Sixth} + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Deps do + alias First.Dep + alias First.Dep.Action + alias First.Dep.Action.Third + alias First.Dep.Action.Third.Fourth.Fifth + alias First.Dep.Action.Third.Fourth.Sixth + end + ]t + + assert expected == organized + end + + test "nested aliases are flattened" do + {:ok, organized} = + ~q[ + defmodule Nested do + alias Foo.Bar.|{ + Baz, + Quux, + Quux.Foorp + } + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Nested do + alias Foo.Bar.Baz + alias Foo.Bar.Quux + alias Foo.Bar.Quux.Foorp + end + ]t + assert expected == organized + end + + test "module attributes are kept " do + {:ok, organized} = + ~q[ + defmodule Simple do + alias First.Second| + @attr true + alias Second.Third + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Simple do + alias First.Second + alias First.Second.Third + @attr true + end + ]t + + assert expected == organized + end + + test "aliases in a given scope are pulled to the top" do + {:ok, organized} = + ~q[ + defmodule Scattered do + alias| My.Alias + def my_function do + end + alias Another.Alias + def other_function do + end + alias Yet.Another + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Scattered do + alias Another.Alias + alias My.Alias + alias Yet.Another + def my_function do + end + def other_function do + end + end + ]t + + assert expected == organized + end + + test "aliases in different scopes are left alone" do + {:ok, organized} = + ~q[ + defmodule Outer do + alias Foo.Bar| + alias A.B + + def my_fn do + alias Something.Else + 1 - Else.other(1) + end + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Outer do + alias A.B + alias Foo.Bar + + def my_fn do + alias Something.Else + 1 - Else.other(1) + end + end + ]t + + assert expected == organized + end + + test "aliases in a nested module are left alone" do + {:ok, organized} = + ~q[ + defmodule Outer do + alias Foo.Bar + alias A.B + + defmodule Nested do + alias Something.Else + alias AA.BB | + + def nested_fn do + end + alias BB.CC + end + end + ] + |> organize_aliases() + + expected = ~q[ + defmodule Outer do + alias Foo.Bar + alias A.B + + defmodule Nested do + alias AA.BB + alias AA.BB.CC + alias Something.Else + + def nested_fn do + end + end + end + ]t + + assert expected == organized + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/code_action/handlers/remove_unused_alias_test.exs b/apps/remote_control/test/lexical/remote_control/code_action/handlers/remove_unused_alias_test.exs new file mode 100644 index 000000000..7c1f619c0 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/code_action/handlers/remove_unused_alias_test.exs @@ -0,0 +1,360 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.RemoveUnusedAliasTest do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Document.Range + alias Lexical.RemoteControl.CodeAction.Diagnostic + alias Lexical.RemoteControl.CodeAction.Handlers.RemoveUnusedAlias + + import Lexical.Test.CursorSupport + import Lexical.Test.CodeSigil + + use Lexical.Test.CodeMod.Case, enable_ast_conversion: false + + def apply_code_mod(original_text, _ast, options) do + Document.Store.open("file:///file.ex", original_text, 1) + {:ok, document} = Document.Store.fetch("file:///file.ex") + + cursor = Keyword.get(options, :cursor) + + start_pos = update_in(cursor.character, fn _ -> 1 end) + end_pos = update_in(start_pos.line, &(&1 + 1)) + + message = + case Keyword.get(options, :alias) do + nil -> + Keyword.get(options, :message, "warning: unused alias Foo") + + module -> + "warning: unused alias #{module}" + end + + range = Document.Range.new(cursor, cursor) + line_range = Range.new(start_pos, end_pos) + + diagnostic = Diagnostic.new(line_range, message, :elixir) + + edits = + document + |> RemoveUnusedAlias.actions(range, [diagnostic]) + |> Enum.flat_map(& &1.changes.edits) + + {:ok, edits} + end + + def remove_alias(orig_text, opts \\ []) do + {position, stripped} = pop_cursor(orig_text) + + opts = Keyword.merge(opts, cursor: position) + modify(stripped, opts) + end + + setup do + start_supervised!({Document.Store, derive: [analysis: &Lexical.Ast.analyze/1]}) + :ok + end + + describe "at the top level" do + test "removes an alias" do + assert {:ok, ""} = remove_alias("alias Foo.Bar.Baz|", alias: "Baz") + end + + test "deletes the line completely" do + {:ok, doc} = + ~q[ + alias Foo.Bar.Baz| + Remote.function_call() + ] + |> remove_alias(alias: "Baz") + + assert "Remote.function_call()" == doc + end + + test "removes an alias in the middle of an alias block" do + {:ok, removed} = + ~q[ + alias Foo.Bar.Baz + alias Quux.Stuff| + alias Yet.More.Things + ] + |> remove_alias(alias: "Stuff") + + assert ~q[ + alias Foo.Bar.Baz + alias Yet.More.Things + ] =~ removed + end + + test "removes an alias at the end of an alias block" do + {:ok, removed} = + ~q[ + alias Foo.Bar.Baz + alias Quux.Stuff + alias Yet.More.Things| + ] + |> remove_alias(alias: "Things") + + assert ~q[ + alias Foo.Bar.Baz + alias Quux.Stuff + ] =~ removed + end + + test "works using as" do + {:ok, removed} = + ~q[ + alias Foo.Bar.Baz, as: Quux| + ] + |> remove_alias(alias: "Quux") + + assert "" == removed + end + + test "only deletes the alias on the cursor's line" do + {:ok, removed} = + ~q[ + alias Foo.Bar + alias Something.Else + alias Foo.Bar| + ] + |> remove_alias(alias: "Bar") + + assert ~q[ + alias Foo.Bar + alias Something.Else + ] =~ removed + end + + test "leaves things alone if the message is different" do + assert {:ok, "alias This.Is.Correct"} == + remove_alias("alias This.Is.Correct|", message: "ugly code") + end + end + + describe "in a module" do + test "removes an alias" do + {:ok, removed} = + ~q[ + defmodule MyModule do + alias Foo.Bar.Baz| + end + ] + |> remove_alias(alias: "Baz") + + assert "defmodule MyModule do\nend" =~ removed + end + + test "removes an alias in the middle of an alias block" do + {:ok, removed} = + ~q[ + defmodule MyModule do + alias Foo.Bar.Baz + alias Quux.Stuff| + alias Yet.More.Things + end + ] + |> remove_alias(alias: "Stuff") + + assert ~q[ + defmodule MyModule do + alias Foo.Bar.Baz + alias Yet.More.Things + end + ] =~ removed + end + + test "removes an alias at the end of an alias block" do + {:ok, removed} = + ~q[ + defmodule MyModule do + alias Foo.Bar.Baz + alias Quux.Stuff + alias Yet.More.Things| + end + ] + |> remove_alias(alias: "Things") + + assert ~q[ + defmodule MyModule do + alias Foo.Bar.Baz + alias Quux.Stuff + end + ] =~ removed + end + end + + describe "in deeply nested modules" do + test "aliases are removed completely" do + {:ok, removed} = + ~q[ + defmodule Grandparent do + defmodule Parent do + defmodule Child do + alias This.Goes.Bye| + end + end + end + ] + |> remove_alias(alias: "Bye") + + expected = ~q[ + defmodule Grandparent do + defmodule Parent do + defmodule Child do + end + end + end + ] + + assert expected =~ removed + end + end + + describe "multi-line alias block" do + test "the first alias can be removed" do + {:ok, removed} = + ~q[ + alias Grandparent.Parent.|{ + Child1, + Child2, + Child3 + } + ] + |> remove_alias(alias: "Child1") + + expected = ~q[ + alias Grandparent.Parent.{ + Child2, + Child3 + } + ] + + assert expected =~ removed + end + + test "the second alias can be removed" do + {:ok, removed} = + ~q[ + alias Grandparent.Parent.|{ + Child1, + Child2, + Child3 + } + ] + |> remove_alias(alias: "Child2") + + expected = ~q[ + alias Grandparent.Parent.{ + Child1, + Child3 + } + ] + + assert expected =~ removed + end + + test "the last alias can be removed" do + {:ok, removed} = + ~q[ + alias Grandparent.Parent.|{ + Child1, + Child2, + Child3 + } + ] + |> remove_alias(alias: "Child3") + + expected = ~q[ + alias Grandparent.Parent.{ + Child1, + Child2 + } + ] + + assert expected =~ removed + end + + test "the only alias can be removed" do + {:ok, removed} = + ~q[ + alias Grandparent.Parent.|{ + Child1 + } + ] + |> remove_alias(alias: "Child1") + + assert "" =~ removed + end + + test "when there are dotted aliases in the list" do + {:ok, removed} = + ~q[ + alias Grandparent.Parent.{ + Child.Stinky, + Child.Smelly|, + Other.Reeky + } + ] + |> remove_alias(alias: "Smelly") + + expected = ~q[ + alias Grandparent.Parent.{ + Child.Stinky, + Other.Reeky + } + ] + + assert expected =~ removed + end + end + + describe "single-line alias block" do + test "the first alias can be removed" do + {:ok, removed} = + ~q[alias Grandparent.Parent.|{Child1, Child2, Child3}] + |> remove_alias(alias: "Child1") + + expected = ~q[alias Grandparent.Parent.{Child2, Child3}] + + assert expected =~ removed + end + + test "the second alias can be removed" do + {:ok, removed} = + ~q[alias Grandparent.Parent.|{Child1, Child2, Child3}] + |> remove_alias(alias: "Child2") + + expected = ~q[ + alias Grandparent.Parent.{Child1, Child3} + ] + + assert expected =~ removed + end + + test "the last alias can be removed" do + {:ok, removed} = + ~q[ + alias Grandparent.Parent.|{Child1, Child2, Child3}] + |> remove_alias(alias: "Child3") + + expected = ~q[alias Grandparent.Parent.{Child1, Child2] + + assert expected =~ removed + end + + test "the only alias can be removed" do + {:ok, removed} = + ~q[alias Grandparent.Parent.|{Child1}] + |> remove_alias(alias: "Child1") + + assert "" =~ removed + end + + test "when there are dotted aliases in the list" do + {:ok, removed} = + "alias Grandparent.Parent.{Child.Stinky, Child.Smelly, Other.Reeky}" + |> remove_alias(alias: "Smelly") + + assert "alias Grandparent.Parent.{Child.Stinky, Other.Reeky}" =~ removed + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/code_action/handlers/replace_remote_function_test.exs b/apps/remote_control/test/lexical/remote_control/code_action/handlers/replace_remote_function_test.exs new file mode 100644 index 000000000..5e5b51cde --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/code_action/handlers/replace_remote_function_test.exs @@ -0,0 +1,271 @@ +defmodule Lexical.RemoteControl.CodeAction.Handlers.ReplaceRemoteFunctionTest do + alias Lexical.Document + alias Lexical.RemoteControl.CodeAction.Diagnostic + alias Lexical.RemoteControl.CodeAction.Handlers.ReplaceRemoteFunction + + use Lexical.Test.CodeMod.Case + + setup do + start_supervised!({Document.Store, derive: [analysis: &Lexical.Ast.analyze/1]}) + :ok + end + + @default_message """ + Enum.counts/1 is undefined or private. Did you mean: + + * concat/1 + * concat/2 + * count/1 + * count/2 + """ + + def apply_code_mod(original_text, _ast, options) do + line_number = Keyword.get(options, :line, 1) + message_body = Keyword.get(options, :message, @default_message) + message_prefix = Keyword.get(options, :message_prefix, "") + message = message_prefix <> message_body + + suggestion = + options + |> Keyword.get(:suggestion, :count) + |> Atom.to_string() + + :ok = Document.Store.open("file:///file.ex", original_text, 0) + {:ok, document} = Document.Store.fetch("file:///file.ex") + + range = + Document.Range.new( + Document.Position.new(document, line_number, 0), + Document.Position.new(document, line_number + 1, 0) + ) + + diagnostic = Diagnostic.new(range, message, nil) + + changes = + document + |> ReplaceRemoteFunction.actions(range, [diagnostic]) + |> Enum.flat_map(& &1.changes.edits) + |> Enum.filter(fn + %Lexical.Document.Edit{text: ^suggestion} -> true + _ -> false + end) + + {:ok, changes} + end + + for prefix <- ["", "warning: "] do + describe "fixes function call with message prefix \"#{prefix}\"" do + test "applied to a standalone call" do + {:ok, result} = + ~q{ + Enum.counts([1, 2, 3]) + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "Enum.count([1, 2, 3])" + end + + test "applied to a variable match" do + {:ok, result} = + ~q{ + x = Enum.counts([1, 2, 3]) + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "x = Enum.count([1, 2, 3])" + end + + test "applied to a variable match, preserves comments" do + {:ok, result} = + ~q{ + x = Enum.counts([1, 2, 3]) # TODO: Fix this + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "x = Enum.count([1, 2, 3]) # TODO: Fix this" + end + + test "not changing variable name" do + {:ok, result} = + ~q{ + counts = Enum.counts([1, 2, 3]) + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "counts = Enum.count([1, 2, 3])" + end + + test "applied to a call after a pipe" do + {:ok, result} = + ~q{ + [1, 2, 3] |> Enum.counts() + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "[1, 2, 3] |> Enum.count()" + end + + test "changing only a function from provided possible modules" do + {:ok, result} = + ~q{ + Enumerable.counts([1, 2, 3]) + Enum.counts([3, 2, 1]) + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "Enumerable.counts([1, 2, 3]) + Enum.count([3, 2, 1])" + end + + test "changing all occurrences of the function in the line" do + {:ok, result} = + ~q{ + Enum.counts([1, 2, 3]) + Enum.counts([3, 2, 1]) + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "Enum.count([1, 2, 3]) + Enum.count([3, 2, 1])" + end + + test "applied in a comprehension" do + {:ok, result} = + ~q{ + for x <- Enum.counts([[1], [2], [3]]), do: x + } + |> modify(suggestion: :concat) + + assert result == "for x <- Enum.concat([[1], [2], [3]]), do: x" + end + + test "applied in a with block" do + {:ok, result} = + ~q{ + with x <- Enum.counts([1, 2, 3]), do: x + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "with x <- Enum.count([1, 2, 3]), do: x" + end + + test "preserving the leading indent" do + {:ok, result} = modify(" Enum.counts([1, 2, 3])", trim: false) + + assert result == " Enum.count([1, 2, 3])" + end + + test "handles erlang functions" do + message = """ + :ets.inserd/2 is undefined or private. Did you mean: + + * insert/2 + * insert_new/2 + + """ + + {:ok, result} = + modify(":ets.inserd(a, b)", + message: message, + message_prefix: unquote(prefix), + suggestion: :insert + ) + + assert result == ":ets.insert(a, b)" + end + end + + describe "fixes captured function with message prefix \"#{prefix}\"" do + test "applied to a standalone function" do + {:ok, result} = + ~q[ + &Enum.counts/1 + ] + |> modify(message_prefix: unquote(prefix)) + + assert result == "&Enum.count/1" + end + + test "applied to a variable match" do + {:ok, result} = + ~q[ + x = &Enum.counts/1 + ] + |> modify(message_prefix: unquote(prefix)) + + assert result == "x = &Enum.count/1" + end + + test "applied to a variable match, preserves comments" do + {:ok, result} = + ~q[ + x = &Enum.counts/1 # TODO: Fix this + ] + |> modify(message_prefix: unquote(prefix)) + + assert result == "x = &Enum.count/1 # TODO: Fix this" + end + + test "not changing variable name" do + {:ok, result} = + ~q{ + counts = &Enum.counts/1 + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "counts = &Enum.count/1" + end + + test "applied to an argument" do + {:ok, result} = + ~q{ + [[1, 2], [3, 4]] |> Enum.map(&Enum.counts/1) + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "[[1, 2], [3, 4]] |> Enum.map(&Enum.count/1)" + end + + test "changing only a function from provided possible modules" do + {:ok, result} = + ~q{ + [&Enumerable.counts/1, &Enum.counts/1] + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "[&Enumerable.counts/1, &Enum.count/1]" + end + + test "changing all occurrences of the function in the line" do + {:ok, result} = + ~q{ + [&Enum.counts/1, &Enum.counts/1] + } + |> modify(message_prefix: unquote(prefix)) + + assert result == "[&Enum.count/1, &Enum.count/1]" + end + + test "preserving the leading indent" do + {:ok, result} = modify(" &Enum.counts/1", trim: false) + + assert result == " &Enum.count/1" + end + + test "handles erlang functions" do + message = """ + :ets.inserd/2 is undefined or private. Did you mean: + + * insert/2 + * insert_new/2 + + """ + + {:ok, result} = + modify("&:ets.inserd/2", + message: message, + message_prefix: unquote(prefix), + suggestion: :insert + ) + + assert result == "&:ets.insert/2" + end + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/code_mod/replace_with_underscore_test.exs b/apps/remote_control/test/lexical/remote_control/code_action/handlers/replace_with_underscore_test.exs similarity index 88% rename from apps/remote_control/test/lexical/remote_control/code_mod/replace_with_underscore_test.exs rename to apps/remote_control/test/lexical/remote_control/code_action/handlers/replace_with_underscore_test.exs index 0893c91bb..d19b38563 100644 --- a/apps/remote_control/test/lexical/remote_control/code_mod/replace_with_underscore_test.exs +++ b/apps/remote_control/test/lexical/remote_control/code_action/handlers/replace_with_underscore_test.exs @@ -1,6 +1,7 @@ -defmodule Lexical.RemoteControl.CodeMod.ReplaceWithUnderscoreTest do +defmodule Lexical.RemoteControl.CodeAction.Handlers.ReplaceWithUnderscoreTest do alias Lexical.Document - alias Lexical.RemoteControl.CodeMod.ReplaceWithUnderscore + alias Lexical.RemoteControl.CodeAction.Diagnostic + alias Lexical.RemoteControl.CodeAction.Handlers.ReplaceWithUnderscore use Lexical.Test.CodeMod.Case @@ -9,9 +10,27 @@ defmodule Lexical.RemoteControl.CodeMod.ReplaceWithUnderscoreTest do line_number = Keyword.get(options, :line, 1) document = Document.new("file:///file.ex", original_text, 0) - with {:ok, document_edits} <- ReplaceWithUnderscore.edits(document, line_number, variable) do - {:ok, document_edits.edits} - end + message = + """ + warning: variable "#{variable}" is unused (if the variable is not meant to be used, prefix it with an underscore) + /file.ex:#{line_number} + """ + |> String.trim() + + range = + Document.Range.new( + Document.Position.new(document, line_number, 0), + Document.Position.new(document, line_number + 1, 0) + ) + + diagnostic = Diagnostic.new(range, message, nil) + + changes = + document + |> ReplaceWithUnderscore.actions(range, [diagnostic]) + |> Enum.flat_map(& &1.changes.edits) + + {:ok, changes} end describe "fixes in parameters" do diff --git a/apps/remote_control/test/lexical/remote_control/code_intelligence/definition_test.exs b/apps/remote_control/test/lexical/remote_control/code_intelligence/definition_test.exs index b6ca597cd..fc286bf75 100644 --- a/apps/remote_control/test/lexical/remote_control/code_intelligence/definition_test.exs +++ b/apps/remote_control/test/lexical/remote_control/code_intelligence/definition_test.exs @@ -1,10 +1,8 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do - use ExUnit.Case, async: true - alias Lexical.Document - alias Lexical.Document.Location alias Lexical.RemoteControl alias Lexical.RemoteControl.ProjectNodeSupervisor + alias Lexical.RemoteControl.Search import Lexical.RemoteControl.Api.Messages import Lexical.Test.CodeSigil @@ -20,6 +18,12 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do |> file_path(Path.join("lib", "my_definition.ex")) |> Document.Path.ensure_uri() + {:ok, _document} = Document.Store.open_temporary(uri) + + on_exit(fn -> + :ok = Document.Store.close(uri) + end) + %{uri: uri} end @@ -38,15 +42,16 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do end setup_all do - start_supervised!(Lexical.Document.Store) - project = project(:navigations) + start_supervised!({Document.Store, derive: [analysis: &Lexical.Ast.analyze/1]}) {:ok, _} = start_supervised({ProjectNodeSupervisor, project}) {:ok, _, _} = RemoteControl.start_link(project) RemoteControl.Api.register_listener(project, self(), [:all]) RemoteControl.Api.schedule_compile(project, true) + assert_receive project_compiled(), 5000 + assert_receive project_index_ready(), 5000 %{project: project} end @@ -58,6 +63,8 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do on_exit(fn -> :ok = Document.Store.close(uri) end) + + %{subject_uri: uri} end describe "definition/2 when making remote call by alias" do @@ -69,13 +76,15 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do alias MyDefinition def uses_greet() do - MyDefinition.greet|("World") + MyDefinition.gree|t("World") end end ] - assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) - assert definition_line == ~S[ def «greet»(name) do] + assert {:ok, ^referenced_uri, definition_line} = + definition(project, subject_module, referenced_uri) + + assert definition_line == ~S[ def «greet(name)» do] end test "find the definition of the module", %{project: project, uri: referenced_uri} do @@ -84,63 +93,66 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do alias MyDefinition def uses_greet() do - MyDefinition|.greet("World") + MyDefinitio|n.greet("World") end end ] - assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) + assert {:ok, ^referenced_uri, definition_line} = + definition(project, subject_module, referenced_uri) + assert definition_line == ~S[defmodule «MyDefinition» do] end + test "find the definition of a struct", %{project: project, uri: referenced_uri} do + subject_module = ~q[ + defmodule UsesRemoteStruct do + alias MyDefinition + + def uses_struct() do + %|MyDefinition{} + end + end + ] + + assert {:ok, ^referenced_uri, definition_line} = + definition(project, subject_module, referenced_uri) + + assert definition_line == " «defstruct [:field, another_field: nil]»" + end + test "find the macro definition", %{project: project, uri: referenced_uri} do subject_module = ~q[ defmodule UsesRemoteFunction do - alias MyDefinition + require MyDefinition def uses_macro() do - MyDefinition.print_hello|() + MyDefinition.print_hel|lo() end end ] - assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) + assert {:ok, ^referenced_uri, definition_line} = + definition(project, subject_module, referenced_uri) + assert definition_line == ~S[ defmacro «print_hello» do] end - @doc """ - This is a limitation of the ElixirSense. - It doesn't support finding the multiple arity definition when making remote call - currently, it will always return the first definition. - - ## Example - - iex> defmodule MultiArity do - ...> def sum(a, b) do - ...> a + b - ...> end - ...> - ...> def sum(a, b, c) do - ...> a + b + c - ...> end - ...> end - - When we want to jump to the definition of `MultiArity.sum/3`, - we will always go to the `MultiArity.sum/2` - """ - @tag :skip - test "find the right arity function definition", %{project: project} do + test "find the right arity function definition", %{ + project: project, + uri: referenced_uri + } do subject_module = ~q[ defmodule UsesRemoteFunction do alias MultiArity def uses_multiple_arity_fun() do - MultiArity.sum|(1, 2, 3) + MultiArity.su|m(1, 2, 3) end end ] - {:ok, referenced_uri, definition_line} = definition(project, subject_module) + {:ok, referenced_uri, definition_line} = definition(project, subject_module, referenced_uri) assert definition_line == ~S[ def «sum»(a, b, c) do] assert referenced_uri =~ "navigations/lib/multi_arity.ex" @@ -156,13 +168,15 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do import MyDefinition def uses_greet() do - greet|("World") + gree|t("World") end end ] - assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) - assert definition_line == ~S[ def «greet»(name) do] + assert {:ok, ^referenced_uri, definition_line} = + definition(project, subject_module, referenced_uri) + + assert definition_line == ~S[ def «greet(name)» do] end test "find the definition of a remote macro call", @@ -172,12 +186,14 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do import MyDefinition def uses_macro() do - print_hello|() + print_hell|o() end end ] - assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) + assert {:ok, ^referenced_uri, definition_line} = + definition(project, subject_module, referenced_uri) + assert definition_line == ~S[ defmacro «print_hello» do] end end @@ -191,12 +207,14 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do use MyDefinition def uses_greet() do - greet|("World") + gree|t("World") end end ] - assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) + assert {:ok, ^referenced_uri, definition_line} = + definition(project, subject_module, referenced_uri) + assert definition_line == ~S[ def «greet»(name) do] end @@ -216,36 +234,107 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do use MyDefinition def uses_hello_defined_in_using_quote() do - hello_func_in_using|() + hello_func_in_usin|g() end end ] - assert {:ok, ^referenced_uri, definition_line} = definition(project, subject_module) + assert {:ok, ^referenced_uri, definition_line} = + definition(project, subject_module, referenced_uri) + assert definition_line == ~S[ def «hello_func_in_using» do] end end describe "definition/2 when making local call" do - test "find the function definition", %{project: project} do + test "find multiple locations when the module is defined in multiple places", %{ + project: project, + subject_uri: subject_uri + } do + subject_module = ~q[ + defmodule MyModule do # line 1 + end + + defmodule MyModule do # line 4 + end + + defmodule UsesMyModule do + |MyModule + end + ] + + {:ok, [{_, definition_line1}, {_, definition_line4}]} = + definition(project, subject_module, subject_uri) + + assert definition_line1 == ~S[defmodule «MyModule» do # line 1] + assert definition_line4 == ~S[defmodule «MyModule» do # line 4] + end + + test "find the function definition", %{project: project, subject_uri: subject_uri} do subject_module = ~q[ defmodule UsesOwnFunction do def greet do end def uses_greet do - greet|() + gree|t() end end ] - {:ok, referenced_uri, definition_line} = definition(project, subject_module) + {:ok, referenced_uri, definition_line} = definition(project, subject_module, subject_uri) assert definition_line == ~S[ def «greet» do] assert referenced_uri =~ "navigations/lib/my_module.ex" end - test "find the attribute", %{project: project} do + test "find the function definition when the function has `when` clause", %{ + project: project, + subject_uri: subject_uri + } do + subject_module = ~q[ + defmodule UsesOwnFunction do + def greet(name) when is_binary(name) do + end + + def uses_greet do + gree|t("World") + end + end + ] + + {:ok, referenced_uri, definition_line} = definition(project, subject_module, subject_uri) + + assert definition_line == ~S[ def «greet(name) when is_binary(name)» do] + assert referenced_uri =~ "navigations/lib/my_module.ex" + end + + test "find only one function when there are multiple same arity functions", %{ + project: project, + subject_uri: subject_uri + } do + subject_module = ~q[ + defmodule UsesOwnFunction do + def greet(name) when is_atom(name) do + IO.inspect(name) + end + + def greet(name) do + name + end + + def uses_greet do + gree|t("World") + end + end + ] + + {:ok, referenced_uri, definition_line} = definition(project, subject_module, subject_uri) + assert definition_line == ~S[ def «greet(name) when is_atom(name)» do] + assert referenced_uri == subject_uri + end + + test "find the attribute", %{project: project, subject_uri: subject_uri} do subject_module = ~q[ defmodule UsesAttribute do @b 2 @@ -256,26 +345,26 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do end ] - {:ok, referenced_uri, definition_line} = definition(project, subject_module) + {:ok, referenced_uri, definition_line} = definition(project, subject_module, subject_uri) assert definition_line =~ ~S[«@b» 2] assert referenced_uri =~ "navigations/lib/my_module.ex" end - test "find the variable", %{project: project} do + test "find the variable", %{project: project, subject_uri: subject_uri} do subject_module = ~q[ defmodule UsesVariable do def use_variable do a = 1 if true do - a| + |a end end end ] - {:ok, referenced_uri, definition_line} = definition(project, subject_module) + {:ok, referenced_uri, definition_line} = definition(project, subject_module, subject_uri) assert definition_line =~ ~S[«a» = 1] assert referenced_uri =~ "navigations/lib/my_module.ex" @@ -288,35 +377,106 @@ defmodule Lexical.RemoteControl.CodeIntelligence.DefinitionTest do """ @tag :skip test "find the definition when calling a Elixir std module function", - %{project: project} do + %{project: project, subject_uri: subject_uri} do subject_module = ~q[ - String.to_integer|("1") + String.to_intege|r("1") ] - {:ok, uri, definition_line} = definition(project, subject_module) + {:ok, uri, definition_line} = definition(project, subject_module, subject_uri) assert uri =~ "lib/elixir/lib/string.ex" assert definition_line =~ ~S[ def «to_integer»(string) when is_binary(string) do] end - test "find the definition when calling a erlang module", %{project: project} do + test "find the definition when calling a erlang module", %{ + project: project, + subject_uri: subject_uri + } do subject_module = ~q[ - :erlang.binary_to_atom|("1") + :erlang.binary_to_ato|m("1") ] - {:ok, uri, definition_line} = definition(project, subject_module) + {:ok, uri, definition_line} = definition(project, subject_module, subject_uri) assert uri =~ "/src/erlang.erl" assert definition_line =~ ~S[«binary_to_atom»(Binary)] end end - defp definition(project, code) do + describe "definition/2 when making local call to a delegated function" do + setup [:with_referenced_file] + + test "find the definition of the delegated function", %{ + project: project, + uri: uri, + subject_uri: subject_uri + } do + subject_module = ~q[ + defmodule UsesDelegatedFunction do + defdelegate greet(name), to: MyDefinition + + def uses_greet do + gree|t("World") + end + end + ] + + {:ok, [location1, location2]} = definition(project, subject_module, [uri, subject_uri]) + + {referenced_uri, definition_line} = location1 + assert definition_line =~ ~S[ def «greet(name)» do] + assert referenced_uri == uri + + {referenced_uri, definition_line} = location2 + assert definition_line == ~S[ defdelegate «greet(name)», to: MyDefinition] + assert referenced_uri == subject_uri + end + end + + describe "edge cases" do + setup [:with_referenced_file] + + test "doesn't crash with structs defined with DSLs", %{project: project, uri: uri} do + subject_module = ~q[ + defmodule MyTest do + def my_test(%TypedStructs.MacroBased|Struct{}) do + end + end + ] + + assert {:ok, _file, _definition} = definition(project, subject_module, [uri]) + end + end + + defp definition(project, code, referenced_uri) do with {position, code} <- pop_cursor(code), {:ok, document} <- subject_module(project, code), - {:ok, %Location{} = location} <- + :ok <- index(project, referenced_uri), + {:ok, location} <- RemoteControl.Api.definition(project, document, position) do - {:ok, location.document.uri, decorate(location.document, location.range)} + if is_list(location) do + {:ok, Enum.map(location, &{&1.document.uri, decorate(&1.document, &1.range)})} + else + {:ok, location.document.uri, decorate(location.document, location.range)} + end + end + end + + defp index(project, referenced_uris) when is_list(referenced_uris) do + entries = Enum.flat_map(referenced_uris, &do_index/1) + RemoteControl.call(project, Search.Store, :replace, [entries]) + end + + defp index(project, referenced_uri) do + entries = do_index(referenced_uri) + RemoteControl.call(project, Search.Store, :replace, [entries]) + end + + defp do_index(referenced_uri) do + with {:ok, document} <- Document.Store.fetch(referenced_uri), + {:ok, entries} <- + Search.Indexer.Source.index(document.path, Document.to_string(document)) do + entries end end end diff --git a/apps/remote_control/test/lexical/remote_control/code_intelligence/entity_test.exs b/apps/remote_control/test/lexical/remote_control/code_intelligence/entity_test.exs index be339e447..b6d4a58b0 100644 --- a/apps/remote_control/test/lexical/remote_control/code_intelligence/entity_test.exs +++ b/apps/remote_control/test/lexical/remote_control/code_intelligence/entity_test.exs @@ -2,12 +2,14 @@ defmodule Lexical.RemoteControl.CodeIntelligence.EntityTest do alias Lexical.Document alias Lexical.RemoteControl.CodeIntelligence.Entity + import ExUnit.CaptureIO import Lexical.Test.CodeSigil import Lexical.Test.CursorSupport import Lexical.Test.Fixtures import Lexical.Test.RangeSupport - use ExUnit.Case, async: true + use ExUnit.Case + use Patch describe "module resolve/2" do test "succeeds with trailing period" do @@ -178,6 +180,135 @@ defmodule Lexical.RemoteControl.CodeIntelligence.EntityTest do assert {:ok, {:module, Example.Nested}, resolved_range} = resolve(code) assert resolved_range =~ ~S[ «__MODULE__.Nested»] end + + test "works for erlang modules" do + code = ~q[:code|] + + assert {:ok, {:module, :code}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[:code] + end + + test "fails for plain old atoms" do + code = ~q[:not_a_module|] + assert {:error, {:unsupported, {:unquoted_atom, ~c"not_a_module"}}} = resolve(code) + end + + test "handles inline embeds_one" do + code = ~q[ + defmodule MyEcto do + use Ecto.Schema + + schema "user" do + embeds_one :address, Address| do + field :street, :string + end + end + end + ] + assert {:ok, {:module, MyEcto.Address}, resolved_range} = resolve(code) + assert resolved_range =~ ~s[Address] + end + + test "handles inline embeds_many" do + code = ~q[ + defmodule MyEcto do + use Ecto.Schema + + schema "user" do + embeds_many :addresses, Address| do + field :street, :string + end + end + end + ] + assert {:ok, {:module, MyEcto.Address}, resolved_range} = resolve(code) + assert resolved_range =~ ~s[Address] + end + end + + describe "controller module resolve/2 in the phoenix router" do + setup do + patch(Entity, :function_exists?, fn + FooWeb.FooController, :call, 2 -> true + FooWeb.FooController, :action, 2 -> true + end) + + :ok + end + + test "succeeds in the `get` block" do + code = ~q[ + scope "/foo", FooWeb do + get "/foo", |FooController, :index + end + ] + + assert {:ok, {:module, FooWeb.FooController}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[get "/foo", «FooController», :index] + end + + test "succeeds in the `post` block" do + code = ~q[ + scope "/foo", FooWeb do + post "/foo", |FooController, :create + end + ] + + assert {:ok, {:module, FooWeb.FooController}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[post "/foo", «FooController», :create] + end + + test "succeeds even the scope module has multiple dots" do + patch(Entity, :function_exists?, fn + FooWeb.Bar.FooController, :call, 2 -> true + FooWeb.Bar.FooController, :action, 2 -> true + end) + + code = ~q[ + scope "/foo", FooWeb.Bar do + get "/foo", |FooController, :index + end + ] + + assert {:ok, {:module, FooWeb.Bar.FooController}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[get "/foo", «FooController», :index] + end + + test "succeeds in the nested scopes" do + patch(Entity, :function_exists?, fn + FooWeb.Bar.FooController, :call, 2 -> true + FooWeb.Bar.FooController, :action, 2 -> true + end) + + code = ~q[ + scope "/", FooWeb do + scope "/bar", Bar do + get "/foo", |FooController, :index + end + end + ] + + assert {:ok, {:module, FooWeb.Bar.FooController}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[get "/foo", «FooController», :index] + end + end + + describe "liveview module resolve in the router" do + test "succeeds in the `live` block" do + patch(Entity, :function_exists?, fn + FooWeb.FooLive, :mount, 2 -> true + FooWeb.FooLive, :render, 1 -> true + end) + + code = ~q[ + scope "/foo", FooWeb do + live "/foo", |FooLive + end + ] + + assert {:ok, {:module, FooLive}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[live "/foo", «FooLive»] + end end describe "struct resolve/2" do @@ -263,7 +394,9 @@ defmodule Lexical.RemoteControl.CodeIntelligence.EntityTest do defstruct [] end - %|Inner{} + def make do + %|Inner{} + end end > @@ -411,6 +544,309 @@ defmodule Lexical.RemoteControl.CodeIntelligence.EntityTest do assert {:ok, {:call, MyModule, :my_fun, 0}, resolved_range} = resolve(code) assert resolved_range =~ ~S[ «MyModule.my_fun»() :: MyModule.t()] end + + test "qualified call for an erlang function" do + code = ~q[ + :code.which|() + ] + assert {:ok, {:call, :code, :which, 0}, resolved_range} = resolve(code) + assert resolved_range =~ "«:code.which»()" + end + + test "captured calls with arity" do + code = ~q[ + &MyModule.|my_fun/2 + ] + assert {:ok, {:call, MyModule, :my_fun, 2}, resolved_range} = resolve(code) + assert resolved_range =~ "«MyModule.my_fun»/2" + end + + test "captured calls with args" do + code = ~q[ + &MyModule.|my_fun(:foo, &1) + ] + assert {:ok, {:call, MyModule, :my_fun, 2}, resolved_range} = resolve(code) + assert resolved_range =~ "&«MyModule.my_fun»(:foo, &1)" + end + + test "defstruct call" do + code = ~q[ + defmodule MyModule do + defstruct| foo: nil + end + ] + assert {:ok, {:call, Kernel, :defstruct, 1}, resolved_range} = resolve(code) + assert resolved_range =~ " «defstruct» foo: nil" + end + + test "comments are ignored" do + code = ~q[ + defmodule Scratch do + def many_such_pipes() do + "pipe" + |> a_humble_pipe() + # |> another_|humble_pipe() + |> a_humble_pipe() + end + end + ] + + assert {:error, :not_found} = resolve(code) + end + end + + describe "local call" do + test "in function definition" do + code = ~q[ + defmodule Parent do + + def my_call|(a, b) + end + ] + assert {:ok, {:call, Parent, :my_call, 2}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[def «my_call»(a, b)] + end + + test "in zero arg function definition" do + code = ~q[ + defmodule Parent do + def zero_ar|g do + end + end + ] + + assert {:ok, {:call, Parent, :zero_arg, 0}, resolved_range} = resolve(code, evaluate: true) + assert resolved_range =~ " def «zero_arg» do" + end + + @tag skip: Version.match?(System.version(), "< 1.15.0") + test "in zero arg function call" do + code = ~q[ + defmodule Parent do + def zero_arg do + zero_ar|g + end + end + ] + + assert {:ok, {:call, Parent, :zero_arg, 0}, resolved_range} = resolve(code, evaluate: true) + assert resolved_range =~ " «zero_arg»" + end + + test "in private function definition" do + code = ~q[ + defmodule Parent do + + defp my_call|(a, b) + end + ] + assert {:ok, {:call, Parent, :my_call, 2}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[defp «my_call»(a, b)] + end + + test "in function definition without parens" do + code = ~q[ + defmodule Parent do + + def |my_call + end + ] + assert {:ok, {:call, Parent, :my_call, 0}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[def «my_call»] + end + + test "in private function definition without parens" do + code = ~q[ + defmodule Parent do + + defp |my_call + end + ] + assert {:ok, {:call, Parent, :my_call, 0}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[defp «my_call»] + end + + test "in function body with arity 0" do + code = ~q[ + defmodule Parent do + def function do + local_fn|() + end + end + ] + assert {:ok, {:call, Parent, :local_fn, 0}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[ «local_fn»()] + end + + test "in function body with arity 2" do + code = ~q[ + defmodule Parent do + def function do + local_fn|(a, b) + end + end + ] + assert {:ok, {:call, Parent, :local_fn, 2}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[ «local_fn»(a, b)] + end + + test "in a function capture" do + code = ~q[ + defmodule Parent do + def function do + &local_fn|/1 + end + end + ] + + assert {:ok, {:call, Parent, :local_fn, 1}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[ &«local_fn»/1] + end + + test "failed at the position of the molecule" do + code = ~q[ + defmodule Parent do + def function do + a = 1 + a|/1 + end + end + ] + + assert {:error, :not_found} = resolve(code) + end + + test "failed at the position of the denominator" do + code = ~q[ + defmodule Parent do + def function do + a = 1 + a/|1 + end + end + ] + + assert {:error, :not_found} = resolve(code) + end + + test "in a function capture and the cursor is at `ampersand`" do + code = ~q[ + defmodule Parent do + def function do + &|local_fn/1 + end + end + ] + + assert {:ok, {:call, Parent, :local_fn, 1}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[ &«local_fn»/1] + end + + test "in a function capture with arity 2" do + code = ~q[ + defmodule Parent do + def function do + &|local_fn/2 + end + end + ] + + assert {:ok, {:call, Parent, :local_fn, 2}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[ &«local_fn»/2] + end + + test "in a function capture with params" do + code = ~q[ + defmodule Parent do + def function do + &local_fn|(&1, 1) + end + end + ] + assert {:ok, {:call, Parent, :local_fn, 2}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[ &«local_fn»(&1, 1)] + end + + test "in a another call" do + code = ~q[ + defmodule Parent do + def function do + Module.remote(local_call|(3)) + end + end + ] + assert {:ok, {:call, Parent, :local_call, 1}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[ Module.remote(«local_call»(3))] + end + + test "in a pipe" do + code = ~q[ + defmodule Parent do + def function do + something + |> Module.a() + |> local_call|() + end + end + ] + assert {:ok, {:call, Parent, :local_call, 1}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[ |> «local_call»()] + end + + test "returns a nil module when outside of a module" do + code = ~q[ + local_call|() + ] + assert {:ok, {:call, nil, :local_call, 0}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[«local_call»()] + end + end + + describe "imported call" do + test "imported in module scope" do + code = ~q[ + defmodule Parent do + import Lexical.Ast + + def parse(doc), do: |from(doc) + end + ] + + assert {:ok, {:call, Lexical.Ast, :from, 1}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[«from»(doc)] + end + + test "imported in function scope" do + code = ~q[ + defmodule Parent do + def parse(doc) do + import Lexical.Ast + |from(doc) + end + end + ] + + assert {:ok, {:call, Lexical.Ast, :from, 1}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[«from»(doc)] + end + + test "imports in a different scope don't clobber local calls" do + code = ~q[ + defmodule Parent do + def parse(doc) do + import Lexical.Ast + from(doc) + end + + def parse2(doc) do + |from(doc) + end + end + ] + + assert {:ok, {:call, Parent, :from, 1}, resolved_range} = resolve(code) + assert resolved_range =~ ~S[«from»(doc)] + end end describe "type resolve/2" do @@ -453,6 +889,76 @@ defmodule Lexical.RemoteControl.CodeIntelligence.EntityTest do end end + describe "module attribute resolve" do + test "in a scalar definition" do + code = ~q[ + defmodule Parent do + @attribut|e 3 + end + ] + + assert {:ok, {:module_attribute, Parent, :attribute}, resolved_range} = resolve(code) + assert resolved_range =~ "«@attribute» 3" + end + + test "in a nested reference" do + code = ~q[ + defmodule Parent do + @foo 3 + @ba|r @foo + 1 + end + ] + + assert {:ok, {:module_attribute, Parent, :bar}, resolved_range} = resolve(code) + assert resolved_range =~ "«@bar» @foo + 1" + end + + test "in a function definition" do + code = ~q[ + defmodule Parent do + + def my_fun(@fo|o), do: 3 + end + ] + + assert {:ok, {:module_attribute, Parent, :foo}, resolved_range} = resolve(code) + assert resolved_range =~ "def my_fun(«@foo»), do: 3" + end + + test "in map keys" do + code = ~q[ + defmodule Parent do + + def my_fun(_), do: %{@fo|o => 3} + end + ] + + assert {:ok, {:module_attribute, Parent, :foo}, resolved_range} = resolve(code) + assert resolved_range =~ "%{«@foo» => 3}" + end + + test "in map values" do + code = ~q[ + defmodule Parent do + + def my_fun(_), do: %{foo: @fo|o} + end + ] + + assert {:ok, {:module_attribute, Parent, :foo}, resolved_range} = resolve(code) + assert resolved_range =~ "%{foo: «@foo»}" + end + + test "returns nil module you're not in a module context" do + code = ~q[ + @fo|o 3 + ] + + assert {:ok, {:module_attribute, nil, :foo}, resolved_range} = resolve(code) + assert resolved_range =~ "«@foo» 3" + end + end + defp subject_module_uri do project() |> file_path(Path.join("lib", "my_module.ex")) @@ -464,11 +970,25 @@ defmodule Lexical.RemoteControl.CodeIntelligence.EntityTest do Document.new(uri, content, 1) end - defp resolve(code) do + defp resolve(code, opts \\ []) do + evaluate? = Keyword.get(opts, :evaluate, false) + with {position, code} <- pop_cursor(code), + :ok <- maybe_evaluate(code, evaluate?), document = subject_module(code), - {:ok, resolved, range} <- Entity.resolve(document, position) do + analysis = Lexical.Ast.analyze(document), + {:ok, resolved, range} <- Entity.resolve(analysis, position) do {:ok, resolved, decorate(document, range)} end end + + defp maybe_evaluate(_code, false), do: :ok + + defp maybe_evaluate(code, true) do + capture_io(:stderr, fn -> + Code.compile_string(code) + end) + + :ok + end end diff --git a/apps/remote_control/test/lexical/remote_control/code_intelligence/references_test.exs b/apps/remote_control/test/lexical/remote_control/code_intelligence/references_test.exs new file mode 100644 index 000000000..06932111d --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/code_intelligence/references_test.exs @@ -0,0 +1,324 @@ +defmodule Lexical.RemoteControl.CodeIntelligence.ReferencesTest do + alias Lexical.Document + alias Lexical.Document.Location + alias Lexical.RemoteControl + alias Lexical.RemoteControl.CodeIntelligence.References + alias Lexical.RemoteControl.Search + alias Lexical.RemoteControl.Search.Store.Backends + + use ExUnit.Case, async: false + + import Lexical.Test.CodeSigil + import Lexical.Test.CursorSupport + import Lexical.Test.Fixtures + import Lexical.Test.RangeSupport + import Lexical.Test.EventualAssertions + + setup do + project = project() + + Backends.Ets.destroy_all(project) + RemoteControl.set_project(project) + + start_supervised!(Document.Store) + start_supervised!(RemoteControl.Dispatch) + start_supervised!(Backends.Ets) + + start_supervised!( + {Search.Store, [project, fn _ -> {:ok, []} end, fn _, _ -> {:ok, [], []} end, Backends.Ets]} + ) + + Search.Store.enable() + assert_eventually Search.Store.loaded?(), 1500 + + on_exit(fn -> + Backends.Ets.destroy_all(project) + end) + + {:ok, project: project} + end + + defp module_uri(project) do + project + |> file_path(Path.join("lib", "my_module.ex")) + |> Document.Path.ensure_uri() + end + + defp project_module(project, content) do + uri = module_uri(project) + + with :ok <- Document.Store.open(uri, content, 1) do + Document.Store.fetch(uri) + end + end + + describe "function references" do + test "are found inside public functions", %{project: project} do + code = ~q/ + defmodule Functions do + def func(x), do: Enum.map(x, & &1 + 1) + end + / + assert [%Location{} = location] = references(project, "Enum.map|(a, b)", code) + assert decorate(code, location.range) =~ "def func(x), do: «Enum.map(x, & &1 + 1)»" + end + + test "are found inside private functions", %{project: project} do + code = ~q/ + defmodule Functions do + defp func(x), do: Enum.map(x, & &1 + 1) + end + / + assert [%Location{} = location] = references(project, "Enum.map|(a, b)", code) + assert decorate(code, location.range) =~ "defp func(x), do: «Enum.map(x, & &1 + 1)»" + end + + test "are found in aliased functions", %{project: project} do + code = ~q/ + defmodule Functions do + alias Enum, as: E + defp func(x), do: E.map(x, & &1 + 1) + end + / + assert [%Location{} = location] = references(project, "Enum.map|(a, b)", code) + assert decorate(code, location.range) =~ "defp func(x), do: «E.map(x, & &1 + 1)»" + end + + test "are found in imported functions", %{project: project} do + code = ~q/ + defmodule Functions do + import Enum, only: [map: 2] + defp func(x), do: map(x, & &1 + 1) + end + / + assert [%Location{} = location] = references(project, "Enum.map|(a, b)", code) + assert decorate(code, location.range) =~ "defp func(x), do: «map(x, & &1 + 1)»" + end + + test "are found in local functions", %{project: project} do + code = ~q/ + defmodule Functions do + def do_map(a, b), do: Enum.map(a, b) + + def func(x), do: do_map(x, & &1 + 1) + + end + / + assert [%Location{} = location] = references(project, "Functions.do_map|(a, b)", code) + assert decorate(code, location.range) =~ "def func(x), do: «do_map(x, & &1 + 1)»" + end + + test "are found in function definitions with optional arguments", %{project: project} do + referenced = ~q/ + defmodule Functions do + def do_map|(a, b, c \\ 3), do: {a, b, c} + def func(x), do: do_map(x, 3, 5) + def func2(x), do: do_map(x, 3) + end + / + + {_, code} = pop_cursor(referenced) + + references = references(project, referenced, code) + assert [first, second] = Enum.sort_by(references, & &1.range.start.line) + assert decorate(code, first.range) =~ " def func(x), do: «do_map(x, 3, 5)»" + assert decorate(code, second.range) =~ " def func2(x), do: «do_map(x, 3)»" + end + end + + describe "module references" do + # Note: These tests aren't exhaustive, as that is covered by Search.StoreTest. + test "are found in an alias", %{project: project} do + code = ~q[ + defmodule ReferencesInAlias do + alias ReferencedModule + end + ] + + assert [%Location{} = location] = references(project, "ReferencedModule|", code) + assert decorate(code, location.range) =~ ~s[alias «ReferencedModule»] + end + + test "are found in a module attribute", %{project: project} do + code = ~q[ + defmodule ReferenceInAttribute do + @attr ReferencedModule + end + ] + + assert [%Location{} = location] = references(project, "ReferencedModule|", code) + assert decorate(code, location.range) =~ ~s[@attr «ReferencedModule»] + end + + test "are found in a variable", %{project: project} do + code = ~q[ + some_module = ReferencedModule + ] + + assert [%Location{} = location] = references(project, "ReferencedModule|", code) + assert decorate(code, location.range) =~ ~s[some_module = «ReferencedModule»] + end + + test "are found in a function's parameters", %{project: project} do + code = ~q[ + def some_fn(ReferencedModule) do + end + ] + + assert [%Location{} = location] = references(project, "ReferencedModule|", code) + assert decorate(code, location.range) =~ ~s[def some_fn(«ReferencedModule») do] + end + + test "includes struct definitions", %{project: project} do + code = ~q[ + %ReferencedModule{} = something_else + ] + + assert [%Location{} = location] = references(project, "ReferencedModule|", code) + assert decorate(code, location.range) =~ ~s[%«ReferencedModule»{} = something_else] + end + + test "includes definitions if the parameter is true", %{project: project} do + code = ~q[ + defmodule DefinedModule do + end + + defmodule OtherModule do + @attr DefinedModule + end + ] + + assert [location_1, location_2] = references(project, "DefinedModule|", code, true) + assert decorate(code, location_1.range) =~ ~s[defmodule «DefinedModule» do] + assert decorate(code, location_2.range) =~ ~s[@attr «DefinedModule»] + end + end + + describe "struct references" do + test "includes their definition if the parameter is true", %{project: project} do + code = ~q( + defmodule Struct do + defstruct [:field] + end + ) + + assert [location] = references(project, "%Struct|{}", code, true) + assert decorate(code, location.range) =~ "«defstruct [:field]»" + end + + test "includes references if defstruct is selected", %{project: project} do + code = ~q[ + defmodule UsesStruct do + def something(%Struct{}) do + end + end + ] + + selector = ~q( + defmodule Struct do + defstruc|t [:name, :value] + end + ) + assert [location] = references(project, selector, code) + assert decorate(code, location.range) =~ "def something(«%Struct{}») do" + end + + test "excludes their definition", %{project: project} do + code = ~q( + defmodule Struct do + defstruct [:field] + end + ) + + assert [] = references(project, "%Struct|{}", code) + end + end + + describe "module attribute references" do + test "are found in a module", %{project: project} do + code = ~q[ + defmodule Refs do + @attr 3 + + def fun(@attr), do: true + end + ] + + query = ~q[ + defmodule Refs do + @att|r 3 + end + + ] + + assert [reference] = references(project, query, code) + assert decorate(code, reference.range) =~ " def fun(«@attr»), do: true" + end + + test "includes definitions if the parameter is true", %{project: project} do + code = ~q[ + defmodule Refs do + @attr 3 + + def fun(@attr), do: true + end + ] + + query = ~q[ + defmodule Refs do + @att|r 3 + end + + ] + + assert [definition, reference] = references(project, query, code, true) + assert decorate(code, definition.range) =~ "«@attr 3»" + assert decorate(code, reference.range) =~ " def fun(«@attr»), do: true" + end + end + + describe "variable references" do + test "are found in a function body", %{project: project} do + query = ~S[ + def my_fun do + first| = 4 + y = first * 2 + z = y * 3 + first + end + ] + + {_, code} = pop_cursor(query) + + assert [ref_1, ref_2] = references(project, query, code) + + assert decorate(code, ref_1.range) =~ " y = «first» * 2" + assert decorate(code, ref_2.range) =~ " z = y * 3 + «first»" + end + + test "can include definitions", %{project: project} do + query = ~S[ + def my_fun do + first = 4 + y = first| * 2 + z = y * 3 + first + end + ] + + {_, code} = pop_cursor(query) + + assert [definition, _ref_1, _ref_2] = references(project, query, code, true) + assert decorate(code, definition.range) =~ " «first» = 4" + end + end + + defp references(project, referenced, code, include_definitions? \\ false) do + with {position, referenced} <- pop_cursor(referenced, as: :document), + {:ok, document} <- project_module(project, code), + {:ok, entries} <- Search.Indexer.Source.index(document.path, code), + :ok <- Search.Store.replace(entries) do + referenced + |> Lexical.Ast.analyze() + |> References.references(position, include_definitions?) + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/code_intelligence/symbols_test.exs b/apps/remote_control/test/lexical/remote_control/code_intelligence/symbols_test.exs new file mode 100644 index 000000000..9150a6363 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/code_intelligence/symbols_test.exs @@ -0,0 +1,625 @@ +defmodule Lexical.RemoteControl.CodeIntelligence.SymbolsTest do + alias Lexical.Document + alias Lexical.RemoteControl.CodeIntelligence.Symbols + alias Lexical.RemoteControl.Search.Indexer.Extractors + alias Lexical.RemoteControl.Search.Indexer.Source + + use ExUnit.Case + use Patch + + import Lexical.Test.CodeSigil + import Lexical.Test.RangeSupport + + def document_symbols(code) do + doc = Document.new("file:///file.ex", code, 1) + symbols = Symbols.for_document(doc) + {symbols, doc} + end + + def workspace_symbols(code) do + doc = Document.new("file:///file.ex", code, 1) + + {:ok, entries} = + Source.index_document(doc, [ + Extractors.ExUnit, + Extractors.FunctionDefinition, + Extractors.FunctionReference, + Extractors.Module, + Extractors.ModuleAttribute, + Extractors.StructReference + ]) + + entries = Enum.reject(entries, &(&1.type == :metadata)) + patch(Lexical.RemoteControl.Search.Store, :fuzzy, {:ok, entries}) + symbols = Symbols.for_workspace("") + {symbols, doc} + end + + defp in_a_module(code) do + """ + defmodule Parent do + #{code} + end + """ + end + + describe "document symbols" do + test "a top level module is found" do + {[%Symbols.Document{} = module], doc} = + ~q[ + defmodule MyModule do + end + ] + |> document_symbols() + + assert decorate(doc, module.detail_range) =~ "defmodule «MyModule» do" + assert module.name == "MyModule" + assert module.type == :module + assert module.children == [] + end + + test "multiple top-level modules are found" do + {[first, second], doc} = + ~q[ + defmodule First do + end + + defmodule Second do + end + ] + |> document_symbols() + + assert decorate(doc, first.detail_range) =~ "defmodule «First» do" + assert first.name == "First" + assert first.type == :module + + assert decorate(doc, second.detail_range) =~ "defmodule «Second» do" + assert second.name == "Second" + assert second.type == :module + end + + test "nested modules are found" do + {[outer], doc} = + ~q[ + defmodule Outer do + defmodule Inner do + defmodule Innerinner do + end + end + end + ] + |> document_symbols() + + assert decorate(doc, outer.detail_range) =~ "defmodule «Outer» do" + assert outer.name == "Outer" + assert outer.type == :module + + assert [inner] = outer.children + assert decorate(doc, inner.detail_range) =~ "defmodule «Inner» do" + assert inner.name == "Outer.Inner" + assert inner.type == :module + + assert [inner_inner] = inner.children + assert decorate(doc, inner_inner.detail_range) =~ "defmodule «Innerinner» do" + assert inner_inner.name == "Outer.Inner.Innerinner" + assert inner_inner.type == :module + end + + test "module attribute definitions are found" do + {[module], doc} = + ~q[ + defmodule Module do + @first 3 + @second 4 + end + ] + |> document_symbols() + + assert [first, second] = module.children + assert decorate(doc, first.detail_range) =~ " «@first 3»" + assert first.name == "@first" + + assert decorate(doc, second.detail_range) =~ " «@second 4»" + assert second.name == "@second" + end + + test "in-progress module attributes are skipped" do + {[module], doc} = + ~q[ + defmodule Module do + @ + @callback foo() :: :ok + end + ] + |> document_symbols() + + assert module.type == :module + assert module.name == "Module" + + [callback] = module.children + + assert callback.type == :module_attribute + assert callback.name == "@callback" + assert callback.range == callback.detail_range + assert decorate(doc, callback.range) =~ "«@callback foo() :: :ok»" + end + + test "module attribute references are skipped" do + {[module], _doc} = + ~q[ + defmodule Parent do + @attr 3 + def my_fun() do + @attr + end + end + ] + |> document_symbols() + + [_attr_def, function_def] = module.children + [] = function_def.children + end + + test "public function definitions are found" do + {[module], doc} = + ~q[ + defmodule Module do + def my_fn do + end + end + ] + |> document_symbols() + + assert [function] = module.children + assert decorate(doc, function.detail_range) =~ " def «my_fn» do" + end + + test "public functions created with defdelegate are found" do + {[module], doc} = + ~q[ + defmodule Module do + defdelegate map(enumerable, func), to: Enum + end + ] + |> document_symbols() + + assert [function] = module.children + assert function.type == {:function, :delegate} + + assert decorate(doc, function.detail_range) =~ + " defdelegate «map(enumerable, func)», to: Enum" + end + + test "public functions created with defdelegate using as are found" do + {[module], doc} = + ~q[ + defmodule Module do + defdelegate collect(enumerable, func), to: Enum, as: :map + end + ] + |> document_symbols() + + assert [function] = module.children + + assert decorate(doc, function.detail_range) =~ + " defdelegate «collect(enumerable, func)», to: Enum, as: :map" + end + + test "private function definitions are found" do + {[module], doc} = + ~q[ + defmodule Module do + defp my_fn do + end + end + ] + |> document_symbols() + + assert [function] = module.children + assert decorate(doc, function.detail_range) =~ " defp «my_fn» do" + assert function.name == "defp my_fn" + end + + test "multiple arity functions are grouped" do + {[module], doc} = + ~q[ + defmodule Module do + def function_arity(:foo), do: :ok + def function_arity(:bar), do: :ok + def function_arity(:baz), do: :ok + end + ] + |> document_symbols() + + assert [parent] = module.children + assert parent.name == "def function_arity/1" + + expected_range = + """ + «def function_arity(:foo), do: :ok + def function_arity(:bar), do: :ok + def function_arity(:baz), do: :ok» + """ + |> String.trim_trailing() + + assert decorate(doc, parent.range) =~ expected_range + assert [first, second, third] = parent.children + + assert first.name == "function_arity(:foo)" + assert decorate(doc, first.range) =~ "«def function_arity(:foo), do: :ok»" + assert decorate(doc, first.detail_range) =~ "def «function_arity(:foo)», do: :ok" + + assert second.name == "function_arity(:bar)" + assert decorate(doc, second.range) =~ "«def function_arity(:bar), do: :ok»" + assert decorate(doc, second.detail_range) =~ "def «function_arity(:bar)», do: :ok" + + assert third.name == "function_arity(:baz)" + assert decorate(doc, third.range) =~ "«def function_arity(:baz), do: :ok»" + assert decorate(doc, third.detail_range) =~ "def «function_arity(:baz)», do: :ok" + end + + test "multiple arity private functions are grouped" do + {[module], doc} = + ~q[ + defmodule Module do + defp function_arity(:foo), do: :ok + defp function_arity(:bar), do: :ok + defp function_arity(:baz), do: :ok + end + ] + |> document_symbols() + + assert [parent] = module.children + assert parent.name == "defp function_arity/1" + + expected_range = + """ + «defp function_arity(:foo), do: :ok + defp function_arity(:bar), do: :ok + defp function_arity(:baz), do: :ok» + """ + |> String.trim_trailing() + + assert decorate(doc, parent.range) =~ expected_range + assert [first, second, third] = parent.children + + assert first.name == "function_arity(:foo)" + assert decorate(doc, first.range) =~ "«defp function_arity(:foo), do: :ok»" + assert decorate(doc, first.detail_range) =~ "defp «function_arity(:foo)», do: :ok" + + assert second.name == "function_arity(:bar)" + assert decorate(doc, second.range) =~ "«defp function_arity(:bar), do: :ok»" + assert decorate(doc, second.detail_range) =~ "defp «function_arity(:bar)», do: :ok" + + assert third.name == "function_arity(:baz)" + assert decorate(doc, third.range) =~ "«defp function_arity(:baz), do: :ok»" + assert decorate(doc, third.detail_range) =~ "defp «function_arity(:baz)», do: :ok" + end + + test "groups public and private functions separately" do + {[module], _doc} = + ~q[ + defmodule Module do + def fun_one(:foo), do: :ok + def fun_one(:bar), do: :ok + + defp fun_one(:foo, :bar), do: :ok + defp fun_one(:bar, :baz), do: :ok + end + ] + |> document_symbols() + + assert [first, second] = module.children + assert first.name == "def fun_one/1" + assert second.name == "defp fun_one/2" + end + + test "line breaks are stripped" do + {[module], _doc} = + ~q[ + defmodule Module do + def long_function( + arg_1, + arg_2, + arg_3) do + end + end + ] + |> document_symbols() + + assert [function] = module.children + assert function.name == "def long_function( arg_1, arg_2, arg_3)" + end + + test "line breaks are stripped for grouped functions" do + {[module], _doc} = + ~q[ + defmodule Module do + def long_function( + :foo, + arg_2, + arg_3) do + end + + def long_function( + :bar, + arg_2, + arg_3) do + end + def long_function( + :baz, + arg_2, + arg_3) do + end + + end + ] + |> document_symbols() + + assert [function] = module.children + assert function.name == "def long_function/3" + + assert [first, second, third] = function.children + assert first.name == "long_function( :foo, arg_2, arg_3)" + assert second.name == "long_function( :bar, arg_2, arg_3)" + assert third.name == "long_function( :baz, arg_2, arg_3)" + end + + test "struct definitions are found" do + {[module], doc} = + ~q{ + defmodule Module do + defstruct [:name, :value] + end + } + |> document_symbols() + + assert [struct] = module.children + assert decorate(doc, struct.detail_range) =~ " «defstruct [:name, :value]»" + assert struct.name == "%Module{}" + assert struct.type == :struct + end + + test "struct references are skippedd" do + assert {[], _doc} = + ~q[%OtherModule{}] + |> document_symbols() + end + + test "variable definitions are skipped" do + {[module], _doc} = + ~q[ + defmodule Module do + defp my_fn do + my_var = 3 + end + end + ] + |> document_symbols() + + assert [function] = module.children + assert [] = function.children + end + + test "variable references are skipped" do + {[module], doc} = + ~q[ + defmodule Module do + defp my_fn do + my_var = 3 + my_var + end + end + ] + |> document_symbols() + + [fun] = module.children + assert decorate(doc, fun.detail_range) =~ " defp «my_fn» do" + assert fun.type == {:function, :private} + assert fun.name == "defp my_fn" + assert [] == fun.children + end + + test "guards shown in the name" do + {[module], doc} = + ~q[ + defmodule Module do + def my_fun(x) when x > 0 do + end + end + ] + |> document_symbols() + + [fun] = module.children + assert decorate(doc, fun.detail_range) =~ " def «my_fun(x) when x > 0» do" + assert fun.type == {:function, :public} + assert fun.name == "def my_fun(x) when x > 0" + assert [] == fun.children + end + + test "types show only their name" do + {[module], doc} = + ~q[ + @type something :: :ok + ] + |> in_a_module() + |> document_symbols() + + assert module.type == :module + + [type] = module.children + assert decorate(doc, type.detail_range) =~ "«@type something :: :ok»" + assert type.name == "@type something" + assert type.type == :type + end + + test "specs are ignored" do + {[module], _doc} = + ~q[ + @spec my_fun(integer()) :: :ok + ] + |> in_a_module() + |> document_symbols() + + assert module.type == :module + assert module.children == [] + end + + test "docs are ignored" do + assert {[module], _doc} = + ~q[ + @doc """ + Hello + """ + ] + |> in_a_module() + |> document_symbols() + + assert module.type == :module + assert module.children == [] + end + + test "moduledocs are ignored" do + assert {[module], _doc} = + ~q[ + @moduledoc """ + Hello + """ + ] + |> in_a_module() + |> document_symbols() + + assert module.type == :module + assert module.children == [] + end + + test "derives are ignored" do + assert {[module], _doc} = + ~q[ + @derive {Something, other} + ] + |> in_a_module() + |> document_symbols() + + assert module.type == :module + assert module.children == [] + end + + test "impl declarations are ignored" do + assert {[module], _doc} = + ~q[ + @impl GenServer + ] + |> in_a_module() + |> document_symbols() + + assert module.type == :module + assert module.children == [] + end + + test "tags ignored" do + assert {[module], _doc} = + ~q[ + @tag :skip + ] + |> in_a_module() + |> document_symbols() + + assert module.type == :module + assert module.children == [] + end + end + + describe "workspace symbols" do + test "converts a module entry" do + {[module], doc} = + ~q[ + defmodule Parent.Child do + end + ] + |> workspace_symbols() + + assert module.type == :module + assert module.name == "Parent.Child" + assert module.link.uri == "file:///file.ex" + refute module.container_name + + assert decorate(doc, module.link.range) =~ "«defmodule Parent.Child do\nend»" + assert decorate(doc, module.link.detail_range) =~ "defmodule «Parent.Child» do" + end + + test "converts a function entry with zero args" do + {[_module, public_function, private_function], doc} = + ~q[ + defmodule Parent.Child do + def my_fn do + end + + defp private_fun(a, b) do + end + end + ] + |> workspace_symbols() + + assert public_function.type == {:function, :public} + assert String.ends_with?(public_function.name, ".my_fn/0") + assert public_function.link.uri == "file:///file.ex" + refute public_function.container_name + + assert decorate(doc, public_function.link.range) =~ " «def my_fn do\n end»" + assert decorate(doc, public_function.link.detail_range) =~ " def «my_fn» do" + + assert private_function.type == {:function, :private} + assert private_function.name == "Parent.Child.private_fun/2" + assert private_function.link.uri == "file:///file.ex" + refute private_function.container_name + + assert decorate(doc, private_function.link.range) =~ " «defp private_fun(a, b) do\n end»" + assert decorate(doc, private_function.link.detail_range) =~ " defp «private_fun(a, b)» do" + end + + test "converts protocol implementations" do + {symbols, _doc} = + ~q[ + defimpl SomeProtocol, for: Atom do + def do_stuff(atom, opts) do + end + end + ] + |> workspace_symbols() + + [proto_impl, defined_module, protocol_module, proto_target, function] = symbols + + assert proto_impl.type == {:protocol, :implementation} + assert proto_impl.name == "SomeProtocol" + + assert defined_module.type == :module + assert defined_module.name == "SomeProtocol.Atom" + + assert protocol_module.type == :module + assert protocol_module.name == "SomeProtocol" + + assert proto_target.type == :module + assert proto_target.name == "Atom" + + assert function.type == {:function, :public} + assert function.name == "SomeProtocol.Atom.do_stuff/2" + end + + test "converts protocol definitions" do + {[protocol, function], _doc} = + ~q[ + defprotocol MyProto do + def do_stuff(something, other) + end + ] + |> workspace_symbols() + + assert protocol.type == {:protocol, :definition} + assert protocol.name == "MyProto" + + assert function.type == {:function, :usage} + assert function.name == "MyProto.do_stuff/2" + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/code_intelligence/variable_test.exs b/apps/remote_control/test/lexical/remote_control/code_intelligence/variable_test.exs new file mode 100644 index 000000000..4ede9e690 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/code_intelligence/variable_test.exs @@ -0,0 +1,490 @@ +defmodule Lexical.RemoteControl.CodeIntelligence.VariableTest do + alias Lexical.Ast + alias Lexical.RemoteControl.CodeIntelligence.Variable + + use ExUnit.Case + + import Lexical.Test.CodeSigil + import Lexical.Test.CursorSupport + import Lexical.Test.RangeSupport + + def find_definition(code) do + {position, document} = pop_cursor(code, as: :document) + analysis = Ast.analyze(document) + {:ok, {:local_or_var, var_name}} = Ast.cursor_context(analysis, position) + + case Variable.definition(analysis, position, List.to_atom(var_name)) do + {:ok, entry} -> {:ok, entry.range, document} + error -> error + end + end + + def find_references(code, include_definition? \\ false) do + {position, document} = pop_cursor(code, as: :document) + analysis = Ast.analyze(document) + {:ok, {:local_or_var, var_name}} = Ast.cursor_context(analysis, position) + + ranges = + analysis + |> Variable.references(position, List.to_atom(var_name), include_definition?) + |> Enum.map(& &1.range) + + {:ok, ranges, document} + end + + describe "definitions in a single scope" do + test "are returned if it is selected" do + {:ok, range, doc} = + ~q[ + def foo(param|) do + param + end + ] + |> find_definition() + + assert decorate(doc, range) =~ "def foo(«param») do" + end + + test "are found in a parameter" do + {:ok, range, doc} = + ~q[ + def foo(param) do + param| + end + ] + |> find_definition() + + assert decorate(doc, range) =~ "def foo(«param») do" + end + + test "are found in a parameter list" do + {:ok, range, doc} = + ~q[ + def foo(other_param, param) do + param| + end + ] + |> find_definition() + + assert decorate(doc, range) =~ "def foo(other_param, «param») do" + end + + test "are found when shadowed" do + {:ok, range, doc} = + ~q[ + def foo(param) do + param = param + 1 + param| + end + ] + |> find_definition() + + assert decorate(doc, range) =~ "«param» = param + 1" + end + + test "are found when shadowing a parameter" do + {:ok, range, doc} = + ~q[ + def foo(param) do + param = param| + 1 + end + ] + |> find_definition() + + assert decorate(doc, range) =~ "def foo(«param») do" + end + + test "when there are multiple definitions on one line" do + {:ok, range, doc} = + ~q[ + param = 3 + foo = param = param + 1 + param| + ] + |> find_definition() + + assert decorate(doc, range) =~ "= «param» = param + 1" + end + + test "when the definition is in a map key" do + {:ok, range, doc} = + ~q[ + %{key: value} = map + value| + ] + |> find_definition() + + assert decorate(doc, range) =~ "%{key: «value»} = map" + end + end + + describe "definitions across scopes" do + test "works in an if in a function" do + {:ok, range, doc} = + ~q[ + def my_fun do + foo = 3 + if something do + foo| + end + end + ] + |> find_definition() + + assert decorate(doc, range) =~ "«foo» = 3" + end + + test "works for variables defined in a module" do + {:ok, range, doc} = + ~q[ + defmodule Parent do + x = 3 + def fun do + unquote(x|) + end + end + ] + |> find_definition() + + assert decorate(doc, range) =~ "«x» = 3" + end + + test "works for variables defined outside module" do + {:ok, range, doc} = + ~q[ + x = 3 + defmodule Parent do + def fun do + unquote(x|) + end + end + ] + |> find_definition() + + assert decorate(doc, range) =~ "«x» = 3" + end + end + + describe "references" do + test "in a function parameter" do + {:ok, [range], doc} = + ~q[ + def something(param|) do + param + end + ] + |> find_references() + + assert decorate(doc, range) =~ "«param»" + end + + test "can include definitions" do + {:ok, [definition, reference], doc} = + ~q[ + def something(param|) do + param + end + ] + |> find_references(true) + + assert decorate(doc, definition) =~ "def something(«param») do" + assert decorate(doc, reference) =~ " «param»" + end + + test "can be found via a usage" do + {:ok, [first, second, third], doc} = + ~q[ + def something(param) do + y = param + 3 + z = param + 4 + param| + y + z + end + ] + |> find_references() + + assert decorate(doc, first) =~ " y = «param» + 3" + assert decorate(doc, second) =~ " z = «param» + 4" + assert decorate(doc, third) =~ " «param» + y + z" + end + + test "are found in a function body" do + {:ok, [first, second, third, fourth, fifth], doc} = + ~q[ + def something(param|) do + x = param + param + 3 + y = param + x + z = 10 + param + x + y + z + param + end + ] + |> find_references() + + assert decorate(doc, first) =~ " x = «param» + param + 3" + assert decorate(doc, second) =~ " x = param + «param» + 3" + assert decorate(doc, third) =~ " y = «param» + x" + assert decorate(doc, fourth) =~ " z = 10 + «param»" + assert decorate(doc, fifth) =~ " x + y + z + «param»" + end + + test "are constrained to their definition function" do + {:ok, [range], doc} = + ~q[ + def something(param|) do + param + end + + def other_fn(param) do + param + 1 + end + ] + |> find_references() + + assert decorate(doc, range) =~ "«param»" + end + + test "are visible across blocks" do + {:ok, [first, second], doc} = + ~q[ + def something(param|) do + if something() do + param + 1 + else + param + 2 + end + end + ] + |> find_references() + + assert decorate(doc, first) =~ " «param» + 1" + assert decorate(doc, second) =~ " «param» + 2" + end + + test "dont leak out of blocks" do + {:ok, [range], doc} = + ~q[ + def something(param) do + + if something() do + param| = 3 + param + 1 + end + param + 1 + end + ] + |> find_references() + + assert decorate(doc, range) =~ "«param»" + end + + test "are found in the head of a case statement" do + {:ok, [range], doc} = + ~q[ + def something(param|) do + case param do + _ -> :ok + end + end + ] + |> find_references() + + assert decorate(doc, range) =~ " case «param» do" + end + + test "are constrained to a single arm of a case statement" do + {:ok, [guard_range, usage_range], doc} = + ~q[ + def something(param) do + case param do + param| when is_number(param) -> param + 1 + param -> 0 + end + end + ] + |> find_references() + + assert decorate(doc, guard_range) =~ " param when is_number(«param») -> param + 1" + assert decorate(doc, usage_range) =~ " param when is_number(param) -> «param» + 1" + end + + test "are found in a module body" do + {:ok, [range], doc} = + ~q[ + defmodule Outer do + something| = 3 + def foo(unquote(something)) do + end + end + ] + |> find_references() + + assert decorate(doc, range) =~ "def foo(unquote(«something»)) do" + end + + test "are found in anonymous function parameters" do + {:ok, [first, second], doc} = + ~q[ + def outer do + fn param| -> + y = param + 1 + x = param + 2 + x + y + end + end + ] + |> find_references() + + assert decorate(doc, first) =~ "y = «param» + 1" + assert decorate(doc, second) =~ "x = «param» + 2" + end + + test "are found in a pin operator" do + {:ok, [ref], doc} = + ~q[ + def outer(param|) do + fn ^param -> + nil + end + end + ] + |> find_references() + + assert decorate(doc, ref) =~ "fn ^«param» ->" + end + + test "are found inside of string interpolation" do + {:ok, [ref], doc} = + ~S[ + name| = "Stinky" + "#{name} Stinkman" + ] + |> find_references() + + assert decorate(doc, ref) =~ "\#{«name»} Stinkman" + end + + # Note: This test needs to pass before we can implement renaming variables reliably + @tag :skip + test "works for variables defined outside of an if while being shadowed" do + {:ok, [first, second], doc} = + ~q{ + entries| = [1, 2, 3] + entries = + if something() do + [4 | entries] + else + entries + end + } + |> find_references() + + assert decorate(doc, first) =~ "[4 | «entries»]" + assert decorate(doc, second) =~ "«entries»" + end + + test "finds variables defined in anonymous function arms" do + {:ok, [first, second], doc} = + ~q" + shadowed? = false + fn + {:foo, entries|} -> + if shadowed? do + [1, entries] + else + entries + end + {:bar, entries} -> + entries + end + " + |> find_references() + + assert decorate(doc, first) =~ "[1, «entries»]" + assert decorate(doc, second) =~ "«entries»" + end + end + + describe "reference shadowing" do + test "on a single line" do + {:ok, [], _doc} = + ~q[ + def something(param) do + other = other = other| = param + end + ] + |> find_references() + end + + test "in a function body" do + {:ok, [], _doc} = + ~q[ + def something(param|) do + param = 3 + param + end + ] + |> find_references() + end + + test "in anonymous function arguments" do + {:ok, [], _doc} = + ~q[ + def something(param|) do + fn param -> + param + 1 + end + :ok + end + ] + |> find_references() + end + + test "inside of a block" do + {:ok, [range], doc} = + ~q[ + def something do + shadow| = 4 + if true do + shadow = shadow + 1 + shadow + end + end + ] + |> find_references() + + assert decorate(doc, range) == " shadow = «shadow» + 1" + end + + test "exiting a block" do + {:ok, [range], doc} = + ~q[ + def something do + shadow| = 4 + if true do + shadow = :ok + shadow + end + shadow + 1 + end + ] + |> find_references() + + assert decorate(doc, range) == " «shadow» + 1" + end + + test "exiting nested blocks" do + {:ok, [range], doc} = + ~q[ + def something(param| = arg) do + case arg do + param when is_number(n) -> + param + 4 + end + param + 5 + end + ] + |> find_references() + + assert decorate(doc, range) == " «param» + 5" + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/code_mod/aliases_test.exs b/apps/remote_control/test/lexical/remote_control/code_mod/aliases_test.exs new file mode 100644 index 000000000..9a8875fb4 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/code_mod/aliases_test.exs @@ -0,0 +1,204 @@ +defmodule Lexical.RemoteControl.CodeMod.AliasesTest do + alias Lexical.Ast + alias Lexical.RemoteControl + alias Lexical.RemoteControl.CodeMod.Aliases + + import Lexical.Test.CursorSupport + use Lexical.Test.CodeMod.Case + use Patch + + setup do + patch(RemoteControl, :get_project, %Lexical.Project{}) + :ok + end + + def insert_position(orig) do + {cursor, document} = pop_cursor(orig, as: :document) + analysis = Ast.analyze(document) + {position, _trailer} = Aliases.insert_position(analysis, cursor) + + {:ok, document, position} + end + + describe "insert_position" do + test "is directly after a module's definition if there are no aliases present" do + {:ok, document, position} = + ~q[ + defmodule MyModule do| + end + ] + |> insert_position() + + assert decorate_cursor(document, position) =~ ~q[ + defmodule MyModule do + |end + ] + end + + test "is after the moduledoc if no aliases are present" do + {:ok, document, position} = + ~q[ + defmodule MyModule do| + @moduledoc """ + This is my funny moduledoc + """ + end + ] + |> insert_position() + + assert decorate_cursor(document, position) =~ ~q[ + defmodule MyModule do + @moduledoc """ + This is my funny moduledoc + """ + |end + ] + end + + test "is before use statements" do + {:ok, document, position} = + ~q[ + defmodule MyModule do| + use Something.That.Exists + end + ] + |> insert_position() + + expected = ~q[ + defmodule MyModule do + |use Something.That.Exists + end + ] + assert decorate_cursor(document, position) =~ expected + end + + test "is before require statements" do + {:ok, document, position} = + ~q[ + defmodule MyModule do| + require Something.That.Exists + end + ] + |> insert_position() + + expected = ~q[ + defmodule MyModule do + |require Something.That.Exists + end + ] + assert decorate_cursor(document, position) =~ expected + end + + test "is before import statements" do + {:ok, document, position} = + ~q[ + defmodule MyModule do| + import Something.That.Exists + end + ] + |> insert_position() + + expected = ~q[ + defmodule MyModule do + |import Something.That.Exists + end + ] + assert decorate_cursor(document, position) =~ expected + end + + test "is where existing aliases are" do + {:ok, document, position} = + ~q[ + defmodule MyModule do| + alias Something.That.Exists + end + ] + |> insert_position() + + expected = ~q[ + defmodule MyModule do + |alias Something.That.Exists + end + ] + assert decorate_cursor(document, position) =~ expected + end + + test "in nested empty modules" do + {:ok, document, position} = + ~q[ + defmodule Outer do + defmodule Inner do| + end + end + ] + |> insert_position() + + expected = ~q[ + defmodule Outer do + defmodule Inner do + |end + end + ]t + + assert decorate_cursor(document, position) =~ expected + end + + test "in nested modules that both have existing aliases" do + {:ok, document, position} = + ~q[ + defmodule Outer do + alias First.Thing + + defmodule Inner do| + alias Second.Person + end + end + ] + |> insert_position() + + expected = ~q[ + defmodule Outer do + alias First.Thing + + defmodule Inner do + |alias Second.Person + end + end + ]t + + assert decorate_cursor(document, position) =~ expected + end + + test "is after moduledocs in nested modules" do + {:ok, document, position} = + ~q[ + defmodule Outer do + alias First.Thing + + defmodule Inner do| + @moduledoc """ + This is my documentation, it + spans multiple lines + """ + end + end + ] + |> insert_position() + + expected = ~q[ + defmodule Outer do + alias First.Thing + + defmodule Inner do + @moduledoc """ + This is my documentation, it + spans multiple lines + """ + |end + end + ]t + + assert decorate_cursor(document, position) =~ expected + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/code_mod/diff_test.exs b/apps/remote_control/test/lexical/remote_control/code_mod/diff_test.exs index 2a2a237cb..598099216 100644 --- a/apps/remote_control/test/lexical/remote_control/code_mod/diff_test.exs +++ b/apps/remote_control/test/lexical/remote_control/code_mod/diff_test.exs @@ -39,7 +39,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = "hello" assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 1, 1, 3, "") + assert_normalized(edit == edit(1, 1, 1, 3, "")) assert_edited(orig, final) end @@ -48,7 +48,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = "heyello" assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 3, 1, 3, "ye") + assert_normalized(edit == edit(1, 3, 1, 3, "ye")) assert_edited(orig, final) end @@ -57,7 +57,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = "heo" assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 3, 1, 5, "") + assert_normalized(edit == edit(1, 3, 1, 5, "")) assert_edited(orig, final) end @@ -68,7 +68,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do # this is collapsed into a single edit of an # insert that spans the delete and the insert assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 4, 1, 6, "vetica went") + assert_normalized(edit == edit(1, 4, 1, 6, "vetica went")) assert_edited(orig, final) end @@ -77,8 +77,8 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = "hellothe" assert [e1, e2] = diff(orig, final) - assert_normalized e1 == edit(1, 10, 1, 12, "") - assert_normalized e2 == edit(1, 6, 1, 7, "") + assert_normalized(e1 == edit(1, 10, 1, 12, "")) + assert_normalized(e2 == edit(1, 6, 1, 7, "")) end end @@ -104,7 +104,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = "hello" assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 1, 3, 1, "") + assert_normalized(edit == edit(1, 1, 3, 1, "")) assert_edited(orig, final) end @@ -113,7 +113,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = "he\n\n ye\n\nllo" assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 3, 1, 3, "\n\n ye\n\n") + assert_normalized(edit == edit(1, 3, 1, 3, "\n\n ye\n\n")) assert_edited(orig, final) end @@ -130,7 +130,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = "hellogoodbye" assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 6, 4, 1, "") + assert_normalized(edit == edit(1, 6, 4, 1, "")) assert_edited(orig, final) end @@ -169,7 +169,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do |> String.trim() assert [edit] = diff(orig, final) - assert_normalized edit == edit(3, 1, 5, 1, "") + assert_normalized(edit == edit(3, 1, 5, 1, "")) assert_edited(orig, final) end end @@ -180,7 +180,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = ~S[{"🎸", "after"}] assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 10, 1, 12, "") + assert_normalized(edit == edit(1, 10, 1, 12, "")) assert_edited(orig, final) end @@ -189,7 +189,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = ~S[🎸🎺🎸] assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 5, 1, 5, "🎺") + assert_normalized(edit == edit(1, 5, 1, 5, "🎺")) assert_edited(orig, final) end @@ -198,7 +198,7 @@ defmodule Lexical.RemoteControl.CodeMod.DiffTest do final = ~S[🎸🎸] assert [edit] = diff(orig, final) - assert_normalized edit == edit(1, 5, 1, 13, "") + assert_normalized(edit == edit(1, 5, 1, 13, "")) assert_edited(orig, final) end diff --git a/apps/remote_control/test/lexical/remote_control/code_mod/format_test.exs b/apps/remote_control/test/lexical/remote_control/code_mod/format_test.exs index d76b98a29..bf74c0df3 100644 --- a/apps/remote_control/test/lexical/remote_control/code_mod/format_test.exs +++ b/apps/remote_control/test/lexical/remote_control/code_mod/format_test.exs @@ -19,7 +19,7 @@ defmodule Lexical.RemoteControl.CodeMod.FormatTest do |> Keyword.get(:file_path, file_path(project)) |> maybe_uri() - with {:ok, document_edits} <- Format.edits(project, document(file_uri, text)) do + with {:ok, document_edits} <- Format.edits(document(file_uri, text)) do {:ok, document_edits.edits} end end @@ -67,6 +67,7 @@ defmodule Lexical.RemoteControl.CodeMod.FormatTest do setup do project = project() + RemoteControl.set_project(project) {:ok, project: project} end @@ -129,12 +130,14 @@ defmodule Lexical.RemoteControl.CodeMod.FormatTest do setup [:with_real_project] test "it should emit diagnostics when a syntax error occurs", %{project: project} do - assert {:error, _} = ~q[ + text = ~q[ def foo(a, ) do - end - ] |> modify(project: project) + end + ] + document = document("file:///file.ex", text) + RemoteControl.Api.format(project, document) - assert_receive file_diagnostics(diagnostics: [diagnostic]), 250 + assert_receive file_diagnostics(diagnostics: [diagnostic]), 500 assert diagnostic.message =~ "syntax error" end end diff --git a/apps/remote_control/test/lexical/remote_control/commands/reindex_test.exs b/apps/remote_control/test/lexical/remote_control/commands/reindex_test.exs new file mode 100644 index 000000000..c88b53667 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/commands/reindex_test.exs @@ -0,0 +1,89 @@ +defmodule Lexical.RemoteControl.Commands.ReindexTest do + alias Lexical.Document + alias Lexical.RemoteControl.Commands.Reindex + alias Lexical.RemoteControl.Search + + import Lexical.Test.EventualAssertions + import Lexical.Test.Fixtures + import Lexical.Test.Entry.Builder + + use ExUnit.Case + use Patch + + setup do + reindex_fun = fn _ -> + Process.sleep(20) + end + + start_supervised!({Reindex, reindex_fun: reindex_fun}) + + {:ok, project: project()} + end + + test "it should allow reindexing", %{project: project} do + assert :ok = Reindex.perform(project) + assert Reindex.running?() + end + + test "it fails if another index is running", %{project: project} do + assert :ok = Reindex.perform(project) + assert {:error, "Already Running"} = Reindex.perform(project) + end + + test "it eventually becomes available", %{project: project} do + assert :ok = Reindex.perform(project) + refute_eventually Reindex.running?() + end + + test "another reindex can be enqueued", %{project: project} do + assert :ok = Reindex.perform(project) + assert_eventually :ok = Reindex.perform(project) + end + + def put_entries(uri, entries) do + Process.put(uri, entries) + end + + describe "uri/1" do + setup do + test = self() + + patch(Reindex.State, :entries_for_uri, fn uri -> + entries = + test + |> Process.info() + |> get_in([:dictionary]) + |> Enum.find_value(fn + {^uri, value} -> value + _ -> nil + end) + + {:ok, Document.Path.ensure_path(uri), entries || []} + end) + + patch(Search.Store, :update, fn uri, entries -> + send(test, {:entries, uri, entries}) + end) + + :ok + end + + test "reindexes a specific uri" do + uri = "file:///file.ex" + entries = [reference()] + put_entries(uri, entries) + Reindex.uri(uri) + assert_receive {:entries, "/file.ex", ^entries} + end + + test "buffers updates if a reindex is in progress", %{project: project} do + uri = "file:///file.ex" + new_entries = [reference(), definition()] + put_entries(uri, new_entries) + Reindex.perform(project) + Reindex.uri(uri) + + assert_receive {:entries, "/file.ex", ^new_entries} + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/completion_test.exs b/apps/remote_control/test/lexical/remote_control/completion_test.exs index 483d03ef6..b4e35ee7a 100644 --- a/apps/remote_control/test/lexical/remote_control/completion_test.exs +++ b/apps/remote_control/test/lexical/remote_control/completion_test.exs @@ -1,11 +1,16 @@ defmodule Lexical.RemoteControl.CompletionTest do + alias Lexical.Ast + alias Lexical.Ast.Env alias Lexical.Document alias Lexical.RemoteControl.Completion - import Lexical.Test.CursorSupport import Lexical.Test.CodeSigil + import Lexical.Test.CursorSupport + import Lexical.Test.Fixtures + import Lexical.Test.Quiet use ExUnit.Case, async: true + use Patch describe "struct_fields/2" do test "returns the field completion for current module" do @@ -85,12 +90,136 @@ defmodule Lexical.RemoteControl.CompletionTest do end end + def expose_strip_struct_operator(_) do + Patch.expose(Completion, strip_struct_operator: 1) + :ok + end + + describe "strip_struct_operator/1" do + setup [:expose_strip_struct_operator] + + test "with a reference followed by __" do + {doc, _position} = + "%__" + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == "__" + end + + test "with a reference followed by a module name" do + {doc, _position} = + "%Module" + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == "Module" + end + + test "with a reference followed by a module and a dot" do + {doc, _position} = + "%Module." + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == "Module." + end + + test "with a reference followed by a nested module" do + {doc, _position} = + "%Module.Sub" + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == "Module.Sub" + end + + test "with a reference followed by an alias" do + code = ~q[ + alias Something.Else + %El| + ]t + + {doc, _position} = + code + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == "alias Something.Else\nEl" + end + + test "on a line with two references, replacing the first" do + {doc, _position} = + "%First{} = %Se" + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == "%First{} = Se" + end + + test "on a line with two references, replacing the second" do + {doc, _position} = + "%Fir| = %Second{}" + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == "Fir = %Second{}" + end + + test "with a plain module" do + env = new_env("Module") + {doc, _position} = private(Completion.strip_struct_operator(env)) + + assert doc == Document.to_string(env.document) + end + + test "with a plain module strip_struct_reference a dot" do + env = new_env("Module.") + {doc, _position} = private(Completion.strip_struct_operator(env)) + + assert doc == Document.to_string(env.document) + end + + test "leaves leading spaces in place" do + {doc, _position} = + " %Some" + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == " Some" + end + + test "works in a function definition" do + {doc, _position} = + "def my_function(%Lo|)" + |> new_env() + |> private(Completion.strip_struct_operator()) + + assert doc == "def my_function(Lo)" + end + end + defp struct_fields(source) do {position, document} = pop_cursor(source, as: :document) - text = Document.to_string(document) - Code.compile_string(text) - Completion.struct_fields(document, position) + quiet(:stderr, fn -> + Code.compile_string(text) + end) + + analysis = + document + |> Ast.analyze() + |> Ast.reanalyze_to(position) + + Completion.struct_fields(analysis, position) + end + + def new_env(text) do + project = project() + {position, document} = pop_cursor(text, as: :document) + analysis = Ast.analyze(document) + {:ok, env} = Env.new(project, analysis, position) + env end end diff --git a/apps/remote_control/test/lexical/remote_control/dispatch/handlers/indexer_test.exs b/apps/remote_control/test/lexical/remote_control/dispatch/handlers/indexer_test.exs index 75abe3de0..60eed4e7e 100644 --- a/apps/remote_control/test/lexical/remote_control/dispatch/handlers/indexer_test.exs +++ b/apps/remote_control/test/lexical/remote_control/dispatch/handlers/indexer_test.exs @@ -1,8 +1,8 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.IndexingTest do - alias Lexical.Ast alias Lexical.Document alias Lexical.RemoteControl alias Lexical.RemoteControl.Api + alias Lexical.RemoteControl.Commands alias Lexical.RemoteControl.Dispatch.Handlers.Indexing alias Lexical.RemoteControl.Search @@ -20,58 +20,73 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.IndexingTest do create_index = &Search.Indexer.create_index/1 update_index = &Search.Indexer.update_index/2 + start_supervised!(RemoteControl.Dispatch) + start_supervised!(Commands.Reindex) + start_supervised!(Search.Store.Backends.Ets) start_supervised!({Search.Store, [project, create_index, update_index]}) - start_supervised!(Document.Store) + start_supervised!({Document.Store, derive: [analysis: &Lexical.Ast.analyze/1]}) + Search.Store.enable() assert_eventually(Search.Store.loaded?(), 1500) {:ok, state} = Indexing.init([]) {:ok, state: state, project: project} end - def quoted_document(source) do - doc = Document.new("file:///file.ex", source, 1) - Document.Store.open("file:///file.ex", source, 1) - {:ok, quoted} = Ast.from(doc) + def set_document!(source) do + uri = "file:///file.ex" - {doc, quoted} - end + :ok = + case Document.Store.fetch(uri) do + {:ok, _} -> + Document.Store.update(uri, fn doc -> + edit = Document.Edit.new(source) + Document.apply_content_changes(doc, doc.version + 1, [edit]) + end) + + {:error, :not_open} -> + Document.Store.open(uri, source, 1) + end - def file_quoted_event(document, quoted_ast) do - file_quoted(document: document, quoted_ast: quoted_ast) + {uri, source} end describe "handling file_quoted events" do test "should add new entries to the store", %{state: state} do - {doc, quoted} = + {uri, _source} = ~q[ defmodule NewModule do end ] - |> quoted_document() + |> set_document!() - assert {:ok, _} = Indexing.on_event(file_quoted_event(doc, quoted), state) + assert {:ok, _} = Indexing.on_event(file_compile_requested(uri: uri), state) - assert {:ok, [entry]} = Search.Store.exact("NewModule", []) + assert_eventually {:ok, [entry]} = Search.Store.exact("NewModule", []) assert entry.subject == NewModule end test "should update entries in the store", %{state: state} do - {old_doc, old_quoted} = quoted_document("defmodule OldModule do\nend") + {uri, source} = + ~q[ + defmodule OldModule + end + ] + |> set_document!() - {:ok, _} = Search.Indexer.Quoted.index(old_doc, old_quoted) + {:ok, _} = Search.Indexer.Source.index(uri, source) - {doc, quoted} = + {^uri, _source} = ~q[ - defmodule UpdatedModule do - end - ] - |> quoted_document() + defmodule UpdatedModule do + end + ] + |> set_document!() - assert {:ok, _} = Indexing.on_event(file_quoted_event(doc, quoted), state) + assert {:ok, _} = Indexing.on_event(file_compile_requested(uri: uri), state) - assert {:ok, [entry]} = Search.Store.exact("UpdatedModule", []) + assert_eventually {:ok, [entry]} = Search.Store.exact("UpdatedModule", []) assert entry.subject == UpdatedModule assert {:ok, []} = Search.Store.exact("OldModule", []) end @@ -80,37 +95,38 @@ defmodule Lexical.RemoteControl.Dispatch.Handlers.IndexingTest do %{state: state} do Document.Store.open("file:///file.ex", "defmodule Newer do \nend", 3) - {doc, quoted} = + {uri, _source} = ~q[ - defmodule Stale do - end - ] - |> quoted_document() + defmodule Stale do + end + ] + |> set_document!() - assert {:ok, _} = Indexing.on_event(file_quoted_event(doc, quoted), state) + assert {:ok, _} = Indexing.on_event(file_compile_requested(uri: uri), state) assert {:ok, []} = Search.Store.exact("Stale", []) end end describe "a file is deleted" do test "its entries should be deleted", %{project: project, state: state} do - {doc, quoted} = + {uri, source} = ~q[ - defmodule ToDelete do - end - ] - |> quoted_document() + defmodule ToDelete do + end + ] + |> set_document!() + + {:ok, entries} = Search.Indexer.Source.index(uri, source) + Search.Store.update(uri, entries) - {:ok, entries} = Search.Indexer.Quoted.index(doc, quoted) - Search.Store.update(doc.path, entries) - assert {:ok, [_]} = Search.Store.exact("ToDelete", []) + assert_eventually {:ok, [_]} = Search.Store.exact("ToDelete", []) Indexing.on_event( - filesystem_event(project: project, uri: doc.uri, event_type: :deleted), + filesystem_event(project: project, uri: uri, event_type: :deleted), state ) - assert {:ok, []} = Search.Store.exact("ToDelete", []) + assert_eventually {:ok, []} = Search.Store.exact("ToDelete", []) end end diff --git a/apps/remote_control/test/lexical/remote_control/module_mappings_test.exs b/apps/remote_control/test/lexical/remote_control/module_mappings_test.exs index 42c6843f1..df7b30e31 100644 --- a/apps/remote_control/test/lexical/remote_control/module_mappings_test.exs +++ b/apps/remote_control/test/lexical/remote_control/module_mappings_test.exs @@ -1,7 +1,11 @@ defmodule Lexical.RemoteControl.ModuleMappingsTest do alias Lexical.RemoteControl.Dispatch alias Lexical.RemoteControl.ModuleMappings + use ExUnit.Case + use Lexical.Test.EventualAssertions + + import Lexical.RemoteControl.Api.Messages setup do start_supervised!(Dispatch) @@ -10,10 +14,14 @@ defmodule Lexical.RemoteControl.ModuleMappingsTest do end def with_a_populated_cache(_) do - ModuleMappings.update(__MODULE__, __ENV__.file) + update_module(__MODULE__, __ENV__.file) :ok end + def update_module(module, file) do + Dispatch.broadcast(module_updated(name: module, file: file)) + end + describe "modules_in_file/1" do test "returns an empty list if the file isn't in the cache" do assert ModuleMappings.modules_in_file("/does/not/exist.ex") == [] @@ -23,33 +31,31 @@ defmodule Lexical.RemoteControl.ModuleMappingsTest do describe "basic functions" do setup [:with_a_populated_cache] - test "it allows you to update a module" do - ModuleMappings.update(__MODULE__, "foo.text") + test "it updates the file for a module" do + update_module(__MODULE__, "foo.text") - assert ModuleMappings.file_for_module(__MODULE__) == "foo.text" + assert_eventually ModuleMappings.file_for_module(__MODULE__) == "foo.text" end test "it allows you to get the file of a module" do - assert ModuleMappings.file_for_module(__MODULE__) == __ENV__.file + assert_eventually ModuleMappings.file_for_module(__MODULE__) == __ENV__.file end test "it allows you to get the modules defined in a file" do - assert ModuleMappings.modules_in_file(__ENV__.file) == [__MODULE__] + assert_eventually ModuleMappings.modules_in_file(__ENV__.file) == [__MODULE__] end test "it allows to get multiple modules defined in a file" do - assert ModuleMappings.update(FakeModule, __ENV__.file) + update_module(FakeModule, __ENV__.file) - modules_in_test = ModuleMappings.modules_in_file(__ENV__.file) - assert FakeModule in modules_in_test - assert __MODULE__ in modules_in_test + assert_eventually [FakeModule, __MODULE__] = ModuleMappings.modules_in_file(__ENV__.file) end test "it allows for modules to move from one file to another" do - ModuleMappings.update(__MODULE__, "other_file.exs") + update_module(__MODULE__, "other_file.exs") - assert ModuleMappings.file_for_module(__MODULE__) == "other_file.exs" - assert ModuleMappings.modules_in_file(__ENV__.file) == [] + assert_eventually ModuleMappings.file_for_module(__MODULE__) == "other_file.exs" + assert_eventually ModuleMappings.modules_in_file(__ENV__.file) == [] end end end diff --git a/apps/remote_control/test/lexical/remote_control/modules_test.exs b/apps/remote_control/test/lexical/remote_control/modules_test.exs index fe8a8ea66..62c959e4a 100644 --- a/apps/remote_control/test/lexical/remote_control/modules_test.exs +++ b/apps/remote_control/test/lexical/remote_control/modules_test.exs @@ -1,6 +1,5 @@ defmodule Lexical.RemoteControl.ModulesTest do alias Lexical.RemoteControl.Modules - use Modules.Predicate.Syntax use ExUnit.Case use Lexical.Test.EventualAssertions @@ -45,22 +44,9 @@ defmodule Lexical.RemoteControl.ModulesTest do end describe "using predicate descriptors" do - test "it should place the argument where you specify" do - assert [module] = - Modules.with_prefix("GenEvent", {Kernel, :macro_exported?, [:"$1", :__using__, 1]}) - - assert to_string(module) == "Elixir.GenEvent" - end - - test "it should work with the predicate syntax helpers" do - assert [GenServer] = - Modules.with_prefix("GenServer", predicate(¯o_exported?(&1, :__using__, 1))) - - assert [GenServer] = - Modules.with_prefix( - "GenServer", - predicate(&Kernel.macro_exported?(&1, :__using__, 1)) - ) + test "it places the module as the first argument" do + assert [GenEvent] = + Modules.with_prefix("GenEvent", {Kernel, :macro_exported?, [:__using__, 1]}) end end end diff --git a/apps/remote_control/test/lexical/remote_control/build/progress_test.exs b/apps/remote_control/test/lexical/remote_control/progress_test.exs similarity index 82% rename from apps/remote_control/test/lexical/remote_control/build/progress_test.exs rename to apps/remote_control/test/lexical/remote_control/progress_test.exs index 8512e9d28..a44c5e832 100644 --- a/apps/remote_control/test/lexical/remote_control/build/progress_test.exs +++ b/apps/remote_control/test/lexical/remote_control/progress_test.exs @@ -1,6 +1,6 @@ -defmodule Lexical.RemoteControl.Build.ProgressTest do +defmodule Lexical.RemoteControl.ProgressTest do alias Lexical.RemoteControl - alias Lexical.RemoteControl.Build.Progress + alias Lexical.RemoteControl.Progress import Lexical.RemoteControl.Api.Messages @@ -10,7 +10,7 @@ defmodule Lexical.RemoteControl.Build.ProgressTest do setup do test_pid = self() - patch(RemoteControl.Dispatch, :broadcast, &send(test_pid, &1)) + patch(RemoteControl.Api.Proxy, :broadcast, &send(test_pid, &1)) :ok end diff --git a/apps/remote_control/test/lexical/remote_control/search/fuzzy/scorer_test.exs b/apps/remote_control/test/lexical/remote_control/search/fuzzy/scorer_test.exs index 6c9260780..2faecc5e7 100644 --- a/apps/remote_control/test/lexical/remote_control/search/fuzzy/scorer_test.exs +++ b/apps/remote_control/test/lexical/remote_control/search/fuzzy/scorer_test.exs @@ -67,9 +67,31 @@ defmodule Lexical.RemoteControl.Search.Fuzzy.ScorerTest do assert results == ~w(abcd axbxcxdx axxbxxcxxdxx axxxbxxxcxxxdxxx) end - test "patterns that match the case are boosted" do - results = score_and_sort(~w(stinky stinkY StiNkY STINKY), "STINKY") - assert results == ~w(STINKY StiNkY stinkY stinky) + test "patterns that match camel case are boosted" do + results = + score_and_sort( + ~w(lotsofcamelcase LotsofcamelCase LotsofCamelCase LotsOfCamelCase), + "LotsOfCamelCase" + ) + + assert results == ~w(LotsOfCamelCase LotsofCamelCase LotsofcamelCase lotsofcamelcase) + end + + test "matches at the end of a module are boosted" do + results = + score_and_sort( + ~w(First.Third.Second Third.First.Second First.Second.Third), + "Third" + ) + + assert ["First.Second.Third" | _] = results + end + + test "tail matches are boosted" do + results = + score_and_sort(~w(create_user save_user Foo.Bar.Baz.Demo.Accounts.LiveDemo.User), "User") + + assert ["Foo.Bar.Baz.Demo.Accounts.LiveDemo.User" | _] = results end end end diff --git a/apps/remote_control/test/lexical/remote_control/search/fuzzy_test.exs b/apps/remote_control/test/lexical/remote_control/search/fuzzy_test.exs index 480eee1f7..a027b8667 100644 --- a/apps/remote_control/test/lexical/remote_control/search/fuzzy_test.exs +++ b/apps/remote_control/test/lexical/remote_control/search/fuzzy_test.exs @@ -5,25 +5,26 @@ defmodule Lexical.RemoteControl.Search.FuzzyTest do setup do entries = [ - reference(subject: Enum), - reference(subject: Foo.Bar), - reference(subject: Bar.Baz) + definition(subject: Enum), + definition(subject: Foo.Bar), + definition(subject: Bar.Baz) ] fuzzy = Fuzzy.from_entries(entries) {:ok, fuzzy: fuzzy, entries: entries} end - def lookup(entities, ref) do - Enum.find(entities, &(&1.ref == ref)) + def lookup(entities, id) do + Enum.find(entities, &(&1.id == id)) end describe "housekeeping" do test "it can add an entry", %{fuzzy: fuzzy} do refute Fuzzy.has_subject?(fuzzy, Other) - entry = reference(subject: Other) + entry = definition(subject: Other) fuzzy = Fuzzy.add(fuzzy, entry) + assert Fuzzy.has_subject?(fuzzy, Other) end @@ -32,8 +33,8 @@ defmodule Lexical.RemoteControl.Search.FuzzyTest do refute Fuzzy.has_subject?(fuzzy, Pants) entries = [ - reference(subject: Stinky), - reference(subject: Pants) + definition(subject: Stinky), + definition(subject: Pants) ] fuzzy = Fuzzy.add(fuzzy, entries) @@ -44,7 +45,7 @@ defmodule Lexical.RemoteControl.Search.FuzzyTest do test "a value can be removed", %{fuzzy: fuzzy, entries: [to_remove | _]} do assert Fuzzy.has_subject?(fuzzy, to_remove.subject) - fuzzy = Fuzzy.drop_values(fuzzy, [to_remove.ref]) + fuzzy = Fuzzy.drop_values(fuzzy, [to_remove.id]) refute Fuzzy.has_subject?(fuzzy, to_remove.subject) end @@ -74,27 +75,13 @@ defmodule Lexical.RemoteControl.Search.FuzzyTest do describe "match/2" do test "fuzzy searching can find prefixes", %{fuzzy: fuzzy, entries: entries} do - assert [ref] = Fuzzy.match(fuzzy, "Enum") - entry = lookup(entries, ref) + assert [id] = Fuzzy.match(fuzzy, "Enum") + entry = lookup(entries, id) assert entry.subject == Enum end test "fuzzy matching is applied", %{fuzzy: fuzzy} do assert [_, _] = Fuzzy.match(fuzzy, "br") end - - test "ordering" do - entries = [ - reference(ref: 1, subject: ZZZZZZZZZZZZZZZZZZZZZZZZ.ABCD), - reference(ref: 2, subject: ZZZZA.ZZZZZb.ZZZZc.ZZZZd), - reference(ref: 3, subject: A.B.C.D), - reference(ref: 4, subject: Abcd) - ] - - fuzzy = Fuzzy.from_entries(entries) - results = Fuzzy.match(fuzzy, "abcd") - - assert results == [4, 3, 2, 1] - end end end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/ecto_schema_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/ecto_schema_test.exs new file mode 100644 index 000000000..2fad53755 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/ecto_schema_test.exs @@ -0,0 +1,258 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.EctoSchemaTest do + use Lexical.Test.ExtractorCase + + def index(source) do + do_index(source, fn entry -> entry.type == :struct end) + end + + describe "finds the structs defined by schema" do + test "only finds if Ecto.Schema is used" do + {:ok, results, _} = + ~q[ + defmodule NotEcto do + schema "not ecto" do + field :ecto, :boolean, default: false + end + end + ] + |> index() + + assert results == [] + end + + test "if ecto.schema is aliased" do + {:ok, [struct], _doc} = + ~q[ + defmodule MySchema do + alias Ecto.Schema , as: SCM + use SCM + schema "my_schema" do + field :last_name, :string + end + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :definition + end + + test "consisting of a single field" do + {:ok, [struct], doc} = + ~q[ + defmodule MySchema do + use Ecto.Schema + schema "my_schema" do + field :last_name, :string + end + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :definition + + expected = + ~q[ + schema "my_schema" do + field :last_name, :string + end + ] + |> String.trim() + + assert decorate(doc, struct.range) =~ ~q[«schema "my_schema" do»] + assert extract(doc, struct.block_range) =~ expected + end + + test "consisting of multiple fileds" do + {:ok, [struct], doc} = + ~q[ + defmodule MySchema do + use Ecto.Schema + schema "my_schema" do + field :first_name, :string + field :last_name, :string + end + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :definition + + expected = + ~q[ + schema "my_schema" do + field :first_name, :string + field :last_name, :string + end + ] + |> String.trim() + + assert decorate(doc, struct.range) =~ ~q[«schema "my_schema" do»] + assert extract(doc, struct.block_range) =~ expected + end + end + + describe "finds the structs defined by embedded_schema" do + test "only finds if Ecto.Schema is used" do + {:ok, results, _doc} = + ~q[ + defmodule NotEcto do + schema "not ecto" do + embedded_schema "also_not_ecto" do + field :very_much_like_ecto, :string + end + end + end + ] + |> index() + + assert [] == results + end + + test "consisting of a single field" do + {:ok, [struct], doc} = + ~q[ + defmodule MySchema do + use Ecto.Schema + embedded_schema do + field :last_name, :string + end + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :definition + + expected = + ~q[ + embedded_schema do + field :last_name, :string + end + ] + |> String.trim() + + assert decorate(doc, struct.range) =~ ~q[«embedded_schema do»] + assert extract(doc, struct.block_range) =~ expected + end + + test "consisting of multiple fileds" do + {:ok, [struct], doc} = + ~q[ + defmodule MySchema do + use Ecto.Schema + embedded_schema "my_schema" do + field :first_name, :string + field :last_name, :string + end + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :definition + + expected = + ~q[ + embedded_schema "my_schema" do + field :first_name, :string + field :last_name, :string + end + ] + |> String.trim() + + assert decorate(doc, struct.range) =~ ~q[«embedded_schema "my_schema" do»] + assert extract(doc, struct.block_range) =~ expected + end + end + + describe "finds referenced to schemas defined with embeds_one" do + test "ignores a schema reference" do + {:ok, [struct_def], _doc} = + ~q[ + defmodule MySchema do + use Ecto.Schema + schema "my_schema" do + embeds_one :friend, Friend + end + end + ] + |> index() + + assert struct_def.subject == MySchema + end + + test "when defined inline" do + {:ok, [_struct_def, schema_definiton], doc} = + ~q[ + defmodule MySchema do + use Ecto.Schema + schema "my_schema" do + embeds_one :child, Child do + field :first_name, :string + end + end + end + ] + |> index() + + assert schema_definiton.type == :struct + assert schema_definiton.subtype == :definition + assert schema_definiton.subject == MySchema.Child + + expected = ~q[ + embeds_one :child, Child do + field :first_name, :string + end + ]t + assert decorate(doc, schema_definiton.range) =~ ~q[embeds_one :child, «Child» do] + assert extract(doc, schema_definiton.block_range) =~ expected + end + end + + describe "finds schemas defined with embeds_many" do + test "ignores a schema reference" do + {:ok, [struct_def], _doc} = + ~q[ + defmodule MySchema do + use Ecto.Schema + schema "my_schema" do + embeds_many :friend, Friend + end + end + ] + |> index() + + assert struct_def.subject == MySchema + end + + test "when defined inline" do + {:ok, [_struct_def, schema_definiton], doc} = + ~q[ + defmodule MySchema do + use Ecto.Schema + schema "my_schema" do + embeds_many :child, Child do + field :first_name, :string + end + end + end + ] + |> index() + + assert schema_definiton.type == :struct + assert schema_definiton.subtype == :definition + assert schema_definiton.subject == MySchema.Child + + expected = ~q[ + embeds_many :child, Child do + field :first_name, :string + end + ]t + assert decorate(doc, schema_definiton.range) =~ ~q[embeds_many :child, «Child» do] + assert extract(doc, schema_definiton.block_range) =~ expected + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/ex_unit_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/ex_unit_test.exs new file mode 100644 index 000000000..ea8b39585 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/ex_unit_test.exs @@ -0,0 +1,337 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.ExUnitTest do + alias Lexical.RemoteControl.Search.Indexer.Extractors + + use Lexical.Test.ExtractorCase + import Lexical.Test.RangeSupport + + @test_types [ + :ex_unit_setup, + :ex_unit_setup_all, + :ex_unit_test, + :ex_unit_describe + ] + + def index_definitions(source) do + do_index(source, fn entry -> entry.type in @test_types and entry.subtype == :definition end, [ + Extractors.ExUnit + ]) + end + + def index_with_structure(source) do + do_index(source, fn entry -> entry.type != :metadata end, [ + Extractors.ExUnit, + Extractors.Module + ]) + end + + describe "finds setup" do + test "in blocks without an argument" do + {:ok, [setup], doc} = + ~q[ + defmodule SomeTest do + setup do + :ok + end + end + ] + |> index_definitions() + + assert setup.type == :ex_unit_setup + assert setup.subject == "SomeTest.setup/1" + assert decorate(doc, setup.range) =~ " «setup do»" + assert decorate(doc, setup.block_range) =~ " «setup do\n :ok\n end»" + end + + test "in blocks with an argument" do + {:ok, [setup], doc} = + ~q[ + defmodule SomeTest do + setup arg do + :ok + end + end + ] + |> index_definitions() + + assert setup.type == :ex_unit_setup + assert setup.subject == "SomeTest.setup/2" + assert decorate(doc, setup.range) =~ " «setup arg do»" + assert decorate(doc, setup.block_range) =~ " «setup arg do\n :ok\n end»" + end + + test "as an atom" do + {:ok, [setup], doc} = + ~q[ + defmodule SomeTest do + setup :other_function + end + ] + |> index_definitions() + + assert setup.type == :ex_unit_setup + assert setup.subject == "SomeTest.setup/1" + refute setup.block_range + assert decorate(doc, setup.range) =~ " «setup :other_function»" + end + + test "as a list of atoms" do + {:ok, [setup], doc} = + ~q{ + defmodule SomeTest do + setup [:other_function, :second_function] + end + } + |> index_definitions() + + assert setup.type == :ex_unit_setup + assert setup.subject == "SomeTest.setup/1" + refute setup.block_range + assert decorate(doc, setup.range) =~ " «setup [:other_function, :second_function]»" + end + + test "as a MF tuple" do + {:ok, [setup], doc} = + ~q[ + defmodule SomeTest do + setup {OtherModule, :setup} + end + ] + |> index_definitions() + + assert setup.type == :ex_unit_setup + assert setup.subject == "SomeTest.setup/1" + refute setup.block_range + assert decorate(doc, setup.range) =~ " «setup {OtherModule, :setup}»" + end + + test "unless setup is a variable" do + {:ok, [test], _doc} = + ~q[ + defmodule SomeTest do + test "something" do + setup = 3 + setup + end + end + ] + |> index_definitions() + + assert test.type == :ex_unit_test + end + end + + describe "finds setup_all" do + test "as a block without an argument" do + {:ok, [setup], doc} = + ~q[ + defmodule SomeTest do + setup_all do + :ok + end + end + ] + |> index_definitions() + + assert setup.type == :ex_unit_setup_all + assert setup.subject == "SomeTest.setup_all/1" + assert decorate(doc, setup.range) =~ " «setup_all do»" + assert decorate(doc, setup.block_range) =~ " «setup_all do\n :ok\n end" + end + + test "as a block with an argument" do + {:ok, [setup], doc} = + ~q[ + defmodule SomeTest do + setup_all arg do + :ok + end + end + ] + |> index_definitions() + + assert setup.type == :ex_unit_setup_all + assert setup.subject == "SomeTest.setup_all/2" + assert decorate(doc, setup.range) =~ " «setup_all arg do»" + assert decorate(doc, setup.block_range) =~ " «setup_all arg do\n :ok\n end" + end + + test "as an atom" do + {:ok, [setup], doc} = + ~q[ + defmodule SomeTest do + setup_all :other_function + end + ] + |> index_definitions() + + assert setup.type == :ex_unit_setup_all + assert setup.subject == "SomeTest.setup_all/1" + refute setup.block_range + + assert decorate(doc, setup.range) =~ " «setup_all :other_function»" + end + + test "as a list of atoms" do + {:ok, [setup], doc} = + ~q{ + defmodule SomeTest do + setup_all [:other_function, :second_function] + end + } + |> index_definitions() + + assert setup.type == :ex_unit_setup_all + assert setup.subject == "SomeTest.setup_all/1" + refute setup.block_range + + assert decorate(doc, setup.range) =~ " «setup_all [:other_function, :second_function]»" + end + + test "as a MF tuple" do + {:ok, [setup], doc} = + ~q[ + defmodule SomeTest do + setup_all {OtherModule, :setup} + end + ] + |> index_definitions() + + assert setup.type == :ex_unit_setup_all + assert setup.subject == "SomeTest.setup_all/1" + refute setup.block_range + + assert decorate(doc, setup.range) =~ " «setup_all {OtherModule, :setup}»" + end + end + + describe "finds describe blocks" do + test "with an empty block" do + {:ok, [describe], doc} = + ~q[ + defmodule SomeTest do + describe "something" do + end + end + ] + |> index_definitions() + + assert describe.type == :ex_unit_describe + assert describe.subtype == :definition + assert decorate(doc, describe.range) =~ " «describe \"something\" do»" + assert decorate(doc, describe.block_range) =~ " «describe \"something\" do\n end»" + end + + test "with tests" do + {:ok, [describe, _test], doc} = + ~q[ + defmodule SomeTest do + describe "something" do + test "something" + end + end + ] + |> index_definitions() + + assert describe.type == :ex_unit_describe + assert describe.subtype == :definition + + assert decorate(doc, describe.range) =~ " «describe \"something\" do»" + + assert decorate(doc, describe.block_range) =~ + " «describe \"something\" do\n test \"something\"\n end»" + end + end + + describe "finds tests" do + test "when pending" do + {:ok, [test], doc} = + ~q[ + defmodule SomeTest do + test "my test" + end + ] + |> index_definitions() + + assert test.type == :ex_unit_test + assert test.subject == "SomeTest.[\"my test\"]/1" + refute test.block_range + + assert decorate(doc, test.range) =~ ~s[ «test "my test"»] + end + + test "when they only have a block" do + {:ok, [test], doc} = + ~q[ + defmodule SomeTest do + test "my test" do + end + end + ] + |> index_definitions() + + assert test.type == :ex_unit_test + assert test.subject == "SomeTest.[\"my test\"]/2" + + assert decorate(doc, test.range) =~ ~s[ «test "my test" do»] + assert decorate(doc, test.block_range) =~ ~s[ «test "my test" do\n end»] + end + + test "when they have a block and a context" do + {:ok, [test], doc} = + ~q[ + defmodule SomeTest do + test "my test", context do + end + end + ] + |> index_definitions() + + assert test.type == :ex_unit_test + assert test.subject =~ "SomeTest.[\"my test\"]/3" + + expected_detail = " «test \"my test\", context do»" + assert decorate(doc, test.range) =~ expected_detail + + expected_block = " «test \"my test\", context do\n end»" + assert decorate(doc, test.block_range) =~ expected_block + end + end + + describe "block structure" do + test "describe contains tests" do + {:ok, [module, describe, test], _} = + ~q[ + defmodule SomeTexst do + describe "outer" do + test "my test", context do + end + end + end + ] + |> index_with_structure() + + assert module.type == :module + assert module.block_id == :root + + assert describe.type == :ex_unit_describe + assert describe.block_id == module.id + + assert test.type == :ex_unit_test + assert test.block_id == describe.id + end + end + + describe "things that it will miss" do + test "quoted test cases" do + {:ok, [], _} = + ~q[ + quote do + test unquote(test_name) do + end + end + ] + |> in_a_module() + |> index_definitions() + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/function_definition_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/function_definition_test.exs new file mode 100644 index 000000000..7b2dea8aa --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/function_definition_test.exs @@ -0,0 +1,442 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.FunctionDefinitionTest do + alias Lexical.RemoteControl.Search.Indexer.Entry + use Lexical.Test.ExtractorCase + + def index(source) do + do_index(source, fn %Entry{type: type} = entry -> + match?({:function, _}, type) and entry.subtype == :definition + end) + end + + def index_functions(source) do + do_index(source, fn %Entry{type: type} -> + match?({:function, _}, type) + end) + end + + describe "indexing public function definitions" do + test "finds zero arity public functions (no parens)" do + code = + ~q[ + def zero_arity do + end + ] + |> in_a_module() + + {:ok, [zero_arity], _} = index(code) + + assert zero_arity.type == {:function, :public} + assert zero_arity.subtype == :definition + assert zero_arity.subject == "Parent.zero_arity/0" + assert "zero_arity" == extract(code, zero_arity.range) + assert "def zero_arity do\nend" == extract(code, zero_arity.block_range) + end + + test "finds zero arity one line public functions (no parens)" do + code = + ~q[ + def zero_arity, do: true + ] + |> in_a_module() + + {:ok, [zero_arity], _} = index(code) + + assert zero_arity.type == {:function, :public} + assert zero_arity.subtype == :definition + assert zero_arity.subject == "Parent.zero_arity/0" + assert "zero_arity" == extract(code, zero_arity.range) + assert "def zero_arity, do: true" == extract(code, zero_arity.block_range) + end + + test "finds zero arity public functions (with parens)" do + code = + ~q[ + def zero_arity() do + end + ] + |> in_a_module() + + {:ok, [zero_arity], _} = index(code) + + assert zero_arity.type == {:function, :public} + assert zero_arity.subtype == :definition + assert zero_arity.subject == "Parent.zero_arity/0" + assert "zero_arity()" == extract(code, zero_arity.range) + assert "def zero_arity() do\nend" == extract(code, zero_arity.block_range) + end + + test "finds one arity public function" do + code = + ~q[ + def one_arity(a) do + a + 1 + end + ] + |> in_a_module() + + {:ok, [one_arity], _} = index(code) + + assert one_arity.type == {:function, :public} + assert one_arity.subtype == :definition + assert one_arity.subject == "Parent.one_arity/1" + assert "one_arity(a)" == extract(code, one_arity.range) + assert "def one_arity(a) do\na + 1\nend" == extract(code, one_arity.block_range) + end + + test "finds one arity public function with a guard" do + code = + ~q[ + def one_arity(a) when is_integer(a) do + a + 1 + end + ] + |> in_a_module() + + {:ok, [one_arity], _} = index(code) + + assert one_arity.type == {:function, :public} + assert one_arity.subtype == :definition + assert one_arity.subject == "Parent.one_arity/1" + assert "one_arity(a) when is_integer(a)" == extract(code, one_arity.range) + + assert "def one_arity(a) when is_integer(a) do\na + 1\nend" == + extract(code, one_arity.block_range) + end + + test "finds multi arity public function" do + code = + ~q[ + def multi_arity(a, b, c, d) do + {a, b, c, d} + end + ] + |> in_a_module() + + {:ok, [multi_arity], _} = index(code) + + assert multi_arity.type == {:function, :public} + assert multi_arity.subtype == :definition + assert multi_arity.subject == "Parent.multi_arity/4" + assert "multi_arity(a, b, c, d)" == extract(code, multi_arity.range) + + assert "def multi_arity(a, b, c, d) do\n{a, b, c, d}\nend" == + extract(code, multi_arity.block_range) + end + + test "finds multi-line function definitions" do + code = + ~q[ + def multi_line(a, + b, + c, + d) do + end + ] + |> in_a_module() + + {:ok, [multi_line], _} = index(code) + + expected = + """ + multi_line(a, + b, + c, + d) + """ + |> String.trim() + + assert expected == extract(code, multi_line.range) + end + + test "finds functions defined in protocol implementations" do + {:ok, [function], doc} = + ~q[ + defimpl MyProtocol, for: Structs.Mystruct do + def do_proto(a, b) do + a + b + end + end + ] + |> index_functions() + + assert function.type == {:function, :public} + assert function.subject == "MyProtocol.Structs.Mystruct.do_proto/2" + assert "do_proto(a, b)" = extract(doc, function.range) + assert decorate(doc, function.block_range) =~ "«def do_proto(a, b) do\n a + b\n end»" + end + + test "finds functions defined with defdelegate" do + {:ok, [function | _], doc} = + ~q[ + defmodule MyModule do + defdelegate map(enumerable, other), to: Enum + end + ] + |> index_functions() + + assert function.type == {:function, :delegate} + assert function.subject == "MyModule.map/2" + assert "map(enumerable, other)" = extract(doc, function.range) + assert decorate(doc, function.range) =~ "defdelegate «map(enumerable, other)», to: Enum" + end + + test "finds functions defined with defdelegate with an aliased module" do + {:ok, [function | _], doc} = + ~q[ + defmodule MyModule do + alias Parent.Child + defdelegate map(enumerable, other), to: Child + end + ] + |> index_functions() + + assert function.type == {:function, :delegate} + assert function.subject == "MyModule.map/2" + assert "map(enumerable, other)" = extract(doc, function.range) + assert decorate(doc, function.range) =~ "defdelegate «map(enumerable, other)», to: Child" + end + + test "skip quoted function defined with defdelegate" do + assert {:ok, [], _doc} = + ~q[ + quote do + defdelegate unquote(symbol)(enumerable, other), to: Enum + end + ] |> index_functions() + end + + test "finds functions defined with defdelegate and as" do + {:ok, [function | _], doc} = + ~q[ + defmodule MyModule do + defdelegate collect(enumerable, other), to: Enum, as: :map + end + ] + |> index_functions() + + assert function.type == {:function, :delegate} + assert function.subject == "MyModule.collect/2" + assert "collect(enumerable, other)" = extract(doc, function.range) + + assert decorate(doc, function.range) =~ + " defdelegate «collect(enumerable, other)», to: Enum, as: :map" + end + + test "skips public functions defined in quote blocks" do + code = + ~q[ + def something(name) do + quote do + def unquote(name)() do + end + end + end + ] + |> in_a_module() + + {:ok, [something], _} = index(code) + assert "something(name)" = extract(code, something.range) + end + + test "returns no references" do + {:ok, [function_definition], doc} = + ~q[ + def my_fn(a, b) do + end + ] + |> in_a_module() + |> index_functions() + + assert function_definition.type == {:function, :public} + assert function_definition.subtype == :definition + assert "my_fn(a, b)" = extract(doc, function_definition.range) + end + end + + describe "indexing private function definitions" do + test "finds zero arity one-line private functions (no parens)" do + code = + ~q[ + defp zero_arity, do: true + ] + |> in_a_module() + + {:ok, [zero_arity], _} = index(code) + + assert zero_arity.type == {:function, :private} + assert zero_arity.subtype == :definition + assert zero_arity.subject == "Parent.zero_arity/0" + assert "zero_arity" == extract(code, zero_arity.range) + assert "defp zero_arity, do: true" == extract(code, zero_arity.block_range) + end + + test "finds zero arity one-line private functions (with parens)" do + code = + ~q[ + defp zero_arity(), do: true + ] + |> in_a_module() + + {:ok, [zero_arity], _} = index(code) + + assert zero_arity.type == {:function, :private} + assert zero_arity.subtype == :definition + assert zero_arity.subject == "Parent.zero_arity/0" + assert "zero_arity()" == extract(code, zero_arity.range) + assert "defp zero_arity(), do: true" == extract(code, zero_arity.block_range) + end + + test "finds zero arity private functions (no parens)" do + code = + ~q[ + defp zero_arity do + end + ] + |> in_a_module() + + {:ok, [zero_arity], _} = index(code) + + assert zero_arity.type == {:function, :private} + assert zero_arity.subtype == :definition + assert zero_arity.subject == "Parent.zero_arity/0" + assert "zero_arity" == extract(code, zero_arity.range) + assert "defp zero_arity do\nend" == extract(code, zero_arity.block_range) + end + + test "finds zero arity private functions (with parens)" do + code = + ~q[ + defp zero_arity() do + end + ] + |> in_a_module() + + {:ok, [zero_arity], _} = index(code) + + assert zero_arity.type == {:function, :private} + assert zero_arity.subtype == :definition + assert zero_arity.subject == "Parent.zero_arity/0" + assert "zero_arity()" == extract(code, zero_arity.range) + assert "defp zero_arity() do\nend" == extract(code, zero_arity.block_range) + end + + test "finds one arity one-line private functions" do + code = + ~q[ + defp one_arity(a), do: a + 1 + ] + |> in_a_module() + + {:ok, [one_arity], _} = index(code) + + assert one_arity.type == {:function, :private} + assert one_arity.subtype == :definition + assert one_arity.subject == "Parent.one_arity/1" + assert "one_arity(a)" == extract(code, one_arity.range) + assert "defp one_arity(a), do: a + 1" == extract(code, one_arity.block_range) + end + + test "finds one arity private functions" do + code = + ~q[ + defp one_arity(a) do + a + 1 + end + ] + |> in_a_module() + + {:ok, [one_arity], _} = index(code) + + assert one_arity.type == {:function, :private} + assert one_arity.subtype == :definition + assert one_arity.subject == "Parent.one_arity/1" + assert "one_arity(a)" == extract(code, one_arity.range) + assert "defp one_arity(a) do\na + 1\nend" == extract(code, one_arity.block_range) + end + + test "finds multi-arity one-line private functions" do + code = + ~q[ + defp multi_arity(a, b, c), do: {a, b, c} + ] + |> in_a_module() + + {:ok, [multi_arity], _} = index(code) + + assert multi_arity.type == {:function, :private} + assert multi_arity.subtype == :definition + assert multi_arity.subject == "Parent.multi_arity/3" + assert "multi_arity(a, b, c)" == extract(code, multi_arity.range) + assert "defp multi_arity(a, b, c), do: {a, b, c}" = extract(code, multi_arity.block_range) + end + + test "finds multi arity private functions" do + code = + ~q[ + defp multi_arity(a, b, c, d) do + {a, b, c, d} + end + ] + |> in_a_module() + + {:ok, [multi_arity], _} = index(code) + + assert multi_arity.type == {:function, :private} + assert multi_arity.subtype == :definition + assert multi_arity.subject == "Parent.multi_arity/4" + assert "multi_arity(a, b, c, d)" == extract(code, multi_arity.range) + + assert "defp multi_arity(a, b, c, d) do\n{a, b, c, d}\nend" == + extract(code, multi_arity.block_range) + end + + test "skips private functions defined in quote blocks" do + code = + ~q[ + defp something(name) do + quote do + defp unquote(name)() do + + end + end + end + ] + |> in_a_module() + + {:ok, [something], _} = index(code) + assert something.type == {:function, :private} + assert "something(name)" = extract(code, something.range) + end + + test "handles macro calls that define functions" do + {:ok, [definiton], doc} = + ~q[ + quote do + def rpc_call(pid, call = %Call{method: unquote(method_name)}), + do: GenServer.unquote(genserver_method)(pid, call) + end + ]x + |> in_a_module() + |> index() + + assert definiton.type == {:function, :public} + + assert decorate(doc, definiton.range) =~ + "def «rpc_call(pid, call = %Call{method: unquote(method_name)})»" + end + + test "returns no references" do + {:ok, [function_definition], doc} = + ~q[ + defp my_fn(a, b) do + end + ] + |> in_a_module() + |> index_functions() + + assert function_definition.type == {:function, :private} + assert function_definition.subtype == :definition + assert "my_fn(a, b)" = extract(doc, function_definition.range) + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/function_reference_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/function_reference_test.exs new file mode 100644 index 000000000..e49e5c295 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/function_reference_test.exs @@ -0,0 +1,417 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.FunctionReferenceTest do + alias Lexical.Test.RangeSupport + + import Lexical.Test.CodeSigil + import RangeSupport + + use Lexical.Test.ExtractorCase + + def index(source) do + do_index(source, fn entry -> + match?({:function, :usage}, entry.type) and entry.subtype == :reference + end) + end + + describe "remote function references" do + test "calling a zero-arg remote function with parens" do + code = in_a_module_function("OtherModule.test()") + + {:ok, [reference], _} = index(code) + + assert reference.subject == "OtherModule.test/0" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "OtherModule.test()" = extract(code, reference.range) + end + + test "calling a zero-arg remote function without parens" do + code = in_a_module_function("OtherModule.test") + + {:ok, [reference], _} = index(code) + assert reference.subject == "OtherModule.test/0" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "OtherModule.test" = extract(code, reference.range) + end + + test "calling a one-arg remote function" do + code = in_a_module_function("OtherModule.test(:arg)") + + {:ok, [reference], _} = index(code) + assert reference.subject == "OtherModule.test/1" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "OtherModule.test(:arg)" = extract(code, reference.range) + end + + test "calling a remote function that spans multiple lines" do + code = + """ + OtherModule.test( + :first, + :second, + :third + ) + """ + |> in_a_module_function() + + {:ok, [multi_line], _} = index(code) + + expected = + """ + OtherModule.test( + :first, + :second, + :third + ) + """ + |> String.trim() + + assert multi_line.subject == "OtherModule.test/3" + assert multi_line.type == {:function, :usage} + assert multi_line.subtype == :reference + assert expected == extract(code, multi_line.range) + end + end + + describe "aliased remote calls" do + test "aliases are expanded" do + code = ~q[ + defmodule Parent do + alias Other.Long.Module + def func do + Module.function(a, b, c) + end + end + ] + + {:ok, [reference], _} = index(code) + + assert reference.subject == "Other.Long.Module.function/3" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "Module.function(a, b, c)" == extract(code, reference.range) + end + + test "aliases using as" do + code = ~q[ + defmodule Parent do + alias Other.Long.Module, as: Mod + def func do + Mod.function(a, b, c) + end + end + ] + + {:ok, [reference], _} = index(code) + + assert reference.subject == "Other.Long.Module.function/3" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "Mod.function(a, b, c)" == extract(code, reference.range) + end + end + + describe "defdelegate" do + test "creates a function reference" do + {:ok, [reference], doc} = + "defdelegate map(enumerable, func), to: Enum" + |> in_a_module() + |> index() + + assert reference.subject == "Enum.map/2" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "defdelegate map(enumerable, func), to: Enum" = extract(doc, reference.range) + end + + test "creates a function reference to an aliased module" do + {:ok, [reference], doc} = + ~q[ + alias Grandparent.Parent.Child + defdelegate map(enumerable, func), to: Child + ] + |> in_a_module() + |> index() + + assert reference.subject == "Grandparent.Parent.Child.map/2" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "defdelegate map(enumerable, func), to: Child" = extract(doc, reference.range) + end + + test "creates a function reference when as is used" do + {:ok, [reference], doc} = + "defdelegate collect(enumerable, func), to: Enum, as: :map" + |> in_a_module() + |> index() + + assert reference.subject == "Enum.map/2" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + + assert "defdelegate collect(enumerable, func), to: Enum, as: :map" = + extract(doc, reference.range) + end + end + + describe "function captures" do + test "with specified arity" do + code = in_a_module_function("&OtherModule.test/3") + {:ok, [reference], _} = index(code) + + assert reference.subject == "OtherModule.test/3" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "OtherModule.test/3" == extract(code, reference.range) + end + + test "with anonymous arguments" do + code = in_a_module_function("&OtherModule.test(arg, &1)") + + {:ok, [reference], _} = index(code) + + assert reference.subject == "OtherModule.test/2" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "OtherModule.test(arg, &1)" == extract(code, reference.range) + end + + test "recognizes calls in a capture" do + code = in_a_module("&OtherModule.test(to_string(&1))") + {:ok, [outer_ref, inner_ref], _} = index(code) + assert outer_ref.subject == "OtherModule.test/1" + assert inner_ref.subject == "Kernel.to_string/1" + end + end + + describe "local function references" do + test "finds a zero-arg local function on the right of a match" do + code = in_a_module_function("x = local()") + {:ok, [reference], _} = index(code) + + assert reference.subject == "Parent.local/0" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "local()" == extract(code, reference.range) + end + + test "finds zero-arg local function with parens" do + code = in_a_module_function("local()") + + {:ok, [reference], _} = index(code) + assert reference.subject == "Parent.local/0" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "local()" == extract(code, reference.range) + end + + test "finds multi-arg local function" do + code = in_a_module_function("local(a, b, c)") + + {:ok, [reference], _} = index(code) + assert reference.subject == "Parent.local/3" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "local(a, b, c)" == extract(code, reference.range) + end + + test "finds multi-arg local function across multiple lines" do + code = + """ + my_thing = local_fn( + first_arg, + second_arg, + third_arg + ) + """ + |> in_a_module_function() + + {:ok, [reference], _} = index(code) + assert reference.subject == "Parent.local_fn/3" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + + expected = ~q[ + local_fn( + first_arg, + second_arg, + third_arg + ) + ]t + + assert expected == extract(code, reference.range) + end + + test "find a captured local private function" do + code = in_a_module(~q[ + def public_fun(x), do: x + defp private_fun(x, y), do: {x, y} + + def use_private_fun(_l), do: &private_fun(&1, 1) + ]) + + {:ok, [reference], _} = index(code) + + assert reference.subject == "Parent.private_fun/2" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "private_fun(&1, 1)" == extract(code, reference.range) + end + + test "finds a captured local public function" do + code = in_a_module(~q[ + def public_fun(x), do: x + defp private_fun(x, y), do: {x, y} + + def use_public_fun(_l), do: &public_fun/1 + ]) + + {:ok, [reference], _} = index(code) + + assert reference.subject == "Parent.public_fun/1" + assert reference.type == {:function, :usage} + assert reference.subtype == :reference + assert "public_fun/1" == extract(code, reference.range) + end + end + + describe "imported function references" do + test "imported local functions remember their module" do + code = ~q{ + defmodule Imports do + import Enum, only: [each: 2] + + def function do + each([1, 2], fn elem -> elem + 1 end) + end + end + } + + assert {:ok, [reference], _} = index(code) + assert reference.subject == "Enum.each/2" + assert "each([1, 2], fn elem -> elem + 1 end)" = extract(code, reference.range) + end + + test "are found even when captured" do + code = ~q{ + import String, only: [downcase: 1] + + f = &downcase/1 + } + + assert {:ok, [downcase_reference], _} = index(code) + assert downcase_reference.subject == "String.downcase/1" + assert "downcase/1" == extract(code, downcase_reference.range) + end + + test "works with multiple imports" do + code = ~q{ + import Enum, only: [map: 2] + import String, only: [downcase: 1] + + map(l, fn i -> downcase(i) end) + } + + assert {:ok, [r1, r2], _} = index(code) + assert r1.subject == "Enum.map/2" + assert "map(l, fn i -> downcase(i) end)" = extract(code, r1.range) + assert r2.subject == "String.downcase/1" + assert "downcase(i)" = extract(code, r2.range) + end + end + + describe "dynamic invocations" do + test "apply/3 with static arguments" do + code = in_a_module_function("apply(OtherModule, :function_name, [1, 2, 3])") + + assert {:ok, [reference], _} = index(code) + assert reference.subject == "OtherModule.function_name/3" + assert "apply(OtherModule, :function_name, [1, 2, 3])" = extract(code, reference.range) + end + + test "Kernel.apply/3 with static arguments" do + code = in_a_module_function("Kernel.apply(OtherModule, :function_name, [1, 2, 3])") + + assert {:ok, [reference], _} = index(code) + assert reference.subject == "OtherModule.function_name/3" + + assert "Kernel.apply(OtherModule, :function_name, [1, 2, 3])" = + extract(code, reference.range) + end + end + + describe "pipelines" do + test "finds references in a pipeline" do + code = "3 |> Module.do_something()" + assert {:ok, [reference], _} = index(code) + assert reference.subject == "Module.do_something/1" + assert "Module.do_something()" = extract(code, reference.range) + end + + test "finds references in a multi-call pipeline" do + code = in_a_module_function("3 |> Module.first() |> second()") + assert {:ok, [first, second], _} = index(code) + assert first.subject == "Module.first/1" + assert "Module.first()" == extract(code, first.range) + + assert second.subject == "Parent.second/1" + assert "second()" == extract(code, second.range) + end + end + + describe "multiple references" do + test "finds multiple references in an if" do + code = + ~q[ + if String.length(s) > 3 or String.length(t) < 7 do + end + ] + |> in_a_module_function() + + assert {:ok, [first, second], _} = index(code) + assert "String.length(s)" == extract(code, first.range) + assert "String.length(t)" == extract(code, second.range) + end + end + + describe "exclusions" do + @defs [ + def: 2, + defp: 2, + defdelegate: 2, + defexception: 1, + defguard: 1, + defguardp: 1, + defimpl: 3, + defmacro: 2, + defmacrop: 2, + defmodule: 2, + defoverridable: 1, + defprotocol: 2, + defstruct: 1 + ] + + for {fn_name, arity} <- @defs, + args = Enum.map(0..arity, &"arg_#{&1}"), + invocation = Enum.join(args, ", ") do + test "#{fn_name} is not found" do + assert {:ok, [], _} = index("#{unquote(fn_name)} #{unquote(invocation)}") + end + end + + @keywords ~w[and if import in not or raise require try use] + for keyword <- @keywords do + test "#{keyword} is not found" do + assert {:ok, [], _} = index("#{unquote(keyword)}") + end + end + + @operators ~w[-> && ** ++ -- .. "..//" ! <> =~ @ |> | || * + - / != !== < <= == === > >=] + for operator <- @operators do + test "operator #{operator} is not found" do + assert {:ok, [], _} = index("#{unquote(operator)}") + end + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/module_attribute_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/module_attribute_test.exs new file mode 100644 index 000000000..153498d32 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/module_attribute_test.exs @@ -0,0 +1,194 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.ModuleAttributeTest do + alias Lexical.RemoteControl.Search.Subject + use Lexical.Test.ExtractorCase + + def index(source) do + do_index(source, fn entry -> + entry.type == :module_attribute + end) + end + + describe "indexing module attributes" do + test "finds definitions when defining scalars" do + {:ok, [attr], doc} = + ~q[ + defmodule Root do + @attribute 32 + end + ] + |> index() + + assert attr.type == :module_attribute + assert attr.subtype == :definition + assert attr.subject == Subject.module_attribute(Root, :attribute) + + assert decorate(doc, attr.range) =~ "«@attribute 32»" + end + + test "in-progress module attributes are ignored" do + {:ok, [latter_attribute], _doc} = + ~q[ + defmodule Root do + @ + @callback foo() :: :ok + end + ] + |> index() + + assert latter_attribute.subject == "@callback" + end + + test "finds multiple definitions of the same attribute" do + {:ok, [first, second, third], doc} = + ~q[ + defmodule Parent do + @tag 1 + def first, do: 1 + + @tag 2 + def second, do: 1 + + @tag 3 + def third, do: 1 + end + ] + |> index() + + assert first.type == :module_attribute + assert first.subtype == :definition + assert first.subject == Subject.module_attribute(Parent, :tag) + assert decorate(doc, first.range) =~ "«@tag 1»" + + assert second.type == :module_attribute + assert second.subtype == :definition + assert second.subject == Subject.module_attribute(Parent, :tag) + assert decorate(doc, second.range) =~ "«@tag 2»" + + assert third.type == :module_attribute + assert third.subtype == :definition + assert third.subject == Subject.module_attribute(Parent, :tag) + assert decorate(doc, third.range) =~ "«@tag 3»" + end + + test "finds definitions when the definition spans multiple lines" do + {:ok, [attr], doc} = + ~q[ + defmodule Parent do + @number_strings 1..50 + |> Enum.map(& &1 * 2) + |> Enum.map(&Integer.to_string/1) + end + ] + |> index() + + assert attr.type == :module_attribute + assert attr.subtype == :definition + assert attr.subject == Subject.module_attribute(Parent, :number_strings) + + expected = + """ + «@number_strings 1..50 + |> Enum.map(& &1 * 2) + |> Enum.map(&Integer.to_string/1)» + """ + |> String.trim() + + assert decorate(doc, attr.range) =~ expected + end + + test "finds references in other definitions" do + {:ok, [_def1, def2, reference], doc} = + ~q[ + defmodule Root do + @attr 23 + + @attr2 @attr + 1 + end + ] + |> index() + + assert def2.type == :module_attribute + assert def2.subtype == :definition + assert def2.subject == Subject.module_attribute(Root, :attr2) + assert decorate(doc, def2.range) =~ "«@attr2 @attr + 1»" + + assert reference.type == :module_attribute + assert reference.subtype == :reference + assert reference.subject == Subject.module_attribute(Root, :attr) + assert decorate(doc, reference.range) =~ "@attr2 «@attr» + 1" + end + + test "finds definitions in nested contexts" do + {:ok, [parent_def, child_def], doc} = + ~q[ + defmodule Parent do + @in_parent true + defmodule Child do + @in_child true + end + end + ] + |> index() + + assert parent_def.type == :module_attribute + assert parent_def.subtype == :definition + assert parent_def.subject == Subject.module_attribute(Parent, :in_parent) + assert decorate(doc, parent_def.range) =~ "«@in_parent true»" + + assert child_def.type == :module_attribute + assert child_def.subtype == :definition + assert child_def.subject == Subject.module_attribute(Parent.Child, :in_child) + assert decorate(doc, child_def.range) =~ "«@in_child true»" + end + + test "finds references in function arguments" do + {:ok, [_definition, reference], doc} = + ~q[ + defmodule InArgs do + @age 95 + def is_old?(@age), do: true + end + ] + |> index() + + assert reference.type == :module_attribute + assert reference.subtype == :reference + assert reference.subject == Subject.module_attribute(InArgs, :age) + assert decorate(doc, reference.range) =~ " def is_old?(«@age»)" + end + + test "finds references in map keys" do + {:ok, [_, key], doc} = + ~q[ + defmodule InMapKey do + @foo 3 + def something(%{@foo => 3}) do + end + end + ] + |> index() + + assert key.type == :module_attribute + assert key.subtype == :reference + assert key.subject == Subject.module_attribute(InMapKey, :foo) + assert decorate(doc, key.range) =~ "%{«@foo» => 3}" + end + + test "finds references in map values" do + {:ok, [_, value], doc} = + ~q[ + defmodule InMapValue do + @foo 3 + def something(%{foo: @foo}) do + end + end + ] + |> index() + + assert value.type == :module_attribute + assert value.subtype == :reference + assert value.subject == Subject.module_attribute(InMapValue, :foo) + assert decorate(doc, value.range) =~ "%{foo: «@foo»}" + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/source_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/module_test.exs similarity index 65% rename from apps/remote_control/test/lexical/remote_control/search/indexer/source_test.exs rename to apps/remote_control/test/lexical/remote_control/search/indexer/extractors/module_test.exs index 59ccbad85..b1ea06954 100644 --- a/apps/remote_control/test/lexical/remote_control/search/indexer/source_test.exs +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/module_test.exs @@ -1,21 +1,8 @@ -defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do - alias Lexical.Document - alias Lexical.RemoteControl.Search.Indexer - alias Lexical.Test.RangeSupport - - import Lexical.Test.CodeSigil - import RangeSupport - - use ExUnit.Case +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.ModuleTest do + use Lexical.Test.ExtractorCase def index(source) do - path = "/foo/bar/baz.ex" - doc = Document.new("file:///#{path}", source, 1) - - case Indexer.Source.index("/foo/bar/baz.ex", source) do - {:ok, indexed_items} -> {:ok, indexed_items, doc} - error -> error - end + do_index(source, &(&1.type == :module)) end describe "indexing modules" do @@ -41,7 +28,7 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do |> index() assert entry.type == :module - assert entry.parent == :root + assert entry.block_id == :root assert entry.subject == Simple assert decorate(doc, entry.range) =~ "defmodule «Simple» do" end @@ -56,7 +43,7 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do assert entry.subject == Simple.Module.Path assert entry.type == :module - assert entry.parent == :root + assert entry.block_id == :root assert decorate(doc, entry.range) =~ "defmodule «Simple.Module.Path» do" end @@ -83,7 +70,7 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do |> index() assert erlang_module.type == :module - assert erlang_module.parent == module_def.ref + assert erlang_module.block_id == module_def.id assert erlang_module.subject == :timer assert decorate(doc, erlang_module.range) =~ " @something «:timer»" end @@ -98,11 +85,27 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do |> index() assert attribute.type == :module - assert attribute.parent == module_def.ref + assert attribute.block_id == module_def.id assert attribute.subject == Some.Other.Module assert decorate(doc, attribute.range) =~ " @attr «Some.Other.Module»" end + test "can detect __MODULE__ in a function" do + {:ok, [_module_def, module_ref], doc} = + ~q[ + defmodule Root do + def something do + __MODULE__ + end + end + ] + |> index() + + assert module_ref.type == :module + assert module_ref.subject == Root + assert decorate(doc, module_ref.range) =~ " «__MODULE__»" + end + test "can detect a module reference on the left side of a pattern match" do {:ok, [_module_def, module_ref], doc} = ~q[ @@ -136,6 +139,23 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do assert decorate(doc, module_ref.range) =~ " «Thing.Util» = arg" end + test "can detect a module reference in a nested alias" do + {:ok, [_top_level, _foo, _first, second, fourth], doc} = ~q[ + defmodule TopLevel do + alias Foo.{ + First, + Second, + Third.Fourth + } + end] |> index() + + assert second.subject == Foo.Second + assert decorate(doc, second.range) == " «Second»," + + assert fourth.subject == Foo.Third.Fourth + assert decorate(doc, fourth.range) == " «Third.Fourth»" + end + test "can detect a module reference on the right side of a pattern match" do {:ok, [_module, module_ref], doc} = ~q[ @@ -185,6 +205,53 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do assert decorate(doc, module_ref.range) =~ " «Some.Module».function()" end + test "can detect a module reference in a remote captured function" do + {:ok, [_module, module_ref], doc} = + ~q[ + defmodule Capture do + def my_fn do + &Some.Module.function/1 + end + end + ]t + |> index() + + assert module_ref.type == :module + assert module_ref.subject == Some.Module + assert decorate(doc, module_ref.range) =~ " &«Some.Module».function/1" + end + + test "can detect a module reference in a remote captured function with multiple arities" do + {:ok, [_module, module_ref], doc} = + ~q[ + defmodule Capture do + def my_fn do + &Some.Module.function(&1, 1) + end + end + ]t + |> index() + + assert module_ref.type == :module + assert module_ref.subject == Some.Module + assert decorate(doc, module_ref.range) =~ " &«Some.Module».function(&1, 1)" + end + + test "can detect a module reference in an aliased remote captured function" do + {:ok, [_module, _alias, _aliased, module_ref], doc} = ~q[ + defmodule Capture do + alias First.Second, as: Third + def my_fn do + &Third.function/1 + end + end + ]t |> index() + + assert module_ref.type == :module + assert module_ref.subject == First.Second + assert decorate(doc, module_ref.range) =~ " &«Third».function/1" + end + test "can detect a module reference in a function call's arguments" do {:ok, [_module, module_ref], doc} = ~q[ @@ -283,9 +350,58 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do assert ref.type == :module assert ref.subject == Ref.To.Something - refute ref.parent == parent.ref + refute ref.block_id == parent.id assert decorate(doc, ref.range) =~ " «Ref.To.Something»" end + + test "can detect a @protocol module reference" do + {:ok, [protocol_def, protocol_ref, for_ref, proto_module_attr], doc} = + ~q[ + defimpl MyProtocol, for: Atom do + def something do + @protocol.Something + end + end + ] + |> index() + + assert protocol_def.type == :module + assert protocol_def.subtype == :definition + assert protocol_def.subject == MyProtocol.Atom + assert decorate(doc, protocol_def.range) == "«defimpl MyProtocol, for: Atom do»" + + assert protocol_ref.type == :module + assert protocol_ref.subtype == :reference + assert protocol_ref.subject == MyProtocol + assert decorate(doc, protocol_ref.range) == "defimpl «MyProtocol», for: Atom do" + + assert for_ref.type == :module + assert for_ref.subtype == :reference + assert for_ref.subject == Atom + assert decorate(doc, for_ref.range) == "defimpl MyProtocol, for: «Atom» do" + + assert proto_module_attr.type == :module + assert proto_module_attr.subtype == :reference + assert proto_module_attr.subject == MyProtocol.Something + assert decorate(doc, proto_module_attr.range) == " «@protocol.Something»" + end + + test "can detect an @for module reference" do + {:ok, [_, _, _, for_module_attr], doc} = + ~q[ + defimpl MyProtocol, for: DataStructure do + def something do + @for.Something + end + end + ] + |> index() + + assert for_module_attr.type == :module + assert for_module_attr.subtype == :reference + assert for_module_attr.subject == DataStructure.Something + assert decorate(doc, for_module_attr.range) == " «@for.Something»" + end end describe "multiple modules in one document" do @@ -300,17 +416,17 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do ] |> index() - assert first.parent == :root + assert first.block_id == :root assert first.type == :module assert first.subtype == :definition assert first.subject == First - assert second.parent == :root + assert second.block_id == :root assert second.type == :module assert second.subtype == :definition assert second.subject == Second - assert second.ref != first.ref + assert second.id != first.id end test "aren't nested" do @@ -360,11 +476,11 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do ] |> index() - assert parent.parent == :root + assert parent.block_id == :root assert parent.type == :module assert parent.subtype == :definition - assert child.parent == parent.ref + assert child.block_id == parent.id assert child.type == :module assert child.subtype == :definition end @@ -382,7 +498,7 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do ] |> index() - assert child_alias.parent == child.ref + assert child_alias.block_id == child.id assert child_alias.type == :module assert child_alias.subtype == :reference assert child_alias.subject == Something.Else.Other @@ -398,13 +514,27 @@ defmodule Lexical.RemoteControl.Search.Indexer.SourceTest do ] |> index() - assert parent.parent == :root + assert parent.block_id == :root assert parent.type == :module assert parent.subtype == :definition - assert child.parent == parent.ref + assert child.block_id == parent.id assert child.type == :module assert child.subtype == :definition end + + test "works with __MODULE__ alias concatenations" do + {:ok, [_, child], _} = + ~q[ + defmodule Parent do + @child_module __MODULE__.Child + end + ] + |> index() + + assert child.type == :module + assert child.subtype == :reference + assert child.subject == Parent.Child + end end end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/protocol_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/protocol_test.exs new file mode 100644 index 000000000..4c0ac283d --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/protocol_test.exs @@ -0,0 +1,146 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.ProtocolTest do + use Lexical.Test.ExtractorCase + + def index(source) do + do_index(source, &match?({:protocol, _}, &1.type)) + end + + describe "indexing protocol definitions" do + test "works" do + {:ok, [protocol], doc} = + ~q[ + defprotocol Something do + def activate(thing, environment) + end + ] + |> index() + + assert protocol.type == {:protocol, :definition} + assert protocol.subtype == :definition + assert protocol.subject == Something + + expected_block = ~q[ + «defprotocol Something do + def activate(thing, environment) + end» + ]t + + assert decorate(doc, protocol.range) == "defprotocol «Something» do" + assert decorate(doc, protocol.block_range) == expected_block + end + end + + describe "indexing protocol implementations" do + test "works" do + {:ok, [protocol], doc} = + ~q[ + defimpl Something, for: Atom do + def my_impl(atom, _opts) do + to_string(atom) + end + end + ] + |> index() + + assert protocol.type == {:protocol, :implementation} + assert protocol.subtype == :definition + assert protocol.subject == Something + + expected_block = + ~q[ + «defimpl Something, for: Atom do + def my_impl(atom, _opts) do + to_string(atom) + end + end» + ]t + |> String.trim_trailing() + + assert decorate(doc, protocol.range) == "«defimpl Something, for: Atom do»" + assert decorate(doc, protocol.block_range) == expected_block + end + end + + test "__MODULE__ is correct in implementations" do + {:ok, [protocol], doc} = + ~q[ + defimpl Something, for: Atom do + def something(atom) do + __MODULE__ + end + end + ] + |> index() + + assert protocol.type == {:protocol, :implementation} + assert protocol.subtype == :definition + assert protocol.subject == Something + + expected_block = ~q[ + «defimpl Something, for: Atom do + def something(atom) do + __MODULE__ + end + end» + ]t + + assert decorate(doc, protocol.range) == "«defimpl Something, for: Atom do»" + assert decorate(doc, protocol.block_range) == expected_block + end + + test "indexes all parts of a protocol" do + {:ok, extracted, doc} = + ~q[ + defimpl Protocol, for: Target do + def function(arg) do + __MODULE__ + end + end + ] + |> index_everything() + + [ + protocol_impl_def, + module_def, + protocol_ref, + target_ref, + function_def, + proto_module_ref + ] = extracted + + expected_block = ~q[ + «defimpl Protocol, for: Target do + def function(arg) do + __MODULE__ + end + end» + ]t + + assert protocol_impl_def.type == {:protocol, :implementation} + assert protocol_impl_def.subtype == :definition + assert protocol_impl_def.subject == Protocol + assert decorate(doc, protocol_impl_def.range) =~ "«defimpl Protocol, for: Target do»" + assert decorate(doc, protocol_impl_def.block_range) =~ expected_block + + assert module_def.type == :module + assert module_def.subtype == :definition + assert module_def.subject == Protocol.Target + assert decorate(doc, module_def.range) =~ "«defimpl Protocol, for: Target do»" + assert decorate(doc, module_def.block_range) =~ expected_block + + assert protocol_ref.type == :module + assert protocol_ref.subtype == :reference + assert protocol_ref.subject == Protocol + assert decorate(doc, protocol_ref.range) =~ "defimpl «Protocol», " + + assert target_ref.type == :module + assert target_ref.subtype == :reference + assert target_ref.subject == Target + assert decorate(doc, target_ref.range) =~ "defimpl Protocol, for: «Target» do" + + assert function_def.type == {:function, :public} + + assert proto_module_ref.type == :module + assert proto_module_ref.subject == Protocol.Target + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/struct_definition_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/struct_definition_test.exs new file mode 100644 index 000000000..10ea7270a --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/struct_definition_test.exs @@ -0,0 +1,91 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.StructDefinitionTest do + alias Lexical.RemoteControl.Search.Subject + use Lexical.Test.ExtractorCase + + def index(source) do + do_index(source, fn entry -> + entry.type == :struct and entry.subtype == :definition + end) + end + + test "it should find a module that defines a struct via a list of atoms" do + {:ok, [struct], doc} = + ~q( + defmodule Root do + defstruct [:name, :value] + end + ) + |> index() + + assert struct.type == :struct + assert struct.subtype == :definition + assert struct.subject == Subject.module(Root) + + assert decorate(doc, struct.range) =~ "«defstruct [:name, :value]»" + end + + test "it highlights multiple line definitions" do + {:ok, [struct], doc} = + ~q( + defmodule Root do + defstruct [ + :name, + :value, + :other + ] + end + ) + |> index() + + expected = + """ + «defstruct [ + :name, + :value, + :other + ]» + """ + |> String.trim() + + assert decorate(doc, struct.range) =~ expected + end + + test "it should find a module that defines a struct via a keyword list" do + {:ok, [struct], doc} = + ~q( + defmodule Root do + defstruct [name: nil, cost: 0] + end + ) + |> index() + + assert struct.type == :struct + assert struct.subtype == :definition + assert struct.subject == Subject.module(Root) + + assert decorate(doc, struct.range) =~ "«defstruct [name: nil, cost: 0]»" + end + + test "it finds struct definitions in nested modules" do + {:ok, [child, parent], doc} = + ~q( + defmodule Parent do + defmodule Child do + defstruct [:parent, :height] + end + defstruct [name: nil, cost: 0] + end + ) + |> index() + + assert child.type == :struct + assert child.subtype == :definition + assert child.subject == Subject.module(Parent.Child) + assert decorate(doc, child.range) =~ "«defstruct [:parent, :height]" + + assert parent.type == :struct + assert parent.subtype == :definition + assert parent.subject == Subject.module(Parent) + assert decorate(doc, parent.range) =~ "«defstruct [name: nil, cost: 0]" + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/struct_reference_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/struct_reference_test.exs new file mode 100644 index 000000000..4849ca7ca --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/struct_reference_test.exs @@ -0,0 +1,392 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.StructReferenceTest do + alias Lexical.RemoteControl.Search.Subject + use Lexical.Test.ExtractorCase + + def index(source) do + do_index(source, fn entry -> + entry.type == :struct and entry.subtype == :reference + end) + end + + describe "recognizing structs" do + test "in a naked reference" do + {:ok, [struct], doc} = + ~q[%MyStruct{}] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == "«%MyStruct{}»" + end + + test "in a naked reference with fields" do + {:ok, [struct], doc} = + ~q[ + %MyStruct{name: "stinky", height: 184} + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S(«%MyStruct{name: "stinky", height: 184}») + end + + test "in a struct on the left side of a match" do + {:ok, [struct], doc} = + ~q[%MyStruct{} = variable] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == "«%MyStruct{}» = variable" + end + + test "in a struct on the right side of a match" do + {:ok, [struct], doc} = + ~q[variable = %MyStruct{}] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == "variable = «%MyStruct{}»" + end + + test "in a struct reference in params" do + {:ok, [struct], doc} = + ~q[ + def my_fn(%MyStruct{} = first) do + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S[def my_fn(«%MyStruct{}» = first) do] + end + + test "in nested struct references" do + {:ok, [outer, inner], doc} = + ~q[ + %OuterStruct{inner: %InnerStruct{}} + ] + |> index() + + assert outer.type == :struct + assert outer.subtype == :reference + assert outer.subject == Subject.module(OuterStruct) + assert decorate(doc, outer.range) == ~S[«%OuterStruct{inner: %InnerStruct{}}»] + + assert inner.type == :struct + assert inner.subtype == :reference + assert inner.subject == Subject.module(InnerStruct) + assert decorate(doc, inner.range) == ~S[%OuterStruct{inner: «%InnerStruct{}»}] + end + + test "in map keys" do + {:ok, [struct], doc} = + ~q[%{%MyStruct{} => 3}] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S[%{«%MyStruct{}» => 3}] + end + + test "in map values" do + {:ok, [struct], doc} = + ~q[%{cool_struct: %MyStruct{}}] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S[%{cool_struct: «%MyStruct{}»}] + end + + test "in list elements" do + {:ok, [struct], doc} = + ~q([1, 2, %MyStruct{}]) + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S([1, 2, «%MyStruct{}»]) + end + + test "in a imported call to struct/1 with an alias" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct(MyStruct) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S[ struct = «struct(MyStruct)»] + end + + test "in a imported call to struct/1 with __MODULE__" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct(__MODULE__) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(Parent) + assert decorate(doc, struct.range) == ~S[ struct = «struct(__MODULE__)»] + end + + test "in a imported call to struct!/1 with __MODULE__" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct!(__MODULE__) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(Parent) + assert decorate(doc, struct.range) == ~S[ struct = «struct!(__MODULE__)»] + end + + test "in a imported call to struct/2 with an alias" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct(MyStruct, foo: 3) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S[ struct = «struct(MyStruct, foo: 3)»] + end + + test "in a imported call to struct!/2 with an alias" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct!(MyStruct, foo: 3) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S[ struct = «struct!(MyStruct, foo: 3)»] + end + + test "in a imported call to struct/2 with __MODULE__" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct(__MODULE__, foo: 3) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(Parent) + assert decorate(doc, struct.range) == ~S[ struct = «struct(__MODULE__, foo: 3)»] + end + + test "is ignored if struct isn't imported" do + assert {:ok, _, _} = + ~q{ + defmodule Parent do + + import Kernel, except: [struct: 1] + struct = struct(MyStruct) + end + } + |> index() + end + + test "in a fully qualified call to Kernel.struct/1" do + {:ok, [struct], doc} = ~q[struct = Kernel.struct(MyStruct)] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S[struct = «Kernel.struct(MyStruct)»] + end + + test "in a fully qualified call to Kernel.struct/2" do + {:ok, [struct], doc} = ~q[struct = Kernel.struct(MyStruct, foo: 3)] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyStruct) + assert decorate(doc, struct.range) == ~S[struct = «Kernel.struct(MyStruct, foo: 3)»] + end + + test "other functions named struct are not counted" do + {:ok, [], _} = ~q[struct = Macro.struct(MyStruct)] |> index() + end + end + + describe "handling __MODULE__" do + test "in a module attribute" do + {:ok, [struct], doc} = + ~q[ + defmodule MyModule do + @attr %__MODULE__{} + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyModule) + assert decorate(doc, struct.range) == ~S( @attr «%__MODULE__{}») + end + + test "in handling a submodule" do + {:ok, [struct], doc} = + ~q[ + defmodule MyModule do + @attr %__MODULE__.Submodule{} + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyModule.Submodule) + assert decorate(doc, struct.range) == ~S( @attr «%__MODULE__.Submodule{}») + end + + test "in a function definition" do + {:ok, [struct], doc} = + ~q[ + defmodule MyModule do + def my_fn(%__MODULE__{}), do: :ok + end + ] + |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(MyModule) + assert decorate(doc, struct.range) == ~S[ def my_fn(«%__MODULE__{}»), do: :ok] + end + + test "in a call to Kernel.struct/1" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct(__MODULE__) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(Parent) + assert decorate(doc, struct.range) == ~S[ struct = «struct(__MODULE__)»] + end + + test "in a call to Kernel.struct!/1" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct!(__MODULE__) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(Parent) + assert decorate(doc, struct.range) == ~S[ struct = «struct!(__MODULE__)»] + end + + test "in a call to Kernel.struct/2" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct(__MODULE__, foo: 3) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(Parent) + assert decorate(doc, struct.range) == ~S[ struct = «struct(__MODULE__, foo: 3)»] + end + + test "in a call to Kernel.struct!/2" do + {:ok, [struct], doc} = ~q[ + defmodule Parent do + struct = struct!(__MODULE__, foo: 3) + end + ] |> index() + + assert struct.type == :struct + assert struct.subtype == :reference + assert struct.subject == Subject.module(Parent) + assert decorate(doc, struct.range) == ~S[ struct = «struct!(__MODULE__, foo: 3)»] + end + end + + describe "when aliases can't be expanded" do + test "a fully qualified call to Kernel.struct/1 is ignored" do + assert {:ok, [], _} = ~q[ + defmodule Parent do + struct = Kernel.struct(unquote(__MODULE__)) + end + ] |> index() + end + + test "a fully qualified call to Kernel.struct/2 is ignored" do + assert {:ok, [], _} = ~q[ + defmodule Parent do + struct = Kernel.struct(unquote(__MODULE__), foo: 3) + end + ] |> index() + end + + test "a call to struct!/2 is ignored" do + assert {:ok, [], _} = ~q[ + defmodule Parent do + struct = struct!(unquote(__MODULE__), foo: 3) + end + ] |> index() + end + + test "a call to struct!/1 is ignored" do + assert {:ok, [], _} = ~q[ + defmodule Parent do + struct = struct!(unquote(__MODULE__)) + end + ] |> index() + end + + test "a call to struct/1 is ignored" do + assert {:ok, [], _} = ~q[ + defmodule Parent do + struct = struct(unquote(__MODULE__)) + end + ] |> index() + end + + test "a call to struct/2 is ignored" do + assert {:ok, [], _} = ~q[ + defmodule Parent do + struct = struct(unquote(__MODULE__), foo: 3) + end + ] |> index() + end + + test "a reference ignored" do + assert {:ok, [], _} = ~q[ + defmodule Parent do + struct = %unquote(__MODULE__){} + end + ] |> index() + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/variable_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/variable_test.exs new file mode 100644 index 000000000..c5053d1ff --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/extractors/variable_test.exs @@ -0,0 +1,1015 @@ +defmodule Lexical.RemoteControl.Search.Indexer.Extractors.VariableTest do + alias Lexical.RemoteControl.Search.Indexer.Extractors + + use Lexical.Test.ExtractorCase + + def index_references(source) do + do_index(source, fn entry -> entry.type == :variable and entry.subtype == :reference end, [ + Extractors.Variable + ]) + end + + def index_definitions(source) do + do_index(source, fn entry -> entry.type == :variable and entry.subtype == :definition end, [ + Extractors.Variable + ]) + end + + def index(source) do + do_index(source, &(&1.type == :variable), [Extractors.Variable]) + end + + def assert_definition(entry, variable_name) do + assert entry.type == :variable + assert entry.subtype == :definition + assert entry.subject == variable_name + end + + def assert_reference(entry, variable_name) do + assert entry.type == :variable + assert entry.subtype == :reference + assert entry.subject == variable_name + end + + for def_type <- [:def, :defp, :defmacro, :defmacrop] do + describe "variable definitions in #{def_type} parameters are extracted" do + test "in a plain parameter" do + {:ok, [param], doc} = + ~q[ + #{unquote(def_type)} my_fun(var) do + end + ] + |> index_definitions() + + assert_definition(param, :var) + assert decorate(doc, param.range) =~ "#{unquote(def_type)} my_fun(«var»)" + end + + test "in a struct value" do + {:ok, [param], doc} = + ~q[ + #{unquote(def_type)} my_fun(%Pattern{foo: var}) do + end + ] + |> index_definitions() + + assert_definition(param, :var) + assert decorate(doc, param.range) =~ "#{unquote(def_type)} my_fun(%Pattern{foo: «var»})" + end + + test "on both sides of a pattern match" do + {:ok, [var_1, var_2], doc} = + ~q[ + #{unquote(def_type)} my_fun(%Pattern{foo: var} = var_2) do + end + ] + |> index_definitions() + + assert_definition(var_1, :var) + + assert decorate(doc, var_1.range) =~ + "#{unquote(def_type)} my_fun(%Pattern{foo: «var»} = var_2)" + + assert_definition(var_2, :var_2) + + assert decorate(doc, var_2.range) =~ + "#{unquote(def_type)} my_fun(%Pattern{foo: var} = «var_2»)" + end + + test "in a struct module" do + {:ok, [var_1], doc} = + ~q[ + #{unquote(def_type)} my_fun(%my_module{}) do + end + ] + |> index_definitions() + + assert_definition(var_1, :my_module) + assert decorate(doc, var_1.range) =~ "#{unquote(def_type)} my_fun(%«my_module»{})" + end + + test "in a bitstrings" do + {:ok, [var], doc} = + ~q[ + #{unquote(def_type)} my_fun(<>) do + end + ] + |> index_definitions() + + assert_definition(var, :foo) + + assert decorate(doc, var.range) =~ + "#{unquote(def_type)} my_fun(<<«foo»::binary-size(3)>>) do" + end + + test "in list elements" do + {:ok, [var_1, var_2], doc} = + ~q{ + #{unquote(def_type)} my_fun([var_1, var_2]) do + end + } + |> index_definitions() + + assert_definition(var_1, :var_1) + assert decorate(doc, var_1.range) =~ "#{unquote(def_type)} my_fun([«var_1», var_2])" + + assert_definition(var_2, :var_2) + assert decorate(doc, var_2.range) =~ "#{unquote(def_type)} my_fun([var_1, «var_2»])" + end + + test "in the tail of a list" do + {:ok, [tail], doc} = + ~q{ + #{unquote(def_type)} my_fun([_ | acc]) do + end + } + |> index_definitions() + + assert_definition(tail, :acc) + assert decorate(doc, tail.range) =~ "#{unquote(def_type)} my_fun([_ | «acc»])" + end + + test "unless it is an alias" do + {:ok, [], _} = + ~q[ + #{unquote(def_type)} my_fun(%MyStruct{}) do + end + ] + |> index_definitions() + end + + test "unless it begins with an underscore" do + {:ok, [], _} = + ~q[ + #{unquote(def_type)} my_fun(_unused) do + end + ] + |> index_definitions() + + {:ok, [], _} = + ~q[ + #{unquote(def_type)} my_fun(_) do + end + ] + |> index_definitions() + end + end + + describe "variable definitions in #{def_type} that contain references are extracted" do + test "when passed through" do + {:ok, [def, ref], doc} = + ~q[ + #{unquote(def_type)} my_fun(var) do + var + end + ] + |> index() + + assert_definition(def, :var) + assert_reference(ref, :var) + + assert decorate(doc, def.range) =~ "#{unquote(def_type)} my_fun(«var») do" + assert decorate(doc, ref.range) =~ " «var»" + end + + test "when wrapped in a list" do + {:ok, [def, ref], doc} = + ~q{ + #{unquote(def_type)} my_fun([var]) do + [var] + end + } + |> index() + + assert_definition(def, :var) + assert_reference(ref, :var) + + assert decorate(doc, def.range) =~ "#{unquote(def_type)} my_fun([«var»]) do" + assert decorate(doc, ref.range) =~ " [«var»]" + end + + test "when it's a map value" do + {:ok, [def, ref], doc} = + ~q[ + #{unquote(def_type)} my_fun(%{key: var}) do + %{key: var} + end + ] + |> index() + + assert_definition(def, :var) + assert_reference(ref, :var) + + assert decorate(doc, def.range) =~ "#{unquote(def_type)} my_fun(%{key: «var»}) do" + assert decorate(doc, ref.range) =~ " %{key: «var»}" + end + + test "when it's a struct module" do + {:ok, [def, ref], doc} = + ~q[ + #{unquote(def_type)} my_fun(%{key: var}) do + %{key: var} + end + ] + |> index() + + assert_definition(def, :var) + assert_reference(ref, :var) + + assert decorate(doc, def.range) =~ "#{unquote(def_type)} my_fun(%{key: «var»}) do" + assert decorate(doc, ref.range) =~ " %{key: «var»}" + end + + test "when it's a tuple entry " do + {:ok, [def, ref], doc} = + ~q[ + #{unquote(def_type)} my_fun({var}) do + {var} + end + ] + |> index() + + assert_definition(def, :var) + assert_reference(ref, :var) + + assert decorate(doc, def.range) =~ "#{unquote(def_type)} my_fun({«var»}) do" + assert decorate(doc, ref.range) =~ " {«var»}" + end + + test "when it utilizes a pin " do + {:ok, [first_def, second_def, first_pin, other_def, second_ref, other_ref], doc} = + ~q" + #{unquote(def_type)} my_fun({first, second}) do + [^first, other] = second + other + end + " + |> index() + + assert_definition(first_def, :first) + + assert decorate(doc, first_def.range) =~ + "#{unquote(def_type)} my_fun({«first», second}) do" + + assert_definition(second_def, :second) + + assert decorate(doc, second_def.range) =~ + "#{unquote(def_type)} my_fun({first, «second»}) do" + + assert_reference(first_pin, :first) + assert decorate(doc, first_pin.range) =~ " [^«first», other]" + + assert_definition(other_def, :other) + assert decorate(doc, other_def.range) =~ " [^first, «other»]" + + assert_reference(second_ref, :second) + assert decorate(doc, second_ref.range) =~ " [^first, other] = «second»" + + assert_reference(other_ref, :other) + assert decorate(doc, other_ref.range) =~ " «other»" + end + end + end + + describe "variable definitions in anonymous function parameters are extracted" do + test "when definition on the right side of the equals" do + {:ok, [ref], doc} = + ~q[ + fn 1 = a -> a end + ] + |> index_references() + + assert decorate(doc, ref.range) =~ "fn 1 = a -> «a»" + end + + test "in a plain parameter" do + {:ok, [param], doc} = + ~q[ + fn var -> + nil + end + ] + |> index_definitions() + + assert_definition(param, :var) + assert decorate(doc, param.range) =~ "fn «var» ->" + end + + test "in a struct's values" do + {:ok, [param], doc} = + ~q[ + fn %Pattern{foo: var} -> + nil + end + ] + |> index_definitions() + + assert_definition(param, :var) + assert decorate(doc, param.range) =~ "fn %Pattern{foo: «var»} ->" + end + + test "when they're pinned" do + {:ok, [param], doc} = + ~q[ + fn ^pinned -> + nil + end + ] + |> index_references() + + assert_reference(param, :pinned) + assert decorate(doc, param.range) =~ "fn ^«pinned» ->" + end + + test "on both sides of a pattern match" do + {:ok, [var_1, var_2], doc} = + ~q[ + fn %Pattern{foo: var} = var_2 -> + nil + end + ] + |> index_definitions() + + assert_definition(var_1, :var) + assert decorate(doc, var_1.range) =~ "fn %Pattern{foo: «var»} = var_2 ->" + + assert_definition(var_2, :var_2) + assert decorate(doc, var_2.range) =~ "fn %Pattern{foo: var} = «var_2» ->" + end + + test "in a struct module" do + {:ok, [var_1], doc} = + ~q[ + fn %my_module{} -> + nil + end + ] + |> index_definitions() + + assert_definition(var_1, :my_module) + assert decorate(doc, var_1.range) =~ "fn %«my_module»{} ->" + end + + test "in list elements" do + {:ok, [var_1, var_2], doc} = + ~q{ + fn [var_1, var_2] -> + nil + end + } + |> index_definitions() + + assert_definition(var_1, :var_1) + assert decorate(doc, var_1.range) =~ "fn [«var_1», var_2] ->" + + assert_definition(var_2, :var_2) + assert decorate(doc, var_2.range) =~ "fn [var_1, «var_2»] ->" + end + + test "in the tail of a list" do + {:ok, [tail], doc} = + ~q{ + fn [_ | acc] -> + nil + end + } + |> index_definitions() + + assert_definition(tail, :acc) + assert decorate(doc, tail.range) =~ "fn [_ | «acc»] ->" + end + + test "unless it is an alias" do + {:ok, [], _} = + ~q[ + fn %MyStruct{} -> + nil + end + ] + |> index_definitions() + end + + test "unless it starts with an underscore" do + {:ok, [], _} = + ~q[ + fn _unused -> + nil + end + ] + |> index_definitions() + + {:ok, [], _} = + ~q[ + fn _ -> + nil + end + ] + |> index_definitions() + end + end + + describe "variable definitions in code are extracted" do + test "from full pattern matches" do + {:ok, [var], doc} = index_definitions(~q[var = 38]) + + assert_definition(var, :var) + assert decorate(doc, var.range) =~ "«var» = 38" + end + + test "from tuples elements" do + {:ok, [first, second], doc} = index_definitions(~q({first, second} = foo)) + + assert_definition(first, :first) + assert decorate(doc, first.range) =~ "{«first», second} =" + + assert_definition(second, :second) + assert decorate(doc, second.range) =~ "{first, «second»} =" + end + + test "from list elements" do + {:ok, [first, second], doc} = index_definitions(~q([first, second] = foo)) + + assert_definition(first, :first) + assert decorate(doc, first.range) =~ "[«first», second] =" + + assert_definition(second, :second) + assert decorate(doc, second.range) =~ "[first, «second»] =" + end + + test "from map values" do + {:ok, [value], doc} = index_definitions(~q(%{key: value} = whatever)) + + assert_definition(value, :value) + assert decorate(doc, value.range) =~ "%{key: «value»} = whatever" + end + + test "from struct values" do + {:ok, [value], doc} = index_definitions(~q(%MyStruct{key: value} = whatever)) + + assert_definition(value, :value) + assert decorate(doc, value.range) =~ "%MyStruct{key: «value»} = whatever" + end + + test "from struct modules" do + {:ok, [module], doc} = index_definitions(~q(%struct_module{} = whatever)) + + assert_definition(module, :struct_module) + assert decorate(doc, module.range) =~ "%«struct_module»{} = whatever" + end + + test "in an else block in a with" do + {:ok, [value], doc} = + ~q[ + with true <- true do + :bad + else var -> + :ok + end + ] + |> index_definitions() + + assert_definition(value, :var) + assert decorate(doc, value.range) =~ "else «var» ->" + end + + test "from comprehensions" do + {:ok, [var, thing, field_1, field_2], doc} = + ~q[ + for var <- things, + {:ok, thing} = var, + {:record, field_1, field_2} <- thing do + end + ] + |> index_definitions() + + assert_definition(var, :var) + assert decorate(doc, var.range) =~ "for «var» <- things," + + assert_definition(thing, :thing) + assert decorate(doc, thing.range) =~ "{:ok, «thing»} = var," + + assert_definition(field_1, :field_1) + assert decorate(doc, field_1.range) =~ "{:record, «field_1», field_2} <- thing do" + + assert_definition(field_2, :field_2) + assert decorate(doc, field_2.range) =~ "{:record, field_1, «field_2»} <- thing do" + end + + test "in an else block in a try" do + {:ok, [value], doc} = + ~q[ + try do + :ok + else failure -> + failure + end + ] + |> index_definitions() + + assert_definition(value, :failure) + assert decorate(doc, value.range) =~ "else «failure» ->" + end + + test "in a catch block in a try" do + {:ok, [value], doc} = + ~q[ + try do + :ok + catch thrown -> + thrown + end + ] + |> index_definitions() + + assert_definition(value, :thrown) + assert decorate(doc, value.range) =~ "catch «thrown» ->" + end + + test "in a rescue block in a try" do + {:ok, [value], doc} = + ~q[ + try do + :ok + rescue ex -> + ex + end + ] + |> index_definitions() + + assert_definition(value, :ex) + assert decorate(doc, value.range) =~ "rescue «ex» ->" + end + + test "in a rescue block in a try using in" do + {:ok, [value], doc} = + ~q[ + try do + :ok + rescue ex in RuntimeError -> + ex + end + ] + |> index_definitions() + + assert_definition(value, :ex) + assert decorate(doc, value.range) =~ "rescue «ex» in RuntimeError ->" + end + + test "from complex, nested mappings" do + {:ok, [module, list_elem, tuple_first, tuple_second], doc} = + index_definitions( + ~q(%struct_module{key: [list_elem, {tuple_first, tuple_second}]} = whatever) + ) + + assert_definition(module, :struct_module) + + assert decorate(doc, module.range) =~ + "%«struct_module»{key: [list_elem, {tuple_first, tuple_second}]} = whatever" + + assert_definition(list_elem, :list_elem) + + assert decorate(doc, list_elem.range) =~ + "%struct_module{key: [«list_elem», {tuple_first, tuple_second}]} = whatever" + + assert_definition(tuple_first, :tuple_first) + + assert decorate(doc, tuple_first.range) =~ + "%struct_module{key: [list_elem, {«tuple_first», tuple_second}]} = whatever" + + assert_definition(tuple_second, :tuple_second) + + assert decorate(doc, tuple_second.range) =~ + "%struct_module{key: [list_elem, {tuple_first, «tuple_second»}]} = whatever" + end + + test "from test arguments" do + {:ok, [test_def], doc} = + ~q[ + defmodule TestCase do + use ExUnit.Case + test "my test", %{var: var} do + var + end + end + ] + |> index_definitions() + + assert_definition(test_def, :var) + assert decorate(doc, test_def.range) =~ "%{var: «var»} do" + end + end + + describe "variable references are extracted" do + test "when by themselves" do + assert {:ok, [ref], doc} = index_references(~q[variable]) + + assert_reference(ref, :variable) + assert decorate(doc, ref.range) =~ "«variable»" + end + + test "from pinned variables" do + {:ok, [ref], doc} = index_references("^pinned = 3") + + assert_reference(ref, :pinned) + assert decorate(doc, ref.range) =~ "^«pinned» = 3" + end + + test "from pinned variables in a function head" do + {:ok, [ref], doc} = + ~q{ + fn [^pinned] -> + nil + end + } + |> index + + assert_reference(ref, :pinned) + assert decorate(doc, ref.range) =~ "fn [^«pinned»] ->" + end + + test "on the left side of operators" do + assert {:ok, [ref], doc} = index_references(~q[x + 3]) + + assert_reference(ref, :x) + assert decorate(doc, ref.range) =~ "«x» + 3" + end + + test "on the right side of operators" do + assert {:ok, [ref], doc} = index_references(~q[3 + x]) + + assert_reference(ref, :x) + assert decorate(doc, ref.range) =~ "3 + «x»" + end + + test "on the right of pattern matches" do + assert {:ok, [ref], doc} = index_references(~q[x = other_variable]) + + assert_reference(ref, :other_variable) + assert decorate(doc, ref.range) =~ "x = «other_variable»" + end + + test "on the right side of pattern matches with dot notation" do + assert {:ok, [ref], doc} = index_references(~q[x = foo.bar.baz]) + + assert_reference(ref, :foo) + assert decorate(doc, ref.range) =~ "x = «foo».bar.baz" + end + + test "on the right side of a pattern match in a function call" do + assert {:ok, [ref], doc} = index_references(~q[_ = foo(bar)]) + + assert_reference(ref, :bar) + assert decorate(doc, ref.range) =~ "_ = foo(«bar»)" + end + + test "on the left of pattern matches via a pin" do + assert {:ok, [ref], doc} = index_references(~q[^pin = 49]) + + assert_reference(ref, :pin) + assert decorate(doc, ref.range) =~ "^«pin» = 49" + end + + test "from function call arguments" do + assert {:ok, [ref], doc} = index_references(~q[pow(x, 3)]) + + assert_reference(ref, :x) + assert decorate(doc, ref.range) =~ "pow(«x», 3)" + end + + test "when using access syntax" do + assert {:ok, [ref], doc} = index_references(~q{3 = foo[:bar]}) + + assert_reference(ref, :foo) + assert decorate(doc, ref.range) =~ "3 = «foo»[:bar]" + end + + test "when inside brackets" do + assert {:ok, [ref, access_ref], doc} = index_references(~q{3 = foo[bar]}) + + assert_reference(ref, :foo) + assert decorate(doc, ref.range) =~ "3 = «foo»[bar]" + + assert_reference(access_ref, :bar) + assert decorate(doc, access_ref.range) =~ "3 = foo[«bar»]" + end + + test "inside string interpolations" do + quoted = + quote file: "foo.ex", line: 1 do + foo = 3 + "#{foo}" + end + + assert {:ok, [ref], doc} = index_references(quoted) + + assert_reference(ref, :foo) + assert decorate(doc, ref.range) =~ ~S["#{«foo»}"] + end + + test "inside string interpolations that have a statement" do + quoted = + quote file: "foo.ex", line: 1 do + foo = 3 + "#{foo + 3}" + end + + assert {:ok, [ref], doc} = index_references(quoted) + + assert_reference(ref, :foo) + assert decorate(doc, ref.range) =~ ~S["#{«foo» + 3}"] + end + + test "inside string interpolations that have a literal prefix" do + quoted = + quote file: "foo.ex", line: 1 do + foo = 3 + "prefix #{foo}" + end + + assert {:ok, [ref], doc} = index_references(quoted) + + assert_reference(ref, :foo) + assert decorate(doc, ref.range) =~ ~S["prefix #{«foo»}"] + end + + test "inside string interpolations that have a literal suffix" do + quoted = + quote file: "foo.ex", line: 1 do + foo = 3 + "#{foo} suffix" + end + + assert {:ok, [ref], doc} = index_references(quoted) + + assert_reference(ref, :foo) + assert decorate(doc, ref.range) =~ ~S["#{«foo»} suffix"] + end + + test "inside string interpolations that have a literal prefix and suffix" do + quoted = + quote file: "foo.ex", line: 1 do + foo = 3 + "prefix #{foo} suffix" + end + + assert {:ok, [ref], doc} = index_references(quoted) + + assert_reference(ref, :foo) + assert decorate(doc, ref.range) =~ ~S["prefix #{«foo»} suffix"] + end + + test "when inside a rescue block in a try" do + {:ok, [ref], doc} = + ~q[ + try do + :ok + rescue e in Something -> + e + end + ] + |> index_references() + + assert_reference(ref, :e) + assert decorate(doc, ref.range) =~ " «e»" + end + + test "when inside a catch block in a try" do + {:ok, [ref], doc} = + ~q[ + try do + :ok + catch thrown -> + thrown + end + ] + |> index_references() + + assert_reference(ref, :thrown) + assert decorate(doc, ref.range) =~ " «thrown»" + end + + test "when inside an after block in a try" do + {:ok, [ref], doc} = + ~q[ + try do + :ok + after -> + x + end + ] + |> index_references() + + assert_reference(ref, :x) + assert decorate(doc, ref.range) =~ " «x»" + end + + test "when inside an else block in a with" do + {:ok, [ref], doc} = + ~q[ + with :ok <- call() do + else other -> + other + end + ] + |> index_references() + + assert_reference(ref, :other) + assert decorate(doc, ref.range) =~ " «other»" + end + + test "when in the tail of a list" do + assert {:ok, [ref], doc} = index_references(~q{[3 | acc]}) + + assert_reference(ref, :acc) + assert decorate(doc, ref.range) =~ "[3 | «acc»]" + end + + test "in the body of an anonymous function" do + {:ok, [ref], doc} = + ~q[ + fn %Pattern{foo: var} -> + var + end + ] + |> index_references() + + assert_reference(ref, :var) + assert decorate(doc, ref.range) =~ " «var»" + end + + test "when unquote is used in a function definition" do + {:ok, [ref], doc} = + ~q[ + def my_fun(unquote(other_var)) do + end + ] + |> index_references() + + assert_reference(ref, :other_var) + assert decorate(doc, ref.range) =~ "def my_fun(unquote(«other_var»)) do" + end + + test "unless it begins with underscore" do + assert {:ok, [], _} = index_references("_") + assert {:ok, [], _} = index_references("_unused") + assert {:ok, [], _} = index_references("_unused = 3") + assert {:ok, [], _} = index_references("_unused = foo()") + end + end + + describe "variable and references are extracted" do + test "in a multiple match" do + {:ok, [foo_def, param_def, bar_def, other_ref], doc} = + ~q[ + foo = param = bar = other + ] + |> index() + + assert_definition(foo_def, :foo) + assert decorate(doc, foo_def.range) =~ "«foo» = param = bar = other" + + assert_definition(param_def, :param) + assert decorate(doc, param_def.range) =~ "foo = «param» = bar = other" + + assert_definition(bar_def, :bar) + assert decorate(doc, bar_def.range) =~ "foo = param = «bar» = other" + + assert_reference(other_ref, :other) + assert decorate(doc, other_ref.range) =~ "foo = param = bar = «other»" + end + + test "in an anoymous function" do + {:ok, [pin_param, var_param, first_def, pin_pin, var_ref, first_ref], doc} = + ~q{ + fn pin, var -> + [first, ^pin] = var + first + end + } + |> index() + + assert_definition(pin_param, :pin) + assert decorate(doc, pin_param.range) =~ "fn «pin», var ->" + + assert_definition(var_param, :var) + assert decorate(doc, var_param.range) =~ "fn pin, «var» ->" + + assert_definition(first_def, :first) + assert decorate(doc, first_def.range) =~ " [«first», ^pin] = var" + + assert_reference(pin_pin, :pin) + assert decorate(doc, pin_pin.range) =~ " [first, ^«pin»] = var" + + assert_reference(var_ref, :var) + assert decorate(doc, var_ref.range) =~ " [first, ^pin] = «var»" + + assert_reference(first_ref, :first) + assert decorate(doc, first_ref.range) =~ " «first»" + end + + test "in the match arms of a with" do + {:ok, [var_def, var_2_def, var_ref], doc} = + ~q[ + with {:ok, var} <- something(), + {:ok, var_2} <- something_else(var) do + :bad + end + ] + |> index() + + assert_definition(var_def, :var) + assert decorate(doc, var_def.range) =~ "{:ok, «var»} <- something()," + + assert_definition(var_2_def, :var_2) + assert decorate(doc, var_2_def.range) =~ " {:ok, «var_2»} <- something_else(var) do" + + assert_reference(var_ref, :var) + assert decorate(doc, var_ref.range) =~ " {:ok, var_2} <- something_else(«var») do" + end + + test "in the body of a with" do + {:ok, [_var_def, var_ref], doc} = + ~q[ + with {:ok, var} <- something() do + var + 1 + end + ] + |> index() + + assert_reference(var_ref, :var) + assert decorate(doc, var_ref.range) =~ " «var» + 1" + end + + test "in a comprehension" do + {:ok, extracted, doc} = + ~q[ + for {:ok, var} <- list, + {:record, elem_1, elem_2} <- var do + {:ok, elem_1 + elem_2} + end + ] + |> index() + + assert [var_def, list_ref, elem_1_def, elem_2_def, var_ref, elem_1_ref, elem_2_ref] = + extracted + + assert_definition(var_def, :var) + assert decorate(doc, var_def.range) =~ "for {:ok, «var»} <- list," + + assert_reference(list_ref, :list) + assert decorate(doc, list_ref.range) =~ "for {:ok, var} <- «list»," + + assert_definition(elem_1_def, :elem_1) + assert decorate(doc, elem_1_def.range) =~ "{:record, «elem_1», elem_2} <- var do" + + assert_definition(elem_2_def, :elem_2) + assert decorate(doc, elem_2_def.range) =~ "{:record, elem_1, «elem_2»} <- var do" + + assert_reference(var_ref, :var) + assert decorate(doc, var_ref.range) =~ "{:record, elem_1, elem_2} <- «var» do" + + assert_reference(elem_1_ref, :elem_1) + assert decorate(doc, elem_1_ref.range) =~ " {:ok, «elem_1» + elem_2}" + + assert_reference(elem_2_ref, :elem_2) + assert decorate(doc, elem_2_ref.range) =~ " {:ok, elem_1 + «elem_2»}" + end + + test "in guards in def functions" do + {:ok, [param_def, param_2_def, param_ref], doc} = + ~q[ + def something(param, param_2) when param > 1 do + end + ] + |> index() + + assert_definition(param_def, :param) + assert decorate(doc, param_def.range) =~ "def something(«param», param_2) when param > 1 do" + + assert_definition(param_2_def, :param_2) + + assert decorate(doc, param_2_def.range) =~ + "def something(param, «param_2») when param > 1 do" + + assert_reference(param_ref, :param) + assert decorate(doc, param_ref.range) =~ "def something(param, param_2) when «param» > 1 do" + end + + test "in guards in anonymous functions" do + {:ok, [param_def, param_2_def, param_ref], doc} = + ~q[ + fn param, param_2 when param > 1 -> :ok end + ] + |> index() + + assert_definition(param_def, :param) + assert decorate(doc, param_def.range) =~ "fn «param», param_2 when param > 1 -> :ok end" + + assert_definition(param_2_def, :param_2) + assert decorate(doc, param_2_def.range) =~ "fn param, «param_2» when param > 1 -> :ok end" + + assert_reference(param_ref, :param) + assert decorate(doc, param_ref.range) =~ "fn param, param_2 when «param» > 1 -> :ok end" + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/metadata_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/metadata_test.exs new file mode 100644 index 000000000..d2c1a1f0c --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/metadata_test.exs @@ -0,0 +1,199 @@ +defmodule Lexical.RemoteControl.Search.Indexer.MetadataTest do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.RemoteControl.Search.Indexer.Metadata + + use ExUnit.Case + + import Lexical.Test.RangeSupport + import Lexical.Test.CodeSigil + + describe "blocks in modules" do + test "finds a block in an empty module" do + code = ~q[ + defmodule MyModule do + end + ]t + + assert "defmodule MyModule «do\n»end" == decorate_location(code) + end + + test "finds a block in a module with an attribute" do + code = ~q[ + defmodule WithAttribute do + @foo 32 + end + ]t + + assert "defmodule WithAttribute «do\n @foo 32\n»end" = decorate_location(code) + end + + test "finds a block in a module with functions" do + code = ~q[ + defmodule WithFunctions do + def fun do + end + end + ]t + + expected = ~q[ + defmodule WithFunctions «do + def fun do + end + »end + ]t + assert expected == decorate_location(code) + end + end + + describe "blocks in functions" do + test "are found in a public function with do/end and no body" do + code = ~q[ + def my_fn do + end + ] + assert "def my_fn «do\nend»" = decorate_location(code) + end + + test "are found in a private function with do/end and no body" do + code = ~q[ + defp my_fn do + end + ] + assert "defp my_fn «do\nend»" = decorate_location(code) + end + + test "are found in a public function with do/end and abody" do + code = ~q[ + def my_fn do + x = 4 + x * 6 + end + ]t + + expected = ~q[ + def my_fn «do + x = 4 + x * 6 + »end + ]t + + assert expected == decorate_location(code) + end + + test "are found in a private function with do/end and abody" do + code = ~q[ + defp my_fn do + x = 4 + x * 6 + end + ]t + + expected = ~q[ + defp my_fn «do + x = 4 + x * 6 + »end + ]t + + assert expected == decorate_location(code) + end + + test "are found in single line terse public function" do + code = ~q[ + def my_fn(arg), do: arg * 10 + ]t + + assert "def my_fn(arg), «do: arg * 10»" = decorate_location(code) + end + + test "are found in single line terse private function" do + code = ~q[ + defp my_fn(arg), do: arg * 10 + ]t + + assert "defp my_fn(arg), «do: arg * 10»" = decorate_location(code) + end + + test "are found in single line terse public macro" do + code = ~q[ + defmacro my_fn(arg), do: arg * 10 + ]t + + assert "defmacro my_fn(arg), «do: arg * 10»" = decorate_location(code) + end + + test "are found in single line terse private macro" do + code = ~q[ + defmacrop my_fn(arg), do: arg * 10 + ]t + + assert "defmacrop my_fn(arg), «do: arg * 10»" = decorate_location(code) + end + + test "are found in multiple line terse public function" do + code = ~q[ + def my_fun(arg), + do: arg + 1 + ] + + assert " «do: arg + 1»" == decorate_location(code) + end + + test "are found in multiple line terse private function" do + code = ~q[ + defp my_fun(arg), + do: arg + 1 + ] + + assert " «do: arg + 1»" == decorate_location(code) + end + + test "are found in anonymous functions with no body" do + code = "fn -> :ok end" + assert "«fn -> :ok end»" == decorate_location(code) + end + + test "are found in single line anonymous functions" do + code = "fn arg -> arg + 1 end" + assert "«fn arg -> arg + 1 end»" == decorate_location(code) + end + + test "are found in multiple line anonymous functions" do + code = ~q[ + fn a, b -> + y = do_something_with(a) + z = do_something_with(b) + y + z + end] + + expected = ~q[ + «fn a, b -> + y = do_something_with(a) + z = do_something_with(b) + y + z + end» + ]t + + assert expected == decorate_location(code) + end + end + + defp decorate_location(code) do + document = Document.new("file:///file.ex", code, 1) + {:ok, ast, _} = Ast.from(document) + + case Metadata.location(ast) do + {:block, _position, {start_line, start_char}, {end_line, end_char}} -> + range = + Range.new( + Position.new(document, start_line, start_char), + Position.new(document, end_line, end_char) + ) + + decorate(code, range) + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer/structure_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer/structure_test.exs new file mode 100644 index 000000000..0fdc84cb2 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/indexer/structure_test.exs @@ -0,0 +1,54 @@ +defmodule Lexical.RemoteControl.Search.Indexer.StructureTest do + use Lexical.Test.ExtractorCase + + def index(source) do + case do_index(source, fn entry -> entry.type != :metadata end) do + {:ok, results, _doc} -> {:ok, results} + error -> error + end + end + + describe "blocks are correctly popped " do + test "when multiple blocks end at once" do + {:ok, results} = + ~q[ + defmodule Parent do + def function_1 do + case something() do + :ok -> :yep + _ -> :nope + end + end + + defp function_2 do + end + end + ] + |> index() + + [module, public_function, private_function] = + Enum.filter(results, fn entry -> + entry.subtype == :definition + end) + + assert public_function.block_id == module.id + assert private_function.block_id == module.id + end + + test "when an expression occurs after a block" do + {:ok, [first_call, _, last_call]} = + ~q[ + first_call() + case something() do + :ok -> :yep + _ -> :nope + end + call() + ] + |> index() + + assert first_call.block_id == :root + assert last_call.block_id == :root + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/indexer_test.exs b/apps/remote_control/test/lexical/remote_control/search/indexer_test.exs index bba1e5a33..0d15ff334 100644 --- a/apps/remote_control/test/lexical/remote_control/search/indexer_test.exs +++ b/apps/remote_control/test/lexical/remote_control/search/indexer_test.exs @@ -1,19 +1,37 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do alias Lexical.Project alias Lexical.RemoteControl.Search.Indexer + alias Lexical.RemoteControl.Search.Indexer.Entry use ExUnit.Case use Patch import Lexical.Test.Fixtures + defmodule FakeBackend do + def set_entries(entries) when is_list(entries) do + :persistent_term.put({__MODULE__, :entries}, entries) + end + + def reduce(accumulator, reducer_fun) do + {__MODULE__, :entries} + |> :persistent_term.get([]) + |> Enum.reduce(accumulator, fn + %{id: id} = entry, acc when is_integer(id) -> reducer_fun.(entry, acc) + _, acc -> acc + end) + end + end + setup do project = project() + start_supervised(Lexical.RemoteControl.Dispatch) {:ok, project: project} end describe "create_index/1" do test "returns a list of entries", %{project: project} do - assert {:ok, entries} = Indexer.create_index(project) + assert {:ok, entry_stream} = Indexer.create_index(project) + entries = Enum.to_list(entry_stream) project_root = Project.root_path(project) assert length(entries) > 0 @@ -44,7 +62,9 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do end def with_an_existing_index(%{project: project}) do - {:ok, entries} = Indexer.create_index(project) + {:ok, entry_stream} = Indexer.create_index(project) + entries = Enum.to_list(entry_stream) + FakeBackend.set_entries(entries) {:ok, entries: entries} end @@ -55,11 +75,9 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do refute Enum.any?(entries, fn entry -> Path.basename(entry.path) == @ephemeral_file_name end) end - test "the ephemeral file is listed in the updated index", %{ - project: project, - entries: entries - } do - {:ok, [updated_entry], []} = Indexer.update_index(project, entries) + test "the ephemeral file is listed in the updated index", %{project: project} do + {:ok, entry_stream, []} = Indexer.update_index(project, FakeBackend) + assert [_structure, updated_entry] = Enum.to_list(entry_stream) assert Path.basename(updated_entry.path) == @ephemeral_file_name assert updated_entry.subject == Ephemeral end @@ -72,13 +90,10 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do assert Enum.any?(entries, fn entry -> Path.basename(entry.path) == @ephemeral_file_name end) end - test "returns the file paths of deleted files", %{ - project: project, - entries: entries, - file_path: file_path - } do + test "returns the file paths of deleted files", %{project: project, file_path: file_path} do File.rm(file_path) - assert {:ok, [], [^file_path]} = Indexer.update_index(project, entries) + assert {:ok, entry_stream, [^file_path]} = Indexer.update_index(project, FakeBackend) + assert [] = Enum.to_list(entry_stream) end test "updates files that have changed since the last index", %{ @@ -86,9 +101,11 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do entries: entries, file_path: file_path } do - path_to_mtime = Map.new(entries, & &1.updated_at) + entries = Enum.reject(entries, &is_nil(&1.id)) + path_to_mtime = Map.new(entries, fn entry -> {entry.path, Entry.updated_at(entry)} end) + [entry | _] = entries - {{year, month, day}, hms} = entry.updated_at + {{year, month, day}, hms} = Entry.updated_at(entry) old_mtime = {{year - 1, month, day}, hms} patch(Indexer, :stat, fn path -> @@ -112,7 +129,8 @@ defmodule Lexical.RemoteControl.Search.IndexerTest do File.write!(file_path, new_contents) - assert {:ok, [entry], []} = Indexer.update_index(project, entries) + assert {:ok, entry_stream, []} = Indexer.update_index(project, FakeBackend) + assert [_structure, entry] = Enum.to_list(entry_stream) assert entry.path == file_path assert entry.subject == Brand.Spanking.New end diff --git a/apps/remote_control/test/lexical/remote_control/search/store/backends/ets/schema_test.exs b/apps/remote_control/test/lexical/remote_control/search/store/backends/ets/schema_test.exs index 8bea54f73..e6ae8feac 100644 --- a/apps/remote_control/test/lexical/remote_control/search/store/backends/ets/schema_test.exs +++ b/apps/remote_control/test/lexical/remote_control/search/store/backends/ets/schema_test.exs @@ -1,8 +1,10 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do alias Lexical.Project alias Lexical.RemoteControl.Search.Store.Backends.Ets.Schema + alias Lexical.RemoteControl.Search.Store.Backends.Ets.Wal import Lexical.Test.Fixtures + import Wal, only: :macros use ExUnit.Case @@ -20,6 +22,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do defmodule First do use Schema, version: 1 + def to_rows(_), do: [] end defmodule IncrementValue do @@ -29,6 +32,8 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do entries = Enum.map(entries, fn {k, v} -> {k, v + 1} end) {:ok, entries} end + + def to_rows(_), do: [] end defmodule IncrementKey do @@ -37,18 +42,22 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do def migrate(entries) do {:ok, Enum.map(entries, fn {k, v} -> {k + 1, v} end)} end + + def to_rows(_), do: [] end test "it ensures the uniqueness of versions in the schema order", %{project: project} do defmodule SameVersion do use Schema, version: 1 + + def to_rows(_), do: [] end assert_raise ArgumentError, fn -> Schema.load(project, [First, SameVersion]) end end test "it loads an empty index", %{project: project} do - assert {:ok, _, :empty} = Schema.load(project, [First]) + assert {:ok, _table_name, _wal, :empty} = Schema.load(project, [First]) end test "it loads existing entries", %{project: project} do @@ -58,7 +67,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do ] write_entries(project, First, entries) - assert {:ok, table_name, :stale} = Schema.load(project(), [First]) + assert {:ok, _wal, table_name, :stale} = Schema.load(project(), [First]) assert table_contents(table_name) == entries end @@ -66,18 +75,18 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do entries = [{1, 1}, {2, 2}, {3, 3}] write_entries(project, First, entries) - assert {:ok, table_name, :stale} = Schema.load(project, [First, IncrementValue]) + assert {:ok, _wal, table_name, :stale} = Schema.load(project, [First, IncrementValue]) assert table_contents(table_name) == [{1, 2}, {2, 3}, {3, 4}] end - test "removes old index files after migration", %{project: project} do + test "removes old wal after migration", %{project: project} do write_entries(project, First, []) - assert File.exists?(Schema.index_file_path(project, First)) + assert Wal.exists?(project, First.version()) - assert {:ok, _table_name, :empty} = Schema.load(project, [First, IncrementValue]) + assert {:ok, _table_name, _wal, :empty} = Schema.load(project, [First, IncrementValue]) - refute File.exists?(Schema.index_file_path(project, First)) + refute Wal.exists?(project, First.version()) end test "migrations that already exist on disk will be reapplied", %{project: project} do @@ -85,7 +94,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do write_entries(project, First, entries) write_entries(project, IncrementValue, entries) - assert {:ok, table_name, :stale} = Schema.load(project, [First, IncrementValue]) + assert {:ok, _wal, table_name, :stale} = Schema.load(project, [First, IncrementValue]) new_contents = table_contents(table_name) @@ -94,8 +103,8 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do assert {3, 4} in new_contents assert length(new_contents) == 3 - refute File.exists?(Schema.index_file_path(project, First)) - assert File.exists?(Schema.index_file_path(project, IncrementValue)) + refute Wal.exists?(project, First.version()) + assert Wal.exists?(project, IncrementValue.version()) end test "migrations will be reapplied", %{project: project} do @@ -103,7 +112,9 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do write_entries(project, First, entries) write_entries(project, IncrementValue, entries) - assert {:ok, table_name, :stale} = Schema.load(project, [First, IncrementValue, IncrementKey]) + assert {:ok, wal, table_name, :stale} = + Schema.load(project, [First, IncrementValue, IncrementKey]) + new_contents = table_contents(table_name) assert {2, 2} in new_contents @@ -111,8 +122,9 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do assert {4, 4} in new_contents assert length(new_contents) == 3 - refute File.exists?(Schema.index_file_path(project, First)) - refute File.exists?(Schema.index_file_path(project, IncrementValue)) + assert Wal.exists?(wal) + refute Wal.exists?(project, First.version()) + refute Wal.exists?(project, IncrementValue.version()) end test "migrations can delete all entries", %{project: project} do @@ -122,13 +134,14 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do def migrate(_) do {:ok, []} end + + def to_rows(_), do: [] end entries = [{1, 1}, {2, 2}, {3, 3}] write_entries(project, First, entries) - assert {:ok, table_name, :empty} = Schema.load(project, [First, Blank]) - + assert {:ok, _wal, table_name, :empty} = Schema.load(project, [First, Blank]) assert table_contents(table_name) == [] end @@ -139,6 +152,8 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do def migrate(_) do {:error, :migration_failed} end + + def to_rows(_), do: [] end entries = [{1, 1}] @@ -146,45 +161,21 @@ defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.SchemaTest do assert {:error, :migration_failed} = Schema.load(project, [First, FailedMigration]) end - test "loading from a table with a different name that shares the filename", %{project: project} do - defmodule StrangeName do - def table_name do - :strange - end - - def index_file_name do - First.index_file_name() - end - end - - entries = [{1, 1}, {2, 2}] - write_entries(project, StrangeName, entries) - {:ok, table_name, :stale} = Schema.load(project, [First]) - assert table_name == First.table_name() - assert table_contents(table_name) == entries - refute table_exists?(StrangeName.table_name()) - end - - defp table_exists?(table_name) do - :ets.whereis(table_name) != :undefined - end - def destroy_index_path(%Project{} = project) do - File.rm_rf(Schema.index_root(project)) + project |> Wal.root_path() |> File.rm_rf() end def write_entries(project, schema_module, entries) do - File.mkdir_p(Schema.index_root(project)) table_name = schema_module.table_name() + :ets.new(table_name, schema_module.table_options()) + {:ok, wal} = Wal.load(project, schema_module.version(), table_name) - path_charlist = - project - |> Schema.index_file_path(schema_module) - |> String.to_charlist() + with_wal wal do + :ets.insert(table_name, entries) + end - :ets.new(table_name, [:named_table, :set]) - :ets.insert(table_name, entries) - :ok = :ets.tab2file(table_name, path_charlist) + Wal.checkpoint(wal) + :ok = Wal.close(wal) :ets.delete(table_name) end diff --git a/apps/remote_control/test/lexical/remote_control/search/store/backends/ets/wal_test.exs b/apps/remote_control/test/lexical/remote_control/search/store/backends/ets/wal_test.exs new file mode 100644 index 000000000..e5ea558f0 --- /dev/null +++ b/apps/remote_control/test/lexical/remote_control/search/store/backends/ets/wal_test.exs @@ -0,0 +1,273 @@ +defmodule Lexical.RemoteControl.Search.Store.Backends.Ets.WalTest do + alias Lexical.RemoteControl.Search.Store.Backends.Ets.Wal + + import Lexical.Test.Fixtures + + use ExUnit.Case + use Patch + + import Wal, only: :macros + + @table_name :wal_test + @schema_version 1 + + setup do + project = project() + new_table() + + on_exit(fn -> + Wal.destroy(project, @schema_version) + end) + + {:ok, project: project} + end + + describe "with_wal/1" do + test "returns the wal state and the ets operation", %{project: project} do + {:ok, wal_state} = Wal.load(project, @schema_version, @table_name) + + {:ok, new_state, result} = + with_wal wal_state do + :ets.insert(@table_name, {:first, 1}) + :worked + end + + assert result == :worked + assert %Wal{} = new_state + end + end + + describe "non-write operations are ignored" do + setup [:with_a_loaded_wal] + + test "ignores lookups", %{wal: wal_state} do + {:ok, new_wal, _} = + with_wal wal_state do + :ets.lookup(@table_name, :first) + end + + assert {:ok, 0} = Wal.size(new_wal) + end + end + + describe "operations" do + setup [:with_a_loaded_wal] + + test "the wal captures deletes operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, {1, 1}}, {:second, {2, 2}}]) + :ets.delete(@table_name, :second) + end + + entries = dump_and_close_table() + + assert {:ok, _new_wal, ^entries} = load_from_project(project) + end + + test "the wal captures delete_all_items operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}, {:second, 2}]) + :ets.delete_all_objects(@table_name) + end + + dump_and_close_table() + assert {:ok, _new_wal, []} = load_from_project(project) + end + + test "the wal captures delete_object operations calls", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}, {:second, 2}]) + :ets.delete_object(@table_name, {:first, 1}) + end + + entries = dump_and_close_table() + assert {:ok, _new_wal, ^entries} = load_from_project(project) + end + + test "the wal captures inserts operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, {3, 6}}]) + end + + entries = dump_and_close_table() + + assert {:ok, _new_wal, ^entries} = load_from_project(project) + end + + test "the wal captures insert_new operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert_new(@table_name, [{:first, 1}, {:second, 2}]) + end + + entries = dump_and_close_table() + + assert {:ok, _new_wal, ^entries} = load_from_project(project) + end + + test "the wal captures match_delete operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}, {:second, 2}, {:third, 1}]) + :ets.match_delete(@table_name, {:_, 1}) + end + + entries = dump_and_close_table() + assert {:ok, _new_wal, ^entries} = load_from_project(project) + end + + test "the wal captures select_delete operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}, {:second, 1}, {:third, 3}]) + :ets.select_delete(@table_name, [{{:_, 1}, [], [true]}]) + end + + entries = dump_and_close_table() + + assert {:ok, _new_wal, ^entries} = load_from_project(project) + end + + test "the wal captures select_replace operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}, {:second, 2}, {:third, 1}]) + :ets.select_replace(@table_name, [{{:third, :_}, [], [{{:third, 3}}]}]) + end + + entries = dump_and_close_table() + assert {:ok, _new_wal, ^entries} = load_from_project(project) + end + + test "the wal captures update_counter operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}, {:second, 2}, {:third, 3}]) + :ets.update_counter(@table_name, :first, {2, 1}) + end + + entries = dump_and_close_table() + assert {:ok, _new_val, ^entries} = load_from_project(project) + end + + test "the wal captures update_element operations", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}, {:second, 2}, {:third, 3}]) + :ets.update_element(@table_name, :first, {2, :oops}) + end + + entries = dump_and_close_table() + assert {:ok, _new_wal, ^entries} = load_from_project(project) + end + end + + describe "checkpoints" do + setup [:with_a_loaded_wal] + + test "fails if the ets table doesn't exist" do + {:ok, wal_state} = Wal.load(project(), @schema_version, :does_not_exist) + assert {:error, :no_table} = Wal.checkpoint(wal_state) + end + + test "gracefully handles an invalid checkpoint", %{wal: wal_state, project: project} do + :ok = Patch.expose(Wal, find_latest_checkpoint: 1) + + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}]) + end + + assert {:ok, new_wal} = Wal.checkpoint(wal_state) + {:ok, checkpoint_path} = private(Wal.find_latest_checkpoint(new_wal)) + Wal.close(new_wal) + # write junk over it + File.write!(checkpoint_path, "this is not data") + + {:ok, new_wal} = Wal.load(project, @schema_version, @table_name) + + assert Wal.size(new_wal) == {:ok, 0} + assert new_wal.checkpoint_version == 0 + end + + test "can load a checkpoint", %{wal: wal_state, project: project} do + with_wal wal_state do + :ets.insert(@table_name, [{:first, 1}, {:second, 2}]) + end + + assert wal_state.checkpoint_version == 0 + + # prior, we had no checkpoint and one item in the update + # log. Checkpointing clears out the updates log and + # creates a checkpoint file, which can be restored + assert {:ok, 1} = Wal.size(wal_state) + assert {:ok, new_wal} = Wal.checkpoint(wal_state) + + checkpoint_version = new_wal.checkpoint_version + + assert checkpoint_version > 0 + assert {:ok, 0} = Wal.size(new_wal) + + items = dump_and_close_table() + {:ok, loaded_wal, ^items} = load_from_project(project) + assert loaded_wal.checkpoint_version == checkpoint_version + end + + test "can handle lots of data", %{wal: wal_state, project: project} do + stream = + 1..500_000 + |> Stream.cycle() + |> Stream.map(fn count -> {{:item, count}, count} end) + + for item <- Enum.take(stream, 20_000) do + with_wal wal_state do + :ets.insert(@table_name, item) + end + end + + {:ok, new_state} = Wal.checkpoint(wal_state) + :ok = Wal.close(new_state) + data = dump_and_close_table() + + assert {:ok, _wal_state, entries} = load_from_project(project) + + assert Enum.sort(entries) == Enum.sort(data) + end + + test "checkpoints after a certain number of operations", %{project: project} do + {:ok, wal_state} = Wal.load(project, @schema_version, @table_name, max_wal_operations: 5) + + with_wal wal_state do + :ets.insert(@table_name, {:first, 1}) + :ets.insert(@table_name, {:first, 2}) + :ets.insert(@table_name, {:first, 3}) + :ets.insert(@table_name, {:first, 4}) + end + + assert Wal.size(wal_state) == {:ok, 4} + + with_wal wal_state do + :ets.insert(@table_name, {:first, 5}) + end + + assert Wal.size(wal_state) == {:ok, 0} + end + end + + defp with_a_loaded_wal(%{project: project}) do + {:ok, wal_state} = Wal.load(project, @schema_version, @table_name) + {:ok, wal: wal_state} + end + + defp dump_and_close_table do + items = :ets.tab2list(@table_name) + :ets.delete(@table_name) + items + end + + defp load_from_project(project) do + new_table() + {:ok, new_wal} = Wal.load(project, @schema_version, @table_name) + entries = :ets.tab2list(@table_name) + {:ok, new_wal, entries} + end + + defp new_table do + if :ets.info(@table_name) == :undefined do + :ets.new(@table_name, [:named_table, :ordered_set]) + end + end +end diff --git a/apps/remote_control/test/lexical/remote_control/search/store/backends/ets_test.exs b/apps/remote_control/test/lexical/remote_control/search/store/backends/ets_test.exs index 294fefa89..dfb2d98b6 100644 --- a/apps/remote_control/test/lexical/remote_control/search/store/backends/ets_test.exs +++ b/apps/remote_control/test/lexical/remote_control/search/store/backends/ets_test.exs @@ -1,12 +1,13 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do alias Lexical.Project + alias Lexical.RemoteControl.Dispatch alias Lexical.RemoteControl.Search.Store alias Lexical.RemoteControl.Search.Store.Backends alias Lexical.Test.Entry alias Lexical.Test.EventualAssertions alias Lexical.Test.Fixtures - use ExUnit.Case + use ExUnit.Case, async: false import EventualAssertions import Entry.Builder @@ -22,11 +23,24 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do # start with a clean slate. Lexical.RemoteControl.set_project(project) + delete_indexes(project, backend) + on_exit(fn -> + delete_indexes(project, backend) + end) + {:ok, backend: backend, project: project} end + def all_entries(backend) do + backend.reduce([], fn entry, acc -> [entry | acc] end) + end + + def delete_indexes(project, Backends.Ets) do + Backends.Ets.destroy_all(project) + end + def delete_indexes(project, backend) do backend.destroy(project) end @@ -40,17 +54,17 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do end defp start_supervised_store(%Project{} = project, create_fn, update_fn, backend) do + start_supervised!(Dispatch) + start_supervised!(Backends.Ets) start_supervised!({Store, [project, create_fn, update_fn, backend]}) - assert_eventually(ready?(project)) + assert_eventually alive?(), 1500 + Store.enable() + assert_eventually ready?(project), 1500 end def with_a_started_store(%{project: project, backend: backend}) do start_supervised_store(project, &default_create/1, &default_update/2, backend) - on_exit(fn -> - delete_indexes(project, backend) - end) - :ok end @@ -71,9 +85,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do {:ok, [], []} end - start_supervised!({Store, [project, create_fn, update_fn, backend]}) - - assert_eventually(ready?(project)) + start_supervised_store(project, create_fn, update_fn, backend) assert_receive :create refute_receive :update @@ -102,11 +114,11 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do test "starts empty if there are no disk files", %{project: project, backend: backend} do start_supervised_store(project, &default_create/1, &default_update/2, backend) - assert [] = Store.all() + assert [] = all_entries(backend) entry = definition() Store.replace([entry]) - assert Store.all() == [entry] + assert all_entries(backend) == [entry] end test "incorporates any indexed files in an empty index", %{project: project, backend: backend} do @@ -123,7 +135,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do restart_store(project) - paths = Enum.map(Store.all(), & &1.path) + paths = Enum.map(all_entries(backend), & &1.path) assert "/foo/bar/baz.ex" in paths assert "/foo/bar/quux.ex" in paths @@ -133,9 +145,9 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do create = fn _ -> {:error, :broken} end start_supervised_store(project, create, &default_update/2, backend) - assert_eventually(ready?(project)) + assert_eventually ready?(project) - assert Store.all() == [] + assert all_entries(backend) == [] end test "incorporates any indexed files in a stale index", %{project: project, backend: backend} do @@ -143,15 +155,15 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do _ -> {:ok, [ - reference(ref: 1, path: "/foo/bar/baz.ex"), - reference(ref: 2, path: "/foo/bar/quux.ex") + reference(id: 1, path: "/foo/bar/baz.ex"), + reference(id: 2, path: "/foo/bar/quux.ex") ]} end update = fn _, _ -> entries = [ - reference(ref: 3, path: "/foo/bar/baz.ex"), - reference(ref: 4, path: "/foo/bar/other.ex") + reference(id: 3, path: "/foo/bar/baz.ex"), + reference(id: 4, path: "/foo/bar/other.ex") ] {:ok, entries, []} @@ -161,7 +173,7 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do restart_store(project) - entries = Enum.map(Store.all(), &{&1.ref, &1.path}) + entries = Enum.map(all_entries(backend), &{&1.id, &1.path}) assert {2, "/foo/bar/quux.ex"} in entries assert {3, "/foo/bar/baz.ex"} in entries assert {4, "/foo/bar/other.ex"} in entries @@ -174,15 +186,20 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do start_supervised_store(project, create, update, backend) restart_store(project) - assert [] = Store.all() + assert [] = all_entries(backend) end test "the updater allows you to delete paths", %{project: project, backend: backend} do + kept_structure = %{root: %{3 => %{4 => %{}}}} + create = fn _ -> entries = [ + structure(path: "/path/to/keep.ex", structure: kept_structure), definition(path: "/path/to/keep.ex"), + structure(path: "/path/to/delete.ex"), definition(path: "/path/to/delete.ex"), definition(path: "/path/to/delete.ex"), + structure(path: "/another/path/to/delete.ex"), definition(path: "/another/path/to/delete.ex") ] @@ -197,64 +214,59 @@ defmodule Lexical.RemoteControl.Search.Store.Backend.EtsTest do restart_store(project) - assert [entry] = Store.all() + assert [entry] = all_entries(backend) assert entry.path == "/path/to/keep.ex" + + assert :error = Backends.Ets.structure_for_path("/path/to/delete.ex") + assert :error = Backends.Ets.structure_for_path("/another/path/to/delete.ex") + assert {:ok, ^kept_structure} = Backends.Ets.structure_for_path("/path/to/keep.ex") end end describe "replace/1" do setup [:with_a_started_store] - test "replace survives a restart", %{project: project} do + test "replace survives a restart", %{project: project, backend: backend} do entries = [definition(subject: My.Module)] assert :ok = Store.replace(entries) - Backends.Ets.force_sync(project) - Store.stop() - - refute_eventually(ready?(project)) - assert_eventually(ready?(project)) + restart_store(project) - assert entries == Store.all() + assert_eventually entries == all_entries(backend) end end describe "updating entries in a file" do setup [:with_a_started_store] - test "updates survive a restart", %{project: project} do + test "updates survive a restart", %{project: project, backend: backend} do path = "/path/to/something.ex" - Store.replace([definition(ref: 1, subject: My.Module, path: path)]) + Store.replace([definition(id: 1, subject: My.Module, path: path)]) Store.update(path, [ - reference(ref: 2, subject: Present, path: path) + reference(id: 2, subject: Present, path: path) ]) - Backends.Ets.force_sync(project) - Store.stop() - - refute_eventually(ready?(project)) - assert_eventually(ready?(project)) + restart_store(project) - assert [found] = Store.all() - assert found.ref == 2 + assert_eventually [%{id: 2}] = all_entries(backend) end end def restart_store(%Project{} = project) do - Backends.Ets.force_sync(project) - - Store - |> Process.whereis() - |> Process.monitor() + ref = + Store + |> Process.whereis() + |> Process.monitor() Store.stop() - refute_eventually(ready?(project)) receive do - {:DOWN, _, _, _, _} -> - assert_eventually(ready?(project)) + {:DOWN, ^ref, _, _, _} -> + assert_eventually Store |> Process.whereis() |> is_pid() + Store.enable() + assert_eventually ready?(project), 1500 after 1000 -> raise "Could not stop store" diff --git a/apps/remote_control/test/lexical/remote_control/search/store_test.exs b/apps/remote_control/test/lexical/remote_control/search/store_test.exs index 164d2d512..5122efce9 100644 --- a/apps/remote_control/test/lexical/remote_control/search/store_test.exs +++ b/apps/remote_control/test/lexical/remote_control/search/store_test.exs @@ -1,15 +1,19 @@ defmodule Lexical.RemoteControl.Search.StoreTest do + alias Lexical.RemoteControl.Dispatch + alias Lexical.RemoteControl.Search.Indexer + alias Lexical.RemoteControl.Search.Indexer.Entry alias Lexical.RemoteControl.Search.Store alias Lexical.RemoteControl.Search.Store.Backends.Ets alias Lexical.Test.Entry alias Lexical.Test.EventualAssertions alias Lexical.Test.Fixtures - use ExUnit.Case + use ExUnit.Case, async: false import Entry.Builder import EventualAssertions import Fixtures + import Lexical.Test.CodeSigil @backends [Ets] @@ -31,6 +35,12 @@ defmodule Lexical.RemoteControl.Search.StoreTest do {:ok, project: project} end + def all_entries(backend) do + [] + |> backend.reduce(fn entry, acc -> [entry | acc] end) + |> Enum.reverse() + end + for backend <- @backends, backend_name = backend |> Module.split() |> List.last() do describe "#{backend_name} :: replace/1" do @@ -42,7 +52,7 @@ defmodule Lexical.RemoteControl.Search.StoreTest do entries = [definition(subject: OtherModule)] Store.replace(entries) - assert entries == Store.all() + assert entries == all_entries(unquote(backend)) end end @@ -53,37 +63,14 @@ defmodule Lexical.RemoteControl.Search.StoreTest do test "matching can exclude on type" do Store.replace([ - definition(ref: 1), - reference(ref: 3) + definition(id: 1), + reference(id: 3) ]) assert {:ok, [ref]} = Store.exact(subtype: :reference) assert ref.subtype == :reference end - test "matching can exclude on elixir version" do - Store.replace([ - reference(subject: Enum, elixir_version: "1.0.0"), - reference(subject: Enum) - ]) - - assert {:ok, [ref]} = Store.exact("Enum", subtype: :reference) - assert ref.subject == Enum - refute ref.elixir_version == "1.0.0" - end - - test "matching can exclude on erlang version" do - Store.replace([ - reference(subject: Enum, erlang_version: "1.0.0"), - reference(subject: Enum) - ]) - - assert {:ok, [ref]} = Store.exact("Enum", subtype: :reference) - - assert ref.subject == Enum - refute ref.erlang_version == "1.0.0" - end - test "matching with queries can exclude on type" do Store.replace([ reference(subject: Foo.Bar.Baz), @@ -100,48 +87,45 @@ defmodule Lexical.RemoteControl.Search.StoreTest do test "matching exact tokens should work" do Store.replace([ - definition(ref: 1, subject: Foo.Bar.Baz), - definition(ref: 2, subject: Foo.Bar.Bak) + definition(id: 1, subject: Foo.Bar.Baz), + definition(id: 2, subject: Foo.Bar.Bak) ]) assert {:ok, [entry]} = Store.exact("Foo.Bar.Baz", type: :module, subtype: :definition) assert entry.subject == Foo.Bar.Baz - assert entry.ref == 1 + assert entry.id == 1 end - test "matching fuzzy tokens works" do + test "matching prefix tokens should work" do Store.replace([ - definition(ref: 1, subject: Foo.Bar.Baz), - definition(ref: 2, subject: Foo.Bar.Bak), - definition(ref: 3, subject: Bad.Times.Now) + definition(id: 1, subject: Foo.Bar), + definition(id: 2, subject: Foo.Baa.Baa), + definition(id: 3, subject: Foo.Bar.Baz) ]) - assert {:ok, [entry_1, entry_2]} = - Store.fuzzy("Foo.Bar.B", type: :module, subtype: :definition) - - assert entry_1.subject in [Foo.Bar.Baz, Foo.Bar.Bak] - assert entry_2.subject in [Foo.Bar.Baz, Foo.Bar.Bak] - end + assert {:ok, [entry1, entry3]} = + Store.prefix("Foo.Bar", type: :module, subtype: :definition) - test "matching only returns entries specific to our elixir version" do - Store.replace([ - definition(ref: 1, subject: Foo.Bar.Baz, elixir_version: "1.1"), - definition(ref: 2, subject: Foo.Bar.Baz) - ]) + assert entry1.subject == Foo.Bar + assert entry3.subject == Foo.Bar.Baz - assert {:ok, [entry]} = Store.fuzzy("Foo.Bar.", type: :module, subtype: :definition) - assert entry.ref == 2 + assert entry1.id == 1 + assert entry3.id == 3 end - test "matching only returns entries specific to our erlang version" do + test "matching fuzzy tokens works" do Store.replace([ - definition(ref: 1, subject: Foo.Bar.Baz, erlang_version: "14.3.2.8"), - definition(ref: 2, subject: Foo.Bar.Baz) + definition(id: 1, subject: Foo.Bar.Baz), + definition(id: 2, subject: Foo.Bar.Bak), + definition(id: 3, subject: Bad.Times.Now) ]) - assert {:ok, [entry]} = Store.fuzzy("Foo.Bar.", type: :module, subtype: :definition) - assert entry.ref == 2 + assert {:ok, [entry_1, entry_2]} = + Store.fuzzy("Foo.Bar.B", type: :module, subtype: :definition) + + assert entry_1.subject in [Foo.Bar.Baz, Foo.Bar.Bak] + assert entry_2.subject in [Foo.Bar.Baz, Foo.Bar.Bak] end end @@ -154,61 +138,211 @@ defmodule Lexical.RemoteControl.Search.StoreTest do path = "/path/to/file.ex" Store.replace([ - definition(ref: 1, subject: Foo.Bar.Baz, path: path), - definition(ref: 2, subject: Foo.Baz.Quux, path: path) + definition(id: 1, subject: Foo.Bar.Baz, path: path), + definition(id: 2, subject: Foo.Baz.Quux, path: path) ]) updated = [ - definition(ref: 3, subject: Other.Thing.Entirely, path: path) + definition(id: 3, subject: Other.Thing.Entirely, path: path) ] Store.update(path, updated) - assert [remaining] = Store.all() - refute remaining.ref in [1, 2] + assert_eventually [remaining] = all_entries(unquote(backend)) + refute remaining.id in [1, 2] end test "old entries with another path are kept" do updated_path = "/path/to/file.ex" Store.replace([ - definition(ref: 1, subject: Foo.Bar.Baz, path: updated_path), - definition(ref: 2, subject: Foo.Bar.Baz.Quus, path: updated_path), - definition(ref: 3, subject: Foo.Bar.Baz, path: "/path/to/another.ex") + definition(id: 1, subject: Foo.Bar.Baz, path: updated_path), + definition(id: 2, subject: Foo.Bar.Baz.Quus, path: updated_path), + definition(id: 3, subject: Foo.Bar.Baz, path: "/path/to/another.ex") ]) updated = [ - definition(ref: 4, subject: Other.Thing.Entirely, path: updated_path) + definition(id: 4, subject: Other.Thing.Entirely, path: updated_path) ] Store.update(updated_path, updated) - assert [first, second] = Store.all() + assert_eventually [first, second] = all_entries(unquote(backend)) - assert first.ref in [3, 4] - assert second.ref in [3, 4] + assert first.id in [3, 4] + assert second.id in [3, 4] end test "updated entries are not searchable" do path = "/path/to/ex.ex" Store.replace([ - reference(ref: 1, subject: Should.Be.Replaced, path: path) + definition(id: 1, subject: Should.Be.Replaced, path: path) ]) Store.update(path, [ - reference(ref: 2, subject: Present, path: path) + definition(id: 2, subject: Present, path: path) ]) - assert {:ok, [found]} = Store.fuzzy("Pres", type: :module, subtype: :reference) - assert found.ref == 2 + assert_eventually {:ok, [found]} = + Store.fuzzy("Pres", type: :module, subtype: :definition) + + assert found.id == 2 assert found.subject == Present end end + + describe "#{backend_name} :: structure queries " do + setup %{project: project} do + with_a_started_store(project, unquote(backend)) + end + + test "finding siblings" do + entries = + ~q[ + defmodule Parent do + def function do + First.Module + Second.Module + Third.Module + end + end + ] + |> entries() + + subject_entry = Enum.find(entries, &(&1.subject == Third.Module)) + assert {:ok, [first_ref, second_ref, ^subject_entry]} = Store.siblings(subject_entry) + assert first_ref.subject == First.Module + assert second_ref.subject == Second.Module + end + + test "finding siblings of a function" do + entries = + ~q[ + defmodule Parent do + def fun do + :ok + end + + def fun2(arg) do + arg + 1 + end + + def fun3(arg, arg2) do + arg + arg2 + end + end + ] + |> entries() + + subject_entry = Enum.find(entries, &(&1.subject == "Parent.fun3/2")) + + assert {:ok, siblings} = Store.siblings(subject_entry) + siblings = Enum.filter(siblings, &(&1.subtype == :definition)) + + assert [first_fun, second_fun, ^subject_entry] = siblings + assert first_fun.subject == "Parent.fun/0" + assert second_fun.subject == "Parent.fun2/1" + end + + test "findidng siblings of a non-existent entry" do + assert :error = Store.siblings(%Indexer.Entry{}) + end + + test "finding a parent in a function" do + entries = + ~q[ + defmodule Parent do + def function do + Module.Ref + end + end + ] + |> entries() + + subject_entry = Enum.find(entries, &(&1.subject == Module.Ref)) + {:ok, parent} = Store.parent(subject_entry) + + assert parent.subject == "Parent.function/0" + assert parent.type == {:function, :public} + assert parent.subtype == :definition + + assert {:ok, parent} = Store.parent(parent) + assert parent.subject == Parent + + assert :error = Store.parent(parent) + end + + test "finding a parent in a comprehension" do + entries = + ~q[ + defmodule Parent do + def fun do + for n <- 1..10 do + Module.Ref + end + end + end + ] + |> entries() + + subject_entry = Enum.find(entries, &(&1.subject == Module.Ref)) + assert {:ok, parent} = Store.parent(subject_entry) + assert parent.subject == "Parent.fun/0" + end + + test "finding parents in a file with multiple nested modules" do + entries = + ~q[ + defmodule Parent do + defmodule Child do + def fun do + end + end + end + + defmodule Parent2 do + defmodule Child2 do + def fun2 do + Module.Ref + end + end + end + ] + |> entries() + + subject_entry = Enum.find(entries, &(&1.subject == Module.Ref)) + + assert {:ok, parent} = Store.parent(subject_entry) + + assert parent.subject == "Parent2.Child2.fun2/0" + assert {:ok, parent} = Store.parent(parent) + assert parent.subject == Parent2.Child2 + + assert {:ok, parent} = Store.parent(parent) + assert parent.subject == Parent2 + end + + test "finding a non-existent entry" do + assert Store.parent(%Indexer.Entry{}) == :error + end + end end - defp after_each_test(_, _) do - :ok + defp entries(source) do + document = Lexical.Document.new("file:///file.ex", source, 1) + + {:ok, entries} = + document + |> Lexical.Ast.analyze() + |> Indexer.Quoted.index_with_cleanup() + + Store.replace(entries) + entries + end + + defp after_each_test(backend, project) do + destroy_backend(backend, project) end defp destroy_backends(project) do @@ -216,7 +350,11 @@ defmodule Lexical.RemoteControl.Search.StoreTest do end defp destroy_backend(Ets, project) do - Ets.destroy(project) + Ets.destroy_all(project) + end + + defp destroy_backend(_, _) do + :ok end defp default_create(_project) do @@ -228,8 +366,16 @@ defmodule Lexical.RemoteControl.Search.StoreTest do end defp with_a_started_store(project, backend) do + destroy_backend(backend, project) + + start_supervised!(Dispatch) + start_supervised!(backend) start_supervised!({Store, [project, &default_create/1, &default_update/2, backend]}) + assert_eventually alive?() + + Store.enable() + assert_eventually ready?(), 1500 on_exit(fn -> diff --git a/apps/remote_control/test/support/lexical/test/code_mod_case.ex b/apps/remote_control/test/support/lexical/test/code_mod_case.ex index baf67091b..c3f6e897a 100644 --- a/apps/remote_control/test/support/lexical/test/code_mod_case.ex +++ b/apps/remote_control/test/support/lexical/test/code_mod_case.ex @@ -28,7 +28,10 @@ defmodule Lexical.Test.CodeMod.Case do alias Lexical.Ast if Keyword.get(options, :convert_to_ast, unquote(convert_to_ast?)) do - Ast.from(code) + case Ast.from(code) do + {:ok, ast, _comments} -> {:ok, ast} + other -> other + end else {:ok, nil} end diff --git a/apps/remote_control/test/support/lexical/test/entry/entry_builder.ex b/apps/remote_control/test/support/lexical/test/entry/entry_builder.ex index 73a119ad2..69eed8387 100644 --- a/apps/remote_control/test/support/lexical/test/entry/entry_builder.ex +++ b/apps/remote_control/test/support/lexical/test/entry/entry_builder.ex @@ -1,21 +1,19 @@ defmodule Lexical.Test.Entry.Builder do alias Lexical.Document.Range + alias Lexical.Identifier alias Lexical.RemoteControl.Search.Indexer.Entry - alias Lexical.VM.Versions import Lexical.Test.PositionSupport def entry(fields \\ []) do - versions = Versions.current() - defaults = [ - subject: Module, - ref: make_ref(), + block_id: Identifier.next_global!(), + id: Identifier.next_global!(), path: "/foo/bar/baz.ex", range: range(1, 1, 1, 5), - elixir_version: versions.elixir, - erlang_version: versions.erlang, - type: :module + subject: Module, + type: :module, + application: :remote_control ] fields = Keyword.merge(defaults, fields) @@ -35,6 +33,12 @@ defmodule Lexical.Test.Entry.Builder do |> entry() end + def structure(fields \\ []) do + path = Keyword.get(fields, :path, "/path/to/file.ex") + structure = Keyword.get(fields, :structure, %{root: %{}}) + Entry.block_structure(path, structure) + end + defp range(start_line, start_column, end_line, end_column) do Range.new(position(start_line, start_column), position(end_line, end_column)) end diff --git a/apps/remote_control/test/support/lexical/test/extractor_case.ex b/apps/remote_control/test/support/lexical/test/extractor_case.ex new file mode 100644 index 000000000..30bc93956 --- /dev/null +++ b/apps/remote_control/test/support/lexical/test/extractor_case.ex @@ -0,0 +1,57 @@ +defmodule Lexical.Test.ExtractorCase do + alias Lexical.Document + alias Lexical.RemoteControl.Search.Indexer + + use ExUnit.CaseTemplate + import Lexical.Test.CodeSigil + + using do + quote do + import Lexical.Test.CodeSigil + import Lexical.Test.RangeSupport + import unquote(__MODULE__) + end + end + + def index_everything(source) do + do_index(source, fn entry -> entry.type != :metadata end) + end + + def do_index(source, filter, extractors \\ nil) + + def do_index(source, filter, extractors) when is_binary(source) do + path = "/foo/bar/baz.ex" + doc = Document.new("file:///#{path}", source, 1) + + case Indexer.Source.index(path, source, extractors) do + {:ok, indexed_items} -> + indexed_items = Enum.filter(indexed_items, filter) + {:ok, indexed_items, doc} + + error -> + error + end + end + + def do_index(quoted_source, filter, extractors) do + source_string = Macro.to_string(quoted_source) + do_index(source_string, filter, extractors) + end + + def in_a_module(code, module_name \\ "Parent") do + ~q[ + defmodule #{module_name} do + #{code} + end + ] + end + + def in_a_module_function(code) do + ~q[ + def something do + #{code} + end + ] + |> in_a_module() + end +end diff --git a/apps/remote_control/test/support/lexical/test/mfa_support.ex b/apps/remote_control/test/support/lexical/test/mfa_support.ex new file mode 100644 index 000000000..88bded248 --- /dev/null +++ b/apps/remote_control/test/support/lexical/test/mfa_support.ex @@ -0,0 +1,9 @@ +defmodule Lexical.Test.MfaSupport do + defmacro mfa(ast) do + {m, f, a} = Macro.decompose_call(ast) + + quote do + {:mfa, unquote(m), unquote(f), unquote(a)} + end + end +end diff --git a/apps/remote_control/test/test_helper.exs b/apps/remote_control/test/test_helper.exs index 727d2b2b3..c078a7721 100644 --- a/apps/remote_control/test/test_helper.exs +++ b/apps/remote_control/test/test_helper.exs @@ -1,8 +1,19 @@ +Application.ensure_all_started(:snowflake) {"", 0} = System.cmd("epmd", ~w(-daemon)) random_number = :rand.uniform(500) with :nonode@nohost <- Node.self() do - {:ok, _pid} = :net_kernel.start([:"testing-#{random_number}@127.0.0.1"]) + {:ok, _pid} = + :net_kernel.start(:"testing-#{random_number}@127.0.0.1", %{name_domain: :longnames}) end +Lexical.RemoteControl.Module.Loader.start_link(nil) +ExUnit.configure(assert_receive_timeout: 1000) + ExUnit.start(exclude: [:skip]) + +if Version.match?(System.version(), ">= 1.15.0") do + Logger.configure(level: :none) +else + Logger.remove_backend(:console) +end diff --git a/apps/server/README.md b/apps/server/README.md index 72064a091..4984638f5 100644 --- a/apps/server/README.md +++ b/apps/server/README.md @@ -1,3 +1,3 @@ # Lexical.Server -The Lexical Language server implemnetation +The Lexical Language server implementation diff --git a/apps/server/lib/lexical/convertibles/lexical.plugin.diagnostic.result.ex b/apps/server/lib/lexical/convertibles/lexical.plugin.diagnostic.result.ex index c3a7ffafa..e6251213e 100644 --- a/apps/server/lib/lexical/convertibles/lexical.plugin.diagnostic.result.ex +++ b/apps/server/lib/lexical/convertibles/lexical.plugin.diagnostic.result.ex @@ -41,21 +41,25 @@ defimpl Lexical.Convertible, for: Lexical.Plugin.V1.Diagnostic.Result do Conversions.to_lsp(range) end - defp lsp_range(%Diagnostic.Result{} = diagnostic) do - with {:ok, document} <- Document.Store.open_temporary(diagnostic.uri) do + defp lsp_range(%Diagnostic.Result{uri: uri} = diagnostic) when is_binary(uri) do + with {:ok, document} <- Document.Store.open_temporary(uri) do position_to_range(document, diagnostic.position) end end + defp lsp_range(%Diagnostic.Result{}) do + {:error, :no_uri} + end + defp position_to_range(%Document{} = document, {start_line, start_column, end_line, end_column}) do - with {:ok, start_pos} <- position_to_range(document, {start_line, start_column}), - {:ok, end_pos} <- position_to_range(document, {end_line, end_column}) do - {:ok, Types.Range.new(start: start_pos, end: end_pos)} - end + start_pos = Position.new(document, start_line, max(start_column, 1)) + end_pos = Position.new(document, end_line, max(end_column, 1)) + + range = Range.new(start_pos, end_pos) + Conversions.to_lsp(range) end defp position_to_range(%Document{} = document, {line_number, column}) do - line_number = Math.clamp(line_number, 1, Document.size(document)) column = max(column, 1) document @@ -75,6 +79,10 @@ defimpl Lexical.Convertible, for: Lexical.Plugin.V1.Diagnostic.Result do end end + defp position_to_range(document, nil) do + position_to_range(document, 1) + end + defp to_lexical_range(%Document{} = document, line_number, column) do line_number = Math.clamp(line_number, 1, Document.size(document) + 1) diff --git a/apps/server/lib/lexical/server.ex b/apps/server/lib/lexical/server.ex index ffbf6fb8e..1d36d60e4 100644 --- a/apps/server/lib/lexical/server.ex +++ b/apps/server/lib/lexical/server.ex @@ -1,9 +1,10 @@ defmodule Lexical.Server do + alias Lexical.Proto.Convert alias Lexical.Protocol.Notifications alias Lexical.Protocol.Requests - alias Lexical.Protocol.Responses - alias Lexical.Server.Provider + alias Lexical.Server.Provider.Handlers alias Lexical.Server.State + alias Lexical.Server.TaskQueue require Logger @@ -23,9 +24,17 @@ defmodule Lexical.Server do @dialyzer {:nowarn_function, apply_to_state: 2} - @spec response_complete(Requests.request(), Responses.response()) :: :ok - def response_complete(request, response) do - GenServer.call(__MODULE__, {:response_complete, request, response}) + @spec server_request( + Requests.request(), + (Requests.request(), {:ok, any()} | {:error, term()} -> term()) + ) :: :ok + def server_request(request, on_response) when is_function(on_response, 2) do + GenServer.call(__MODULE__, {:server_request, request, on_response}) + end + + @spec server_request(Requests.request()) :: :ok + def server_request(request) do + server_request(request, fn _, _ -> :ok end) end def start_link(_) do @@ -40,8 +49,9 @@ defmodule Lexical.Server do {:ok, State.new()} end - def handle_call({:response_complete, _request, _response}, _from, %State{} = state) do - {:reply, :ok, state} + def handle_call({:server_request, request, on_response}, _from, %State{} = state) do + new_state = State.add_request(state, request, on_response) + {:reply, :ok, new_state} end def handle_cast({:protocol_message, message}, %State{} = state) do @@ -93,12 +103,12 @@ defmodule Lexical.Server do end def handle_message(%Requests.Cancel{} = cancel_request, %State{} = state) do - Provider.Queue.cancel(cancel_request) + TaskQueue.cancel(cancel_request) {:ok, state} end def handle_message(%Notifications.Cancel{} = cancel_notification, %State{} = state) do - Provider.Queue.cancel(cancel_notification) + TaskQueue.cancel(cancel_notification) {:ok, state} end @@ -119,12 +129,27 @@ defmodule Lexical.Server do {:ok, state} end - def handle_message(request, %State{} = state) do - Provider.Queue.add(request, state.configuration) + def handle_message(%_{} = request, %State{} = state) do + with {:ok, handler} <- fetch_handler(request), + {:ok, req} <- Convert.to_native(request) do + TaskQueue.add(request.id, {handler, :handle, [req, state.configuration]}) + else + {:error, {:unhandled, _}} -> + Logger.info("Unhandled request: #{request.method}") + + _ -> + :ok + end {:ok, state} end + def handle_message(%{} = response, %State{} = state) do + new_state = State.finish_request(state, response) + + {:ok, new_state} + end + defp apply_to_state(%State{} = state, %{} = request_or_notification) do case State.apply(state, request_or_notification) do {:ok, new_state} -> {:ok, new_state} @@ -132,4 +157,42 @@ defmodule Lexical.Server do error -> {error, state} end end + + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity + defp fetch_handler(%_{} = request) do + case request do + %Requests.FindReferences{} -> + {:ok, Handlers.FindReferences} + + %Requests.Formatting{} -> + {:ok, Handlers.Formatting} + + %Requests.CodeAction{} -> + {:ok, Handlers.CodeAction} + + %Requests.CodeLens{} -> + {:ok, Handlers.CodeLens} + + %Requests.Completion{} -> + {:ok, Handlers.Completion} + + %Requests.GoToDefinition{} -> + {:ok, Handlers.GoToDefinition} + + %Requests.Hover{} -> + {:ok, Handlers.Hover} + + %Requests.ExecuteCommand{} -> + {:ok, Handlers.Commands} + + %Requests.DocumentSymbols{} -> + {:ok, Handlers.DocumentSymbols} + + %Requests.WorkspaceSymbol{} -> + {:ok, Handlers.WorkspaceSymbol} + + %request_module{} -> + {:error, {:unhandled, request_module}} + end + end end diff --git a/apps/server/lib/lexical/server/application.ex b/apps/server/lib/lexical/server/application.ex index 53b81107b..3e7964cc0 100644 --- a/apps/server/lib/lexical/server/application.ex +++ b/apps/server/lib/lexical/server/application.ex @@ -3,7 +3,9 @@ defmodule Lexical.Server.Application do # for more information on OTP Applications @moduledoc false - alias Lexical.Server.Provider + alias Lexical.Document + alias Lexical.Server + alias Lexical.Server.TaskQueue alias Lexical.Server.Transport use Application @@ -11,15 +13,20 @@ defmodule Lexical.Server.Application do @impl true def start(_type, _args) do children = [ - Lexical.Document.Store, - Lexical.Server, - {DynamicSupervisor, Lexical.Server.Project.Supervisor.options()}, - Provider.Queue.Supervisor.child_spec(), - Provider.Queue.child_spec(), - {Transport.StdIO, [:standard_io, &Lexical.Server.protocol_message/1]} + document_store_child_spec(), + Server, + {DynamicSupervisor, Server.Project.Supervisor.options()}, + {Task.Supervisor, name: TaskQueue.task_supervisor_name()}, + TaskQueue, + {Transport.StdIO, [:standard_io, &Server.protocol_message/1]} ] - opts = [strategy: :one_for_one, name: Lexical.Server.Supervisor] + opts = [strategy: :one_for_one, name: Server.Supervisor] Supervisor.start_link(children, opts) end + + @doc false + def document_store_child_spec do + {Document.Store, derive: [analysis: &Lexical.Ast.analyze/1]} + end end diff --git a/apps/server/lib/lexical/server/boot.ex b/apps/server/lib/lexical/server/boot.ex index 9fcd2c79f..2a5df77cc 100644 --- a/apps/server/lib/lexical/server/boot.ex +++ b/apps/server/lib/lexical/server/boot.ex @@ -4,6 +4,7 @@ defmodule Lexical.Server.Boot do Packaging will ensure that config.exs and runtime.exs will be visible to the `:code` module """ + alias Future.Code alias Lexical.VM.Versions require Logger @@ -16,16 +17,31 @@ defmodule Lexical.Server.Boot do def start do {:ok, _} = Application.ensure_all_started(:mix) + Application.stop(:logger) load_config() Application.ensure_all_started(:logger) Enum.each(@dep_apps, &load_app_modules/1) - verify_packaging() - verify_versioning() + + case detect_errors() do + [] -> + :ok + + errors -> + errors + |> Enum.join("\n\n") + |> halt() + end + Application.ensure_all_started(:server) end + @doc false + def detect_errors do + versioning_errors() + end + defp load_config do config = read_config("config.exs") runtime = read_config("runtime.exs") @@ -62,94 +78,72 @@ defmodule Lexical.Server.Boot do defp load_app_modules(app_name) do Application.ensure_loaded(app_name) - - with {:ok, modules} <- :application.get_key(app_name, :modules) do - Enum.each(modules, &Code.ensure_loaded!/1) - end - end - - defp verify_packaging do - unless Versions.compatible?() do - {:ok, compiled_versions} = Versions.compiled() - - compiled_versions = Versions.to_versions(compiled_versions) - current_versions = Versions.current() |> Versions.to_versions() - - compiled_erlang = compiled_versions.erlang - current_erlang = current_versions.erlang - - message = """ - Lexical failed its version check. This is a FATAL Error! - Lexical is running on Erlang #{current_erlang} and the compiled files were built on - Erlang #{compiled_erlang}. - - If you wish to run Lexical under Erlang version #{current_erlang}, you must rebuild lexical - under an Erlang version that is <= #{current_erlang.major}. - - Detected Lexical running on erlang #{current_erlang.major} and needs >= #{compiled_erlang.major} - """ - - halt(message) - - Process.sleep(500) - System.halt() - end + modules = Application.spec(app_name, :modules) + Code.ensure_all_loaded!(modules) end @allowed_elixir %{ "1.13.0" => ">= 1.13.0", "1.14.0" => ">= 1.14.0", - "1.15.0" => ">= 1.15.3" + "1.15.0" => ">= 1.15.3", + "1.16.0" => ">= 1.16.0", + "1.17.0-rc" => ">= 1.17.0-rc", + "1.17.0" => ">= 1.17.0" } @allowed_erlang %{ "24" => ">= 24.3.4", - "25" => "> 25.0.0", - "26" => ">= 26.0.2" + "25" => ">= 25.0.0", + "26" => ">= 26.0.2", + "27" => ">= 27.0.0" } - defp verify_versioning do + defp versioning_errors do versions = Versions.to_versions(Versions.current()) elixir_base = to_string(%Version{versions.elixir | patch: 0}) erlang_base = to_string(versions.erlang.major) - detected_elixir_range = Map.get(@allowed_elixir, elixir_base) - detected_erlang_range = Map.get(@allowed_erlang, erlang_base) - - elixir_ok? = Version.match?(versions.elixir, detected_elixir_range) - erlang_ok? = Version.match?(versions.erlang, detected_erlang_range) + detected_elixir_range = Map.get(@allowed_elixir, elixir_base, false) + detected_erlang_range = Map.get(@allowed_erlang, erlang_base, false) - cond do - not elixir_ok? -> - message = """ - The version of elixir lexical found (#{versions.elixir}) is not compatible with lexical, - and lexical can't start. + elixir_ok? = detected_elixir_range && Version.match?(versions.elixir, detected_elixir_range) + erlang_ok? = detected_erlang_range && Version.match?(versions.erlang, detected_erlang_range) - Please change your version of elixir to #{detected_elixir_range} + errors = [ + unless elixir_ok? do """ + FATAL: Lexical is not compatible with Elixir #{versions.elixir} - halt(message) + Lexical is compatible with the following versions of Elixir: - not erlang_ok? -> - message = """ - The version of erlang lexical found (#{versions.erlang}) is not compatible with lexical, - and lexical can't start. + #{format_allowed_versions(@allowed_elixir)} + """ + end, + unless erlang_ok? do + """ + FATAL: Lexical is not compatible with Erlang/OTP #{versions.erlang} + + Lexical is compatible with the following versions of Erlang/OTP: - Please change your version of erlang to one of the following: #{detected_erlang_range} + #{format_allowed_versions(@allowed_erlang)} """ + end + ] - halt(message) + Enum.filter(errors, &Function.identity/1) + end - true -> - :ok - end + defp format_allowed_versions(%{} = versions) do + versions + |> Map.values() + |> Enum.sort() + |> Enum.map_join("\n", fn range -> " #{range}" end) end defp halt(message) do Mix.Shell.IO.error(message) Logger.emergency(message) - # Wait for the logs to flush - Process.sleep(500) + Logger.flush() System.halt() end end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion.ex b/apps/server/lib/lexical/server/code_intelligence/completion.ex index 5a4f995e0..371eb61e4 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion.ex @@ -1,21 +1,19 @@ defmodule Lexical.Server.CodeIntelligence.Completion do alias Future.Code, as: Code + alias Lexical.Ast.Analysis alias Lexical.Ast.Env - alias Lexical.Completion.Translatable - alias Lexical.Document alias Lexical.Document.Position alias Lexical.Project alias Lexical.Protocol.Types.Completion alias Lexical.Protocol.Types.InsertTextFormat alias Lexical.RemoteControl alias Lexical.RemoteControl.Completion.Candidate - alias Lexical.RemoteControl.Modules.Predicate alias Lexical.Server.CodeIntelligence.Completion.Builder + alias Lexical.Server.CodeIntelligence.Completion.Translatable alias Lexical.Server.Configuration alias Lexical.Server.Project.Intelligence alias Mix.Tasks.Namespace - use Predicate.Syntax require InsertTextFormat require Logger @@ -27,18 +25,18 @@ defmodule Lexical.Server.CodeIntelligence.Completion do [".", "@", "&", "%", "^", ":", "!", "-", "~"] end - @spec complete(Project.t(), Document.t(), Position.t(), Completion.Context.t()) :: + @spec complete(Project.t(), Analysis.t(), Position.t(), Completion.Context.t()) :: Completion.List.t() def complete( %Project{} = project, - %Document{} = document, + %Analysis{} = analysis, %Position{} = position, %Completion.Context{} = context ) do - case Env.new(project, document, position) do + case Env.new(project, analysis, position) do {:ok, env} -> completions = completions(project, env, context) - Logger.info("Emitting completions: #{inspect(completions)}") + log_candidates(completions) maybe_to_completion_list(completions) {:error, _} = error -> @@ -47,6 +45,19 @@ defmodule Lexical.Server.CodeIntelligence.Completion do end end + defp log_candidates(candidates) do + log_iolist = + Enum.reduce(candidates, ["Emitting Completions: ["], fn %Completion.Item{} = completion, + acc -> + name = Map.get(completion, :name) || Map.get(completion, :label) + kind = completion |> Map.get(:kind, :unknown) |> to_string() + + [acc, [kind, ":", name], " "] + end) + + Logger.info([log_iolist, "]"]) + end + defp completions(%Project{} = project, %Env{} = env, %Completion.Context{} = context) do prefix_tokens = Env.prefix_tokens(env, 1) @@ -58,25 +69,31 @@ defmodule Lexical.Server.CodeIntelligence.Completion do do_end_snippet = "do\n $0\nend" env - |> Builder.snippet(do_end_snippet, label: "do/end block") + |> Builder.snippet( + do_end_snippet, + label: "do/end block", + filter_text: "do" + ) |> List.wrap() Env.in_context?(env, :struct_field_key) -> project - |> RemoteControl.Api.complete_struct_fields(env.document, env.position) + |> RemoteControl.Api.complete_struct_fields(env.analysis, env.position) |> Enum.map(&Translatable.translate(&1, Builder, env)) true -> - {stripped, position} = Builder.strip_struct_operator_for_elixir_sense(env) - project - |> RemoteControl.Api.complete(stripped, position) + |> RemoteControl.Api.complete(env) |> to_completion_items(project, env, context) end end defp should_emit_completions?(%Env{} = env) do - always_emit_completions?() or has_meaningful_completions?(env) + if inside_comment?(env) or inside_string?(env) do + false + else + always_emit_completions?() or has_meaningful_completions?(env) + end end defp always_emit_completions? do @@ -96,7 +113,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion do {:local_or_var, name} -> local_length = length(name) - surround_begin = max(1, env.position.character - local_length - 1) + surround_begin = max(1, env.position.character - local_length) local_length > 1 or has_surround_context?(env.prefix, 1, surround_begin) @@ -105,21 +122,37 @@ defmodule Lexical.Server.CodeIntelligence.Completion do end end + defp inside_comment?(env) do + Env.in_context?(env, :comment) + end + + defp inside_string?(env) do + Env.in_context?(env, :string) + end + defp has_surround_context?(fragment, line, column) when is_binary(fragment) and line >= 1 and column >= 1 do Code.Fragment.surround_context(fragment, {line, column}) != :none end - # We emit a do/end snippet if the prefix token is the do operator and + # We emit a do/end snippet if the prefix token is the do operator or 'd', and # there is a space before the token preceding it on the same line. This # handles situations like `@do|` where a do/end snippet would be invalid. + defguardp valid_do_prefix(kind, value) + when (kind === :identifier and value === ~c"d") or + (kind === :operator and value === :do) + + defguardp space_before_preceding_token(do_col, preceding_col) + when do_col - preceding_col > 1 + defp should_emit_do_end_snippet?(%Env{} = env) do prefix_tokens = Env.prefix_tokens(env, 2) valid_prefix? = match?( - [{:operator, :do, {line, do_col}}, {_, _, {line, preceding_col}}] - when do_col - preceding_col > 1, + [{kind, value, {line, do_col}}, {_, _, {line, preceding_col}}] + when space_before_preceding_token(do_col, preceding_col) and + valid_do_prefix(kind, value), prefix_tokens ) @@ -132,7 +165,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion do %Env{} = env, %Completion.Context{} = context ) do - Logger.info("Local completions are #{inspect(local_completions)}") + debug_local_completions(local_completions) for result <- local_completions, displayable?(project, result), @@ -143,6 +176,30 @@ defmodule Lexical.Server.CodeIntelligence.Completion do end end + defp debug_local_completions(completions) do + completions_by_type = + Enum.group_by(completions, fn %candidate_module{} -> + candidate_module + |> Atom.to_string() + |> String.split(".") + |> List.last() + |> String.downcase() + end) + + log_iodata = + Enum.reduce(completions_by_type, ["Local completions are: ["], fn {type, completions}, + acc -> + names = + Enum.map_join(completions, ", ", fn candidate -> + Map.get(candidate, :name) || Map.get(candidate, :detail) + end) + + [acc, [type, ": (", names], ") "] + end) + + Logger.info([log_iodata, "]"]) + end + defp to_completion_item(candidate, env) do candidate |> Translatable.translate(Builder, env) @@ -152,8 +209,8 @@ defmodule Lexical.Server.CodeIntelligence.Completion do defp displayable?(%Project{} = project, result) do suggested_module = case result do - %_{full_name: full_name} -> full_name - %_{origin: origin} -> origin + %_{full_name: full_name} when is_binary(full_name) -> full_name + %_{origin: origin} when is_binary(origin) -> origin _ -> "" end @@ -176,22 +233,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion do end end - # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity defp applies_to_env?(%Env{} = env, %struct_module{} = result) do - struct_reference? = Env.in_context?(env, :struct_reference) - cond do - struct_reference? and struct_module == Candidate.Struct -> - true - - struct_reference? and struct_module == Candidate.Module -> - Intelligence.defines_struct?(env.project, result.full_name, to: :child) - - struct_reference? and match?(%Candidate.Macro{name: "__MODULE__"}, result) -> - true - - struct_reference? -> - false + Env.in_context?(env, :struct_reference) -> + struct_reference_completion?(result, env) Env.in_context?(env, :bitstring) -> struct_module in [Candidate.BitstringOption, Candidate.Variable] @@ -205,26 +250,95 @@ defmodule Lexical.Server.CodeIntelligence.Completion do ] Env.in_context?(env, :use) -> - case result do - %{full_name: full_name} -> - with_prefix = - RemoteControl.Api.modules_with_prefix( - env.project, - full_name, - predicate(¯o_exported?(&1, :__using__, 1)) - ) + # only allow modules that define __using__ in a use statement + usable?(env, result) - not Enum.empty?(with_prefix) + Env.in_context?(env, :impl) -> + # only allow behaviour modules after @impl + behaviour?(env, result) - _ -> - false - end + Env.in_context?(env, :spec) or Env.in_context?(env, :type) -> + typespec_or_type_candidate?(result, env) true -> - true + struct_module != Candidate.Typespec + end + end + + defp usable?(%Env{} = env, completion) do + # returns true if the given completion is or is a parent of + # a module that defines __using__ + case completion do + %{full_name: full_name} -> + with_prefix = + RemoteControl.Api.modules_with_prefix( + env.project, + full_name, + {Kernel, :macro_exported?, [:__using__, 1]} + ) + + not Enum.empty?(with_prefix) + + _ -> + false end end + defp behaviour?(%Env{} = env, completion) do + # returns true if the given completion is or is a parent of + # a module that is a behaviour + + case completion do + %{full_name: full_name} -> + with_prefix = + RemoteControl.Api.modules_with_prefix( + env.project, + full_name, + {Kernel, :function_exported?, [:behaviour_info, 1]} + ) + + not Enum.empty?(with_prefix) + + _ -> + false + end + end + + defp struct_reference_completion?(%Candidate.Struct{}, _) do + true + end + + defp struct_reference_completion?(%Candidate.Module{} = module, %Env{} = env) do + Intelligence.defines_struct?(env.project, module.full_name, to: :great_grandchild) + end + + defp struct_reference_completion?(%Candidate.Macro{name: "__MODULE__"}, _) do + true + end + + defp struct_reference_completion?(_, _) do + false + end + + defp typespec_or_type_candidate?(%struct_module{}, _) + when struct_module in [Candidate.Module, Candidate.Typespec, Candidate.ModuleAttribute] do + true + end + + defp typespec_or_type_candidate?(%Candidate.Function{} = function, %Env{} = env) do + case RemoteControl.Api.expand_alias(env.project, [:__MODULE__], env.analysis, env.position) do + {:ok, expanded} -> + expanded == function.origin + + _error -> + false + end + end + + defp typespec_or_type_candidate?(_, _) do + false + end + defp applies_to_context?(%Project{} = project, result, %Completion.Context{ trigger_kind: :trigger_character, trigger_character: "%" diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/builder.ex b/apps/server/lib/lexical/server/code_intelligence/completion/builder.ex index b012d9143..e3e042c31 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/builder.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/builder.ex @@ -1,6 +1,6 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Builder do @moduledoc """ - Default completion builder. + Completion builder. For broader compatibility and control, this builder always creates text edits, as opposed to simple text insertions. This allows the replacement @@ -10,31 +10,36 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Builder do replacement range will be determined by the preceding token. """ + alias Future.Code, as: Code alias Lexical.Ast.Env - alias Lexical.Completion.Builder alias Lexical.Document alias Lexical.Document.Edit alias Lexical.Document.Position alias Lexical.Document.Range alias Lexical.Protocol.Types.Completion + alias Lexical.Protocol.Types.Markup.Content + alias Lexical.Server.CodeIntelligence.Completion.SortScope - import Document.Line + @doc "Fields found in `t:Lexical.Protocol.Types.Completion.Item.t()`" + @type item_opts :: keyword() - @behaviour Builder + @type t :: module() - @impl Builder + @type line_range :: {start_character :: pos_integer, end_character :: pos_integer} + + @spec snippet(Env.t(), String.t(), item_opts) :: Completion.Item.t() def snippet(%Env{} = env, text, options \\ []) do range = prefix_range(env) text_edit_snippet(env, text, range, options) end - @impl Builder + @spec plain_text(Env.t(), String.t(), item_opts) :: Completion.Item.t() def plain_text(%Env{} = env, text, options \\ []) do range = prefix_range(env) text_edit(env, text, range, options) end - @impl Builder + @spec text_edit(Env.t(), String.t(), line_range, item_opts) :: Completion.Item.t() def text_edit(%Env{} = env, text, {start_char, end_char}, options \\ []) do line_number = env.position.line @@ -49,10 +54,11 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Builder do options |> Keyword.put(:text_edit, edits) |> Completion.Item.new() - |> boost(0) + |> markdown_docs() + |> set_sort_scope(SortScope.default()) end - @impl Builder + @spec text_edit_snippet(Env.t(), String.t(), line_range, item_opts) :: Completion.Item.t() def text_edit_snippet(%Env{} = env, text, {start_char, end_char}, options \\ []) do snippet = String.trim_trailing(text, "\n") line_number = env.position.line @@ -69,34 +75,29 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Builder do |> Keyword.put(:text_edit, edits) |> Keyword.put(:insert_text_format, :snippet) |> Completion.Item.new() - |> boost(0) + |> markdown_docs() + |> set_sort_scope(SortScope.default()) end - @impl Builder + @spec fallback(any, any) :: any def fallback(nil, fallback), do: fallback def fallback("", fallback), do: fallback def fallback(detail, _), do: detail - @impl Builder - def boost(item, local_boost \\ 1, global_boost \\ 0) + @spec set_sort_scope(Completion.Item.t(), sort_scope :: String.t()) :: Completion.Item.t() + def set_sort_scope(item, default \\ SortScope.default()) - def boost(%Completion.Item{} = item, local_boost, global_boost) - when local_boost in 0..9 and global_boost in 0..9 do - global_boost = Integer.to_string(9 - global_boost) - local_boost = Integer.to_string(9 - local_boost) + def set_sort_scope(%Completion.Item{} = item, sort_scope) + when is_binary(sort_scope) do + stripped_sort_text = + item.sort_text + |> fallback(item.label) + |> strip_sort_text() - sort_text = "0#{global_boost}#{local_boost}_#{item.label}" + sort_text = "0#{sort_scope}_#{stripped_sort_text}" %Completion.Item{item | sort_text: sort_text} end - # HACK: This fixes ElixirSense struct completions for certain cases. - # We should try removing when we update or remove ElixirSense. - @spec strip_struct_operator_for_elixir_sense(Env.t()) :: - {Document.t() | String.t(), Position.t()} - def strip_struct_operator_for_elixir_sense(%Env{} = env) do - do_strip_struct_operator(env) - end - # private defp prefix_range(%Env{} = env) do @@ -105,78 +106,83 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Builder do {start_char, end_char} end + # credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity defp prefix_length(%Env{} = env) do - case Env.prefix_tokens(env, 1) do - [{:operator, :"::", _}] -> + case Code.Fragment.cursor_context(env.prefix) do + {:alias, alias_charlist} -> + alias_charlist + |> :string.split(~c".", :all) + |> List.last() + |> length() + + {:alias, {:local_or_var, _}, alias_charlist} -> + length(alias_charlist) + + {:alias, {:module_attribute, _}, alias_charlist} -> + length(alias_charlist) + + {:dot, _inside_dot, charlist} -> + length(charlist) + + {:dot_arity, _inside_dot, charlist} -> + length(charlist) + + {:dot_call, _inside_dot, _charlist} -> 0 - [{:operator, :., _}] -> + :expr -> + String.length(env.prefix) + + {:local_or_var, local} -> + length(local) + + {:local_arity, local} -> + length(local) + + {:local_call, call} -> + length(call) + + {:module_attribute, attr} -> + length(attr) + + {:operator, operator} -> + length(operator) + + {:operator_arity, _} -> 0 - [{:operator, :in, _}] -> - # they're typing integer and got "in" out, which the lexer thinks - # is Kernel.in/2 - 2 + {:operator_call, _} -> + 0 - [{:atom, token, _}] -> - length(token) + 1 + {:sigil, sigil} -> + # The sigil charlist doesn't include the leading `~` + length(sigil) + 1 - [{_, token, _}] when is_binary(token) -> - String.length(token) + {:struct, struct} -> + length(struct) - [{_, token, _}] when is_list(token) -> - length(token) + :none -> + 0 - [{_, token, _}] when is_atom(token) -> - token |> Atom.to_string() |> String.length() + {:unquoted_atom, atom} -> + # add one to include the leading colon, which isn't included + # in the atom charlist + length(atom) + 1 end end - defp do_strip_struct_operator(env) do - with true <- Env.in_context?(env, :struct_reference), - {:ok, completion_length} <- fetch_struct_completion_length(env) do - column = env.position.character - percent_position = column - (completion_length + 1) - - new_line_start = String.slice(env.line, 0, percent_position - 1) - new_line_end = String.slice(env.line, percent_position..-1) - new_line = [new_line_start, new_line_end] - new_position = Position.new(env.document, env.position.line, env.position.character - 1) - line_to_replace = env.position.line - - new_document = - env.document.lines - |> Enum.with_index(1) - |> Enum.reduce([], fn - {line(ending: ending), ^line_to_replace}, acc -> - [acc, new_line, ending] - - {line(text: line_text, ending: ending), _}, acc -> - [acc, line_text, ending] - end) - |> IO.iodata_to_binary() - - {new_document, new_position} - else - _ -> - {env.document, env.position} - end + @sort_prefix_re ~r/^[0-9_]+/ + defp strip_sort_text(sort_text) do + String.replace(sort_text, @sort_prefix_re, "") end - defp fetch_struct_completion_length(env) do - case Code.Fragment.cursor_context(env.prefix) do - {:struct, {:dot, {:alias, struct_name}, []}} -> - # add one because of the trailing period - {:ok, length(struct_name) + 1} - - {:struct, {:local_or_var, local_name}} -> - {:ok, length(local_name)} + defp markdown_docs(%Completion.Item{} = item) do + case item.documentation do + doc when is_binary(doc) -> + %{item | documentation: %Content{kind: :markdown, value: doc}} - {:struct, struct_name} -> - {:ok, length(struct_name)} - - {:local_or_var, local_name} -> - {:ok, length(local_name)} + _ -> + item end end end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/sort_scope.ex b/apps/server/lib/lexical/server/code_intelligence/completion/sort_scope.ex new file mode 100644 index 000000000..44f11da5a --- /dev/null +++ b/apps/server/lib/lexical/server/code_intelligence/completion/sort_scope.ex @@ -0,0 +1,77 @@ +defmodule Lexical.Server.CodeIntelligence.Completion.SortScope do + @moduledoc """ + Enumerated categories for sorting completion items. + + The following options are available for all categories, spare variables + which cannot be deprecated. + * `deprecated?` - Indicates the completion is for a deprecated declaration. + Defaults to `false`. + + * `local_priority` - An integer from 0-9 highest-to-lowest for + prioritizing/sorting results within a given scope. Defaults to `1`. + """ + + @doc """ + Intended for module completions, such as `Lexical.` -> `Lexical.Completion`. + """ + def module(local_priority \\ 1) do + "0" <> "0" <> local_priority(local_priority) + end + + @doc """ + Intended for variables, which are always local in scope. + """ + def variable(local_priority \\ 1) do + "1" <> "0" <> local_priority(local_priority) + end + + @doc """ + Intended for declarations (functions and macros) defined in the immediate + module, or inherited from invoking `use`. + """ + def local(deprecated? \\ false, local_priority \\ 1) do + "2" <> extra_order_fields(deprecated?, local_priority) + end + + @doc """ + Intended for delcarations defined in other modules than the immediate scope, + either from one's project, dependencies, or the standard library. + """ + def remote(deprecated? \\ false, local_priority \\ 1) do + "3" <> extra_order_fields(deprecated?, local_priority) + end + + @doc """ + Intended for declarations available without aliasing, namely those in + `Kernel` and `Kernel.SpecialForms`. + """ + def global(deprecated? \\ false, local_priority \\ 1) do + "4" <> extra_order_fields(deprecated?, local_priority) + end + + @doc """ + Aspirationally for declarations that could be auto-aliased into the user's + immediate module (not yet a feature of Lexical). + """ + def auto(deprecated? \\ false, local_priority \\ 1) do + "5" <> extra_order_fields(deprecated?, local_priority) + end + + @doc """ + Sorting scope applied to completions that without any sorting scope applied. + """ + def default(deprecated? \\ false, local_priority \\ 1) do + "9" <> extra_order_fields(deprecated?, local_priority) + end + + defp extra_order_fields(deprecated?, local_priority) do + deprecated(deprecated?) <> local_priority(local_priority) + end + + defp deprecated(false), do: "0" + defp deprecated(true), do: "1" + + defp local_priority(x) when x in 0..9 do + Integer.to_string(x) + end +end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translatable.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translatable.ex new file mode 100644 index 000000000..3a53e8561 --- /dev/null +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translatable.ex @@ -0,0 +1,19 @@ +defprotocol Lexical.Server.CodeIntelligence.Completion.Translatable do + alias Lexical.Ast.Env + alias Lexical.Protocol.Types.Completion + alias Lexical.Server.CodeIntelligence.Completion.Builder + + @type t :: any() + + @type translated :: [Completion.Item.t()] | Completion.Item.t() | :skip + + @fallback_to_any true + @spec translate(t, Builder.t(), Env.t()) :: translated + def translate(item, builder, env) +end + +defimpl Lexical.Server.CodeIntelligence.Completion.Translatable, for: Any do + def translate(_any, _builder, _environment) do + :skip + end +end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/bitstring_option.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/bitstring_option.ex index d24aae529..f23c5e128 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/bitstring_option.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/bitstring_option.ex @@ -1,7 +1,8 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.BitstringOption do alias Lexical.Ast.Env - alias Lexical.Completion.Translatable alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.SortScope + alias Lexical.Server.CodeIntelligence.Completion.Translatable alias Lexical.Server.CodeIntelligence.Completion.Translations require Logger @@ -19,6 +20,6 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.BitstringOptio kind: :unit, label: option.name ) - |> builder.boost(5) + |> builder.set_sort_scope(SortScope.global()) end end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/callable.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/callable.ex index ad9ac352e..5d4699aff 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/callable.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/callable.ex @@ -2,14 +2,19 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Callable do alias Lexical.Ast.Env alias Lexical.RemoteControl.Completion.Candidate alias Lexical.Server.CodeIntelligence.Completion.Builder + alias Lexical.Server.CodeIntelligence.Completion.SortScope - @callables [Candidate.Function, Candidate.Macro, Candidate.Callback] + @callables [Candidate.Function, Candidate.Macro, Candidate.Callback, Candidate.Typespec] @syntax_macros ~w(= == == === =~ .. ..// ! != !== &&) - def completion(%_callable_module{name: name}, _env) + def completion(%_callable_module{name: name} = callable, %Env{} = env) when name in @syntax_macros do - :skip + if String.ends_with?(env.prefix, "Kernel.") do + do_completion(callable, env) + else + :skip + end end def completion(%callable_module{arity: 0} = callable, %Env{} = env) @@ -24,6 +29,8 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Callable do do_completion(callable, env) end + # for a callable to be local, it must be defined in the current scope, + # or be a callback. defp do_completion(callable, %Env{} = env) do add_args? = not String.contains?(env.suffix, "(") @@ -39,13 +46,33 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Callable do [:deprecated] end + kind = + case callable do + %Candidate.Typespec{} -> + :type_parameter + + _ -> + :function + end + + detail = + if callback?(callable) do + "(callback)" + else + "(#{callable.type})" + end + env |> Builder.snippet(insert_text, - kind: :function, label: label(callable, env), + kind: kind, + detail: detail, + sort_text: sort_text(callable), + filter_text: "#{callable.name}", + documentation: build_docs(callable), tags: tags ) - |> maybe_boost(callable) + |> maybe_boost(callable, env) end def capture_completions(%callable_module{} = callable, %Env{} = env) @@ -55,20 +82,26 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Callable do complete_capture = env |> Builder.plain_text(name_and_arity, - detail: "(Capture)", + label: name_and_arity, kind: :function, - label: name_and_arity + detail: "(Capture)", + sort_text: sort_text(callable), + filter_text: "#{callable.name}", + documentation: build_docs(callable) ) - |> maybe_boost(callable, 4) + |> maybe_boost(callable, env) call_capture = env |> Builder.snippet(callable_snippet(callable, env), - detail: "(Capture with arguments)", + label: label(callable, env), kind: :function, - label: label(callable, env) + detail: "(Capture with arguments)", + sort_text: sort_text(callable), + filter_text: "#{callable.name}", + documentation: build_docs(callable) ) - |> maybe_boost(callable, 4) + |> maybe_boost(callable, env) [complete_capture, call_capture] end @@ -94,25 +127,96 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Callable do "${#{index}:#{name}}" end) - "#{callable.name}(#{argument_templates})" + if callable.parens? do + "#{callable.name}(#{argument_templates})" + else + "#{callable.name} #{argument_templates}" + end end @default_functions ["module_info", "behaviour_info"] - defp maybe_boost(item, %_{name: name}, default_boost \\ 5) do - if String.starts_with?(name, "__") or name in @default_functions do - item - else - Builder.boost(item, default_boost) + defp maybe_boost(item, callable, %Env{} = env) do + position_module = env.position_module + + %_{ + name: name, + origin: origin, + metadata: metadata + } = callable + + # elixir_sense suggests child_spec as a callback, though it's not formally one. + deprecated? = Map.has_key?(metadata, :deprecated) + dunder? = String.starts_with?(name, "__") + callback? = callback?(callable) + + local_priority = + cond do + dunder? -> 9 + callback? -> 8 + true -> 1 + end + + cond do + origin === "Kernel" or origin === "Kernel.SpecialForms" or name in @default_functions -> + local_priority = if dunder?, do: 9, else: 1 + Builder.set_sort_scope(item, SortScope.global(deprecated?, local_priority)) + + origin === position_module -> + Builder.set_sort_scope(item, SortScope.local(deprecated?, local_priority)) + + true -> + Builder.set_sort_scope(item, SortScope.remote(deprecated?, local_priority)) end end defp label(%_{} = callable, env) do arg_detail = callable |> argument_names(env) |> Enum.join(", ") - "#{callable.name}(#{arg_detail})" + + if callable.parens? do + "#{callable.name}(#{arg_detail})" + else + "#{callable.name} #{arg_detail}" + end end defp name_and_arity(%_{name: name, arity: arity}) do "#{name}/#{arity}" end + + defp sort_text(%_callable{name: name, arity: arity}) do + normalized_arity = + arity + |> Integer.to_string() + |> String.pad_leading(3, "0") + + # we used to use : as a separator between the name and + # arity, but this caused bang functions to sort + # before non-bang variants, which is incorrect. + # Using a space sorts correctly, as it's the only ascii + # character lower than bang + + "#{name} #{normalized_arity}" + end + + defp build_docs(%{summary: summary, spec: spec}) + when is_binary(summary) and is_binary(spec) do + "#{summary}\n```elixir\n#{spec}\n```" + end + + defp build_docs(%{summary: summary}) when is_binary(summary) do + summary + end + + defp build_docs(%{spec: spec}) when is_binary(spec) do + "```elixir\n#{spec}\n```" + end + + defp build_docs(_) do + "" + end + + defp callback?(%_{name: name, metadata: metadata} = _callable) do + Map.has_key?(metadata, :implementing) || name === "child_spec" + end end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/callback.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/callback.ex index 90fef3d97..e7a62d632 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/callback.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/callback.ex @@ -1,12 +1,103 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Callback do alias Lexical.Ast.Env - alias Lexical.Completion.Translatable - alias Lexical.RemoteControl.Completion.Candidate - alias Lexical.Server.CodeIntelligence.Completion.Translations + alias Lexical.Document + alias Lexical.RemoteControl.Completion.Candidate.Callback + alias Lexical.Server.CodeIntelligence.Completion.Builder + alias Lexical.Server.CodeIntelligence.Completion.SortScope + alias Lexical.Server.CodeIntelligence.Completion.Translatable - defimpl Translatable, for: Candidate.Callback do + defimpl Translatable, for: Callback do def translate(callback, _builder, %Env{} = env) do - Translations.Callable.completion(callback, env) + %Callback{ + name: name, + argument_names: arg_names, + summary: summary + } = callback + + %Env{line: line} = env + + env + |> Builder.text_edit_snippet( + insert_text(name, arg_names, env), + line_range(line), + label: label(name, arg_names), + kind: :interface, + detail: detail(callback), + sort_text: sort_text(callback), + filter_text: "def #{name}", + documentation: summary + ) + |> Builder.set_sort_scope(SortScope.local()) + end + + defp insert_text(name, arg_names, env) + when is_binary(name) and is_list(arg_names) do + impl_line(name, env) <> + "def #{name}(#{arg_text(arg_names)}) do" <> + "\n $0\nend" + end + + # add tab stops and join with ", " + defp arg_text(args) do + args + |> Enum.with_index(fn arg, i -> + "${#{i + 1}:#{arg}}" + end) + |> Enum.join(", ") + end + + # elixir_sense suggests child_spec/1 as a callback as it's a common idiom, + # but not an actual callback of behaviours like GenServer. + defp impl_line("child_spec", _env), do: "" + + # It's generally safe adding `@impl true` to callbacks as Elixir warns + # of conflicting behaviours, and they're virtually non-existent anyway. + defp impl_line(_, %Env{} = env) do + with {:ok, line_before} <- Document.fetch_text_at(env.document, env.position.line - 1), + true <- line_before =~ "@impl" do + "" + else + _ -> "@impl true\n" + end + end + + defp line_range(line) when is_binary(line) do + start_char = + case String.split(line, "def", parts: 2) do + [i, _] -> String.length(i) + 1 + [_] -> 0 + end + + end_char = String.length(line) + 1 + + {start_char, end_char} + end + + defp label(name, arg_names) + when is_binary(name) and is_list(arg_names) do + "#{name}(#{Enum.join(arg_names, ", ")})" + end + + defp detail(%Callback{name: "child_spec"}) do + "supervision specification" + end + + defp detail(%Callback{origin: origin, metadata: %{optional: false}}) do + "#{origin} callback (required)" + end + + defp detail(%Callback{origin: origin}) do + "#{origin} callback" + end + + # cribbed from the Callable translation for now. + defp sort_text(%Callback{name: name, arity: arity}) do + normalized_arity = + arity + |> Integer.to_string() + |> String.pad_leading(3, "0") + + "#{name}:#{normalized_arity}" end end end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/function.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/function.ex index 528f48c36..851894ea1 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/function.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/function.ex @@ -1,7 +1,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Function do alias Lexical.Ast.Env - alias Lexical.Completion.Translatable alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.Translatable alias Lexical.Server.CodeIntelligence.Completion.Translations defimpl Translatable, for: Candidate.Function do diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/macro.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/macro.ex index 8eb7139ec..bc24ce976 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/macro.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/macro.ex @@ -1,13 +1,17 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do + alias Lexical.Ast alias Lexical.Ast.Env - alias Lexical.Completion.Translatable alias Lexical.Document + alias Lexical.Document.Position alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.SortScope + alias Lexical.Server.CodeIntelligence.Completion.Translatable alias Lexical.Server.CodeIntelligence.Completion.Translations alias Lexical.Server.CodeIntelligence.Completion.Translations.Callable alias Lexical.Server.CodeIntelligence.Completion.Translations.Struct @snippet_macros ~w(def defp defmacro defmacrop defimpl defmodule defprotocol defguard defguardp defexception test use) + @unhelpful_macros ~w(:: alias! in and or destructure) defimpl Translatable, for: Candidate.Macro do def translate(macro, builder, %Env{} = env) do @@ -21,39 +25,11 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do end def translate(%Candidate.Macro{name: "def", arity: 2} = macro, builder, env) do - label = "#{macro.name} (define a function)" - - snippet = """ - def ${1:name}($2) do - $0 - end - """ - - env - |> builder.snippet(snippet, - detail: macro.spec, - kind: :class, - label: label - ) - |> builder.boost(9) + function_snippet("def", "define a function", macro, builder, env) end def translate(%Candidate.Macro{name: "defp", arity: 2} = macro, builder, env) do - label = "#{macro.name} (define a private function)" - - snippet = """ - defp ${1:name}($2) do - $0 - end - """ - - env - |> builder.snippet(snippet, - detail: macro.spec, - kind: :class, - label: label - ) - |> builder.boost(8) + function_snippet("defp", "define a private function", macro, builder, env) end def translate(%Candidate.Macro{name: "defmodule"} = macro, builder, env) do @@ -70,9 +46,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost(7) + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defmacro", arity: 2} = macro, builder, env) do @@ -88,9 +65,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost(6) + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defmacrop", arity: 2} = macro, builder, env) do @@ -106,9 +84,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost(5) + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defprotocol"} = macro, builder, env) do @@ -124,9 +103,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defimpl", arity: 3} = macro, builder, env) do @@ -142,9 +122,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defoverridable"} = macro, builder, env) do @@ -156,9 +137,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defdelegate", arity: 2} = macro, builder, env) do @@ -170,9 +152,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defguard", arity: 1} = macro, builder, env) do @@ -184,9 +167,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defguardp", arity: 1} = macro, builder, env) do @@ -198,9 +182,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defexception", arity: 1} = macro, builder, env) do @@ -212,9 +197,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "defstruct", arity: 1} = macro, builder, env) do @@ -226,9 +212,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "alias", arity: 2} = macro, builder, env) do @@ -240,9 +227,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "use", arity: 1}, builder, env) do @@ -252,9 +240,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do env |> builder.snippet(snippet, kind: :class, - label: label + label: label, + filter_text: "use" ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "require" <> _, arity: 2} = macro, builder, env) do @@ -266,9 +255,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "quote" <> _, arity: 2} = macro, builder, env) do @@ -284,9 +274,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "receive" <> _, arity: 1} = macro, builder, env) do @@ -302,9 +293,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "try" <> _, arity: 1} = macro, builder, env) do @@ -320,9 +312,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "with" <> _, arity: 1} = macro, builder, env) do @@ -338,9 +331,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "case", arity: 2} = macro, builder, env) do @@ -356,9 +350,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "if", arity: 2} = macro, builder, env) do @@ -374,9 +369,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "import", arity: 2} = macro, builder, env) do @@ -388,9 +384,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "unless", arity: 2} = macro, builder, env) do @@ -406,9 +403,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "cond"} = macro, builder, env) do @@ -425,9 +423,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "for"} = macro, builder, env) do @@ -443,9 +442,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: macro.spec, kind: :class, - label: label + label: label, + filter_text: macro.name ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end @stub_label ~S(test "message" ) @@ -461,9 +461,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(stub_snippet, detail: "A stub test", kind: :class, - label: stub_label + label: stub_label, + filter_text: "test" ) - |> builder.boost(1) + |> builder.set_sort_scope(SortScope.remote(false, 2)) end def translate(%Candidate.Macro{name: "test", arity: 2}, builder, env) do @@ -479,9 +480,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(plain_snippet, detail: "A test", kind: :class, - label: plain_label + label: plain_label, + filter_text: "test" ) - |> builder.boost(2) + |> builder.set_sort_scope(SortScope.remote(false, 0)) end def translate(%Candidate.Macro{name: "test", arity: 3}, builder, env) do @@ -497,9 +499,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(context_snippet, detail: "A test that receives context", kind: :class, - label: context_label + label: context_label, + filter_text: "test" ) - |> builder.boost(3) + |> builder.set_sort_scope(SortScope.remote(false, 1)) end def translate(%Candidate.Macro{name: "describe"}, builder, env) do @@ -513,9 +516,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.snippet(snippet, detail: "A describe block", kind: :class, - label: ~S(describe "message") + label: ~S(describe "message"), + filter_text: "describe" ) - |> builder.boost(1) + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: "__MODULE__"} = macro, builder, env) do @@ -526,9 +530,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.plain_text("__MODULE__", detail: macro.spec, kind: :constant, - label: "__MODULE__" + label: "__MODULE__", + filter_text: "__MODULE__" ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end end @@ -538,9 +543,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do |> builder.plain_text(dunder_form, detail: macro.spec, kind: :constant, - label: dunder_form + label: dunder_form, + filter_text: dunder_form ) - |> builder.boost() + |> builder.set_sort_scope(SortScope.global()) end def translate(%Candidate.Macro{name: dunder_form}, _builder, _env) @@ -548,6 +554,11 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do :skip end + def translate(%Candidate.Macro{name: name}, _builder, _env) + when name in @unhelpful_macros do + :skip + end + def translate(%Candidate.Macro{name: name} = macro, _builder, env) when name not in @snippet_macros do Callable.completion(macro, env) @@ -557,7 +568,72 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do :skip end - def suggest_module_name(%Document{} = document) do + defp function_snippet(kind, label, %Candidate.Macro{} = macro, builder, env) do + label = "#{macro.name} (#{label})" + + snippet = + with {:ok, %Position{} = position} <- Env.prev_significant_position(env), + {:ok, name, args} <- extract_spec_name_and_args(env, position) do + args_snippet = + case suggest_arg_names(args) do + [] -> + "" + + names -> + placeholders = + names + |> Enum.with_index(1) + |> Enum.map_join(", ", fn {name, i} -> "${#{i}:#{name}}" end) + + "(" <> placeholders <> ")" + end + + """ + #{kind} #{name}#{args_snippet} do + $0 + end + """ + else + _ -> + """ + #{kind} ${1:name}($2) do + $0 + end + """ + end + + env + |> builder.snippet(snippet, + detail: macro.spec, + kind: :class, + label: label, + filter_text: macro.name + ) + |> builder.set_sort_scope(SortScope.global()) + end + + defp extract_spec_name_and_args(%Env{} = env, %Position{} = position) do + with {:ok, [maybe_spec | _]} <- Ast.path_at(env.analysis, position), + {:@, _, [{:spec, _, [typespec]}]} <- maybe_spec, + {:"::", _, [{name, _, args}, _return]} <- typespec do + if is_list(args) do + {:ok, name, args} + else + {:ok, name, []} + end + else + _ -> :error + end + end + + defp suggest_arg_names(args) do + Enum.with_index(args, fn + {:"::", _, [{name, _, nil}, _]}, _i when is_atom(name) -> name + _, i -> "arg_#{i + 1}" + end) + end + + defp suggest_module_name(%Document{} = document) do result = document.path |> Path.split() @@ -575,10 +651,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do false element, {type, elements} -> - camelized = - element - |> Path.rootname() - |> Macro.camelize() + camelized = camelize_file_name(element) {type, [camelized | elements]} end) @@ -592,8 +665,14 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Macro do false -> document.path |> Path.basename() - |> Path.rootname() - |> Macro.camelize() + |> camelize_file_name() end end + + defp camelize_file_name(file_name_with_extension) do + file_name_with_extension + |> Path.rootname() + |> String.split(".") + |> Enum.map_join(".", &Macro.camelize/1) + end end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/map_field.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/map_field.ex index 57e141467..9b6a3529b 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/map_field.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/map_field.ex @@ -1,7 +1,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MapField do alias Lexical.Ast.Env - alias Lexical.Completion.Translatable alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.Translatable defimpl Translatable, for: Candidate.MapField do def translate(%Candidate.MapField{} = map_field, builder, %Env{} = env) do diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/module_attribute.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/module_attribute.ex index 240b41b7b..5d66783dd 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/module_attribute.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/module_attribute.ex @@ -1,7 +1,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.ModuleAttribute do + alias Lexical.Ast alias Lexical.Ast.Env - alias Lexical.Completion.Translatable + alias Lexical.Document.Position alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.SortScope + alias Lexical.Server.CodeIntelligence.Completion.Translatable alias Lexical.Server.CodeIntelligence.Completion.Translations defimpl Translatable, for: Candidate.ModuleAttribute do @@ -70,6 +73,19 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.ModuleAttribut end end + def translate(%Candidate.ModuleAttribute{name: "@spec"}, builder, env) do + case fetch_range(env) do + {:ok, range} -> + [ + maybe_specialized_spec_snippet(builder, env, range), + basic_spec_snippet(builder, env, range) + ] + + :error -> + :skip + end + end + def translate(%Candidate.ModuleAttribute{} = attribute, builder, env) do case fetch_range(env) do {:ok, range} -> @@ -106,4 +122,57 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.ModuleAttribut {:cont, acc} end) end + + defp maybe_specialized_spec_snippet(builder, %Env{} = env, range) do + with {:ok, %Position{} = position} <- Env.next_significant_position(env), + {:ok, [{maybe_def, _, [call, _]} | _]} when maybe_def in [:def, :defp] <- + Ast.path_at(env.analysis, position), + {function_name, _, args} <- call do + specialized_spec_snippet(builder, env, range, function_name, args) + else + _ -> nil + end + end + + defp specialized_spec_snippet(builder, env, range, function_name, args) do + name = to_string(function_name) + + args_snippet = + case args do + nil -> + "" + + list -> + Enum.map_join(1..length(list), ", ", &"${#{&1}:term()}") + end + + snippet = ~s""" + @spec #{name}(#{args_snippet}) :: ${0:term()} + """ + + env + |> builder.text_edit_snippet(snippet, range, + detail: "Typespec", + kind: :property, + label: "@spec #{name}" + ) + |> builder.set_sort_scope(SortScope.global(false, 0)) + end + + defp basic_spec_snippet(builder, env, range) do + snippet = ~S""" + @spec ${1:function}(${2:term()}) :: ${3:term()} + def ${1:function}(${4:args}) do + $0 + end + """ + + env + |> builder.text_edit_snippet(snippet, range, + detail: "Typespec", + kind: :property, + label: "@spec" + ) + |> builder.set_sort_scope(SortScope.global(false, 1)) + end end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/module_or_behaviour.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/module_or_behaviour.ex index fd7b3526b..57ee01408 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/module_or_behaviour.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/module_or_behaviour.ex @@ -1,7 +1,8 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.ModuleOrBehaviour do alias Lexical.Ast.Env - alias Lexical.Completion.Translatable alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.SortScope + alias Lexical.Server.CodeIntelligence.Completion.Translatable alias Lexical.Server.CodeIntelligence.Completion.Translations alias Lexical.Server.Project.Intelligence @@ -109,7 +110,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.ModuleOrBehavi env |> builder.plain_text(module_name, label: module_name, kind: :module, detail: detail) - |> builder.boost(0, 2) + |> builder.set_sort_scope(SortScope.module()) end defp local_module_name(parent_module, child_module, aliased_module) do @@ -124,7 +125,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.ModuleOrBehavi local_name = child_module - |> String.slice(local_module_length..-1) + |> String.slice(local_module_length..-1//1) |> strip_leading_period() if String.starts_with?(local_name, aliased_module) do diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/struct_field.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/struct_field.ex index 9b1c7eccb..82f045049 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/struct_field.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/struct_field.ex @@ -1,8 +1,9 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.StructField do alias Future.Code, as: Code alias Lexical.Ast.Env - alias Lexical.Completion.Translatable alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.SortScope + alias Lexical.Server.CodeIntelligence.Completion.Translatable alias Lexical.Server.CodeIntelligence.Completion.Translations defimpl Translatable, for: Candidate.StructField do @@ -21,18 +22,21 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.StructField do builder_opts = [ kind: :field, - label: "#{name}: #{value}" + label: "#{name}: #{value}", + filter_text: "#{name}:" ] insert_text = "#{name}: ${1:#{value}}" range = edit_range(env) - builder.text_edit_snippet(env, insert_text, range, builder_opts) + env + |> builder.text_edit_snippet(insert_text, range, builder_opts) + |> builder.set_sort_scope(SortScope.variable()) end def translate(%Candidate.StructField{} = struct_field, builder, %Env{} = env) do builder.plain_text(env, struct_field.name, - detail: struct_field.name, + detail: struct_field.type_spec, label: struct_field.name, kind: :field ) diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/typespec.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/typespec.ex new file mode 100644 index 000000000..0ed98d5a9 --- /dev/null +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/typespec.ex @@ -0,0 +1,12 @@ +defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Typespec do + alias Lexical.Ast.Env + alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.Translatable + alias Lexical.Server.CodeIntelligence.Completion.Translations.Callable + + defimpl Translatable, for: Candidate.Typespec do + def translate(typespec, _builder, %Env{} = env) do + Callable.completion(typespec, env) + end + end +end diff --git a/apps/server/lib/lexical/server/code_intelligence/completion/translations/variable.ex b/apps/server/lib/lexical/server/code_intelligence/completion/translations/variable.ex index bea86f6ee..811bb14fd 100644 --- a/apps/server/lib/lexical/server/code_intelligence/completion/translations/variable.ex +++ b/apps/server/lib/lexical/server/code_intelligence/completion/translations/variable.ex @@ -1,15 +1,18 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.Variable do alias Lexical.Ast.Env - alias Lexical.Completion.Translatable alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.SortScope + alias Lexical.Server.CodeIntelligence.Completion.Translatable defimpl Translatable, for: Candidate.Variable do def translate(variable, builder, %Env{} = env) do - builder.plain_text(env, variable.name, + env + |> builder.plain_text(variable.name, detail: variable.name, kind: :variable, label: variable.name ) + |> builder.set_sort_scope(SortScope.variable()) end end end diff --git a/apps/server/lib/lexical/server/configuration.ex b/apps/server/lib/lexical/server/configuration.ex index 6b898ff90..cb0dc1d60 100644 --- a/apps/server/lib/lexical/server/configuration.ex +++ b/apps/server/lib/lexical/server/configuration.ex @@ -40,9 +40,9 @@ defmodule Lexical.Server.Configuration do |> tap(&set/1) end - @spec new() :: t - def new do - %__MODULE__{support: Support.new()} + @spec new(keyword()) :: t + def new(attrs \\ []) do + struct!(__MODULE__, [support: Support.new()] ++ attrs) end defp set(%__MODULE__{} = config) do diff --git a/apps/server/lib/lexical/server/iex/helpers.ex b/apps/server/lib/lexical/server/iex/helpers.ex index 02e6f3030..f1821be9e 100644 --- a/apps/server/lib/lexical/server/iex/helpers.ex +++ b/apps/server/lib/lexical/server/iex/helpers.ex @@ -1,9 +1,11 @@ defmodule Lexical.Server.IEx.Helpers do + alias Lexical.Ast alias Lexical.Document alias Lexical.Document.Position alias Lexical.Project alias Lexical.Protocol.Types.Completion alias Lexical.RemoteControl + alias Lexical.RemoteControl.Search alias Lexical.Server.CodeIntelligence defmacro __using__(_) do @@ -11,7 +13,11 @@ defmodule Lexical.Server.IEx.Helpers do alias Lexical.Document alias Lexical.Document.Position alias Lexical.RemoteControl + alias Lexical.RemoteControl.Search import unquote(__MODULE__) + + RemoteControl.Module.Loader.start_link(nil) + RemoteControl.Dispatch.start_link([]) end end @@ -48,6 +54,27 @@ defmodule Lexical.Server.IEx.Helpers do |> Document.new(text, 0) end + def search_store(project) do + project = ensure_project(project) + RemoteControl.set_project(project) + + Search.Store.start_link( + project, + &Search.Indexer.create_index/1, + &Search.Indexer.update_index/2, + Search.Store.Backends.Ets + ) + end + + def search_entries(project) do + {:ok, entries} = + project + |> ensure_project() + |> Search.Indexer.create_index() + + entries + end + def pos(doc, line, character) do Position.new(doc, line, character) end @@ -75,14 +102,15 @@ defmodule Lexical.Server.IEx.Helpers do def complete(project, source, context) when is_binary(source) do case completion_position(source) do {:found, line, character} -> - complete(project, doc(source), line, character, context) + analysis = source |> doc() |> Ast.analyze() + complete(project, analysis, line, character, context) other -> other end end - def complete(project, %Document{} = source, line, character, context) do + def complete(project, %Ast.Analysis{} = analysis, line, character, context) do context = if is_nil(context) do Completion.Context.new(trigger_kind: :trigger_character) @@ -90,11 +118,11 @@ defmodule Lexical.Server.IEx.Helpers do context end - position = pos(source, line, character) + position = pos(analysis.document, line, character) project |> ensure_project() - |> CodeIntelligence.Completion.complete(source, position, context) + |> CodeIntelligence.Completion.complete(analysis, position, context) end def connect do diff --git a/apps/server/lib/lexical/server/json_rpc_backend.ex b/apps/server/lib/lexical/server/json_rpc_backend.ex deleted file mode 100644 index 7fd6e90d2..000000000 --- a/apps/server/lib/lexical/server/json_rpc_backend.ex +++ /dev/null @@ -1,175 +0,0 @@ -defmodule Lexical.Server.JsonRpc.Backend do - @moduledoc ~S""" - A logger backend that logs messages by sending them via LSP ‘window/logMessage’. - - ## Options - - * `:level` - the level to be logged by this backend. - Note that messages are filtered by the general - `:level` configuration for the `:logger` application first. - - * `:format` - the format message used to print logs. - Defaults to: `"$message"`. - It may also be a `{module, function}` tuple that is invoked - with the log level, the message, the current timestamp and - the metadata and must return `t:IO.chardata/0`. See - `Logger.Formatter`. - - * `:metadata` - the metadata to be printed by `$metadata`. - Defaults to an empty list (no metadata). - Setting `:metadata` to `:all` prints all metadata. See - the "Metadata" section for more information. - - """ - - alias Lexical.Server.Transport - @behaviour :gen_event - - defstruct format: nil, - level: nil, - metadata: nil - - @impl true - def init(__MODULE__) do - config = Application.get_env(:logger, __MODULE__) - - {:ok, init(config, %__MODULE__{})} - end - - def init({__MODULE__, opts}) when is_list(opts) do - config = - :logger - |> Application.get_env(__MODULE__) - |> merge_config(opts) - - {:ok, init(config, %__MODULE__{})} - end - - @impl true - def handle_call({:configure, options}, state) do - {:ok, :ok, configure(options, state)} - end - - def handle_call({:set_group_leader, pid}, state) do - Process.group_leader(self(), pid) - {:ok, :ok, state} - end - - @impl true - def handle_event({level, _gl, {Logger, msg, ts, md}}, state) do - %{level: log_level} = state - - {:erl_level, level} = List.keyfind(md, :erl_level, 0, {:erl_level, level}) - - if meets_level?(level, log_level) do - {:ok, log_event(level, msg, ts, md, state)} - else - {:ok, state} - end - end - - def handle_event(:flush, state) do - {:ok, state} - end - - def handle_event(_, state) do - {:ok, state} - end - - @impl true - def handle_info(_, state) do - {:ok, state} - end - - @impl true - def code_change(_old_vsn, state, _extra) do - {:ok, state} - end - - @impl true - def terminate(_reason, _state) do - :ok - end - - ## Helpers - - defp meets_level?(_lvl, nil), do: true - - defp meets_level?(lvl, min) do - Logger.compare_levels(lvl, min) != :lt - end - - defp configure(options, state) do - config = merge_config(Application.get_env(:logger, __MODULE__), options) - Application.put_env(:logger, __MODULE__, config) - init(config, state) - end - - defp init(config, state) do - level = Keyword.get(config, :level) - format = Logger.Formatter.compile(Keyword.get(config, :format)) - - metadata = - config - |> Keyword.get(:metadata, []) - |> configure_metadata() - - %{ - state - | format: format, - metadata: metadata, - level: level - } - end - - defp configure_metadata(:all), do: :all - defp configure_metadata(metadata), do: Enum.reverse(metadata) - - defp merge_config(env, options) do - Keyword.merge(env, options, fn - _, _v1, v2 -> v2 - end) - end - - defp log_event(level, msg, ts, md, state) do - output = - level - |> format_event(msg, ts, md, state) - |> IO.chardata_to_string() - - level - |> elixir_log_level_to_lsp() - |> Transport.log(output) - - state - end - - defp elixir_log_level_to_lsp(:debug), do: :log - defp elixir_log_level_to_lsp(:info), do: :info - defp elixir_log_level_to_lsp(:notice), do: :info - defp elixir_log_level_to_lsp(:warning), do: :warning - defp elixir_log_level_to_lsp(:warn), do: :warning - defp elixir_log_level_to_lsp(:error), do: :error - defp elixir_log_level_to_lsp(:critical), do: :error - defp elixir_log_level_to_lsp(:alert), do: :error - defp elixir_log_level_to_lsp(:emergency), do: :error - - defp format_event(level, msg, ts, md, state) do - %{format: format, metadata: keys} = state - - Logger.Formatter.format(format, level, msg, ts, take_metadata(md, keys)) - end - - defp take_metadata(metadata, :all) do - metadata - end - - defp take_metadata(metadata, keys) do - Enum.reduce(keys, [], fn key, acc -> - case Keyword.fetch(metadata, key) do - {:ok, val} -> [{key, val} | acc] - :error -> acc - end - end) - end -end diff --git a/apps/server/lib/lexical/server/project/intelligence.ex b/apps/server/lib/lexical/server/project/intelligence.ex index af36919b8..968f1ce54 100644 --- a/apps/server/lib/lexical/server/project/intelligence.ex +++ b/apps/server/lib/lexical/server/project/intelligence.ex @@ -167,7 +167,12 @@ defmodule Lexical.Server.Project.Intelligence do @impl GenServer def init([%Project{} = project]) do - Api.register_listener(project, self(), [module_updated(), struct_discovered()]) + Api.register_listener(project, self(), [ + project_index_ready(), + module_updated(), + struct_discovered() + ]) + state = State.new(project) {:ok, state} end @@ -204,6 +209,20 @@ defmodule Lexical.Server.Project.Intelligence do {:noreply, state} end + require Logger + + @impl GenServer + def handle_info(project_index_ready(), %State{} = state) do + {:ok, struct_definitions} = Api.struct_definitions(state.project) + + state = + Enum.reduce(struct_definitions, State.new(state.project), fn module, state -> + State.add_struct_module(state, module) + end) + + {:noreply, state} + end + # Private def name(%Project{} = project) do diff --git a/apps/server/lib/lexical/server/project/node.ex b/apps/server/lib/lexical/server/project/node.ex index 77dff642c..82c6a3fac 100644 --- a/apps/server/lib/lexical/server/project/node.ex +++ b/apps/server/lib/lexical/server/project/node.ex @@ -98,7 +98,9 @@ defmodule Lexical.Server.Project.Node do end defp delete_build_artifacts(%Project{} = project) do - case File.rm_rf(Project.build_path(project)) do + build_path = RemoteControl.Build.path(project) + + case File.rm_rf(build_path) do {:ok, _deleted} -> :ok error -> error end diff --git a/apps/server/lib/lexical/server/project/progress.ex b/apps/server/lib/lexical/server/project/progress.ex index 6a8e85ab6..ca05916d3 100644 --- a/apps/server/lib/lexical/server/project/progress.ex +++ b/apps/server/lib/lexical/server/project/progress.ex @@ -30,6 +30,12 @@ defmodule Lexical.Server.Project.Progress do {:noreply, new_state} end + def handle_info(percent_progress(stage: stage) = message, %State{} = state) do + new_state = apply(State, stage, [state, message]) + + {:noreply, new_state} + end + def name(%Project{} = project) do :"#{Project.name(project)}::progress" end diff --git a/apps/server/lib/lexical/server/project/progress/percentage.ex b/apps/server/lib/lexical/server/project/progress/percentage.ex new file mode 100644 index 000000000..aa6b5eded --- /dev/null +++ b/apps/server/lib/lexical/server/project/progress/percentage.ex @@ -0,0 +1,67 @@ +defmodule Lexical.Server.Project.Progress.Percentage do + @moduledoc """ + The backing data structure for percentage based progress reports + """ + alias Lexical.Math + alias Lexical.Protocol.Notifications + alias Lexical.Protocol.Types.WorkDone + + @enforce_keys [:token, :kind, :max] + defstruct [:token, :kind, :title, :message, :max, current: 0] + + def begin(title, max) do + token = System.unique_integer([:positive]) + %__MODULE__{token: token, kind: :begin, title: title, max: max} + end + + def report(percentage, delta, message \\ "") + + def report(%__MODULE__{} = percentage, delta, message) when is_integer(delta) and delta >= 0 do + new_current = percentage.current + delta + + %__MODULE__{percentage | kind: :report, message: message, current: new_current} + end + + def report(%__MODULE__{} = percentage, delta, _message) when is_integer(delta) do + percentage + end + + def report(_, _, _) do + nil + end + + def complete(%__MODULE__{} = percentage, message) do + %__MODULE__{percentage | kind: :end, current: percentage.max, message: message} + end + + def to_protocol(%__MODULE__{kind: :begin} = value) do + Notifications.Progress.new( + token: value.token, + value: WorkDone.Progress.Begin.new(kind: "begin", title: value.title, percentage: 0) + ) + end + + def to_protocol(%__MODULE__{kind: :report} = value) do + percent_complete = + (value.current / value.max * 100) + |> round() + |> Math.clamp(0, 100) + + Notifications.Progress.new( + token: value.token, + value: + WorkDone.Progress.Report.new( + kind: "report", + message: value.message, + percentage: percent_complete + ) + ) + end + + def to_protocol(%__MODULE__{kind: :end} = value) do + Notifications.Progress.new( + token: value.token, + value: WorkDone.Progress.End.new(kind: "end", message: value.message) + ) + end +end diff --git a/apps/server/lib/lexical/server/project/progress/state.ex b/apps/server/lib/lexical/server/project/progress/state.ex index 4736000d2..2e5415780 100644 --- a/apps/server/lib/lexical/server/project/progress/state.ex +++ b/apps/server/lib/lexical/server/project/progress/state.ex @@ -3,6 +3,7 @@ defmodule Lexical.Server.Project.Progress.State do alias Lexical.Protocol.Id alias Lexical.Protocol.Requests alias Lexical.Server.Configuration + alias Lexical.Server.Project.Progress.Percentage alias Lexical.Server.Project.Progress.Value alias Lexical.Server.Transport @@ -21,7 +22,16 @@ defmodule Lexical.Server.Project.Progress.State do write_work_done(progress.token) write(progress) - %{state | progress_by_label: progress_by_label} + %__MODULE__{state | progress_by_label: progress_by_label} + end + + def begin(%__MODULE__{} = state, percent_progress(label: label, max: max)) do + progress = Percentage.begin(label, max) + progress_by_label = Map.put(state.progress_by_label, label, progress) + write_work_done(progress.token) + write(progress) + + %__MODULE__{state | progress_by_label: progress_by_label} end def report(%__MODULE__{} = state, project_progress(label: label, message: message)) do @@ -32,15 +42,51 @@ defmodule Lexical.Server.Project.Progress.State do end) write(progress) - %{state | progress_by_label: progress_by_label} + %__MODULE__{state | progress_by_label: progress_by_label} + end + + def report( + %__MODULE__{} = state, + percent_progress(label: label, message: message, delta: delta) + ) do + {progress, progress_by_label} = + Map.get_and_update(state.progress_by_label, label, fn old_percentage -> + new_percentage = Percentage.report(old_percentage, delta, message) + {new_percentage, new_percentage} + end) + + write(progress) + %__MODULE__{state | progress_by_label: progress_by_label} end def complete(%__MODULE__{} = state, project_progress(label: label, message: message)) do {progress, progress_by_label} = Map.get_and_update(state.progress_by_label, label, fn _ -> :pop end) - progress |> Value.complete(message) |> write() - %{state | progress_by_label: progress_by_label} + case progress do + %Value{} = progress -> + progress |> Value.complete(message) |> write + + _ -> + :ok + end + + %__MODULE__{state | progress_by_label: progress_by_label} + end + + def complete(%__MODULE__{} = state, percent_progress(label: label, message: message)) do + {progress, progress_by_label} = + Map.get_and_update(state.progress_by_label, label, fn _ -> :pop end) + + case progress do + %Percentage{} = progress -> + progress |> Percentage.complete(message) |> write() + + nil -> + :ok + end + + %__MODULE__{state | progress_by_label: progress_by_label} end defp write_work_done(token) do @@ -50,9 +96,9 @@ defmodule Lexical.Server.Project.Progress.State do end end - defp write(%{token: token} = progress) when not is_nil(token) do + defp write(%progress_module{token: token} = progress) when not is_nil(token) do if Configuration.client_supports?(:work_done_progress) do - progress |> Value.to_protocol() |> Transport.write() + progress |> progress_module.to_protocol() |> Transport.write() end end diff --git a/apps/server/lib/lexical/server/project/progress/support.ex b/apps/server/lib/lexical/server/project/progress/support.ex index 35136fe3d..cf339b0b4 100644 --- a/apps/server/lib/lexical/server/project/progress/support.ex +++ b/apps/server/lib/lexical/server/project/progress/support.ex @@ -1,4 +1,5 @@ defmodule Lexical.Server.Project.Progress.Support do + alias Lexical.Project alias Lexical.Server.Project.Progress import Lexical.RemoteControl.Api.Messages @@ -19,4 +20,23 @@ defmodule Lexical.Server.Project.Progress.Support do send(dest, project_progress(label: label, stage: :complete)) end end + + def with_percentage_progress(%Project{} = project, label, max, func) + when is_function(func, 1) do + dest = Progress.name(project) + + report_progress = fn delta, message -> + message = + percent_progress(label: label, max: max, message: message, delta: delta, stage: :report) + + send(dest, message) + end + + try do + send(dest, percent_progress(label: label, max: max, stage: :begin)) + func.(report_progress) + after + send(dest, percent_progress(label: label, stage: :complete)) + end + end end diff --git a/apps/server/lib/lexical/server/project/search_listener.ex b/apps/server/lib/lexical/server/project/search_listener.ex new file mode 100644 index 000000000..2af99b770 --- /dev/null +++ b/apps/server/lib/lexical/server/project/search_listener.ex @@ -0,0 +1,55 @@ +defmodule Lexical.Server.Project.SearchListener do + alias Lexical.Formats + alias Lexical.Project + alias Lexical.Protocol.Id + alias Lexical.Protocol.Requests + alias Lexical.RemoteControl.Api + alias Lexical.Server + alias Lexical.Server.Window + + import Api.Messages + + use GenServer + require Logger + + def start_link(%Project{} = project) do + GenServer.start_link(__MODULE__, [project], name: name(project)) + end + + defp name(%Project{} = project) do + :"#{Project.name(project)}::search_listener" + end + + @impl GenServer + def init([%Project{} = project]) do + Api.register_listener(project, self(), [ + project_reindex_requested(), + project_reindexed() + ]) + + {:ok, project} + end + + @impl GenServer + def handle_info(project_reindex_requested(), %Project{} = project) do + Logger.info("project reindex requested") + send_code_lens_refresh() + + {:noreply, project} + end + + def handle_info(project_reindexed(elapsed_ms: elapsed), %Project{} = project) do + message = "Reindexed #{Project.name(project)} in #{Formats.time(elapsed, unit: :millisecond)}" + Logger.info(message) + send_code_lens_refresh() + + Window.show_info_message(message) + + {:noreply, project} + end + + defp send_code_lens_refresh do + request = Requests.CodeLensRefresh.new(id: Id.next()) + Server.server_request(request) + end +end diff --git a/apps/server/lib/lexical/server/project/supervisor.ex b/apps/server/lib/lexical/server/project/supervisor.ex index 1693c6281..58adf99bc 100644 --- a/apps/server/lib/lexical/server/project/supervisor.ex +++ b/apps/server/lib/lexical/server/project/supervisor.ex @@ -5,6 +5,7 @@ defmodule Lexical.Server.Project.Supervisor do alias Lexical.Server.Project.Intelligence alias Lexical.Server.Project.Node alias Lexical.Server.Project.Progress + alias Lexical.Server.Project.SearchListener use Supervisor @@ -26,7 +27,8 @@ defmodule Lexical.Server.Project.Supervisor do {ProjectNodeSupervisor, project}, {Node, project}, {Diagnostics, project}, - {Intelligence, project} + {Intelligence, project}, + {SearchListener, project} ] Supervisor.init(children, strategy: :one_for_one) diff --git a/apps/server/lib/lexical/server/provider/code_action/replace_with_underscore.ex b/apps/server/lib/lexical/server/provider/code_action/replace_with_underscore.ex deleted file mode 100644 index b10f328cc..000000000 --- a/apps/server/lib/lexical/server/provider/code_action/replace_with_underscore.ex +++ /dev/null @@ -1,79 +0,0 @@ -defmodule Lexical.Server.Provider.CodeAction.ReplaceWithUnderscore do - @moduledoc """ - A code action that prefixes unused variables with an underscore - """ - alias Lexical.Document - alias Lexical.Document.Changes - alias Lexical.Project - alias Lexical.Protocol.Requests.CodeAction - alias Lexical.Protocol.Types.CodeAction, as: CodeActionResult - alias Lexical.Protocol.Types.Diagnostic - alias Lexical.Protocol.Types.Workspace - alias Lexical.RemoteControl - alias Lexical.Server.Provider.Env - - @spec apply(CodeAction.t(), Env.t()) :: [CodeActionResult.t()] - def apply(%CodeAction{} = code_action, %Env{} = env) do - document = code_action.document - diagnostics = get_in(code_action, [:context, :diagnostics]) || [] - - Enum.flat_map(diagnostics, fn %Diagnostic{} = diagnostic -> - with {:ok, variable_name, line_number} <- extract_variable_and_line(diagnostic), - {:ok, reply} <- build_code_action(env.project, document, line_number, variable_name) do - [reply] - else - _ -> - [] - end - end) - end - - defp build_code_action( - %Project{} = project, - %Document{} = document, - line_number, - variable_name - ) do - case RemoteControl.Api.replace_with_underscore( - project, - document, - line_number, - variable_name - ) do - {:ok, %Changes{} = document_edits} -> - reply = - CodeActionResult.new( - title: "Rename to _#{variable_name}", - kind: :quick_fix, - edit: Workspace.Edit.new(changes: %{document.uri => document_edits}) - ) - - {:ok, reply} - - _ -> - :error - end - end - - defp extract_variable_and_line(%Diagnostic{} = diagnostic) do - with {:ok, variable_name} <- extract_variable_name(diagnostic.message), - {:ok, line} <- extract_line(diagnostic) do - {:ok, variable_name, line} - end - end - - @variable_re ~r/variable "([^"]+)" is unused/ - defp extract_variable_name(message) do - case Regex.scan(@variable_re, message) do - [[_, variable_name]] -> - {:ok, String.to_atom(variable_name)} - - _ -> - :error - end - end - - defp extract_line(%Diagnostic{} = diagnostic) do - {:ok, diagnostic.range.start.line} - end -end diff --git a/apps/server/lib/lexical/server/provider/env.ex b/apps/server/lib/lexical/server/provider/env.ex deleted file mode 100644 index f728c6ba7..000000000 --- a/apps/server/lib/lexical/server/provider/env.ex +++ /dev/null @@ -1,28 +0,0 @@ -defmodule Lexical.Server.Provider.Env do - @moduledoc """ - An environment passed to provider handlers. - This represents the current state of the project, and should include additional - information that provider handles might need to complete their tasks. - """ - - alias Lexical.Project - alias Lexical.Server.Configuration - - defstruct [:project] - - @type t :: %__MODULE__{ - project: Project.t() - } - - def new do - %__MODULE__{} - end - - def from_configuration(%Configuration{} = config) do - %__MODULE__{project: config.project} - end - - def project_name(%__MODULE__{} = env) do - Project.name(env.project) - end -end diff --git a/apps/server/lib/lexical/server/provider/handlers.ex b/apps/server/lib/lexical/server/provider/handlers.ex deleted file mode 100644 index fa39c6da0..000000000 --- a/apps/server/lib/lexical/server/provider/handlers.ex +++ /dev/null @@ -1,29 +0,0 @@ -defmodule Lexical.Server.Provider.Handlers do - alias Lexical.Protocol.Requests - alias Lexical.Server.Provider.Handlers - - def for_request(%_{} = request) do - case request do - %Requests.FindReferences{} -> - {:ok, Handlers.FindReferences} - - %Requests.Formatting{} -> - {:ok, Handlers.Formatting} - - %Requests.CodeAction{} -> - {:ok, Handlers.CodeAction} - - %Requests.Completion{} -> - {:ok, Handlers.Completion} - - %Requests.GoToDefinition{} -> - {:ok, Handlers.GoToDefinition} - - %Requests.Hover{} -> - {:ok, Handlers.Hover} - - %request_module{} -> - {:error, {:unhandled, request_module}} - end - end -end diff --git a/apps/server/lib/lexical/server/provider/handlers/code_action.ex b/apps/server/lib/lexical/server/provider/handlers/code_action.ex index 63955b14f..44fc17843 100644 --- a/apps/server/lib/lexical/server/provider/handlers/code_action.ex +++ b/apps/server/lib/lexical/server/provider/handlers/code_action.ex @@ -1,18 +1,41 @@ defmodule Lexical.Server.Provider.Handlers.CodeAction do alias Lexical.Protocol.Requests alias Lexical.Protocol.Responses - alias Lexical.Server.Provider.CodeAction.ReplaceWithUnderscore - alias Lexical.Server.Provider.CodeAction.ReplaceWithUnderscore - alias Lexical.Server.Provider.Env + alias Lexical.Protocol.Types + alias Lexical.Protocol.Types.Workspace + alias Lexical.RemoteControl + alias Lexical.RemoteControl.CodeAction + alias Lexical.Server.Configuration require Logger - @code_actions [ReplaceWithUnderscore] + def handle(%Requests.CodeAction{} = request, %Configuration{} = config) do + diagnostics = Enum.map(request.context.diagnostics, &to_code_action_diagnostic/1) - def handle(%Requests.CodeAction{} = request, %Env{} = env) do - code_actions = Enum.flat_map(@code_actions, & &1.apply(request, env)) - reply = Responses.CodeAction.new(request.id, code_actions) + code_actions = + RemoteControl.Api.code_actions( + config.project, + request.document, + request.range, + diagnostics, + request.context.only || :all + ) + + results = Enum.map(code_actions, &to_result/1) + reply = Responses.CodeAction.new(request.id, results) {:reply, reply} end + + defp to_code_action_diagnostic(%Types.Diagnostic{} = diagnostic) do + CodeAction.Diagnostic.new(diagnostic.range, diagnostic.message, diagnostic.source) + end + + defp to_result(%CodeAction{} = action) do + Types.CodeAction.new( + title: action.title, + kind: action.kind, + edit: Workspace.Edit.new(changes: %{action.uri => action.changes}) + ) + end end diff --git a/apps/server/lib/lexical/server/provider/handlers/code_lens.ex b/apps/server/lib/lexical/server/provider/handlers/code_lens.ex new file mode 100644 index 000000000..620048652 --- /dev/null +++ b/apps/server/lib/lexical/server/provider/handlers/code_lens.ex @@ -0,0 +1,58 @@ +defmodule Lexical.Server.Provider.Handlers.CodeLens do + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Document.Range + alias Lexical.Project + alias Lexical.Protocol.Requests + alias Lexical.Protocol.Responses + alias Lexical.Protocol.Types.CodeLens + alias Lexical.RemoteControl + alias Lexical.Server.Configuration + alias Lexical.Server.Provider.Handlers + + import Document.Line + require Logger + + def handle(%Requests.CodeLens{} = request, %Configuration{} = config) do + lenses = + case reindex_lens(config.project, request.document) do + nil -> [] + lens -> List.wrap(lens) + end + + response = Responses.CodeLens.new(request.id, lenses) + {:reply, response} + end + + defp reindex_lens(%Project{} = project, %Document{} = document) do + if show_reindex_lens?(project, document) do + range = def_project_range(document) + command = Handlers.Commands.reindex_command(project) + + CodeLens.new(command: command, range: range) + end + end + + @project_regex ~r/def\s+project\s/ + defp def_project_range(%Document{} = document) do + # returns the line in mix.exs where `def project` occurs + Enum.reduce_while(document.lines, nil, fn + line(text: line_text, line_number: line_number), _ -> + if String.match?(line_text, @project_regex) do + start_pos = Position.new(document, line_number, 1) + end_pos = Position.new(document, line_number, String.length(line_text)) + range = Range.new(start_pos, end_pos) + {:halt, range} + else + {:cont, nil} + end + end) + end + + defp show_reindex_lens?(%Project{} = project, %Document{} = document) do + document_path = Path.expand(document.path) + + document_path == Project.mix_exs_path(project) and + not RemoteControl.Api.index_running?(project) + end +end diff --git a/apps/server/lib/lexical/server/provider/handlers/commands.ex b/apps/server/lib/lexical/server/provider/handlers/commands.ex new file mode 100644 index 000000000..42d64fbc4 --- /dev/null +++ b/apps/server/lib/lexical/server/provider/handlers/commands.ex @@ -0,0 +1,60 @@ +defmodule Lexical.Server.Provider.Handlers.Commands do + alias Lexical.Project + alias Lexical.Protocol.Requests + alias Lexical.Protocol.Responses + alias Lexical.Protocol.Types + alias Lexical.Protocol.Types.ErrorCodes + alias Lexical.RemoteControl + alias Lexical.Server.Configuration + alias Lexical.Server.Window + + require ErrorCodes + require Logger + + @reindex_name "Reindex" + + def names do + [@reindex_name] + end + + def reindex_command(%Project{} = project) do + project_name = Project.name(project) + + Types.Command.new( + title: "Rebuild #{project_name}'s code search index", + command: @reindex_name + ) + end + + def handle(%Requests.ExecuteCommand{} = request, %Configuration{} = config) do + response = + case request.command do + @reindex_name -> + Logger.info("Reindex #{Project.name(config.project)}") + reindex(config.project, request.id) + + invalid -> + message = "#{invalid} is not a valid command" + internal_error(request.id, message) + end + + {:reply, response} + end + + defp reindex(%Project{} = project, request_id) do + case RemoteControl.Api.reindex(project) do + :ok -> + Responses.ExecuteCommand.new(request_id, "ok") + + error -> + Window.show_error_message("Indexing #{Project.name(project)} failed") + Logger.error("Indexing command failed due to #{inspect(error)}") + + internal_error(request_id, "Could not reindex: #{error}") + end + end + + defp internal_error(request_id, message) do + Responses.ExecuteCommand.error(request_id, :internal_error, message) + end +end diff --git a/apps/server/lib/lexical/server/provider/handlers/completion.ex b/apps/server/lib/lexical/server/provider/handlers/completion.ex index 00806a363..e08115bf2 100644 --- a/apps/server/lib/lexical/server/provider/handlers/completion.ex +++ b/apps/server/lib/lexical/server/provider/handlers/completion.ex @@ -1,17 +1,20 @@ defmodule Lexical.Server.Provider.Handlers.Completion do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Document.Position alias Lexical.Protocol.Requests alias Lexical.Protocol.Responses alias Lexical.Protocol.Types.Completion alias Lexical.Server.CodeIntelligence - alias Lexical.Server.Provider.Env + alias Lexical.Server.Configuration require Logger - def handle(%Requests.Completion{} = request, %Env{} = env) do + def handle(%Requests.Completion{} = request, %Configuration{} = config) do completions = CodeIntelligence.Completion.complete( - env.project, - request.document, + config.project, + document_analysis(request.document, request.position), request.position, request.context || Completion.Context.new(trigger_kind: :invoked) ) @@ -19,4 +22,16 @@ defmodule Lexical.Server.Provider.Handlers.Completion do response = Responses.Completion.new(request.id, completions) {:reply, response} end + + defp document_analysis(%Document{} = document, %Position{} = position) do + case Document.Store.fetch(document.uri, :analysis) do + {:ok, %Document{}, %Ast.Analysis{} = analysis} -> + Ast.reanalyze_to(analysis, position) + + _ -> + document + |> Ast.analyze() + |> Ast.reanalyze_to(position) + end + end end diff --git a/apps/server/lib/lexical/server/provider/handlers/document_symbols.ex b/apps/server/lib/lexical/server/provider/handlers/document_symbols.ex new file mode 100644 index 000000000..5805ff26d --- /dev/null +++ b/apps/server/lib/lexical/server/provider/handlers/document_symbols.ex @@ -0,0 +1,58 @@ +defmodule Lexical.Server.Provider.Handlers.DocumentSymbols do + alias Lexical.Document + alias Lexical.Protocol.Requests.DocumentSymbols + alias Lexical.Protocol.Responses + alias Lexical.Protocol.Types.Document.Symbol + alias Lexical.Protocol.Types.Symbol.Kind, as: SymbolKind + alias Lexical.RemoteControl.Api + alias Lexical.RemoteControl.CodeIntelligence.Symbols + alias Lexical.Server.Configuration + + require SymbolKind + + def handle(%DocumentSymbols{} = request, %Configuration{} = config) do + symbols = + config.project + |> Api.document_symbols(request.document) + |> Enum.map(&to_response(&1, request.document)) + + response = Responses.DocumentSymbols.new(request.id, symbols) + + {:reply, response} + end + + def to_response(%Symbols.Document{} = root, %Document{} = document) do + children = + case root.children do + list when is_list(list) -> + Enum.map(list, &to_response(&1, document)) + + _ -> + nil + end + + Symbol.new( + children: children, + detail: root.detail, + kind: to_kind(root.type), + name: root.name, + range: root.range, + selection_range: root.detail_range + ) + end + + defp to_kind(:struct), do: :struct + defp to_kind(:module), do: :module + defp to_kind(:variable), do: :variable + defp to_kind({:function, _}), do: :function + defp to_kind({:protocol, _}), do: :module + defp to_kind(:module_attribute), do: :constant + defp to_kind(:ex_unit_test), do: :method + defp to_kind(:ex_unit_describe), do: :method + defp to_kind(:ex_unit_setup), do: :method + defp to_kind(:ex_unit_setup_all), do: :method + defp to_kind(:type), do: :type_parameter + defp to_kind(:spec), do: :interface + defp to_kind(:file), do: :file + defp to_kind(_), do: :string +end diff --git a/apps/server/lib/lexical/server/provider/handlers/find_references.ex b/apps/server/lib/lexical/server/provider/handlers/find_references.ex index f7319817a..a9a788dfb 100644 --- a/apps/server/lib/lexical/server/provider/handlers/find_references.ex +++ b/apps/server/lib/lexical/server/provider/handlers/find_references.ex @@ -1,57 +1,26 @@ -# defmodule Lexical.Server.Provider.Handlers.FindReferences do -# alias Lexical.Build -# alias Lexical.Document -# alias Lexical.Protocol.Requests.FindReferences -# alias Lexical.Protocol.Responses -# alias Lexical.Protocol.Types.Location -# alias Lexical.Ranged -# alias Lexical.Tracer - -# require Logger - -# def handle(%FindReferences{} = request, _) do -# document = request.document -# pos = request.position -# trace = Tracer.get_trace() -# # elixir_ls uses 1 based columns, so add 1 here. -# character = pos.character + 1 - -# Build.with_lock(fn -> -# references = -# document -# |> Document.to_string() -# |> ElixirSense.references(pos.line, character, trace) -# |> Enum.reduce([], fn reference, acc -> -# case build_reference(reference, document) do -# {:ok, location} -> -# [location | acc] - -# _ -> -# acc -# end -# end) -# |> Enum.reverse() - -# response = Responses.FindReferences.new(request.id, references) -# Logger.info("found #{length(references)} refs") -# {:reply, response} -# end) -# end - -# defp build_reference(%{range: _, uri: _} = elixir_sense_reference, current_document) do -# with {:ok, document} <- get_document(elixir_sense_reference, current_document), -# {:ok, elixir_range} <- Ranged.Native.from_lsp(elixir_sense_reference, document), -# {:ok, ls_range} <- Ranged.Lsp.from_native(elixir_range, document) do -# uri = Document.Path.ensure_uri(document.uri) -# {:ok, Location.new(uri: uri, range: ls_range)} -# end -# end - -# defp get_document(%{uri: nil}, current_document) do -# {:ok, current_document} -# end - -# defp get_document(%{uri: uri}, _) do -# Document.Store.open_temporary(uri) -# end -# end +defmodule Lexical.Server.Provider.Handlers.FindReferences do + alias Lexical.Ast + alias Lexical.Document + alias Lexical.Protocol.Requests.FindReferences + alias Lexical.Protocol.Responses + alias Lexical.RemoteControl.Api + alias Lexical.Server.Configuration + + require Logger + + def handle(%FindReferences{} = request, %Configuration{} = config) do + include_declaration? = !!request.context.include_declaration + + locations = + case Document.Store.fetch(request.document.uri, :analysis) do + {:ok, _document, %Ast.Analysis{} = analysis} -> + Api.references(config.project, analysis, request.position, include_declaration?) + + _ -> + nil + end + + response = Responses.FindReferences.new(request.id, locations) + {:reply, response} + end +end diff --git a/apps/server/lib/lexical/server/provider/handlers/formatting.ex b/apps/server/lib/lexical/server/provider/handlers/formatting.ex index 84546e69a..b250f74e3 100644 --- a/apps/server/lib/lexical/server/provider/handlers/formatting.ex +++ b/apps/server/lib/lexical/server/provider/handlers/formatting.ex @@ -3,14 +3,14 @@ defmodule Lexical.Server.Provider.Handlers.Formatting do alias Lexical.Protocol.Requests alias Lexical.Protocol.Responses alias Lexical.RemoteControl - alias Lexical.Server.Provider.Env + alias Lexical.Server.Configuration require Logger - def handle(%Requests.Formatting{} = request, %Env{} = env) do + def handle(%Requests.Formatting{} = request, %Configuration{} = config) do document = request.document - case RemoteControl.Api.format(env.project, document) do + case RemoteControl.Api.format(config.project, document) do {:ok, %Changes{} = document_edits} -> response = Responses.Formatting.new(request.id, document_edits) Logger.info("Response #{inspect(response)}") diff --git a/apps/server/lib/lexical/server/provider/handlers/go_to_definition.ex b/apps/server/lib/lexical/server/provider/handlers/go_to_definition.ex index dfe9e0d4e..74b8459f3 100644 --- a/apps/server/lib/lexical/server/provider/handlers/go_to_definition.ex +++ b/apps/server/lib/lexical/server/provider/handlers/go_to_definition.ex @@ -2,17 +2,18 @@ defmodule Lexical.Server.Provider.Handlers.GoToDefinition do alias Lexical.Protocol.Requests.GoToDefinition alias Lexical.Protocol.Responses alias Lexical.RemoteControl + alias Lexical.Server.Configuration require Logger - def handle(%GoToDefinition{} = request, env) do - case RemoteControl.Api.definition(env.project, request.document, request.position) do + def handle(%GoToDefinition{} = request, %Configuration{} = config) do + case RemoteControl.Api.definition(config.project, request.document, request.position) do {:ok, native_location} -> {:reply, Responses.GoToDefinition.new(request.id, native_location)} {:error, reason} -> Logger.error("GoToDefinition failed: #{inspect(reason)}") - {:error, Responses.GoToDefinition.error(request.id, :request_failed, inspect(reason))} + {:reply, Responses.GoToDefinition.new(request.id, nil)} end end end diff --git a/apps/server/lib/lexical/server/provider/handlers/hover.ex b/apps/server/lib/lexical/server/provider/handlers/hover.ex index efe05b640..731261ef8 100644 --- a/apps/server/lib/lexical/server/provider/handlers/hover.ex +++ b/apps/server/lib/lexical/server/provider/handlers/hover.ex @@ -1,20 +1,25 @@ defmodule Lexical.Server.Provider.Handlers.Hover do alias Lexical.Ast + alias Lexical.Ast.Analysis + alias Lexical.Document + alias Lexical.Document.Position + alias Lexical.Project alias Lexical.Protocol.Requests alias Lexical.Protocol.Responses alias Lexical.Protocol.Types.Hover alias Lexical.RemoteControl alias Lexical.RemoteControl.CodeIntelligence.Docs - alias Lexical.RemoteControl.CodeIntelligence.Entity - alias Lexical.Server.Provider.Env + alias Lexical.Server.Configuration alias Lexical.Server.Provider.Markdown require Logger - def handle(%Requests.Hover{} = request, %Env{} = env) do + def handle(%Requests.Hover{} = request, %Configuration{} = config) do maybe_hover = - with {:ok, entity, range} <- Entity.resolve(request.document, request.position), - {:ok, markdown} <- hover_content(entity, env) do + with {:ok, _document, %Ast.Analysis{} = analysis} <- + Document.Store.fetch(request.document.uri, :analysis), + {:ok, entity, range} <- resolve_entity(config.project, analysis, request.position), + {:ok, markdown} <- hover_content(entity, config.project) do content = Markdown.to_content(markdown) %Hover{contents: content, range: range} else @@ -26,8 +31,12 @@ defmodule Lexical.Server.Provider.Handlers.Hover do {:reply, Responses.Hover.new(request.id, maybe_hover)} end - defp hover_content({kind, module}, env) when kind in [:module, :struct] do - case RemoteControl.Api.docs(env.project, module, exclude_hidden: false) do + defp resolve_entity(%Project{} = project, %Analysis{} = analysis, %Position{} = position) do + RemoteControl.Api.resolve_entity(project, analysis, position) + end + + defp hover_content({kind, module}, %Project{} = project) when kind in [:module, :struct] do + case RemoteControl.Api.docs(project, module, exclude_hidden: false) do {:ok, %Docs{} = module_docs} -> header = module_header(kind, module_docs) types = module_header_types(kind, module_docs) @@ -49,8 +58,8 @@ defmodule Lexical.Server.Provider.Handlers.Hover do end end - defp hover_content({:call, module, fun, arity}, env) do - with {:ok, %Docs{} = module_docs} <- RemoteControl.Api.docs(env.project, module), + defp hover_content({:call, module, fun, arity}, %Project{} = project) do + with {:ok, %Docs{} = module_docs} <- RemoteControl.Api.docs(project, module), {:ok, entries} <- Map.fetch(module_docs.functions_and_macros, fun) do sections = entries @@ -62,8 +71,8 @@ defmodule Lexical.Server.Provider.Handlers.Hover do end end - defp hover_content({:type, module, type, arity}, env) do - with {:ok, %Docs{} = module_docs} <- RemoteControl.Api.docs(env.project, module), + defp hover_content({:type, module, type, arity}, %Project{} = project) do + with {:ok, %Docs{} = module_docs} <- RemoteControl.Api.docs(project, module), {:ok, entries} <- Map.fetch(module_docs.types, type) do case Enum.find(entries, &(&1.arity == arity)) do %Docs.Entry{} = entry -> @@ -75,6 +84,10 @@ defmodule Lexical.Server.Provider.Handlers.Hover do end end + defp hover_content(type, _) do + {:error, {:unsupported, type}} + end + defp module_header(:module, %Docs{module: module}) do Ast.Module.name(module) end @@ -108,25 +121,22 @@ defmodule Lexical.Server.Provider.Handlers.Hover do defp entry_content(%Docs.Entry{kind: fn_or_macro} = entry) when fn_or_macro in [:function, :macro] do - with {:ok, call_header} <- call_header(entry) do - specs = Enum.map_join(entry.defs, "\n", &("@spec " <> &1)) + call_header = call_header(entry) + specs = Enum.map_join(entry.defs, "\n", &("@spec " <> &1)) - header = - [call_header, specs] - |> Markdown.join_sections() - |> String.trim() - |> Markdown.code_block() + header = + [call_header, specs] + |> Markdown.join_sections() + |> String.trim() + |> Markdown.code_block() - Markdown.join_sections([header, entry_doc_content(entry.doc)]) - end + Markdown.join_sections([header, entry_doc_content(entry.doc)]) end defp entry_content(%Docs.Entry{kind: :type} = entry) do - module_name = Ast.Module.name(entry.module) - header = Markdown.code_block(""" - #{module_name}.#{entry.name}/#{entry.arity} + #{call_header(entry)} #{type_defs(entry)}\ """) @@ -134,21 +144,52 @@ defmodule Lexical.Server.Provider.Handlers.Hover do Markdown.join_sections([header, entry_doc_content(entry.doc)]) end + @one_line_header_cutoff 50 + + defp call_header(%Docs.Entry{kind: :type} = entry) do + module_name = Ast.Module.name(entry.module) + + one_line_header = "#{module_name}.#{entry.name}/#{entry.arity}" + + two_line_header = + "#{last_module_name(module_name)}.#{entry.name}/#{entry.arity}\n#{module_name}" + + if String.length(one_line_header) >= @one_line_header_cutoff do + two_line_header + else + one_line_header + end + end + defp call_header(%Docs.Entry{kind: maybe_macro} = entry) do - with [signature | _] <- entry.signature do - module_name = Ast.Module.name(entry.module) + [signature | _] = entry.signature + module_name = Ast.Module.name(entry.module) - macro_prefix = - if maybe_macro == :macro do - "(macro) " - else - "" - end + macro_prefix = + if maybe_macro == :macro do + "(macro) " + else + "" + end + + one_line_header = "#{macro_prefix}#{module_name}.#{signature}" - {:ok, "#{macro_prefix}#{module_name}.#{signature}"} + two_line_header = + "#{macro_prefix}#{last_module_name(module_name)}.#{signature}\n#{module_name}" + + if String.length(one_line_header) >= @one_line_header_cutoff do + two_line_header + else + one_line_header end end + defp last_module_name(module_name) do + module_name + |> String.split(".") + |> List.last() + end + defp type_defs(%Docs.Entry{metadata: %{opaque: true}} = entry) do Enum.map_join(entry.defs, "\n", fn def -> def diff --git a/apps/server/lib/lexical/server/provider/handlers/workspace_symbol.ex b/apps/server/lib/lexical/server/provider/handlers/workspace_symbol.ex new file mode 100644 index 000000000..3579eb95f --- /dev/null +++ b/apps/server/lib/lexical/server/provider/handlers/workspace_symbol.ex @@ -0,0 +1,57 @@ +defmodule Lexical.Server.Provider.Handlers.WorkspaceSymbol do + alias Lexical.Protocol.Requests.WorkspaceSymbol + alias Lexical.Protocol.Responses + alias Lexical.Protocol.Types.Location + alias Lexical.Protocol.Types.Symbol.Kind, as: SymbolKind + alias Lexical.Protocol.Types.Workspace.Symbol + alias Lexical.RemoteControl.Api + alias Lexical.RemoteControl.CodeIntelligence.Symbols + alias Lexical.Server.Configuration + + require SymbolKind + + require Logger + + def handle(%WorkspaceSymbol{} = request, %Configuration{} = config) do + symbols = + if String.length(request.query) > 1 do + config.project + |> Api.workspace_symbols(request.query) + |> tap(fn symbols -> Logger.info("syms #{inspect(Enum.take(symbols, 5))}") end) + |> Enum.map(&to_response/1) + else + [] + end + + response = Responses.WorkspaceSymbol.new(request.id, symbols) + {:reply, response} + end + + def to_response(%Symbols.Workspace{} = root) do + Symbol.new( + kind: to_kind(root.type), + location: to_location(root.link), + name: root.name, + container_name: root.container_name + ) + end + + defp to_location(%Symbols.Workspace.Link{} = link) do + Location.new(uri: link.uri, range: link.detail_range) + end + + defp to_kind(:struct), do: :struct + defp to_kind(:module), do: :module + defp to_kind({:protocol, _}), do: :module + defp to_kind({:lx_protocol, _}), do: :module + defp to_kind(:variable), do: :variable + defp to_kind({:function, _}), do: :function + defp to_kind(:module_attribute), do: :constant + defp to_kind(:ex_unit_test), do: :method + defp to_kind(:ex_unit_describe), do: :method + defp to_kind(:ex_unit_setup), do: :method + defp to_kind(:ex_unit_setup_all), do: :method + defp to_kind(:type), do: :type_parameter + defp to_kind(:spec), do: :interface + defp to_kind(:file), do: :file +end diff --git a/apps/server/lib/lexical/server/provider/queue.ex b/apps/server/lib/lexical/server/provider/queue.ex deleted file mode 100644 index ae3c9b641..000000000 --- a/apps/server/lib/lexical/server/provider/queue.ex +++ /dev/null @@ -1,244 +0,0 @@ -defmodule Lexical.Server.Provider.Queue do - defmodule State do - alias Lexical.Proto.Convert - alias Lexical.Proto.LspTypes.ResponseError - alias Lexical.Protocol.Requests - alias Lexical.Server.Provider.Env - alias Lexical.Server.Provider.Handlers - alias Lexical.Server.Provider.Queue - alias Lexical.Server.Transport - require Logger - - defstruct tasks_by_id: %{}, pids_to_ids: %{} - - @type t :: %__MODULE__{} - - def new do - %__MODULE__{} - end - - @spec add(t, Requests.request(), Env.t()) :: {:ok, t} | :error - def add(%__MODULE__{} = state, request, env) do - with {:ok, handler_module} <- Handlers.for_request(request), - {:ok, req} <- Convert.to_native(request) do - task = %Task{} = as_task(request, fn -> handler_module.handle(req, env) end) - request_id = to_string(request.id) - - new_state = %__MODULE__{ - state - | tasks_by_id: Map.put(state.tasks_by_id, request_id, task), - pids_to_ids: Map.put(state.pids_to_ids, task.pid, request_id) - } - - {:ok, new_state} - else - {:error, {:unhandled, _}} -> - Logger.info("unhandled request #{request.method}") - :error - - _ -> - :error - end - end - - @spec cancel(t, pos_integer()) :: t - def cancel(%__MODULE__{} = state, request_id) do - with {:ok, %Task{} = task} <- Map.fetch(state.tasks_by_id, request_id), - :ok <- Queue.Supervisor.cancel(task) do - error = ResponseError.new(message: "Request cancelled", code: :request_cancelled) - reply = %{id: request_id, error: error} - Transport.write(reply) - - %State{ - state - | tasks_by_id: Map.delete(state.tasks_by_id, request_id), - pids_to_ids: Map.delete(state.pids_to_ids, task.pid) - } - else - _ -> - state - end - end - - def size(%__MODULE__{} = state) do - map_size(state.tasks_by_id) - end - - def task_finished(%__MODULE__{} = state, pid, reason) do - case Map.pop(state.pids_to_ids, pid) do - {nil, _} -> - state - - {request_id, new_pids_to_ids} -> - maybe_log_task(reason, request_id) - - %__MODULE__{ - state - | pids_to_ids: new_pids_to_ids, - tasks_by_id: Map.delete(state.tasks_by_id, request_id) - } - end - end - - def running?(%__MODULE__{} = state, request_id) do - Map.has_key?(state.tasks_by_id, request_id) - end - - defp maybe_log_task(:normal, _), - do: :ok - - defp maybe_log_task(reason, %{id: request_id} = _request), - do: maybe_log_task(reason, request_id) - - defp maybe_log_task(reason, request_id), - do: Logger.warning("Request id #{request_id} failed with reason #{inspect(reason)}") - - defp as_task(%{id: _} = request, func) do - handler = fn -> - try do - case func.() do - :noreply -> - {:request_complete, request} - - {:reply, reply} -> - write_reply(reply) - - {:request_complete, request} - - {:reply_and_alert, reply} -> - write_reply(reply) - - Lexical.Server.response_complete(request, reply) - {:request_complete, request} - end - rescue - e -> - exception_string = Exception.format(:error, e, __STACKTRACE__) - Logger.error(exception_string) - - response_error = - ResponseError.new( - message: exception_string, - code: :internal_error - ) - - error = %{ - id: request.id, - error: response_error - } - - Transport.write(error) - - {:request_complete, request} - end - end - - Queue.Supervisor.run_in_task(handler) - end - - defp write_reply(response) do - with {:ok, lsp_response} <- Convert.to_lsp(response) do - Transport.write(lsp_response) - end - end - end - - alias Lexical.Protocol.Requests - alias Lexical.Server.Configuration - alias Lexical.Server.Provider.Env - - use GenServer - - # public interface - @spec add(Requests.request(), Configuration.t() | Env.t()) :: :ok - def add(request, %Configuration{} = config) do - env = Env.from_configuration(config) - add(request, env) - end - - def add(request, %Env{} = env) do - GenServer.call(__MODULE__, {:add, request, env}) - end - - @spec size() :: non_neg_integer() - def size do - GenServer.call(__MODULE__, :size) - end - - def cancel(%{lsp: %{id: id}}) do - cancel(id) - end - - def cancel(%{id: request_id}) do - cancel(request_id) - end - - def cancel(request_id) when is_integer(request_id) do - request_id - |> Integer.to_string() - |> cancel() - end - - def cancel(request_id) when is_binary(request_id) do - GenServer.call(__MODULE__, {:cancel, request_id}) - end - - def running?(%{id: request_id}) do - running?(request_id) - end - - def running?(request_id) when is_binary(request_id) do - GenServer.call(__MODULE__, {:running?, request_id}) - end - - # genserver callbacks - - def child_spec do - __MODULE__ - end - - def start_link(_) do - GenServer.start_link(__MODULE__, [], name: __MODULE__) - end - - def init(_) do - {:ok, State.new()} - end - - def handle_call({:add, request, env}, _from, %State{} = state) do - {reply, new_state} = - case State.add(state, request, env) do - {:ok, new_state} -> {:ok, new_state} - error -> {error, state} - end - - {:reply, reply, new_state} - end - - def handle_call({:cancel, request_id}, _from, %State{} = state) do - new_state = State.cancel(state, request_id) - {:reply, :ok, new_state} - end - - def handle_call({:running?, request_id}, _from, %State{} = state) do - {:reply, State.running?(state, request_id), state} - end - - def handle_call(:size, _from, %State{} = state) do - {:reply, State.size(state), state} - end - - def handle_info({:DOWN, _ref, :process, pid, reason}, state) do - new_state = State.task_finished(state, pid, reason) - - {:noreply, new_state} - end - - def handle_info({ref, {:request_complete, _response}}, %State{} = state) - when is_reference(ref) do - # This head handles the replies from the tasks, which we don't really care about. - {:noreply, state} - end - - # private -end diff --git a/apps/server/lib/lexical/server/provider/supervisor.ex b/apps/server/lib/lexical/server/provider/supervisor.ex deleted file mode 100644 index b19dd4fd3..000000000 --- a/apps/server/lib/lexical/server/provider/supervisor.ex +++ /dev/null @@ -1,17 +0,0 @@ -defmodule Lexical.Server.Provider.Queue.Supervisor do - def name do - __MODULE__ - end - - def child_spec do - {Task.Supervisor, name: name()} - end - - def run_in_task(provider_fn) do - Task.Supervisor.async_nolink(name(), provider_fn) - end - - def cancel(%Task{} = task) do - Task.Supervisor.terminate_child(name(), task.pid) - end -end diff --git a/apps/server/lib/lexical/server/state.ex b/apps/server/lib/lexical/server/state.ex index deb7d9b33..a9b8a0d97 100644 --- a/apps/server/lib/lexical/server/state.ex +++ b/apps/server/lib/lexical/server/state.ex @@ -15,8 +15,10 @@ defmodule Lexical.Server.State do alias Lexical.Protocol.Responses alias Lexical.Protocol.Types alias Lexical.Protocol.Types.CodeAction + alias Lexical.Protocol.Types.CodeLens alias Lexical.Protocol.Types.Completion alias Lexical.Protocol.Types.DidChangeWatchedFiles + alias Lexical.Protocol.Types.ExecuteCommand alias Lexical.Protocol.Types.FileEvent alias Lexical.Protocol.Types.FileSystemWatcher alias Lexical.Protocol.Types.Registration @@ -26,16 +28,22 @@ defmodule Lexical.Server.State do alias Lexical.Server.CodeIntelligence alias Lexical.Server.Configuration alias Lexical.Server.Project + alias Lexical.Server.Provider.Handlers alias Lexical.Server.Transport require CodeAction.Kind require Logger import Api.Messages - defstruct configuration: nil, initialized?: false, shutdown_received?: false + + defstruct configuration: nil, + initialized?: false, + shutdown_received?: false, + in_flight_requests: %{} @supported_code_actions [ - :quick_fix + :quick_fix, + :source_organize_imports ] def new do @@ -69,6 +77,39 @@ defmodule Lexical.Server.State do {:error, :already_initialized} end + def in_flight?(%__MODULE__{} = state, request_id) do + Map.has_key?(state.in_flight_requests, request_id) + end + + def add_request(%__MODULE__{} = state, request, callback) do + Transport.write(request) + + in_flight_requests = Map.put(state.in_flight_requests, request.id, {request, callback}) + + %__MODULE__{state | in_flight_requests: in_flight_requests} + end + + def finish_request(%__MODULE__{} = state, response) do + %{"id" => response_id} = response + + case Map.pop(state.in_flight_requests, response_id) do + {{%request_module{} = request, callback}, in_flight_requests} -> + case request_module.parse_response(response) do + {:ok, response} -> + callback.(request, {:ok, response.result}) + + error -> + Logger.info("failed to parse response for #{request_module}, #{inspect(error)}") + callback.(request, error) + end + + %__MODULE__{state | in_flight_requests: in_flight_requests} + + _ -> + state + end + end + def default_configuration(%__MODULE__{configuration: config}) do Configuration.default(config) end @@ -129,17 +170,21 @@ defmodule Lexical.Server.State do end end - def apply(%__MODULE__{} = state, %DidOpen{lsp: event}) do - %TextDocument.Item{text: text, uri: uri, version: version} = - text_document = event.text_document + def apply(%__MODULE__{} = state, %DidOpen{} = did_open) do + %TextDocument.Item{ + text: text, + uri: uri, + version: version, + language_id: language_id + } = did_open.lsp.text_document - case Document.Store.open(uri, text, version) do + case Document.Store.open(uri, text, version, language_id) do :ok -> Logger.info("opened #{uri}") {:ok, state} error -> - Logger.error("Could not open #{text_document.uri} #{inspect(error)}") + Logger.error("Could not open #{uri} #{inspect(error)}") error end end @@ -230,17 +275,26 @@ defmodule Lexical.Server.State do code_action_options = CodeAction.Options.new(code_action_kinds: @supported_code_actions, resolve_provider: false) + code_lens_options = CodeLens.Options.new(resolve_provider: false) + + command_options = ExecuteCommand.Registration.Options.new(commands: Handlers.Commands.names()) + completion_options = Completion.Options.new(trigger_characters: CodeIntelligence.Completion.trigger_characters()) server_capabilities = Types.ServerCapabilities.new( code_action_provider: code_action_options, + code_lens_provider: code_lens_options, completion_provider: completion_options, definition_provider: true, document_formatting_provider: true, + document_symbol_provider: true, + execute_command_provider: command_options, hover_provider: true, - text_document_sync: sync_options + references_provider: true, + text_document_sync: sync_options, + workspace_symbol_provider: true ) result = diff --git a/apps/server/lib/lexical/server/task_queue.ex b/apps/server/lib/lexical/server/task_queue.ex new file mode 100644 index 000000000..c31f2d3ff --- /dev/null +++ b/apps/server/lib/lexical/server/task_queue.ex @@ -0,0 +1,218 @@ +defmodule Lexical.Server.TaskQueue do + defmodule State do + alias Lexical.Proto.Convert + alias Lexical.Proto.LspTypes.ResponseError + alias Lexical.Server.Transport + import Lexical.Logging + require Logger + + defstruct ids_to_tasks: %{}, pids_to_ids: %{} + + @type t :: %__MODULE__{} + + def new do + %__MODULE__{} + end + + def task_supervisor_name do + __MODULE__.TaskSupervisor + end + + @spec add(t, request_id :: term(), mfa :: {module(), atom(), [term()]}) :: t + def add(%__MODULE__{} = state, request_id, {_, _, _} = mfa) do + task = %Task{} = as_task(request_id, mfa) + + %__MODULE__{ + state + | ids_to_tasks: Map.put(state.ids_to_tasks, request_id, task), + pids_to_ids: Map.put(state.pids_to_ids, task.pid, request_id) + } + end + + @spec cancel(t, request_id :: term()) :: t + def cancel(%__MODULE__{} = state, request_id) do + with {:ok, %Task{} = task} <- Map.fetch(state.ids_to_tasks, request_id), + :ok <- cancel_task(task) do + write_error(request_id, "Request cancelled", :request_cancelled) + + %__MODULE__{ + state + | ids_to_tasks: Map.delete(state.ids_to_tasks, request_id), + pids_to_ids: Map.delete(state.pids_to_ids, task.pid) + } + else + _ -> + state + end + end + + def size(%__MODULE__{} = state) do + map_size(state.ids_to_tasks) + end + + def task_finished(%__MODULE__{} = state, pid, reason) do + case Map.pop(state.pids_to_ids, pid) do + {nil, _} -> + state + + {request_id, new_pids_to_ids} -> + log_task_run_time(state.ids_to_tasks[request_id], :success) + maybe_log_task(reason, request_id) + + %__MODULE__{ + state + | pids_to_ids: new_pids_to_ids, + ids_to_tasks: Map.delete(state.ids_to_tasks, request_id) + } + end + end + + defp maybe_log_task(:normal, _), + do: :ok + + defp maybe_log_task(reason, request_id), + do: Logger.warning("Request id #{request_id} failed with reason #{inspect(reason)}") + + defp as_task(request_id, {m, f, a}) do + handler = fn -> + try do + case apply(m, f, a) do + :noreply -> + {:request_complete, request_id} + + {:reply, reply} -> + write_reply(reply) + + {:request_complete, request_id} + end + rescue + e -> + exception_string = Exception.format(:error, e, __STACKTRACE__) + Logger.error(exception_string) + write_error(request_id, exception_string) + + {:request_complete, request_id} + end + end + + run_task(handler, {m, f, a}) + end + + defp write_reply(response) do + case timed_log("convert", fn -> Convert.to_lsp(response) end) do + {:ok, lsp_response} -> + Transport.write(lsp_response) + + error -> + error_message = """ + Failed to convert #{response.__struct__}: + + #{inspect(error, pretty: true)}\ + """ + + Logger.critical(""" + #{error_message} + + #{inspect(response, pretty: true)}\ + """) + + write_error(response.id, error_message) + end + end + + defp write_error(id, message, code \\ :internal_error) do + error = ResponseError.new(code: code, message: message) + + Transport.write(%{id: id, error: error}) + end + + defp run_task(fun, mfa) when is_function(fun) do + task_supervisor_name() + |> Task.Supervisor.async_nolink(fun) + |> Map.merge(%{started_at: System.system_time(:microsecond), mfa: mfa}) + end + + defp cancel_task(%Task{} = task) do + log_task_run_time(task, :canceled) + Process.exit(task.pid, :canceled) + :ok + end + + defp log_task_run_time(%Task{} = task, result) do + case task do + %{started_at: ts, mfa: {m, f, a}} -> + elapsed = System.system_time(:microsecond) - ts + + Logger.warning( + "Task #{m}.#{f}/#{length(a)} ran for #{Lexical.Formats.time(elapsed)}. Result #{inspect(result)}" + ) + + _ -> + :ok + end + end + end + + use GenServer + + def task_supervisor_name do + State.task_supervisor_name() + end + + @spec add(request_id :: term(), mfa :: {module(), atom(), [term()]}) :: :ok + def add(request_id, {_, _, _} = mfa) do + GenServer.call(__MODULE__, {:add, request_id, mfa}) + end + + @spec size() :: non_neg_integer() + def size do + GenServer.call(__MODULE__, :size) + end + + def cancel(%{lsp: %{id: id}}) do + cancel(id) + end + + def cancel(%{id: request_id}) do + cancel(request_id) + end + + def cancel(request_id) do + GenServer.call(__MODULE__, {:cancel, request_id}) + end + + # genserver callbacks + + def start_link(_) do + GenServer.start_link(__MODULE__, [], name: __MODULE__) + end + + def init(_) do + {:ok, State.new()} + end + + def handle_call({:add, request_id, mfa}, _from, %State{} = state) do + new_state = State.add(state, request_id, mfa) + {:reply, :ok, new_state} + end + + def handle_call({:cancel, request_id}, _from, %State{} = state) do + new_state = State.cancel(state, request_id) + {:reply, :ok, new_state} + end + + def handle_call(:size, _from, %State{} = state) do + {:reply, State.size(state), state} + end + + def handle_info({:DOWN, _ref, :process, pid, reason}, state) do + new_state = State.task_finished(state, pid, reason) + {:noreply, new_state} + end + + def handle_info({ref, {:request_complete, _request_id}}, %State{} = state) + when is_reference(ref) do + # This head handles the replies from the tasks, which we don't really care about. + {:noreply, state} + end +end diff --git a/apps/server/lib/lexical/server/transport.ex b/apps/server/lib/lexical/server/transport.ex index a9647b41f..a9067283f 100644 --- a/apps/server/lib/lexical/server/transport.ex +++ b/apps/server/lib/lexical/server/transport.ex @@ -2,20 +2,11 @@ defmodule Lexical.Server.Transport do @moduledoc """ A behaviour for a LSP transport """ - - @type level :: :error | :warning | :info | :log - - @callback log(level(), Jason.Encoder.t()) :: Jason.Encoder.t() @callback write(Jason.Encoder.t()) :: Jason.Encoder.t() alias Lexical.Server.Transport.StdIO @implementation Application.compile_env(:server, :transport, StdIO) - defdelegate log(level, message), to: @implementation defdelegate write(message), to: @implementation - - def error(message) do - @implementation.log(:error, message) - end end diff --git a/apps/server/lib/lexical/server/transport/std_io.ex b/apps/server/lib/lexical/server/transport/std_io.ex index 2d08ef69e..97bf0a76d 100644 --- a/apps/server/lib/lexical/server/transport/std_io.ex +++ b/apps/server/lib/lexical/server/transport/std_io.ex @@ -1,8 +1,8 @@ defmodule Lexical.Server.Transport.StdIO do - alias Lexical.Protocol.Notifications.LogMessage - alias Lexical.Protocol.JsonRpc + require Logger + @behaviour Lexical.Server.Transport def start_link(device, callback) do @@ -15,7 +15,7 @@ defmodule Lexical.Server.Transport.StdIO do end def init({callback, device}) do - :io.setopts([:binary, encoding: :latin1]) + :io.setopts(binary: true, encoding: :latin1) loop([], device, callback) end @@ -37,7 +37,7 @@ defmodule Lexical.Server.Transport.StdIO do def write(io_device, payload) when is_binary(payload) do message = case io_device do - device when device in [:stdio, :standard_io] -> + device when device in [:stdio, :standard_io] or is_pid(device) -> {:ok, json_rpc} = JsonRpc.encode(payload) json_rpc @@ -54,77 +54,74 @@ defmodule Lexical.Server.Transport.StdIO do def write(_, []) do end - def log(level, message, opts \\ []) - - def log(level, message, opts) when level in [:error, :warning, :info, :log] do - formatted_message = format_message(message, opts) - log_message = apply(LogMessage, level, [formatted_message]) - write(:standard_io, log_message) - message - end - - def log(_level, message, opts) do - log_message = format_message(message, opts) - - write(:standard_error, log_message) - message - end - # private - defp format_message(message, opts) do - case Keyword.get(opts, :label) do - nil -> inspect(message) <> "\n" - label -> "#{label}: '#{inspect(message, limit: :infinity)}\n" - end - end - defp loop(buffer, device, callback) do - case IO.read(device, :line) do + case IO.binread(device, :line) do "\n" -> headers = parse_headers(buffer) - with {:ok, content_length} <- - header_value(headers, "content-length", &String.to_integer/1), + with {:ok, content_length} <- content_length(headers), {:ok, data} <- read_body(device, content_length), {:ok, message} <- JsonRpc.decode(data) do callback.(message) + else + {:error, :empty_response} -> + :noop + + {:error, reason} -> + Logger.critical("read protocol message: #{inspect(reason)}") end loop([], device, callback) :eof -> - System.halt() + Logger.critical("stdio received :eof, server will stop.") + maybe_stop() line -> loop([line | buffer], device, callback) end end - defp parse_headers(headers) do - Enum.map(headers, &parse_header/1) + defp content_length(headers) do + with {:ok, len_str} <- find_header(headers, "content-length") do + parse_length(len_str) + end + end + + defp find_header(headers, name) do + case List.keyfind(headers, name, 0) do + {_, len_str} -> {:ok, len_str} + nil -> {:error, {:header_not_found, name}} + end end - defp header_value(headers, header_name, converter) do - case List.keyfind(headers, header_name, 0) do - nil -> :error - {_, value} -> {:ok, converter.(value)} + defp parse_length(len_str) when is_binary(len_str) do + case Integer.parse(len_str) do + {int, ""} -> {:ok, int} + :error -> {:error, {:cant_parse_length, len_str}} end end - defp read_body(device, amount) do - case IO.read(device, amount) do - data when is_binary(data) or is_list(data) -> - # Ensure that incoming data is latin1 to prevent double-encoding to utf8 later - # See https://github.com/lexical-lsp/lexical/issues/287 for context. - data = :unicode.characters_to_binary(data, :utf8, :latin1) + defp read_body(device, byte_count) do + case IO.binread(device, byte_count) do + data when is_binary(data) -> {:ok, data} - other -> - other + :eof -> + Logger.critical("stdio received :eof, server will stop.") + maybe_stop() + + {:error, reason} -> + {:error, reason} end end + defp parse_headers(headers) do + Enum.map(headers, &parse_header/1) + end + defp parse_header(line) do [name, value] = String.split(line, ":") @@ -135,4 +132,14 @@ defmodule Lexical.Server.Transport.StdIO do {header_name, String.trim(value)} end + + if Mix.env() == :test do + defp maybe_stop do + :ok + end + else + defp maybe_stop do + System.stop() + end + end end diff --git a/apps/server/lib/lexical/server/window.ex b/apps/server/lib/lexical/server/window.ex new file mode 100644 index 000000000..37a06a8f6 --- /dev/null +++ b/apps/server/lib/lexical/server/window.ex @@ -0,0 +1,89 @@ +defmodule Lexical.Server.Window do + alias Lexical.Protocol.Id + alias Lexical.Protocol.Notifications.LogMessage + alias Lexical.Protocol.Notifications.ShowMessage + alias Lexical.Protocol.Requests + alias Lexical.Protocol.Types + alias Lexical.Server.Transport + + @type level :: :error | :warning | :info | :log + @type message_result :: {:errory, term()} | {:ok, nil} | {:ok, Types.Message.ActionItem.t()} + @type on_response_callback :: (message_result() -> any()) + @type message :: String.t() + @type action :: String.t() + + @levels [:error, :warning, :info, :log] + + @spec log(level, message()) :: :ok + def log(level, message) when level in @levels and is_binary(message) do + log_message = apply(LogMessage, level, [message]) + Transport.write(log_message) + :ok + end + + for level <- [:error, :warning, :info] do + def unquote(level)(message) do + log(unquote(level), message) + end + end + + @spec show(level(), message()) :: :ok + def show(level, message) when level in @levels and is_binary(message) do + show_message = apply(ShowMessage, level, [message]) + Transport.write(show_message) + :ok + end + + @spec show_message(level(), message()) :: :ok + def show_message(level, message) do + request = Requests.ShowMessageRequest.new(id: Id.next(), message: message, type: level) + Lexical.Server.server_request(request) + end + + for level <- @levels, + fn_name = :"show_#{level}_message" do + def unquote(fn_name)(message) do + show_message(unquote(level), message) + end + end + + for level <- @levels, + fn_name = :"show_#{level}_message" do + @doc """ + Shows a message at the #{level} level. Delegates to `show_message/4` + """ + def unquote(fn_name)(message, actions, on_response) when is_function(on_response, 1) do + show_message(unquote(level), message, actions, on_response) + end + end + + @doc """ + Shows a message request and handles the response + + Displays a message to the user in the UI and waits for a response. + The result type handed to the callback function is a + `Lexical.Protocol.Types.Message.ActionItem` or nil if there was no response + from the user. + + The strings passed in as the `actions` command are displayed to the user, and when + they select one, the `Types.Message.ActionItem` is passed to the callback function. + """ + @spec show_message(level(), message(), [action()], on_response_callback) :: :ok + def show_message(level, message, actions, on_response) + when is_function(on_response, 1) do + action_items = + Enum.map(actions, fn action_string -> + Types.Message.ActionItem.new(title: action_string) + end) + + request = + Requests.ShowMessageRequest.new( + id: Id.next(), + message: message, + actions: action_items, + type: level + ) + + Lexical.Server.server_request(request, fn _request, response -> on_response.(response) end) + end +end diff --git a/apps/server/lib/mix/tasks/package.ex b/apps/server/lib/mix/tasks/package.ex index 1e8b2dfee..c9737e428 100644 --- a/apps/server/lib/mix/tasks/package.ex +++ b/apps/server/lib/mix/tasks/package.ex @@ -164,7 +164,7 @@ defmodule Mix.Tasks.Package do {:ok, _} = zip_path |> String.to_charlist() - |> :zip.create(file_list, uncompress: ['.beam']) + |> :zip.create(file_list, uncompress: [~c".beam"]) :ok end @@ -258,6 +258,8 @@ defmodule Mix.Tasks.Package do config_dest = Path.join(package_root, "config") File.mkdir_p!(config_dest) File.cp_r!(config_source, config_dest) + + Namespace.Transform.Configs.apply_to_all(config_dest) end @priv_apps [:remote_control] @@ -285,23 +287,12 @@ defmodule Mix.Tasks.Package do defp zip(package_root) do package_name = Path.basename(package_root) - zip_output = - File.cwd!() - |> Path.join("#{package_name}.zip") - |> String.to_charlist() + zip_output = Path.join(File.cwd!(), "#{package_name}.zip") package_root |> Path.dirname() |> File.cd!(fn -> - glob = Path.join(package_name, "**") - - files = - for path <- Path.wildcard(glob, match_dot: true), - File.regular?(path) do - String.to_charlist(path) - end - - :zip.create(zip_output, files, uncompress: ['ez', 'beam']) + System.cmd("zip", ["-r", zip_output, package_name]) end) end diff --git a/apps/server/mix.exs b/apps/server/mix.exs index d962e225f..18c648249 100644 --- a/apps/server/mix.exs +++ b/apps/server/mix.exs @@ -4,7 +4,7 @@ defmodule Lexical.Server.MixProject do def project do [ app: :server, - version: "0.3.0", + version: "0.5.0", build_path: "../../_build", config_path: "../../config/config.exs", deps_path: "../../deps", @@ -50,7 +50,7 @@ defmodule Lexical.Server.MixProject do {:path_glob, "~> 0.2"}, {:protocol, in_umbrella: true}, {:remote_control, in_umbrella: true, runtime: false}, - {:sourceror, "~> 0.14.0"} + {:sourceror, "~> 1.4"} ] end end diff --git a/apps/server/test/lexical/convertibles/lexical.plugin.diagnostic.result_test.exs b/apps/server/test/lexical/convertibles/lexical.plugin.diagnostic.result_test.exs index 4c93bd027..4d235aec7 100644 --- a/apps/server/test/lexical/convertibles/lexical.plugin.diagnostic.result_test.exs +++ b/apps/server/test/lexical/convertibles/lexical.plugin.diagnostic.result_test.exs @@ -43,6 +43,20 @@ defmodule Lexical.Convertibles.Lexical.Plugin.V1.Diagnostic.ResultTest do assert converted.range == range(:lsp, position(:lsp, 0, 0), position(:lsp, 1, 0)) end + test "it should translate a diagnostic with a four-elements tuple position", %{uri: uri} do + assert {:ok, %Types.Diagnostic{} = converted} = + to_lsp(plugin_diagnostic(uri, {2, 5, 2, 8}), uri) + + assert converted.message == "Broken!" + assert converted.range == range(:lsp, position(:lsp, 1, 4), position(:lsp, 1, 7)) + + assert {:ok, %Types.Diagnostic{} = converted} = + to_lsp(plugin_diagnostic(uri, {1, 0, 3, 0}), uri) + + assert converted.message == "Broken!" + assert converted.range == range(:lsp, position(:lsp, 0, 0), position(:lsp, 2, 0)) + end + test "it should translate a diagnostic line that is out of bounds (elixir can do this)", %{ uri: uri } do @@ -59,7 +73,7 @@ defmodule Lexical.Convertibles.Lexical.Plugin.V1.Diagnostic.ResultTest do test "it can translate a diagnostic that starts after an emoji", %{uri: uri} do assert {:ok, %Types.Diagnostic{} = converted} = to_lsp(plugin_diagnostic(uri, {6, 10}), uri) - assert converted.range == range(:lsp, position(:lsp, 5, 7), position(:lsp, 6, 0)) + assert converted.range == range(:lsp, position(:lsp, 5, 10), position(:lsp, 6, 0)) end test "it converts lexical positions", %{uri: uri, document: document} do diff --git a/apps/server/test/lexical/server/boot_test.exs b/apps/server/test/lexical/server/boot_test.exs new file mode 100644 index 000000000..ed9b7854e --- /dev/null +++ b/apps/server/test/lexical/server/boot_test.exs @@ -0,0 +1,58 @@ +defmodule Lexical.Server.BootTest do + alias Lexical.Server.Boot + alias Lexical.VM.Versions + + use ExUnit.Case + use Patch + + describe "detect_errors/0" do + test "returns empty list when all checks succeed" do + patch_runtime_versions("1.14.5", "25.0") + patch_compiled_versions("1.14.5", "25.0") + + assert [] = Boot.detect_errors() + end + + test "includes error when runtime elixir is incompatible" do + patch_runtime_versions("1.12.0", "24.3.4") + patch_compiled_versions("1.13.4", "24.3.4") + + assert [error] = Boot.detect_errors() + assert error =~ "FATAL: Lexical is not compatible with Elixir 1.12.0" + end + + test "includes error when runtime erlang is incompatible" do + patch_runtime_versions("1.13.4", "23.0") + patch_compiled_versions("1.13.4", "23.0") + + assert [error] = Boot.detect_errors() + assert error =~ "FATAL: Lexical is not compatible with Erlang/OTP 23.0.0" + end + + test "includes multiple errors when runtime elixir and erlang are incompatible" do + patch_runtime_versions("1.15.2", "26.0.0") + patch_compiled_versions("1.15.6", "26.1") + + assert [elixir_error, erlang_error] = Boot.detect_errors() + assert elixir_error =~ "FATAL: Lexical is not compatible with Elixir 1.15.2" + assert erlang_error =~ "FATAL: Lexical is not compatible with Erlang/OTP 26.0.0" + end + end + + defp patch_runtime_versions(elixir, erlang) do + patch(Versions, :elixir_version, elixir) + patch(Versions, :erlang_version, erlang) + end + + defp patch_compiled_versions(elixir, erlang) do + patch(Versions, :code_find_file, fn file -> {:ok, file} end) + + patch(Versions, :read_file, fn file -> + if String.ends_with?(file, ".elixir") do + {:ok, elixir} + else + {:ok, erlang} + end + end) + end +end diff --git a/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs index 913a395e2..5857ab81b 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/builder_test.exs @@ -1,18 +1,20 @@ defmodule Lexical.Server.CodeIntelligence.Completion.BuilderTest do + alias Lexical.Ast alias Lexical.Ast.Env alias Lexical.Protocol.Types.Completion.Item, as: CompletionItem + alias Lexical.Server.CodeIntelligence.Completion.SortScope use ExUnit.Case, async: true import Lexical.Server.CodeIntelligence.Completion.Builder - import Lexical.Test.CodeSigil import Lexical.Test.CursorSupport import Lexical.Test.Fixtures def new_env(text) do project = project() {position, document} = pop_cursor(text, as: :document) - {:ok, env} = Env.new(project, document, position) + analysis = Ast.analyze(document) + {:ok, env} = Env.new(project, analysis, position) env end @@ -20,150 +22,63 @@ defmodule Lexical.Server.CodeIntelligence.Completion.BuilderTest do opts |> Keyword.merge(label: label) |> CompletionItem.new() - |> boost(0) + |> set_sort_scope(SortScope.default()) end defp sort_items(items) do Enum.sort_by(items, &{&1.sort_text, &1.label}) end - describe "boosting" do - test "default boost sorts things first" do - alpha_first = item("a") - alpha_last = "z" |> item() |> boost() + setup do + start_supervised!(Lexical.Server.Application.document_store_child_spec()) + :ok + end - assert [^alpha_last, ^alpha_first] = sort_items([alpha_first, alpha_last]) + describe "sort scopes" do + test "scope order follows module -> variable -> local -> remote -> global -> auto -> default" do + i = set_sort_scope(item("g"), SortScope.module()) + ii = set_sort_scope(item("f"), SortScope.variable()) + iii = set_sort_scope(item("e"), SortScope.local()) + iv = set_sort_scope(item("d"), SortScope.remote()) + v = set_sort_scope(item("c"), SortScope.global()) + vi = set_sort_scope(item("b"), SortScope.auto()) + vii = set_sort_scope(item("a"), SortScope.default()) + + assert [^i, ^ii, ^iii, ^iv, ^v, ^vi, ^vii] = sort_items([vii, vi, v, iv, iii, ii, i]) end - test "local boost allows you to specify the order" do - alpha_first = "a" |> item() |> boost(1) - alpha_second = "b" |> item() |> boost(2) - alpha_third = "c" |> item() |> boost(3) + test "low priority sorts items lower in their scope" do + alpha_first = set_sort_scope(item("a"), SortScope.remote(false, 2)) + alpha_second = set_sort_scope(item("b"), SortScope.remote()) + alpha_third = set_sort_scope(item("c"), SortScope.remote()) - assert [^alpha_third, ^alpha_second, ^alpha_first] = + assert [^alpha_second, ^alpha_third, ^alpha_first] = sort_items([alpha_first, alpha_second, alpha_third]) end - test "global boost overrides local boost" do - local_max = "a" |> item() |> boost(9) - global_min = "z" |> item() |> boost(0, 1) - - assert [^global_min, ^local_max] = sort_items([local_max, global_min]) - end - - test "items can have a global and local boost" do - group_b_min = "a" |> item() |> boost(1) - group_b_max = "b" |> item() |> boost(2) - group_a_min = "c" |> item |> boost(1, 1) - group_a_max = "c" |> item() |> boost(2, 1) - global_max = "d" |> item() |> boost(0, 2) - - items = [group_b_min, group_b_max, group_a_min, group_a_max, global_max] + test "deprecated items are gathered at the bottom of their scope" do + i_deprecated = set_sort_scope(item("a"), SortScope.remote(true)) + i = set_sort_scope(item("a"), SortScope.remote()) + ii = set_sort_scope(item("b"), SortScope.remote()) + iii_low = set_sort_scope(item("c"), SortScope.remote(false, 2)) - assert [^global_max, ^group_a_max, ^group_a_min, ^group_b_max, ^group_b_min] = - sort_items(items) + assert [^i, ^ii, ^iii_low, ^i_deprecated] = sort_items([i_deprecated, i, ii, iii_low]) end end - describe "strip_struct_operator_for_elixir_sense/1" do - test "with a reference followed by __" do - {doc, _position} = - "%__" - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == "__" - end - - test "with a reference followed by a module name" do - {doc, _position} = - "%Module" - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == "Module" + describe "snippet edge cases" do + # The following would crash due to missing case clauses + # in `prefix_length/1` + test "handles aliases inside locals" do + "before __MODULE__.Submodule|" + |> new_env() + |> snippet("", label: "") end - test "with a reference followed by a module and a dot" do - {doc, _position} = - "%Module." - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == "Module." - end - - test "with a reference followed by a nested module" do - {doc, _position} = - "%Module.Sub" - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == "Module.Sub" - end - - test "with a reference followed by an alias" do - code = ~q[ - alias Something.Else - %El| - ]t - - {doc, _position} = - code - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == "alias Something.Else\nEl" - end - - test "on a line with two references, replacing the first" do - {doc, _position} = - "%First{} = %Se" - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == "%First{} = Se" - end - - test "on a line with two references, replacing the second" do - {doc, _position} = - "%Fir| = %Second{}" - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == "Fir = %Second{}" - end - - test "with a plain module" do - env = new_env("Module") - {doc, _position} = strip_struct_operator_for_elixir_sense(env) - - assert doc == env.document - end - - test "with a plain module strip_struct_reference a dot" do - env = new_env("Module.") - {doc, _position} = strip_struct_operator_for_elixir_sense(env) - - assert doc == env.document - end - - test "leaves leading spaces in place" do - {doc, _position} = - " %Some" - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == " Some" - end - - test "works in a function definition" do - {doc, _position} = - "def my_function(%Lo|)" - |> new_env() - |> strip_struct_operator_for_elixir_sense() - - assert doc == "def my_function(Lo)" + test "handles locals inside a module attribute" do + "@hello.Submodule" + |> new_env() + |> snippet("", label: "") end end end diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/callback_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/callback_test.exs index e6a74c208..8025b6c30 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/translations/callback_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/callback_test.exs @@ -13,9 +13,10 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.CallbackTest d {:ok, completion} = project |> complete(source) - |> fetch_completion(kind: :function) + |> fetch_completion(kind: :interface) - assert apply_completion(completion) =~ "def handle_info(${1:msg}, ${2:state})" + assert apply_completion(completion) =~ + "@impl true\ndef handle_info(${1:msg}, ${2:state}) do" end test "do not add parens if they're already present", %{project: project} do @@ -29,9 +30,35 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.CallbackTest d {:ok, completion} = project |> complete(source) - |> fetch_completion(kind: :function) + |> fetch_completion(kind: :interface) - assert apply_completion(completion) =~ "def handle_info(msg, state)" + assert apply_completion(completion) =~ + "@impl true\ndef handle_info(${1:msg}, ${2:state}) do" + end + + test "does not add second @impl if one is already present", %{project: project} do + source = ~q[ + defmodule MyServer do + use GenServer + @impl true + def handle_inf| + end + ] + + {:ok, completion} = + project + |> complete(source) + |> fetch_completion(kind: :interface) + + assert apply_completion(completion) == """ + defmodule MyServer do + use GenServer + @impl true + def handle_info(${1:msg}, ${2:state}) do + $0 + end + end + """ end end end diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/function_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/function_test.exs index 9bdde766a..d5ed719e3 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/translations/function_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/function_test.exs @@ -12,6 +12,16 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.FunctionTest d assert [:deprecated] = completion.tags end + test "bang functions are sorted after non-bang functions", %{project: project} do + {:ok, [normal, bang]} = + project + |> complete("Map.fetc|") + |> fetch_completion("fetch") + + assert normal.label == "fetch(map, key)" + assert bang.label == "fetch!(map, key)" + end + test "suggest arity 0 functions if not in a pipeline", %{project: project} do {:ok, completion} = project @@ -64,6 +74,38 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.FunctionTest d assert apply_completion(completion) == "Enum.dedup_by(${1:enumerable}, ${2:fun})" end + + test "completes funs in locals_without_parens with a specific arity", + %{project: project} do + source = ~q[ + Project.Functions.fun_1| + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion(kind: :function) + + assert completion.label == "fun_1_without_parens arg" + assert apply_completion(completion) =~ "Project.Functions.fun_1_without_parens ${1:arg}" + end + + test "completes imported funs in locals_without_parens with a specific arity", + %{project: project} do + source = ~q[ + import Project.Functions + + fun_1| + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion(kind: :function) + + assert completion.label == "fun_1_without_parens arg" + assert apply_completion(completion) =~ "fun_1_without_parens ${1:arg}" + end end describe "function captures" do @@ -145,8 +187,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.FunctionTest d |> Enum.filter(fn completion -> sort_text = completion.sort_text # arity 1 and is is_map - not String.contains?(sort_text, ",") and - not String.contains?(sort_text, "/2") and + String.ends_with?(sort_text, "001") and String.contains?(sort_text, "is_map") end) @@ -242,35 +283,18 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.FunctionTest d end describe "ordering" do - test "dunder functions aren't boosted", %{project: project} do - assert {:ok, completion} = - project - |> complete("Enum.|") - |> fetch_completion("__info__") - - refute boosted?(completion) - end - - test "dunder and default functions have lower completion priority", %{project: project} do - completions = complete(project, "GenServer.|") - - defaults = ["module_info(", "behaviour_info("] - - low_priority_completion? = fn fun -> - String.starts_with?(fun.label, "__") or - Enum.any?(defaults, &String.contains?(fun.sort_text, &1)) - end - - {low_priority_completions, normal_completions} = - Enum.split_with(completions, low_priority_completion?) + test "functions with lower arity have higher completion priority", %{project: project} do + [arity_2, arity_3] = + project + |> complete("Enum.|") + |> fetch_completion("count_until") + |> then(fn {:ok, list} -> list end) + |> Enum.sort_by(& &1.sort_text) - for completion <- low_priority_completions do - refute boosted?(completion) - end + assert apply_completion(arity_2) == "Enum.count_until(${1:enumerable}, ${2:limit})" - for completion <- normal_completions do - assert boosted?(completion) - end + assert apply_completion(arity_3) == + "Enum.count_until(${1:enumerable}, ${2:fun}, ${3:limit})" end end end diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/interpolation_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/interpolation_test.exs new file mode 100644 index 000000000..1fa303a19 --- /dev/null +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/interpolation_test.exs @@ -0,0 +1,66 @@ +defmodule Lexical.Server.CodeIntelligence.Completion.Translations.InterpolationTest do + use Lexical.Test.Server.CompletionCase + + test "variables are completed inside strings", %{project: project} do + source = + ~S[ + variable = 3 + "#{var|}" + ] + |> String.trim() + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion(kind: :variable) + + expected = + ~S[ + variable = 3 + "#{variable}" + ] + |> String.trim() + + assert apply_completion(completion) == expected + end + + test "erlang modules are completed inside strings", %{project: project} do + source = ~S[ + "#{:erlan|}" + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion(label: ":erlang") + + assert String.trim(apply_completion(completion)) == ~S["#{:erlang}"] + end + + test "elixir modules are completed inside strings", %{project: project} do + source = ~S[ + "#{Kern|}" + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion(label: "Kernel") + + assert String.trim(apply_completion(completion)) == ~S["#{Kernel}"] + end + + test "structs are completed inside strings", %{project: project} do + source = ~S[ + "#{inspect(%Project.Structs.Us|)}" + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion(kind: :struct) + + assert String.trim(apply_completion(completion)) == + ~S["#{inspect(%Project.Structs.User{$1})}"] + end +end diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/macro_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/macro_test.exs index 0a6fceaa1..57b991292 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/translations/macro_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/macro_test.exs @@ -46,6 +46,82 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MacroTest do assert apply_completion(completion) == "def ${1:name}($2) do\n $0\nend" end + test "def preceeded by a @spec with args", %{project: project} do + source = ~q[ + @spec my_function(term(), term()) :: term() + def| + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion("def ") + + assert apply_completion(completion) == ~q[ + @spec my_function(term(), term()) :: term() + def my_function(${1:arg_1}, ${2:arg_2}) do + $0 + end + ] + end + + test "def preceeded by a @spec with named args", %{project: project} do + source = ~q[ + @spec my_function(x :: term(), y :: term(), term()) :: term() + def| + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion("def ") + + assert apply_completion(completion) == ~q[ + @spec my_function(x :: term(), y :: term(), term()) :: term() + def my_function(${1:x}, ${2:y}, ${3:arg_3}) do + $0 + end + ] + end + + test "def preceeded by a @spec without args", %{project: project} do + source = ~q[ + @spec my_function :: term() + def| + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion("def ") + + assert apply_completion(completion) == ~q[ + @spec my_function :: term() + def my_function do + $0 + end + ] + end + + test "defp preceeded by a @spec with args", %{project: project} do + source = ~q[ + @spec my_function(term(), term()) :: term() + def| + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion("defp ") + + assert apply_completion(completion) == ~q[ + @spec my_function(term(), term()) :: term() + defp my_function(${1:arg_1}, ${2:arg_2}) do + $0 + end + ] + end + test "defp only has a single completion", %{project: project} do assert {:ok, completion} = project @@ -189,6 +265,40 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MacroTest do """ end + test "defmodule for filenames with multiple periods", %{project: project} do + assert {:ok, completion} = + project + |> complete("defmodule|", path: "/lib/path/to/my.module.test.ex") + |> fetch_completion("defmodule ") + + assert completion.detail + assert completion.label == "defmodule (define a module)" + assert completion.insert_text_format == :snippet + + assert apply_completion(completion) == """ + defmodule ${1:Path.To.My.Module.Test} do + $0 + end\ + """ + end + + test "defmodule for filenames with multiple periods in another path", %{project: project} do + assert {:ok, completion} = + project + |> complete("defmodule|", path: "/this/is/another/path/to/my.module.test.ex") + |> fetch_completion("defmodule ") + + assert completion.detail + assert completion.label == "defmodule (define a module)" + assert completion.insert_text_format == :snippet + + assert apply_completion(completion) == """ + defmodule ${1:My.Module.Test} do + $0 + end\ + """ + end + test "defprotocol only has a single completion", %{project: project} do assert {:ok, completion} = project @@ -592,6 +702,37 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MacroTest do test "__block__ is hidden", %{project: project} do assert [] = complete(project, "__block|") end + + test ":: is hidden", %{project: project} do + assert [] = complete(project, "::|") + end + + test "alias! is hidden", %{project: project} do + assert [] = complete(project, "alias!|") + end + + test "in is hidden", %{project: project} do + {:ok, completions} = + project + |> complete("in|") + |> fetch_completion("in") + + completion_labels = completions |> Enum.map(fn completion -> completion.label end) + + assert "in(left, right)" not in completion_labels + end + + test "and is hidden", %{project: project} do + assert [] = complete(project, "and|") + end + + test "or is hidden", %{project: project} do + assert [] = complete(project, "or|") + end + + test "destructure is hidden", %{project: project} do + assert [] = complete(project, "destructure|") + end end describe "normal macro completion" do @@ -599,7 +740,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MacroTest do source = ~q[ import Project.Macros - macro_a| + macro_ad| ] assert {:ok, completion} = @@ -618,7 +759,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MacroTest do source = ~q[ require Project.Macros - Project.Macros.macro_a| + Project.Macros.macro_ad| ] assert {:ok, completion} = @@ -638,7 +779,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MacroTest do alias Project.Macros require Macros - Macros.macro_a| + Macros.macro_ad| ] assert {:ok, completion} = @@ -652,16 +793,77 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MacroTest do assert apply_completion(completion) =~ "Macros.macro_add(${1:a}, ${2:b})" end - end - describe "sort_text" do - test "dunder macros aren't boosted", %{project: project} do + test "completes imported macros in locals_without_parens with a specific arity", + %{project: project} do + source = ~q[ + import Project.Macros + + macro_1_without| + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion(kind: :function) + + assert completion.label == "macro_1_without_parens arg" + assert apply_completion(completion) =~ "macro_1_without_parens ${1:arg}" + end + + test "completes required macros in locals_without_parens with a specific arity", + %{project: project} do + source = ~q[ + require Project.Macros + + Project.Macros.macro_1_without| + ] + assert {:ok, completion} = project - |> complete("Project.__dunder_macro__|") - |> fetch_completion("__dunder_macro__") + |> complete(source) + |> fetch_completion(kind: :function) - refute boosted?(completion) + assert completion.label == "macro_1_without_parens arg" + assert apply_completion(completion) =~ "Project.Macros.macro_1_without_parens ${1:arg}" + end + + test "completes imported macros in locals_without_parens with any arity", %{project: project} do + source = ~q[ + import Project.Macros + + macro_2_without| + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion(kind: :function) + + assert completion.label == "macro_2_without_parens arg1, arg2, arg3, arg4" + + assert apply_completion(completion) =~ + "macro_2_without_parens ${1:arg1}, ${2:arg2}, ${3:arg3}, ${4:arg4}" + end + + test "completes ExUnit macros without parens", %{project: project} do + source = ~q[ + defmodule ExampleTest do + use ExUnit.Case + + test "example do" + asser| + end + end + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion("assert assertion") + + assert completion.label == "assert assertion" + assert apply_completion(completion) =~ "assert ${1:assertion}" end end @@ -704,14 +906,23 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MacroTest do inside_exunit_context("describe \"${1:message}\" do\n $0\nend") end - test "syntax macros", %{project: project} do - assert [] = complete(project, "a =|") - assert [] = complete(project, "a ==|") - assert [] = complete(project, "a ..|") - assert [] = complete(project, "a !|") - assert [] = complete(project, "a !=|") - assert [] = complete(project, "a !==|") - assert [] = complete(project, "a &&|") + describe "syntax macros" do + test "completions are skipped for syntax macros", %{project: project} do + assert [] = complete(project, "a =|") + assert [] = complete(project, "a ==|") + assert [] = complete(project, "a ..|") + assert [] = complete(project, "a !|") + assert [] = complete(project, "a !=|") + assert [] = complete(project, "a !==|") + assert [] = complete(project, "a &&|") + end + + test "completions are shown for syntax macros when `Kernel.|` is prefixed.", %{ + project: project + } do + completions = complete(project, ":some_expression && Kernel.|") + assert length(completions) > 0 + end end defp inside_exunit_context(text) do diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/map_field_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/map_field_test.exs index 39d0b0b18..3a422bb6c 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/translations/map_field_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/map_field_test.exs @@ -1,9 +1,6 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.MapFieldTest do - # alias Lexical.Server.CodeIntelligence.Completion.Translations.MapField use Lexical.Test.Server.CompletionCase - use ExUnit.Case, async: true - test "a map's fields are completed", %{project: project} do source = ~q[ user = %{first_name: "John", last_name: "Doe"} diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/module_attribute_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/module_attribute_test.exs index 6fa6e7b87..3ae1a9b21 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/translations/module_attribute_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/module_attribute_test.exs @@ -147,4 +147,137 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.ModuleAttribut ] end end + + describe "@spec completion" do + test "with no function following", %{project: project} do + source = ~q[ + defmodule MyModule do + @spe| + end + ] + + assert {:ok, completion} = + project + |> complete(source) + |> fetch_completion("@spec") + + assert apply_completion(completion) == ~q[ + defmodule MyModule do + @spec ${1:function}(${2:term()}) :: ${3:term()} + def ${1:function}(${4:args}) do + $0 + end + end + ] + end + + test "with a function with args after it", %{project: project} do + source = ~q[ + defmodule MyModule do + @spe| + def my_function(arg1, arg2, arg3) do + :ok + end + end + ] + + assert {:ok, [spec_my_function, spec]} = + project + |> complete(source) + |> fetch_completion(kind: :property) + + assert spec_my_function.label == "@spec my_function" + + assert apply_completion(spec_my_function) == ~q[ + defmodule MyModule do + @spec my_function(${1:term()}, ${2:term()}, ${3:term()}) :: ${0:term()} + def my_function(arg1, arg2, arg3) do + :ok + end + end + ] + + assert spec.label == "@spec" + + assert apply_completion(spec) == ~q[ + defmodule MyModule do + @spec ${1:function}(${2:term()}) :: ${3:term()} + def ${1:function}(${4:args}) do + $0 + end + def my_function(arg1, arg2, arg3) do + :ok + end + end + ] + end + + test "with a function without args after it", %{project: project} do + source = ~q[ + defmodule MyModule do + @spe| + def my_function do + :ok + end + end + ] + + assert {:ok, [spec_my_function, spec]} = + project + |> complete(source) + |> fetch_completion(kind: :property) + + assert spec_my_function.label == "@spec my_function" + + assert apply_completion(spec_my_function) == ~q[ + defmodule MyModule do + @spec my_function() :: ${0:term()} + def my_function do + :ok + end + end + ] + + assert spec.label == "@spec" + + assert apply_completion(spec) == ~q[ + defmodule MyModule do + @spec ${1:function}(${2:term()}) :: ${3:term()} + def ${1:function}(${4:args}) do + $0 + end + def my_function do + :ok + end + end + ] + end + + test "with a private function after it", %{project: project} do + source = ~q[ + defmodule MyModule do + @spe| + defp my_function(arg1, arg2, arg3) do + :ok + end + end + ] + + assert {:ok, [spec_my_function, _spec]} = + project + |> complete(source) + |> fetch_completion(kind: :property) + + assert spec_my_function.label == "@spec my_function" + + assert apply_completion(spec_my_function) == ~q[ + defmodule MyModule do + @spec my_function(${1:term()}, ${2:term()}, ${3:term()}) :: ${0:term()} + defp my_function(arg1, arg2, arg3) do + :ok + end + end + ] + end + end end diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/module_or_behaviour_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/module_or_behaviour_test.exs index 46c1e2993..6ff91ad1b 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/translations/module_or_behaviour_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/module_or_behaviour_test.exs @@ -56,6 +56,22 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.ModuleOrBehavi assert completion.label == "Enumerable" assert completion.detail =~ "Enumerable protocol" end + + test "regression: modules should emit a completion after a dot in the context of a module", + %{project: project} do + code = ~q[ + defmodule MyTest do + alias Project.| + end + ] + + assert {:ok, completion} = + project + |> complete(code, trigger_character: ".") + |> fetch_completion("Foo") + + assert completion.kind == :module + end end describe "erlang module completions" do diff --git a/apps/server/test/lexical/server/code_intelligence/completion/translations/struct_field_test.exs b/apps/server/test/lexical/server/code_intelligence/completion/translations/struct_field_test.exs index 7588824ff..d9c2c9b5c 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion/translations/struct_field_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion/translations/struct_field_test.exs @@ -12,7 +12,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.StructFieldTes |> complete(source) |> fetch_completion(kind: :field) - assert completion.detail == "first_name" + assert completion.detail == "String.t()" assert completion.label == "first_name" end @@ -28,7 +28,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.StructFieldTes |> complete(source) |> fetch_completion(kind: :field) - assert completion.detail == "first_name" + assert completion.detail == "String.t()" assert completion.label == "first_name" end @@ -44,7 +44,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.StructFieldTes |> complete(source) |> fetch_completion(kind: :field) - assert completion.detail == "first_name" + assert completion.detail == "String.t()" assert completion.label == "first_name" assert apply_completion(completion) =~ "struct.first_name" end @@ -61,7 +61,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.StructFieldTes |> complete(source) |> fetch_completion(kind: :field) - assert completion.detail == "first_name" + assert completion.detail == "String.t()" assert completion.label == "first_name" assert apply_completion(completion) =~ "struct.first_name" end @@ -81,7 +81,7 @@ defmodule Lexical.Server.CodeIntelligence.Completion.Translations.StructFieldTes |> complete(source) |> fetch_completion(kind: :field) - assert completion.detail == "first_name" + assert completion.detail == "String.t()" assert completion.label == "first_name" assert apply_completion(completion) =~ "user.first_name" end diff --git a/apps/server/test/lexical/server/code_intelligence/completion_test.exs b/apps/server/test/lexical/server/code_intelligence/completion_test.exs index 80ad1ce5f..3209f232b 100644 --- a/apps/server/test/lexical/server/code_intelligence/completion_test.exs +++ b/apps/server/test/lexical/server/code_intelligence/completion_test.exs @@ -1,6 +1,8 @@ defmodule Lexical.Server.CodeIntelligence.CompletionTest do alias Lexical.Protocol.Types.Completion + alias Lexical.Protocol.Types.Completion.Item, as: CompletionItem alias Lexical.RemoteControl.Completion.Candidate + alias Lexical.Server.CodeIntelligence.Completion.SortScope use Lexical.Test.Server.CompletionCase use Patch @@ -62,32 +64,103 @@ defmodule Lexical.Server.CodeIntelligence.CompletionTest do assert %Completion.List{is_incomplete: true, items: []} = complete(project, " ", as_list: false) end + + test "returns no completions in a comment at the beginning of a line", %{project: project} do + assert [] == complete(project, "# IO.in|") + end + + test "returns no completions in a comment at the end of a line", %{project: project} do + assert [] == complete(project, "IO.inspe # IO.in|") + end + + test "returns no completions in double quoted strings", %{project: project} do + assert [] = complete(project, ~S/"IO.in|"/) + end + + test "returns no completions inside heredocs", %{project: project} do + assert [] = complete(project, ~S/ + """ + This is my heredoc + It does not IO.in| + """ + /) + end + + test "returns no completions inside ~s", %{project: project} do + assert [] = complete(project, ~S/~s[ IO.in|]/) + end + + test "returns no completions inside ~S", %{project: project} do + assert [] = complete(project, ~S/ ~S[ IO.in|] /) + end + + test "only modules that are behaviuors are completed in an @impl", %{project: project} do + assert [behaviour] = complete(project, "@impl U|") + assert behaviour.label == "Unary" + assert behaviour.kind == :module + end end describe "do/end" do - test "returns do/end when the last token is do", %{project: project} do + test "returns do/end when the last token is 'do'", %{project: project} do assert [completion] = complete(project, "for a <- something do|") assert completion.label == "do/end block" end + + test "returns do/end when the last token is 'd'", %{project: project} do + assert [completion] = complete(project, "for a <- something d|") + assert completion.label == "do/end block" + end end - describe "sorting" do - test "dunder functions aren't boosted", %{project: project} do - assert {:ok, completion} = - project - |> complete("Enum.|") - |> fetch_completion("__info__") + describe "sorting dunder function/macro completions" do + test "dunder functions are sorted last in their sort scope", %{project: project} do + {:ok, completion} = + project + |> complete("Enum.|") + |> fetch_completion("__info__") + + %CompletionItem{ + sort_text: sort_text + } = completion + + assert sort_text =~ SortScope.remote(false, 9) + end + + test "dunder macros are sorted last in their scope", %{project: project} do + {:ok, completion} = + project + |> complete("Project.__dunder_macro__|") + |> fetch_completion("__dunder_macro__") + + %CompletionItem{ + sort_text: sort_text + } = completion - refute boosted?(completion) + assert sort_text =~ SortScope.remote(false, 9) end - test "dunder macros aren't boosted", %{project: project} do - assert {:ok, completion} = - project - |> complete("Project.__dunder_macro__|") - |> fetch_completion("__dunder_macro__") + test "typespecs with no origin are completed", %{project: project} do + candidate = %Candidate.Typespec{ + argument_names: [], + metadata: %{builtin: true}, + arity: 0, + name: "any", + origin: nil + } + + patch(Lexical.RemoteControl.Api, :complete, [candidate]) - refute boosted?(completion) + [completion] = complete(project, " @type a|") + assert completion.label == "any()" + end + + test "typespecs with no full_name are completed", %{project: project} do + candidate = %Candidate.Struct{full_name: nil, metadata: %{}, name: "Struct"} + patch(Lexical.RemoteControl.Api, :complete, [candidate]) + + [completion] = complete(project, " %Stru|") + assert completion.label == "Struct" end end @@ -102,11 +175,24 @@ defmodule Lexical.Server.CodeIntelligence.CompletionTest do name: "#{name}-callback", origin: full_name, argument_names: [], - metadata: %{} + metadata: %{}, + arity: 0 }, %Candidate.Exception{name: "#{name}-exception", full_name: full_name}, - %Candidate.Function{name: "my_func", origin: full_name, argument_names: [], metadata: %{}}, - %Candidate.Macro{name: "my_macro", origin: full_name, argument_names: [], metadata: %{}}, + %Candidate.Function{ + name: "my_func", + origin: full_name, + argument_names: [], + metadata: %{}, + arity: 0 + }, + %Candidate.Macro{ + name: "my_macro", + origin: full_name, + argument_names: [], + metadata: %{}, + arity: 0 + }, %Candidate.MixTask{name: "#{name}-mix-task", full_name: full_name}, %Candidate.Module{name: "#{name}-module", full_name: full_name}, %Candidate.Module{name: "#{name}-submodule", full_name: "#{full_name}.Bar"}, @@ -114,7 +200,13 @@ defmodule Lexical.Server.CodeIntelligence.CompletionTest do %Candidate.Protocol{name: "#{name}-protocol", full_name: full_name}, %Candidate.Struct{name: "#{name}-struct", full_name: full_name}, %Candidate.StructField{name: "#{name}-struct-field", origin: full_name}, - %Candidate.Typespec{name: "#{name}-typespec"}, + %Candidate.Typespec{ + name: "#{name}-typespec", + origin: full_name, + argument_names: ["value"], + arity: 1, + metadata: %{} + }, %Candidate.Variable{name: "#{name}-variable"} ] @@ -138,6 +230,21 @@ defmodule Lexical.Server.CodeIntelligence.CompletionTest do assert {:ok, _} = fetch_completion(completions, label: "Foo-struct") end + test "only modules, typespecs and module attributes are returned in types", %{ + project: project + } do + completions = + for completion <- complete(project, "@spec F"), into: MapSet.new() do + completion.label + end + + assert "Foo-module" in completions + assert "Foo-module-attribute" in completions + assert "Foo-submodule" in completions + assert "Foo-typespec(value)" in completions + assert Enum.count(completions) == 4 + end + test "modules are sorted before functions", %{project: project} do code = ~q[ def in_function do diff --git a/apps/server/test/lexical/server/project/progress_test.exs b/apps/server/test/lexical/server/project/progress_test.exs index c7e814e1e..7a08559e5 100644 --- a/apps/server/test/lexical/server/project/progress_test.exs +++ b/apps/server/test/lexical/server/project/progress_test.exs @@ -21,10 +21,26 @@ defmodule Lexical.Server.Project.ProgressTest do pid = start_supervised!({Project.Progress, project}) DispatchFake.start() RemoteControl.Dispatch.register_listener(pid, project_progress()) + RemoteControl.Dispatch.register_listener(pid, percent_progress()) {:ok, project: project} end + def percent_begin(project, label, max) do + message = percent_progress(stage: :begin, label: label, max: max) + RemoteControl.Api.broadcast(project, message) + end + + defp percent_report(project, label, delta, message \\ nil) do + message = percent_progress(stage: :report, label: label, message: message, delta: delta) + RemoteControl.Api.broadcast(project, message) + end + + defp percent_complete(project, label, message) do + message = percent_progress(stage: :complete, label: label, message: message) + RemoteControl.Api.broadcast(project, message) + end + def progress(stage, label, message \\ "") do project_progress(label: label, message: message, stage: stage) end @@ -39,6 +55,11 @@ defmodule Lexical.Server.Project.ProgressTest do :ok end + def with_work_done_progress_support(_) do + patch(Configuration, :client_supports?, fn :work_done_progress -> true end) + :ok + end + describe "report the progress message" do setup [:with_patched_transport] @@ -70,4 +91,68 @@ defmodule Lexical.Server.Project.ProgressTest do refute_receive {:transport, %Requests.CreateWorkDoneProgress{lsp: %{}}} end end + + describe "reporting a percentage progress" do + setup [:with_patched_transport, :with_work_done_progress_support] + + test "it should be able to increment the percentage", %{project: project} do + percent_begin(project, "indexing", 400) + + assert_receive {:transport, %Requests.CreateWorkDoneProgress{lsp: %{token: token}}} + assert_receive {:transport, %Notifications.Progress{} = progress} + + assert progress.lsp.value.kind == "begin" + assert progress.lsp.value.title == "indexing" + assert progress.lsp.value.percentage == 0 + + percent_report(project, "indexing", 100) + + assert_receive {:transport, %Notifications.Progress{lsp: %{token: ^token, value: value}}} + assert value.kind == "report" + assert value.percentage == 25 + assert value.message == nil + + percent_report(project, "indexing", 260, "Almost done") + + assert_receive {:transport, %Notifications.Progress{lsp: %{token: ^token, value: value}}} + assert value.percentage == 90 + assert value.message == "Almost done" + + percent_complete(project, "indexing", "Indexing Complete") + + assert_receive {:transport, %Notifications.Progress{lsp: %{token: ^token, value: value}}} + assert value.kind == "end" + assert value.message == "Indexing Complete" + end + + test "it caps the percentage at 100", %{project: project} do + percent_begin(project, "indexing", 100) + percent_report(project, "indexing", 1000) + assert_receive {:transport, %Notifications.Progress{lsp: %{value: %{kind: "begin"}}}} + assert_receive {:transport, %Notifications.Progress{lsp: %{value: value}}} + assert value.kind == "report" + assert value.percentage == 100 + end + + test "it only allows the percentage to grow", %{project: project} do + percent_begin(project, "indexing", 100) + assert_receive {:transport, %Notifications.Progress{lsp: %{value: %{kind: "begin"}}}} + + percent_report(project, "indexing", 10) + + assert_receive {:transport, %Notifications.Progress{lsp: %{value: value}}} + assert value.kind == "report" + assert value.percentage == 10 + + percent_report(project, "indexing", -10) + assert_receive {:transport, %Notifications.Progress{lsp: %{value: value}}} + assert value.kind == "report" + assert value.percentage == 10 + + percent_report(project, "indexing", 5) + assert_receive {:transport, %Notifications.Progress{lsp: %{value: value}}} + assert value.kind == "report" + assert value.percentage == 15 + end + end end diff --git a/apps/server/test/lexical/server/provider/handlers/code_lens_test.exs b/apps/server/test/lexical/server/provider/handlers/code_lens_test.exs new file mode 100644 index 000000000..c6d1e0e3d --- /dev/null +++ b/apps/server/test/lexical/server/provider/handlers/code_lens_test.exs @@ -0,0 +1,98 @@ +defmodule Lexical.Server.Provider.Handlers.CodeLensTest do + alias Lexical.Document + alias Lexical.Project + alias Lexical.Proto.Convert + alias Lexical.Protocol.Requests.CodeLens + alias Lexical.Protocol.Types + alias Lexical.RemoteControl + alias Lexical.Server + alias Lexical.Server.Provider.Handlers + + import Lexical.Test.Protocol.Fixtures.LspProtocol + import Lexical.RemoteControl.Api.Messages + import Lexical.Test.Fixtures + import Lexical.Test.RangeSupport + + use ExUnit.Case, async: false + use Patch + + setup_all do + start_supervised(Document.Store) + project = project(:umbrella) + + start_supervised!({DynamicSupervisor, Server.Project.Supervisor.options()}) + start_supervised!({Server.Project.Supervisor, project}) + + RemoteControl.Api.register_listener(project, self(), [project_compiled()]) + RemoteControl.Api.schedule_compile(project, true) + + assert_receive project_compiled(), 5000 + + {:ok, project: project} + end + + defp with_indexing_enabled(_) do + patch(Lexical.RemoteControl.Api, :index_running?, false) + :ok + end + + defp with_mix_exs(%{project: project}) do + path = Project.mix_exs_path(project) + %{uri: Document.Path.ensure_uri(path)} + end + + def build_request(path) do + uri = Document.Path.ensure_uri(path) + + params = [ + text_document: [uri: uri] + ] + + with {:ok, _} <- Document.Store.open_temporary(uri), + {:ok, req} <- build(CodeLens, params) do + Convert.to_native(req) + end + end + + def handle(request, project) do + config = Server.Configuration.new(project: project) + Handlers.CodeLens.handle(request, config) + end + + describe "code lens for mix.exs" do + setup [:with_mix_exs, :with_indexing_enabled] + + test "emits a code lens at the project definition", %{project: project, uri: referenced_uri} do + mix_exs_path = Document.Path.ensure_path(referenced_uri) + mix_exs = File.read!(mix_exs_path) + + {:ok, request} = build_request(mix_exs_path) + {:reply, %{result: lenses}} = handle(request, project) + + assert [%Types.CodeLens{} = code_lens] = lenses + + assert extract(mix_exs, code_lens.range) =~ "def project" + assert code_lens.command == Handlers.Commands.reindex_command(project) + end + + test "does not emit a code lens for a project file", %{project: project} do + {:ok, request} = + project + |> Project.project_path() + |> Path.join("apps/first/lib/umbrella/first.ex") + |> build_request() + + assert {:reply, %{result: []}} = handle(request, project) + end + + test "does not emite a code lens for an umbrella app's mix.exs", %{project: project} do + {:ok, request} = + project + |> Project.project_path() + |> Path.join("apps/first/mix.exs") + |> build_request() + + assert {:reply, %{result: []}} = handle(request, project) + end + end +end diff --git a/apps/server/test/lexical/server/provider/handlers/find_references_test.exs b/apps/server/test/lexical/server/provider/handlers/find_references_test.exs new file mode 100644 index 000000000..284db8041 --- /dev/null +++ b/apps/server/test/lexical/server/provider/handlers/find_references_test.exs @@ -0,0 +1,84 @@ +defmodule Lexical.Server.Provider.Handlers.FindReferencesTest do + alias Lexical.Ast.Analysis + alias Lexical.Document + alias Lexical.Document.Location + alias Lexical.Proto.Convert + alias Lexical.Protocol.Requests.FindReferences + alias Lexical.Protocol.Responses + alias Lexical.RemoteControl + alias Lexical.Server + alias Lexical.Server.Provider.Handlers + + import Lexical.Test.Protocol.Fixtures.LspProtocol + import Lexical.Test.Fixtures + + use ExUnit.Case, async: false + use Patch + + setup_all do + start_supervised(Server.Application.document_store_child_spec()) + :ok + end + + setup do + project = project(:navigations) + path = file_path(project, Path.join("lib", "my_definition.ex")) + uri = Document.Path.ensure_uri(path) + {:ok, project: project, uri: uri} + end + + def build_request(path, line, char) do + uri = Document.Path.ensure_uri(path) + + params = [ + text_document: [uri: uri], + position: [line: line, character: char] + ] + + with {:ok, _} <- Document.Store.open_temporary(uri), + {:ok, req} <- build(FindReferences, params) do + Convert.to_native(req) + end + end + + def handle(request, project) do + config = Server.Configuration.new(project: project) + Handlers.FindReferences.handle(request, config) + end + + describe "find references" do + test "returns locations that the entity returns", %{project: project, uri: uri} do + patch(RemoteControl.Api, :references, fn ^project, + %Analysis{document: document}, + _position, + _ -> + locations = [ + Location.new( + Document.Range.new( + Document.Position.new(document, 1, 5), + Document.Position.new(document, 1, 10) + ), + Document.Path.to_uri("/path/to/file.ex") + ) + ] + + locations + end) + + {:ok, request} = build_request(uri, 5, 6) + + assert {:reply, %Responses.FindReferences{} = response} = handle(request, project) + assert [%Location{} = location] = response.result + assert location.uri =~ "file.ex" + end + + test "returns nothing if the entity can't resolve it", %{project: project, uri: uri} do + patch(RemoteControl.Api, :references, nil) + + {:ok, request} = build_request(uri, 1, 5) + + assert {:reply, %Responses.FindReferences{} = response} = handle(request, project) + assert response.result == nil + end + end +end diff --git a/apps/server/test/lexical/server/provider/handlers/go_to_definition_test.exs b/apps/server/test/lexical/server/provider/handlers/go_to_definition_test.exs index f9d5ec113..37cc97b77 100644 --- a/apps/server/test/lexical/server/provider/handlers/go_to_definition_test.exs +++ b/apps/server/test/lexical/server/provider/handlers/go_to_definition_test.exs @@ -5,7 +5,6 @@ defmodule Lexical.Server.Provider.Handlers.GoToDefinitionTest do alias Lexical.Protocol.Requests.GoToDefinition alias Lexical.RemoteControl alias Lexical.Server - alias Lexical.Server.Provider.Env alias Lexical.Server.Provider.Handlers import Lexical.Test.Protocol.Fixtures.LspProtocol @@ -15,17 +14,20 @@ defmodule Lexical.Server.Provider.Handlers.GoToDefinitionTest do use ExUnit.Case, async: false setup_all do - start_supervised(Document.Store) project = project(:navigations) - {:ok, _} = start_supervised({DynamicSupervisor, Server.Project.Supervisor.options()}) + start_supervised!(Server.Application.document_store_child_spec()) + start_supervised!({DynamicSupervisor, Server.Project.Supervisor.options()}) + start_supervised!({Server.Project.Supervisor, project}) - {:ok, _} = start_supervised({Server.Project.Supervisor, project}) + RemoteControl.Api.register_listener(project, self(), [ + project_compiled(), + project_index_ready() + ]) - RemoteControl.Api.register_listener(project, self(), [project_compiled()]) RemoteControl.Api.schedule_compile(project, true) - assert_receive project_compiled(), 5000 + assert_receive project_index_ready(), 5000 {:ok, project: project} end @@ -50,23 +52,41 @@ defmodule Lexical.Server.Provider.Handlers.GoToDefinitionTest do end def handle(request, project) do - Handlers.GoToDefinition.handle(request, %Env{project: project}) + config = Server.Configuration.new(project: project) + Handlers.GoToDefinition.handle(request, config) end describe "go to definition" do setup [:with_referenced_file] - test "find the function defintion", %{project: project, uri: referenced_uri} do + test "finds user-defined functions", %{project: project, uri: referenced_uri} do uses_file_path = file_path(project, Path.join("lib", "uses.ex")) {:ok, request} = build_request(uses_file_path, 4, 17) {:reply, %{result: %Location{} = location}} = handle(request, project) + assert Location.uri(location) == referenced_uri + end - assert location.range.start.line == 15 - assert location.range.start.character == 7 - assert location.range.end.line == 15 - assert location.range.end.character == 12 + test "finds user-defined modules", %{project: project, uri: referenced_uri} do + uses_file_path = file_path(project, Path.join("lib", "uses.ex")) + {:ok, request} = build_request(uses_file_path, 4, 4) + + {:reply, %{result: %Location{} = location}} = handle(request, project) assert Location.uri(location) == referenced_uri end + + test "does not find built-in functions", %{project: project} do + uses_file_path = file_path(project, Path.join("lib", "uses.ex")) + {:ok, request} = build_request(uses_file_path, 8, 7) + + {:reply, %{result: nil}} = handle(request, project) + end + + test "does not find built-in modules", %{project: project} do + uses_file_path = file_path(project, Path.join("lib", "uses.ex")) + {:ok, request} = build_request(uses_file_path, 8, 4) + + {:reply, %{result: nil}} = handle(request, project) + end end end diff --git a/apps/server/test/lexical/server/provider/handlers/hover_test.exs b/apps/server/test/lexical/server/provider/handlers/hover_test.exs index 82a2cc712..661ec549f 100644 --- a/apps/server/test/lexical/server/provider/handlers/hover_test.exs +++ b/apps/server/test/lexical/server/provider/handlers/hover_test.exs @@ -7,7 +7,6 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do alias Lexical.RemoteControl alias Lexical.RemoteControl.Api.Messages alias Lexical.Server - alias Lexical.Server.Provider.Env alias Lexical.Server.Provider.Handlers alias Lexical.Test.Fixtures alias Lexical.Test.Protocol.Fixtures.LspProtocol @@ -23,9 +22,9 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do setup_all do project = Fixtures.project() - {:ok, _} = start_supervised(Document.Store) - {:ok, _} = start_supervised({DynamicSupervisor, Server.Project.Supervisor.options()}) - {:ok, _} = start_supervised({Server.Project.Supervisor, project}) + start_supervised!(Server.Application.document_store_child_spec()) + start_supervised!({DynamicSupervisor, Server.Project.Supervisor.options()}) + start_supervised!({Server.Project.Supervisor, project}) :ok = RemoteControl.Api.register_listener(project, self(), [Messages.project_compiled()]) assert_receive Messages.project_compiled(), 5000 @@ -536,6 +535,33 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do assert "«MacroHover.my_macro»(:foo)" = hovered |> strip_cursor() |> decorate(result.range) end) end + + test "splits to two lines if the signature is too long", %{project: project} do + code = ~q[ + defmodule VeryVeryVeryLongModuleName.CallHover do + def very_very_very_long_fun(_with, _many, _args) do + end + end + ] + + hovered = ~q[ + alias VeryVeryVeryLongModuleName.CallHover + CallHover.|very_very_very_long_fun(1, 2, 3) + ] + + expected = """ + ```elixir + CallHover.very_very_very_long_fun(with, many, args) + VeryVeryVeryLongModuleName.CallHover + ``` + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + end) + end end describe "type hover" do @@ -666,13 +692,40 @@ defmodule Lexical.Server.Provider.Handlers.HoverTest do assert {:reply, %{result: nil}} = hover(project, hovered) end) end + + test "splits to two lines if the signature is too long", %{project: project} do + code = ~q[ + defmodule VeryVeryVeryLongModuleName.TypeHover do + @opaque very_very_very_long_type(var) :: {integer(), var} + end + ] + + hovered = + "@type foo :: VeryVeryVeryLongModuleName.TypeHover.|very_very_very_long_type(:foo)" + + expected = """ + ```elixir + TypeHover.very_very_very_long_type/1 + VeryVeryVeryLongModuleName.TypeHover + + @opaque very_very_very_long_type(var) + ``` + """ + + with_compiled_in(project, code, fn -> + assert {:reply, %{result: %Types.Hover{} = result}} = hover(project, hovered) + assert result.contents.kind == :markdown + assert result.contents.value == expected + end) + end end defp hover(project, hovered) do with {position, hovered} <- pop_cursor(hovered), {:ok, document} <- document_with_content(project, hovered), {:ok, request} <- hover_request(document.uri, position) do - Handlers.Hover.handle(request, %Env{project: project}) + config = Server.Configuration.new(project: project) + Handlers.Hover.handle(request, config) end end diff --git a/apps/server/test/lexical/server/provider/queue_test.exs b/apps/server/test/lexical/server/provider/queue_test.exs deleted file mode 100644 index a26f41ac1..000000000 --- a/apps/server/test/lexical/server/provider/queue_test.exs +++ /dev/null @@ -1,173 +0,0 @@ -defmodule Lexical.Server.Provider.QueueTest do - alias Lexical.Protocol.Notifications - alias Lexical.Protocol.Requests - alias Lexical.Server.Provider.Env - alias Lexical.Server.Provider.Handlers - alias Lexical.Server.Provider.Queue - alias Lexical.Server.Transport - - use ExUnit.Case - use Patch - use Lexical.Test.EventualAssertions - - setup do - {:ok, _} = start_supervised(Queue.Supervisor.child_spec()) - {:ok, _} = start_supervised(Queue) - unit_test = self() - patch(Transport, :write, &send(unit_test, &1)) - - :ok - end - - def request(id, func) do - patch(Handlers.Completion, :handle, fn request, env -> func.(request, env) end) - patch(Handlers, :for_request, fn _ -> {:ok, Handlers.Completion} end) - patch(Requests.Completion, :to_elixir, fn req -> {:ok, req} end) - Requests.Completion.new(id: id, text_document: nil, position: nil, context: nil) - end - - describe "size/0" do - test "an empty queue has size 0" do - assert 0 == Queue.size() - end - - test "adding a request makes the queue grow" do - request = request(1, fn _, _ -> Process.sleep(500) end) - assert :ok = Queue.add(request, Env.new()) - assert 1 == Queue.size() - end - end - - describe "cancel/1" do - test "canceling a request stops it" do - request = request("1", fn _, _ -> Process.sleep(500) end) - assert :ok = Queue.add(request, Env.new()) - - :ok = Queue.cancel("1") - - assert_receive %{id: "1", error: error} - - assert Queue.size() == 0 - assert error.code == :request_cancelled - assert error.message == "Request cancelled" - end - - test "integers are stringified" do - request = request("1", fn _, _ -> Process.sleep(500) end) - assert :ok = Queue.add(request, Env.new()) - - :ok = Queue.cancel(1) - - assert_receive %{id: "1", error: _} - end - - test "passing in a request for cancellation" do - request = request("1", fn _, _ -> Process.sleep(500) end) - :ok = Queue.add(request, Env.new()) - - :ok = Queue.cancel(request) - - assert_receive %{id: "1", error: error} - assert Queue.size() == 0 - assert error.code == :request_cancelled - assert error.message == "Request cancelled" - end - - test "canceling a non-existing request is a no-op" do - assert :ok = Queue.cancel("5") - refute_receive %{id: _} - end - - test "Adding a cancel notification cancels the request" do - request = request("1", fn _, _ -> Process.sleep(500) end) - :ok = Queue.add(request, Env.new()) - - {:ok, notif} = - Notifications.Cancel.parse(%{ - "method" => "$/cancelRequest", - "jsonrpc" => "2.0", - "params" => %{ - "id" => "1" - } - }) - - :ok = Queue.cancel(notif) - assert_receive %{id: "1", error: error} - assert Queue.size() == 0 - assert error.code == :request_cancelled - assert error.message == "Request cancelled" - end - - test "Adding a cancel request cancels the request" do - request = request("1", fn _, _ -> Process.sleep(500) end) - :ok = Queue.add(request, Env.new()) - - {:ok, req} = - Requests.Cancel.parse(%{ - "method" => "$/cancelRequest", - "jsonrpc" => "2.0", - "id" => "50", - "params" => %{ - "id" => "1" - } - }) - - :ok = Queue.cancel(req) - assert_receive %{id: "1", error: error} - assert Queue.size() == 0 - assert error.code == :request_cancelled - assert error.message == "Request cancelled" - end - - test "canceling a request that has finished is a no-op" do - me = self() - request = request("1", fn _, _ -> send(me, :finished) end) - - assert :ok = Queue.add(request, Env.new()) - assert_receive :finished - - :ok = Queue.cancel("1") - assert Queue.size() == 0 - end - end - - describe "task return values" do - test "tasks can reply" do - request = request("1", fn _, _ -> {:reply, "great"} end) - :ok = Queue.add(request, Env.new()) - - assert_receive "great" - end - - test "replies are optional" do - request = request("1", fn _, _ -> :noreply end) - :ok = Queue.add(request, Env.new()) - - assert_eventually Queue.size() == 0 - refute_receive _ - end - - test "the server can be notified about the request" do - unit_test = self() - request = request("1", fn _, _ -> {:reply_and_alert, :response} end) - - patch(Lexical.Server, :response_complete, fn request, reply -> - send(unit_test, {:request_complete, request, reply}) - end) - - :ok = Queue.add(request, Env.new()) - - assert_receive :response - assert_receive {:request_complete, ^request, :response} - end - - test "exceptions are handled" do - request = request("1", fn _, _ -> raise "Boom!" end) - assert :ok = Queue.add(request, Env.new()) - - assert_receive %{id: "1", error: error} - assert error.code == :internal_error - assert error.message =~ "Boom!" - end - end -end diff --git a/apps/server/test/lexical/server/task_queue_test.exs b/apps/server/test/lexical/server/task_queue_test.exs new file mode 100644 index 000000000..d034a119c --- /dev/null +++ b/apps/server/test/lexical/server/task_queue_test.exs @@ -0,0 +1,164 @@ +defmodule Lexical.Server.TaskQueueTest do + alias Lexical.Protocol.Notifications + alias Lexical.Protocol.Requests + alias Lexical.Server.Configuration + alias Lexical.Server.Provider.Handlers + alias Lexical.Server.TaskQueue + alias Lexical.Server.Transport + alias Lexical.Test.Fixtures + + use ExUnit.Case + use Patch + use Lexical.Test.EventualAssertions + + setup_all do + {:ok, config: Configuration.new(project: Fixtures.project())} + end + + setup do + {:ok, _} = start_supervised({Task.Supervisor, name: TaskQueue.task_supervisor_name()}) + {:ok, _} = start_supervised(TaskQueue) + unit_test = self() + patch(Transport, :write, &send(unit_test, &1)) + + :ok + end + + def request(config, func) do + id = System.unique_integer([:positive]) + + patch(Lexical.Server, :handler_for, fn _ -> {:ok, Handlers.Completion} end) + + patch(Handlers.Completion, :handle, fn request, %Configuration{} = ^config -> + func.(request, config) + end) + + patch(Requests.Completion, :to_elixir, fn req -> {:ok, req} end) + + request = Requests.Completion.new(id: id, text_document: nil, position: nil, context: nil) + + {id, {Handlers.Completion, :handle, [request, config]}} + end + + describe "size/0" do + test "an empty queue has size 0" do + assert 0 == TaskQueue.size() + end + + test "adding a request makes the queue grow", %{config: config} do + {id, mfa} = request(config, fn _, _ -> Process.sleep(500) end) + assert :ok = TaskQueue.add(id, mfa) + assert 1 == TaskQueue.size() + end + end + + describe "cancel/1" do + test "canceling a request stops it", %{config: config} do + {id, mfa} = request(config, fn _, _ -> Process.sleep(500) end) + + assert :ok = TaskQueue.add(id, mfa) + assert :ok = TaskQueue.cancel(id) + + assert_receive %{id: ^id, error: error} + + assert TaskQueue.size() == 0 + assert error.code == :request_cancelled + assert error.message == "Request cancelled" + end + + test "passing in a request for cancellation", %{config: config} do + {id, mfa} = request(config, fn _, _ -> Process.sleep(500) end) + + assert :ok = TaskQueue.add(id, mfa) + assert :ok = TaskQueue.cancel(id) + + assert_receive %{id: ^id, error: error} + assert TaskQueue.size() == 0 + assert error.code == :request_cancelled + assert error.message == "Request cancelled" + end + + test "canceling a non-existing request is a no-op" do + assert :ok = TaskQueue.cancel("5") + refute_receive %{id: _} + end + + test "Adding a cancel notification cancels the request", %{config: config} do + {id, mfa} = request(config, fn _, _ -> Process.sleep(500) end) + assert :ok = TaskQueue.add(id, mfa) + + {:ok, notif} = + Notifications.Cancel.parse(%{ + "method" => "$/cancelRequest", + "jsonrpc" => "2.0", + "params" => %{ + "id" => id + } + }) + + assert :ok = TaskQueue.cancel(notif) + assert_receive %{id: ^id, error: error} + assert TaskQueue.size() == 0 + assert error.code == :request_cancelled + assert error.message == "Request cancelled" + end + + test "Adding a cancel request cancels the request", %{config: config} do + {id, mfa} = request(config, fn _, _ -> Process.sleep(500) end) + assert :ok = TaskQueue.add(id, mfa) + + {:ok, req} = + Requests.Cancel.parse(%{ + "method" => "$/cancelRequest", + "jsonrpc" => "2.0", + "id" => "50", + "params" => %{ + "id" => id + } + }) + + assert :ok = TaskQueue.cancel(req) + assert_receive %{id: ^id, error: error} + assert TaskQueue.size() == 0 + assert error.code == :request_cancelled + assert error.message == "Request cancelled" + end + + test "canceling a request that has finished is a no-op", %{config: config} do + me = self() + {id, mfa} = request(config, fn _, _ -> send(me, :finished) end) + + assert :ok = TaskQueue.add(id, mfa) + assert_receive :finished + + assert :ok = TaskQueue.cancel(id) + assert TaskQueue.size() == 0 + end + end + + describe "task return values" do + test "tasks can reply", %{config: config} do + {id, mfa} = request(config, fn _, _ -> {:reply, "great"} end) + assert :ok = TaskQueue.add(id, mfa) + + assert_receive "great" + end + + test "replies are optional", %{config: config} do + {id, mfa} = request(config, fn _, _ -> :noreply end) + assert :ok = TaskQueue.add(id, mfa) + + assert_eventually TaskQueue.size() == 0 + refute_receive _ + end + + test "exceptions are handled", %{config: config} do + {id, mfa} = request(config, fn _, _ -> raise "Boom!" end) + assert :ok = TaskQueue.add(id, mfa) + + assert_receive %{id: ^id, error: error} + assert error.code == :internal_error + assert error.message =~ "Boom!" + end + end +end diff --git a/apps/server/test/lexical/server/transport/std_io_test.exs b/apps/server/test/lexical/server/transport/std_io_test.exs new file mode 100644 index 000000000..85b35c96b --- /dev/null +++ b/apps/server/test/lexical/server/transport/std_io_test.exs @@ -0,0 +1,51 @@ +defmodule Lexical.Server.Transport.StdIoTest do + alias Lexical.Protocol.JsonRpc + alias Lexical.Server.Transport.StdIO + + use ExUnit.Case + + defp request(requests) do + {:ok, requests} = + requests + |> List.wrap() + |> Enum.map_join(fn req -> + {:ok, req} = + req + |> Map.put("jsonrpc", "2.0") + |> Jason.encode!() + |> JsonRpc.encode() + + req + end) + |> StringIO.open(encoding: :latin1) + + test = self() + StdIO.start_link(requests, &send(test, {:request, &1})) + end + + defp receive_request do + assert_receive {:request, request} + request + end + + test "works with unicode characters" do + # This tests a bug that occurred when we were using `IO.read`. + # Due to `IO.read` reading characters, a prior request with unicode + # in the body, can make the body length in characters longer than the content-length. + # This would cause the prior request to consume some of the next request if they happen + # quickly enough. If the prior request consumes the subsequent request's headers, then + # the read for the next request will read the JSON body as headers, and will fail the + # pattern match in the call to `parse_header`. This would cause the dreaded + # "no match on right hand side value [...JSON content]". + # The fix is to switch to binread, which takes bytes as an argument. + # This series of requests is specially crafted to cause the original failure. Removing + # a single « from the string will break the setup. + request([ + %{method: "textDocument/doesSomething", body: "««««««««««««««««««««««"}, + %{method: "$/cancelRequest", id: 2}, + %{method: "$/cancelRequest", id: 3} + ]) + + _ = receive_request() + end +end diff --git a/apps/server/test/support/lexical/test/completion_case.ex b/apps/server/test/support/lexical/test/completion_case.ex index 431c76cca..9c735b74f 100644 --- a/apps/server/test/support/lexical/test/completion_case.ex +++ b/apps/server/test/support/lexical/test/completion_case.ex @@ -1,4 +1,5 @@ defmodule Lexical.Test.Server.CompletionCase do + alias Lexical.Ast alias Lexical.Document alias Lexical.Project alias Lexical.Protocol.Types.Completion.Context, as: CompletionContext @@ -17,12 +18,17 @@ defmodule Lexical.Test.Server.CompletionCase do setup_all do project = project() - {:ok, _} = start_supervised({DynamicSupervisor, Server.Project.Supervisor.options()}) - {:ok, _} = start_supervised({Server.Project.Supervisor, project}) + start_supervised!({DynamicSupervisor, Server.Project.Supervisor.options()}) + start_supervised!({Server.Project.Supervisor, project}) + + RemoteControl.Api.register_listener(project, self(), [ + project_compiled(), + project_index_ready() + ]) - RemoteControl.Api.register_listener(project, self(), [project_compiled()]) RemoteControl.Api.schedule_compile(project, true) assert_receive project_compiled(), 5000 + assert_receive project_index_ready(), 5000 {:ok, project: project} end @@ -70,7 +76,8 @@ defmodule Lexical.Test.Server.CompletionCase do CompletionContext.new(trigger_kind: :invoked) end - result = Completion.complete(project, document, position, context) + analysis = Ast.analyze(document) + result = Completion.complete(project, analysis, position, context) if return_as_list? do completion_items(result) @@ -107,22 +114,6 @@ defmodule Lexical.Test.Server.CompletionCase do end end - def boosted?(%CompletionItem{} = item, expected_amount \\ :any) do - case String.split(item.sort_text, "_") do - [boost | _rest] -> - actual_boost = String.to_integer(boost) - - if expected_amount == :any do - actual_boost < 99 - else - actual_boost == 99 - expected_amount - end - - _ -> - false - end - end - defp completion_items(%CompletionList{items: items}), do: items defp completion_items(items) when is_list(items), do: items end diff --git a/apps/server/test/support/transport/no_op.ex b/apps/server/test/support/transport/no_op.ex index c953879db..ffd9e88c7 100644 --- a/apps/server/test/support/transport/no_op.ex +++ b/apps/server/test/support/transport/no_op.ex @@ -2,5 +2,4 @@ defmodule Lexical.Test.Transport.NoOp do @behaviour Lexical.Server.Transport def write(_message), do: :ok - def log(_level, _message), do: :ok end diff --git a/apps/server/test/test_helper.exs b/apps/server/test/test_helper.exs index 4adf5c3e5..8e6e63c4f 100644 --- a/apps/server/test/test_helper.exs +++ b/apps/server/test/test_helper.exs @@ -1,2 +1,9 @@ +Application.ensure_all_started(:snowflake) ExUnit.configure(timeout: :infinity) ExUnit.start() + +if Version.match?(System.version(), ">= 1.15.0") do + Logger.configure(level: :none) +else + Logger.remove_backend(:console) +end diff --git a/bin/activate_version_manager.sh b/bin/activate_version_manager.sh index 6e49905a2..754ae450b 100755 --- a/bin/activate_version_manager.sh +++ b/bin/activate_version_manager.sh @@ -2,7 +2,7 @@ # The purpose of these functions is to detect and activate the correct # installed version manager in the current shell session. Currently, we -# try to detect asdf and rtx. +# try to detect asdf, rtx, and mise (new name for rtx). # # The general approach involves the following steps: # @@ -12,24 +12,25 @@ # Elixir, we're all set. # 3. Try to find and activate an rtx installation. If it provides # Elixir, we're all set. +# 4. Try to find and activate a mise installation. If it provides +# Elixir, we're all set. # activate_version_manager() { - _detect_version_manager || - (_try_activating_asdf && _detect_asdf) || - (_try_activating_rtx && _detect_rtx) - return $? -} - -_detect_version_manager() { - if ! (_detect_asdf || _detect_rtx); then - echo >&2 "No version manager detected" - return 1 + if (_detect_asdf || _detect_rtx || _detect_mise); then + return 0 fi + + echo >&2 "No activated version manager detected. Searching for version manager..." + + { _try_activating_asdf && _detect_asdf; } || + { _try_activating_rtx && _detect_rtx; } || + { _try_activating_mise && _detect_mise; } + return $? } _detect_asdf() { - if command -v asdf >/dev/null && asdf which elixir >/dev/null 2>&1; then + if command -v asdf >/dev/null && asdf which elixir >/dev/null 2>&1 && _ensure_which_elixir asdf; then echo >&2 "Detected Elixir through asdf: $(asdf which elixir)" return 0 else @@ -38,7 +39,7 @@ _detect_asdf() { } _detect_rtx() { - if command -v rtx >/dev/null && rtx which elixir >/dev/null 2>&1; then + if command -v rtx >/dev/null && rtx which elixir >/dev/null 2>&1 && _ensure_which_elixir rtx; then echo >&2 "Detected Elixir through rtx: $(rtx which elixir)" return 0 else @@ -46,6 +47,20 @@ _detect_rtx() { fi } +_detect_mise() { + if command -v mise >/dev/null && mise which elixir >/dev/null 2>&1 && _ensure_which_elixir mise; then + echo >&2 "Detected Elixir through mise: $(mise which elixir)" + return 0 + else + return 1 + fi +} + +_ensure_which_elixir() { + [[ $(which elixir) == *"$1"* ]] + return $? +} + _try_activating_asdf() { local asdf_dir="${ASDF_DIR:-"$HOME/.asdf"}" local asdf_vm="$asdf_dir/asdf.sh" @@ -63,15 +78,23 @@ _try_activating_asdf() { _try_activating_rtx() { if which rtx >/dev/null; then echo >&2 "Found rtx. Activating..." - eval "$($(which rtx) activate bash)" + eval "$(rtx activate bash)" + eval "$(rtx env)" + return $? + else + return 1 + fi +} + +_try_activating_mise() { + if which mise >/dev/null; then + echo >&2 "Found mise. Activating..." + eval "$(mise activate bash)" + eval "$(mise env)" return $? else return 1 fi } -# Run activate_version_manager if we're being run directly. If we're being -# sourced, let the caller run the function. -if [[ "${BASH_SOURCE[0]}" == "$0" ]]; then - activate_version_manager -fi +activate_version_manager diff --git a/bin/start_lexical.sh b/bin/start_lexical.sh index fa7306170..4b29a39ba 100755 --- a/bin/start_lexical.sh +++ b/bin/start_lexical.sh @@ -1,10 +1,11 @@ #!/usr/bin/env bash -set -eo pipefail +set -o pipefail script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" -if ! "$script_dir"/activate_version_manager.sh; then - echo "Could not activate a version manager." +# shellcheck disable=SC1091 +if ! . "$script_dir"/activate_version_manager.sh; then + echo >&2 "Could not activate a version manager. Trying system installation." fi case $1 in diff --git a/config/config.exs b/config/config.exs index 68457def8..51f54f36b 100644 --- a/config/config.exs +++ b/config/config.exs @@ -1,3 +1,8 @@ import Config +config :snowflake, + machine_id: 1, + # First second of 2024 + epoch: 1_704_070_800_000 + import_config("#{Mix.env()}.exs") diff --git a/config/runtime.exs b/config/runtime.exs index 59eecf9cd..44f01b946 100644 --- a/config/runtime.exs +++ b/config/runtime.exs @@ -19,15 +19,6 @@ cond do path: log_file_name, level: :debug - Code.ensure_loaded?(JsonRpcBackend) -> - config :logger, - backends: [JsonRpcBackend] - - config :logger, JsonRpcBackend, - level: :error, - format: "$message", - metadata: [] - true -> :ok end diff --git a/config/test.exs b/config/test.exs index c8a675356..8aa85ee5a 100644 --- a/config/test.exs +++ b/config/test.exs @@ -6,14 +6,8 @@ config :logger, level: :none config :remote_control, edit_window_millis: 10, - modules_cache_expiry: {50, :millisecond} + modules_cache_expiry: {50, :millisecond}, + search_store_quiescent_period_ms: 10 config :server, transport: NoOp config :stream_data, initial_size: 50 - -if Version.match?(System.version(), ">= 1.15.0") do - Logger.configure(level: :none) -else - Logger.remove_backend(:console) - Logger.remove_backend(JsonRpc.Backend) -end diff --git a/flake.lock b/flake.lock index 949bc9dde..aa24ddf9b 100644 --- a/flake.lock +++ b/flake.lock @@ -1,40 +1,54 @@ { "nodes": { - "flake-utils": { + "flake-parts": { "inputs": { - "systems": "systems" + "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1692799911, - "narHash": "sha256-3eihraek4qL744EvQXsK1Ha6C3CR7nnT8X2qWap4RNk=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "f9e7cf818399d17d347f847525c5a5a8032e4e44", + "lastModified": 1719994518, + "narHash": "sha256-pQMhCCHyQGRzdfAkdJ4cIWiw+JNuWsTX7f0ZYSyz0VY=", + "owner": "hercules-ci", + "repo": "flake-parts", + "rev": "9227223f6d922fee3c7b190b2cc238a99527bbb7", "type": "github" }, "original": { - "owner": "numtide", - "repo": "flake-utils", + "owner": "hercules-ci", + "repo": "flake-parts", "type": "github" } }, "nixpkgs": { "locked": { - "lastModified": 1692557222, - "narHash": "sha256-TCOtZaioLf/jTEgfa+nyg0Nwq5Uc610Z+OFV75yUgGw=", - "path": "/nix/store/7x0565m245dv1ny5j1v04a7azl2kll3a-source", - "rev": "0b07d4957ee1bd7fd3bdfd12db5f361bd70175a6", - "type": "path" + "lastModified": 1719931832, + "narHash": "sha256-0LD+KePCKKEb4CcPsTBOwf019wDtZJanjoKm1S8q3Do=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "0aeab749216e4c073cece5d34bc01b79e717c3e0", + "type": "github" }, "original": { "id": "nixpkgs", "type": "indirect" } }, + "nixpkgs-lib": { + "locked": { + "lastModified": 1719876945, + "narHash": "sha256-Fm2rDDs86sHy0/1jxTOKB1118Q0O3Uc7EC0iXvXKpbI=", + "type": "tarball", + "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" + }, + "original": { + "type": "tarball", + "url": "https://github.com/NixOS/nixpkgs/archive/5daf0514482af3f97abaefc78a6606365c9108e2.tar.gz" + } + }, "root": { "inputs": { - "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs" + "flake-parts": "flake-parts", + "nixpkgs": "nixpkgs", + "systems": "systems" } }, "systems": { diff --git a/flake.nix b/flake.nix index 5b798c7ea..fcc6165ed 100644 --- a/flake.nix +++ b/flake.nix @@ -2,28 +2,55 @@ description = "Reimagined language server for Elixir"; inputs.nixpkgs.url = "flake:nixpkgs"; - inputs.flake-utils.url = "github:numtide/flake-utils"; - - outputs = - { self - , nixpkgs - , flake-utils - }: - flake-utils.lib.eachDefaultSystem ( - system: - let - pkgs = import nixpkgs { inherit system; }; - erl = pkgs.beam.packages.erlang; - lexical = erl.callPackage ./nix/lexical.nix {}; - in - { + inputs.flake-parts.url = "github:hercules-ci/flake-parts"; + inputs.systems.url = "github:nix-systems/default"; + + outputs = { + self, + nixpkgs, + systems, + ... + } @ inputs: + inputs.flake-parts.lib.mkFlake {inherit inputs;} { + flake = { + lib = { + mkLexical = {erlang}: erlang.callPackage ./nix/lexical.nix {}; + }; + }; + + systems = import systems; + + perSystem = { + self', + pkgs, + ... + }: let + erlang = pkgs.beam.packages.erlang; + lexical = self.lib.mkLexical {inherit erlang;}; + in { + formatter = pkgs.alejandra; + + apps.update-hash = let + script = pkgs.writeShellApplication { + name = "update-hash"; + + runtimeInputs = [ pkgs.nixFlakes pkgs.gawk ]; + + text = '' + nix --extra-experimental-features 'nix-command flakes' \ + build --no-link "${self}#__fodHashGen" 2>&1 | gawk '/got:/ { print $2 }' || true + ''; + }; + in { + type = "app"; + program = "${script}/bin/update-hash"; + }; + packages = { inherit lexical; - default = lexical; - # Private package used to automatically generate hash for Mix deps - __fodHashGen = lexical.mixFodDeps.overrideAttrs(final: curr: { + __fodHashGen = lexical.mixFodDeps.overrideAttrs (final: curr: { outputHash = pkgs.lib.fakeSha256; }); }; @@ -31,13 +58,13 @@ devShells.default = pkgs.mkShell { packages = [ - erl.elixir + erlang.elixir ] ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [ pkgs.darwin.apple_sdk.frameworks.CoreFoundation pkgs.darwin.apple_sdk.frameworks.CoreServices ]; }; - } - ); + }; + }; } diff --git a/integration/Dockerfile b/integration/Dockerfile index 0b764613a..26471410d 100644 --- a/integration/Dockerfile +++ b/integration/Dockerfile @@ -3,10 +3,13 @@ # Build: docker build -t lx -f integration/Dockerfile . # Run: docker run -it lx -ARG ELIXIR_VERSION=1.15.6 -ARG ERLANG_VERSION=26.1.1 +ARG SYS_ELIXIR_VERSION=1.15.7 +ARG SYS_ERLANG_VERSION=26.2.1 -FROM hexpm/elixir:${ELIXIR_VERSION}-erlang-${ERLANG_VERSION}-ubuntu-jammy-20230126 +FROM hexpm/elixir:${SYS_ELIXIR_VERSION}-erlang-${SYS_ERLANG_VERSION}-ubuntu-jammy-20231128 + +ENV ELIXIR_VERSION=1.15.7-otp-26 +ENV ERLANG_VERSION=26.2.1 RUN apt-get update RUN apt-get install -y \ @@ -21,19 +24,23 @@ RUN apt-get install -y \ WORKDIR /lexical -COPY integration/boot/set_up_rtx.sh integration/boot/set_up_rtx.sh -RUN integration/boot/set_up_rtx.sh +COPY integration/boot/set_up_mise.sh integration/boot/set_up_mise.sh +RUN integration/boot/set_up_mise.sh COPY integration/boot/set_up_asdf.sh integration/boot/set_up_asdf.sh RUN integration/boot/set_up_asdf.sh COPY apps apps -COPY bin bin COPY config config COPY projects projects COPY mix* . -COPY integration/boot/set_up_lexical.sh integration/boot/set_up_lexical.sh -RUN integration/boot/set_up_lexical.sh +RUN mix local.hex --force +RUN mix deps.get +RUN mix compile + +COPY bin bin + +RUN mix package CMD bash diff --git a/integration/boot/set_up_asdf.sh b/integration/boot/set_up_asdf.sh index ca51e2ca6..1a897bd34 100755 --- a/integration/boot/set_up_asdf.sh +++ b/integration/boot/set_up_asdf.sh @@ -13,9 +13,9 @@ asdf update export KERL_CONFIGURE_OPTIONS="--disable-debug --without-javac --without-termcap --without-wx" asdf plugin add erlang https://github.com/asdf-vm/asdf-erlang.git -asdf install erlang latest -asdf global erlang latest +asdf install erlang "$ERLANG_VERSION" +asdf global erlang "$ERLANG_VERSION" asdf plugin add elixir https://github.com/asdf-vm/asdf-elixir.git -asdf install elixir latest -asdf global elixir latest +asdf install elixir "$ELIXIR_VERSION" +asdf global elixir "$ELIXIR_VERSION" diff --git a/integration/boot/set_up_lexical.sh b/integration/boot/set_up_lexical.sh deleted file mode 100755 index 1ad02f74b..000000000 --- a/integration/boot/set_up_lexical.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash -set -eo pipefail - -cd /lexical - -mix local.hex --force -mix deps.get -mix package diff --git a/integration/boot/set_up_rtx.sh b/integration/boot/set_up_mise.sh similarity index 50% rename from integration/boot/set_up_rtx.sh rename to integration/boot/set_up_mise.sh index 61855d292..17d23e0e3 100755 --- a/integration/boot/set_up_rtx.sh +++ b/integration/boot/set_up_mise.sh @@ -1,10 +1,10 @@ #!/usr/bin/env bash set -eo pipefail -rtx_dir=/version_managers/rtx_vm -mkdir -p $rtx_dir && cd $rtx_dir +mise_dir=/version_managers/mise_vm +mkdir -p $mise_dir && cd $mise_dir -# Download the rtx binary for the correct architecture +# Download the mise binary for the correct architecture arch=$(uname -m) architecture="" @@ -21,14 +21,13 @@ case $arch in ;; esac -curl "https://rtx.pub/rtx-latest-linux-$architecture" >"$(pwd)/rtx" -chmod +x ./rtx +curl "https://mise.jdx.dev/mise-latest-linux-$architecture" >"$(pwd)/mise" +chmod +x ./mise -eval "$(./rtx activate bash)" +eval "$(./mise activate bash)" export KERL_CONFIGURE_OPTIONS="--disable-debug --without-javac --without-termcap --without-wx" -./rtx plugins install erlang -./rtx use --global erlang@latest +./mise use --global "erlang@$ERLANG_VERSION" -./rtx plugins install elixir -./rtx use --global elixir@latest +./mise plugins install -y elixir +./mise use --global "elixir@$ELIXIR_VERSION" diff --git a/integration/test.sh b/integration/test.sh index 2ef291bdd..7a786593e 100755 --- a/integration/test.sh +++ b/integration/test.sh @@ -1,4 +1,8 @@ #!/usr/bin/env bash + +# Disable warning for interpolations in single quotes: +# shellcheck disable=2016 + set -eo pipefail script_dir="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)" @@ -52,10 +56,9 @@ run_test() { fi } -# Tests: - -test_using_system_installation() { +test_system_installation() { local expect=( + "No activated version manager detected" "Could not activate a version manager" ) @@ -63,39 +66,94 @@ test_using_system_installation() { return $? } -test_find_asdf_directory() { +test_asdf_already_activated() { + local setup=( + 'mv "$(which elixir)" "$(which elixir).hidden" && ' + 'ASDF_DIR=/version_managers/asdf_vm . /version_managers/asdf_vm/asdf.sh' + ) + local expect=( + "Detected Elixir through asdf" + ) + + run_test "${setup[*]}" "${expect[@]}" + return $? +} + +test_asdf_dir_found() { + local setup=( + 'mv "$(which elixir)" "$(which elixir).hidden" && ' + 'export ASDF_DIR=/version_managers/asdf_vm' + ) local expect=( - "No version manager detected" - "Found asdf" + "No activated version manager detected" + "Found asdf. Activating" "Detected Elixir through asdf" ) - local setup="export ASDF_DIR=/version_managers/asdf_vm" - run_test "$setup" "${expect[@]}" + run_test "${setup[*]}" "${expect[@]}" return $? } -test_activated_asdf() { +test_asdf_used_when_activated_mise_missing_elixir() { + local setup=( + 'mv "$(which elixir)" "$(which elixir).hidden" && ' + 'eval "$(/version_managers/mise_vm/mise activate bash)" && ' + 'mise uninstall "elixir@$ELIXIR_VERSION" && ' + 'export ASDF_DIR=/version_managers/asdf_vm' + ) local expect=( + "No activated version manager detected" + "Found asdf. Activating" "Detected Elixir through asdf" ) - local setup="ASDF_DIR=/version_managers/asdf_vm . /version_managers/asdf_vm/asdf.sh" - run_test "$setup" "${expect[@]}" + run_test "${setup[*]}" "${expect[@]}" return $? } -test_activated_rtx() { +test_mise_already_activated() { + local setup=( + 'mv "$(which elixir)" "$(which elixir).hidden" && ' + 'eval "$(/version_managers/mise_vm/mise activate bash)"' + ) local expect=( - "Detected Elixir through rtx" + "Detected Elixir through mise" ) - # shellcheck disable=2016 - local setup='eval "$(/version_managers/rtx_vm/rtx activate bash)"' - run_test "$setup" "${expect[@]}" + run_test "${setup[*]}" "${expect[@]}" return $? } -# Run all tests +test_mise_binary_found_and_activated() { + local setup=( + 'mv "$(which elixir)" "$(which elixir).hidden" && ' + 'export PATH="/version_managers/mise_vm:$PATH"' + ) + local expect=( + "No activated version manager detected" + "Found mise" + "Detected Elixir through mise" + ) + + run_test "${setup[*]}" "${expect[@]}" + return $? +} + +test_mise_used_when_activated_asdf_missing_elixir() { + local setup=( + 'mv "$(which elixir)" "$(which elixir).hidden" && ' + 'ASDF_DIR=/version_managers/asdf_vm . /version_managers/asdf_vm/asdf.sh && ' + 'asdf uninstall elixir "$ELIXIR_VERSION" && ' + 'export PATH="/version_managers/mise_vm:$PATH"' + ) + local expect=( + "No activated version manager detected" + "Found mise. Activating" + "Detected Elixir through mise" + ) + + run_test "${setup[*]}" "${expect[@]}" + return $? +} run_tests_and_exit diff --git a/integration/test_utils.sh b/integration/test_utils.sh index e69cb441d..6f38c4dc6 100755 --- a/integration/test_utils.sh +++ b/integration/test_utils.sh @@ -27,19 +27,9 @@ assert_contains() { if [ ${#not_found[@]} -ne 0 ]; then log_error "Assertion failed!" - - log "\n ${cyan}Expected:${reset}\n\n" - - for expected in "${not_found[@]}"; do - prefix_lines " " "$expected" - done - - log "\n ${cyan}To be in:${reset}\n\n" - - prefix_lines " " "$output" - + log_section "Expected" "${not_found[@]}" + log_section "To be in" "$output" log "\n\n" - return 1 fi } @@ -86,6 +76,17 @@ log_info() { log "${faint}$1${reset}\n" } +log_section() { + local title="$1" + local content=("${@:2}") + + log "\n ${cyan}${title}:${reset}\n\n" + + for item in "${content[@]}"; do + prefix_lines " " "$item" + done +} + prefix_lines() { local prefix_with=$1 echo "$2" | sed "s/^/$prefix_with/" | cat diff --git a/mix.exs b/mix.exs index 5320212dc..252f2f66c 100644 --- a/mix.exs +++ b/mix.exs @@ -5,7 +5,7 @@ defmodule Lexical.LanguageServer.MixProject do def project do [ apps_path: "apps", - version: "0.3.0", + version: "0.7.0", start_permanent: Mix.env() == :prod, deps: deps(), aliases: aliases(), @@ -33,6 +33,7 @@ defmodule Lexical.LanguageServer.MixProject do README.md pages/installation.md pages/architecture.md + pages/glossary.md ), filter_modules: fn mod_name, _ -> case Module.split(mod_name) do @@ -58,7 +59,17 @@ defmodule Lexical.LanguageServer.MixProject do [ compile: "compile --docs --debug-info", docs: "docs --html", - test: "test --no-start" + test: "test --no-start", + "nix.hash": &nix_hash/1 ] end + + defp nix_hash(_args) do + docker = System.get_env("DOCKER_CMD", "docker") + + Mix.shell().cmd( + "#{docker} run --rm -v '#{File.cwd!()}:/data' nixos/nix nix --extra-experimental-features 'nix-command flakes' run ./data#update-hash", + stderr_to_stdout: false + ) + end end diff --git a/mix.lock b/mix.lock index 752502471..2ce31fbaf 100644 --- a/mix.lock +++ b/mix.lock @@ -1,16 +1,16 @@ %{ "benchee": {:hex, :benchee, "1.1.0", "f3a43817209a92a1fade36ef36b86e1052627fd8934a8b937ac9ab3a76c43062", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}], "hexpm", "7da57d545003165a012b587077f6ba90b89210fd88074ce3c60ce239eb5e6d93"}, - "bunt": {:hex, :bunt, "0.2.1", "e2d4792f7bc0ced7583ab54922808919518d0e57ee162901a16a1b6664ef3b14", [:mix], [], "hexpm", "a330bfb4245239787b15005e66ae6845c9cd524a288f0d141c148b02603777a5"}, + "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, "castore": {:hex, :castore, "1.0.3", "7130ba6d24c8424014194676d608cb989f62ef8039efd50ff4b3f33286d06db8", [:mix], [], "hexpm", "680ab01ef5d15b161ed6a95449fac5c6b8f60055677a8e79acf01b27baa4390b"}, - "credo": {:hex, :credo, "1.7.0", "6119bee47272e85995598ee04f2ebbed3e947678dee048d10b5feca139435f75", [:mix], [{:bunt, "~> 0.2.1", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "6839fcf63d1f0d1c0f450abc8564a57c43d644077ab96f2934563e68b8a769d7"}, + "credo": {:hex, :credo, "1.7.5", "643213503b1c766ec0496d828c90c424471ea54da77c8a168c725686377b9545", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "f799e9b5cd1891577d8c773d245668aa74a2fcd15eb277f51a0131690ebfb3fd"}, "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, - "dialyxir": {:hex, :dialyxir, "1.3.0", "fd1672f0922b7648ff9ce7b1b26fcf0ef56dda964a459892ad15f6b4410b5284", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "00b2a4bcd6aa8db9dcb0b38c1225b7277dca9bc370b6438715667071a304696f"}, + "dialyxir": {:hex, :dialyxir, "1.4.4", "fb3ce8741edeaea59c9ae84d5cec75da00fa89fe401c72d6e047d11a61f65f70", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "cd6111e8017ccd563e65621a4d9a4a1c5cd333df30cebc7face8029cacb4eff6"}, "earmark_parser": {:hex, :earmark_parser, "1.4.32", "fa739a0ecfa34493de19426681b23f6814573faee95dfd4b4aafe15a7b5b32c6", [:mix], [], "hexpm", "b8b0dd77d60373e77a3d7e8afa598f325e49e8663a51bcc2b88ef41838cca755"}, - "elixir_sense": {:git, "https://github.com/elixir-lsp/elixir_sense.git", "64c4afe4f59cf22fd44302373fa88a606df941c8", []}, - "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, + "elixir_sense": {:git, "https://github.com/elixir-lsp/elixir_sense.git", "67f6974dedb33846a060031d5afd5430a3f583f0", []}, + "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, "ex_doc": {:hex, :ex_doc, "0.29.4", "6257ecbb20c7396b1fe5accd55b7b0d23f44b6aa18017b415cb4c2b91d997729", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "2c6699a737ae46cb61e4ed012af931b57b699643b24dabe2400a8168414bc4f5"}, - "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, - "jason": {:hex, :jason, "1.4.0", "e855647bc964a44e2f67df589ccf49105ae039d4179db7f6271dfd3843dc27e6", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "79a3791085b2a0f743ca04cec0f7be26443738779d09302e01318f97bdb82121"}, + "file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"}, + "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, "logger_file_backend": {:hex, :logger_file_backend, "0.0.13", "df07b14970e9ac1f57362985d76e6f24e3e1ab05c248055b7d223976881977c2", [:mix], [], "hexpm", "71a453a7e6e899ae4549fb147b1c6621f4233f8f48f58ca10a64ec67b6c50018"}, "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, @@ -26,7 +26,8 @@ "phoenix_template": {:hex, :phoenix_template, "1.0.3", "32de561eefcefa951aead30a1f94f1b5f0379bc9e340bb5c667f65f1edfa4326", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "16f4b6588a4152f3cc057b9d0c0ba7e82ee23afa65543da535313ad8d25d8e2c"}, "plug": {:hex, :plug, "1.14.2", "cff7d4ec45b4ae176a227acd94a7ab536d9b37b942c8e8fa6dfc0fff98ff4d80", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "842fc50187e13cf4ac3b253d47d9474ed6c296a8732752835ce4a86acdf68d13"}, "plug_crypto": {:hex, :plug_crypto, "1.2.5", "918772575e48e81e455818229bf719d4ab4181fcbf7f85b68a35620f78d89ced", [:mix], [], "hexpm", "26549a1d6345e2172eb1c233866756ae44a9609bd33ee6f99147ab3fd87fd842"}, - "sourceror": {:hex, :sourceror, "0.14.0", "b6b8552d0240400d66b6f107c1bab7ac1726e998efc797f178b7b517e928e314", [:mix], [], "hexpm", "809c71270ad48092d40bbe251a133e49ae229433ce103f762a2373b7a10a8d8b"}, + "snowflake": {:hex, :snowflake, "1.0.4", "8433b4e04fbed19272c55e1b7de0f7a1ee1230b3ae31a813b616fd6ef279e87a", [:mix], [], "hexpm", "badb07ebb089a5cff737738297513db3962760b10fe2b158ae3bebf0b4d5be13"}, + "sourceror": {:hex, :sourceror, "1.4.0", "be87319b1579191e25464005d465713079b3fd7124a3938a1e6cf4def39735a9", [:mix], [], "hexpm", "16751ca55e3895f2228938b703ad399b0b27acfe288eff6c0e629ed3e6ec0358"}, "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"}, "stream_data": {:hex, :stream_data, "0.6.0", "e87a9a79d7ec23d10ff83eb025141ef4915eeb09d4491f79e52f2562b73e5f47", [:mix], [], "hexpm", "b92b5031b650ca480ced047578f1d57ea6dd563f5b57464ad274718c9c29501c"}, "telemetry": {:hex, :telemetry, "1.2.1", "68fdfe8d8f05a8428483a97d7aab2f268aaff24b49e0f599faa091f1d4e7f61c", [:rebar3], [], "hexpm", "dad9ce9d8effc621708f99eac538ef1cbe05d6a874dd741de2e689c47feafed5"}, diff --git a/mix_dialyzer.exs b/mix_dialyzer.exs index 55312d419..f8372f228 100644 --- a/mix_dialyzer.exs +++ b/mix_dialyzer.exs @@ -1,6 +1,6 @@ defmodule Mix.Dialyzer do def dependency do - {:dialyxir, "~> 1.3", only: [:dev, :test], runtime: false, optional: true} + {:dialyxir, "~> 1.4", only: [:dev, :test], runtime: false, optional: true} end def config(name \\ :dialyzer) do diff --git a/nix/hash b/nix/hash index 5fc334d7d..db16cea63 100644 --- a/nix/hash +++ b/nix/hash @@ -1 +1 @@ -sha256-V9iBKPSiGZy4Pp96VsNz8iyzvAYT8faqQ/csnYQMgjA= +sha256-c9n+lbUwwf4NPIbcUr2sTldHvPDVJrS/4LOhIDWT0/U= diff --git a/nix/lexical.nix b/nix/lexical.nix index 6a9b87da3..4a3191125 100644 --- a/nix/lexical.nix +++ b/nix/lexical.nix @@ -1,7 +1,10 @@ -{ mixRelease -, fetchMixDeps -, erlang -}: mixRelease rec { +{ + mixRelease, + fetchMixDeps, + elixir, + writeScript, +}: +mixRelease rec { pname = "lexical"; version = "development"; @@ -24,11 +27,17 @@ runHook postInstall ''; - preFixup = '' - for script in $out/releases/*/elixir; do - substituteInPlace "$script" --replace 'ERL_EXEC="erl"' 'ERL_EXEC="${erlang}/bin/erl"' - done + preFixup = let + activate_version_manager = writeScript "activate_version_manager.sh" '' + true + ''; + in '' + substituteInPlace "$out/bin/start_lexical.sh" --replace 'elixir_command=' 'elixir_command="${elixir}/bin/"' + rm "$out/bin/activate_version_manager.sh" + ln -s ${activate_version_manager} "$out/bin/activate_version_manager.sh" - makeWrapper $out/bin/start_lexical.sh $out/bin/lexical --set RELEASE_COOKIE lexical + mv "$out/bin" "$out/binsh" + + makeWrapper "$out/binsh/start_lexical.sh" "$out/bin/lexical" --set RELEASE_COOKIE lexical ''; } diff --git a/pages/architecture.md b/pages/architecture.md index 5684eac18..c338597a2 100644 --- a/pages/architecture.md +++ b/pages/architecture.md @@ -1,25 +1,33 @@ # Architecture ## Project Structure + Lexical is designed to keep your application isolated from lexical's code. Because of this, lexical is structured as an umbrella app, with the following sub-apps: - * `core`: Contains all code common to the other applications. + * `common`: Contains all code common to the other applications. + * `proto`: Used by `protocol` to generate the Elixir representation of LSP data structures. * `protocol`: Code related to speaking the language server protocol. * `remote_control`: The application that's injected into a project's code, which - gives lexical an API to do things in the context of your app. + gives lexical an API to do things in the context of your app. * `server` The language server itself. Lexical is an umbrella app so we can control how many dependencies the remote control app has. By separating lexical into sub-applications, each is built as a separate archive, and we can pick and choose which of these applications (and their dependencies) are injected into the project's VM, thus reducing how much contamination the project sees. If lexical was a standard application, adding dependencies to lexical would cause those dependencies to appear in the project's VM, which might cause build issues, version conflicts in mix or other inconsistencies. - + Since the `remote_control` app only depends on `common`, `path_glob` and `elixir_sense`, only those applications pollute the project's vm. Keeping `remote_control`'s dependencies to a minimum is a design goal of this architecture. ## Language Server -The language server (the `server` app) is the entry point to Lexical. When started by the `start_lexical.sh` command, it sets up a [transport](`Lexical.Server.Transport`) that [reads JsonRPC from standard input and writes responses to standard output](`Lexical.Server.Transport.StdIO`). -When a message is received, it is parsed into either a [LSP Request](`Lexical.Protocol.Requests`) or a [LSP Notification](`Lexical.Protocol.Notifications`) and and then it's handed to the [language server](`Lexical.Server`) to process. +The language server (the `server` app) is the entry point to Lexical. When started by the `start_lexical.sh` command, it sets up a [transport](https://github.com/lexical-lsp/lexical/blob/main/apps/server/lib/lexical/server/transport.ex) that [reads JsonRPC from standard input and writes responses to standard output](https://github.com/lexical-lsp/lexical/blob/main/apps/server/lib/lexical/server/transport/std_io.ex). + +When a message is received, it is parsed into either a [LSP Request](https://github.com/lexical-lsp/lexical/blob/main/apps/protocol/lib/lexical/protocol/requests.ex) or a [LSP Notification](https://github.com/lexical-lsp/lexical/blob/main/apps/protocol/lib/lexical/protocol/notifications.ex) and then it's handed to the [language server](https://github.com/lexical-lsp/lexical/blob/main/apps/server/lib/lexical/server.ex) to process. + +The only messages the [lexical server process](https://github.com/lexical-lsp/lexical/blob/main/apps/server/lib/lexical/server.ex) handles directly are those related to the lifecycle of the language server itself: -The only messages the [lexical server process](`Lexical.Server`) handles directly are those related to the lifecycle of the language server itself. All other messages are delegated to a _Provider Handler_. This delegation is accomplished by the server process adding the request to the [provider queue](`Lexical.Server.Provider.Queue`). The provider queue asks the `Lexical.Server.Provider.Handlers.for_request/1` function which handler is configured to handle the request, creates a task for the handler and starts it. +- Synchronizing document states. +- Processing LSP configuration changes. +- Performing initialization and shutdown. -A _Provider Handler_ is just a module that defines a function of arity 2 that takes the request to handle and a `Lexical.Server.Provider.Env`. These functions can reply to the request, ignore it, or do some other action. +All other messages are delegated to a _Provider Handler_. This delegation is accomplished by the server process adding the request to the [provider queue](https://github.com/lexical-lsp/lexical/blob/main/apps/server/lib/lexical/server/provider/queue.ex). The provider queue asks the `Lexical.Server.Provider.Handlers.for_request/1` function which handler is configured to handle the request, creates a task for the handler and starts it. +A _Provider Handler_ is just a module that defines a function of arity 2 that takes the request to handle and a `%Lexical.Server.Configuration{}`. These functions can reply to the request, ignore it, or do some other action. diff --git a/pages/glossary.md b/pages/glossary.md new file mode 100644 index 000000000..52893e21f --- /dev/null +++ b/pages/glossary.md @@ -0,0 +1,86 @@ +# Glossary +This project uses a considerable amount of jargon, some adopted from the Language Server Protocol and some specific to Lexical. + +This glossary attempts to define jargon used in this codebase. +Though it is not exhaustive, we hope it helps contributors more easily navigate and understand existing code and the goal, and that it provides some guidance for naming new things. + +**You can help!** If you run across a new term while working on Lexical and you think it should be defined here, please [open an issue](https://github.com/lexical-lsp/lexical/issues) suggesting it! + +## Language Server Protocol (LSP) + +This section covers features, names, and abstractions used by Lexical that have a correspondence to the Language Server Protocol. For a definitive reference, see the [LSP Specification](https://microsoft.github.io/language-server-protocol/specifications/specification-current). + +### Messages, Requests, Responses, and Notifications + +LSP defines a general heirarchy of the types of messages langauge servers and clients and may exchange, and the expected behaviours associated with them. + +There's 3 top-level types of messages: [Requests](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage), [Responses](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#responseMessage), and [Notifications](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#notificationMessage): + +- [Requests](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage) are sent from client to server and vice versa, and must always be answered with a [Response](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#responseMessage). + +- [Notifications](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#notificationMessage) are likewise bi-directional and work like events. They expressly do not receive responses per LSP's specification. + +From these 3 top-level types, LSP defines more specific more concrete, actionable messages such as: +- [Completion Requests](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_completion) +- [Goto Definition Requests](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_definition) +- [WillSaveTextDocument Notifications](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_willSave) + +... and many more. These can serve as good reference for the specific features you're working on. + +Lexical maps these in the modules [`Lexical.Protocol.Requests`](https://github.com/lexical-lsp/lexical/blob/main/apps/protocol/lib/lexical/protocol/requests.ex), [`Lexical.Protocol.Responses`](https://github.com/lexical-lsp/lexical/blob/main/apps/protocol/lib/lexical/protocol/responses.ex), and[ `Lexical.Protocol.Notifications`](https://github.com/lexical-lsp/lexical/blob/main/apps/protocol/lib/lexical/protocol/notifications.ex). + +Finally, it's worth noting all messages are JSON, specifically [JSON-RPC version 2.0](https://www.jsonrpc.org/specification). + +### Document(s) + +A single file identified by a URI and contains textual content. Formally referred to as [Text Documents](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocuments) in LSP and modeled as [`Lexical.Document`](https://github.com/lexical-lsp/lexical/blob/main/projects/lexical_shared/lib/lexical/document.ex) structs in Lexical. + +### Diagnostics + +Represents a diagnostic, such as a compiler error or warning. Diagnostic objects are only valid in the scope of a resource. + +### Completions and Code Intelligence + +Auto-completion suggestions that appear in an editor's IntelliSense. For example, a user that's typed `IO.in|` may be suggested `IO.inspect(|)` as one of a few possible completions. + +### Code Actions + +A code action represents a change that can be performed in code. In VSCode they typically appear as "quick fixes" next to an error or warning, but they aren't exclusive to that. In fact, VSCode frequently requests available code actions while users are browsing and editing code. + +LSP defines a protocol for language servers to tell clients what actions they're capable of performing, and for clients to request those actions be taken. See for example LSP's [CodeActionClientCapabilities interface](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#codeActionClientCapabilities). + +## Concepts exclusive to Lexical + +This section briefly summarizes abstractions introduced by Lexical. Detailed information can be found in the respective moduledocs. + +### The Project struct + +An Elixir struct that represents the current state of an elixir project. See `Lexical.Project`. + +### The Convertible protocol + +Some LSP data structures cannot be trivially converted to Elixir terms. + +The `Lexical.Convertible` protocol helps centralize the necessary conversion logic where this is the case. + +### The Transport Behaviour + +A behaviour responsible for reading, writing, serializing, and deserializing messages between the LSP client and Lexical language server. + +The behaviour is defined in `Lexical.Server.Transport`, with the implementation for stdio in `Lexical.Server.Transport.StdIO`. + +### The Translatable protocol and Translation modules + +The `Lexical.Completion.Translatable` protocol specifies how Elixir language constructs (such as behaviour callbacks) are converted into LSP constructs (such as [completion items](https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#completionItem)). + +See `Lexical.Server.CodeIntelligence.Completion.Translations` for various implementations. + +### Code Mods + +A variety of modules that change existing code in some way. They take a document, modify it, and return diffs. + +Examples of code mods include: + * Formatting code in a file (`> Format Document`/`shift`+`alt`+`f` in VSCode). + * Prefixing unused variables with an `_`. + +Code mods are defined in the `remote_control` sub-app and are executed in the project's virutal machine. diff --git a/pages/installation.md b/pages/installation.md index cad49fb3c..3fae43657 100644 --- a/pages/installation.md +++ b/pages/installation.md @@ -14,12 +14,15 @@ Lexical supports the following versions of Elixir and Erlang: | 24 | `>= 24.3.4.12` | Might run on older versions; this was the lowest that would compile on arm | | 25 | `>= 25.0` | | | 26 | `>= 26.0.2` | | +| 27 | `>= 27.0` | Will use dramatically more memory due to a bug in Erlang's ETS table compression | | Elixir | Version Range | Notes | | -------- | -------------- | -------- | | 1.13 | `>= 1.13.4` | | | 1.14 | `all` | | | 1.15 | `>= 1.15.3` | `1.15.0` - `1.15.2` had compiler bugs that prevented lexical from working | +| 1.16 | `>= 1.16.0` | | +| 1.17 | `>= 1.17.0` | | Lexical can run projects in any version of Elixir and Erlang that it supports, but it's important to understand that Lexical needs to be @@ -34,9 +37,9 @@ Lexical would need to be compiled with Erlang `24.3.2` and Elixir `1.13.3`. Lexical's prepackaged builds use Erlang `24.3.4.12` and Elixir `1.13.4` ## Prerequisites +First, Install git LFS by [following these instructions](https://docs.github.com/en/repositories/working-with-files/managing-large-files/installing-git-large-file-storage). -The first step in getting Lexical running locally is to clone the git -repository. Do this with +Then, clone the git repository. Do this with ```elixir git clone git@github.com:lexical-lsp/lexical.git @@ -72,8 +75,11 @@ source code is `/my/home/projects/lexical`. 2. [Vanilla Emacs with eglot](#vanilla-emacs-with-eglot) 3. [Visual Studio Code](#visual-studio-code) 4. [neovim](#neovim) -5. [Vim + Vim-LSP](#vim--vim-lsp) -6. [Helix](#helix) +5. [LunarVim](#lunarvim) +6. [Vim + ALE](#vim--ale) +7. [Vim + Vim-LSP](#vim--vim-lsp) +8. [Helix](#helix) +9. [Sublime Text](#sublime-text) ### Vanilla Emacs with lsp-mode The emacs instructions assume you're using `use-package`, which you @@ -170,42 +176,67 @@ of whether you are using mason or others, you can use this configuration below as a reference: ```lua - local lspconfig = require("lspconfig") - local configs = require("lspconfig.configs") - - local lexical_config = { - filetypes = { "elixir", "eelixir", }, - cmd = { "/my/home/projects/_build/dev/package/lexical/bin/start_lexical.sh" }, - settings = {}, - } - - if not configs.lexical then - configs.lexical = { - default_config = { - filetypes = lexical_config.filetypes, - cmd = lexical_config.cmd, - root_dir = function(fname) - return lspconfig.util.root_pattern("mix.exs", ".git")(fname) or vim.loop.os_homedir() - end, - -- optional settings - settings = lexical_config.settings, - }, - } - end - - lspconfig.lexical.setup({}) +require('lspconfig').lexical.setup { + cmd = { "my/home/projects/_build/dev/package/lexical/bin/start_lexical.sh" }, + root_dir = function(fname) + return util.root_pattern("mix.exs", ".git")(fname) or vim.loop.cwd() + end, + filetypes = { "elixir", "eelixir", "heex" }, + -- optional settings + settings = {} +} ``` If the configuration above doesn't work for you, please try this minimal [neovim configuration](https://github.com/scottming/nvim-mini-for-lexical), It can eliminate other plugin factors. +### LunarVim + +[LunarVim](https://www.lunarvim.org) is a neovim configuration package with a lot of goodies built-in, while remaining very configurable. + +First, add this to your configuration: + +```lua +-- Add `elixirls` to `skipped_servers` list +vim.list_extend(lvim.lsp.automatic_configuration.skipped_servers, { "elixirls" }) + +-- Remove `lexical` from `skipped_servers` list +lvim.lsp.automatic_configuration.skipped_servers = vim.tbl_filter(function(server) + return server ~= "lexical" +end, lvim.lsp.automatic_configuration.skipped_servers) +``` + +This is necessary because LunarVim defaults to `elixirls` so we must ignore it first. Otherwise you'll have both `lexical` and `elixirls` running when you open Elixir files. + +Remove `elixirls` from the `lvim.lsp.installer.setup.ensure_installed = { ... }` list so it does not get automatically reinstalled. + +Optionally run `:LspUninstall elixirls` from within neovim if you don't want to keep `elixirls` around. + +Then use the same configuration as the one in the [neovim](#neovim) section. + +### Vim + ALE + +[ALE](https://github.com/dense-analysis/ale) includes built-in LSP support for Lexical. +To enable it, you'll need to tell ALE where your Lexical release is located (including +the `bin` directory) and add `lexical` to the list of enabled Elixir linters. + +A good way to do this is to add the following to a `~/.vim/after/ftplugin/elixir.vim` +file: + +```viml +let b:ale_linters = ['lexical', 'mix'] +let b:ale_elixir_lexical_release = '/my/home/projects/_build/dev/package/lexical/bin' +``` + +That will automatically enable the `lexical` and `mix` linters for all buffers with +the `elixir` file type. + ### Vim + Vim-LSP An example of configuring Lexical as the Elixir language server for [Vim-LSP](https://github.com/prabirshrestha/vim-lsp). Uses the newer vim9script syntax but can be converted to Vim 8 etc (`:h vim9script`). -``` -vim9script +```vim9script # Loading vim-lsp with minpac: call minpac#add("prabirshrestha/vim-lsp") @@ -230,7 +261,7 @@ endif If you use [Vim-LSP-Settings](mattn/vim-lsp-settings) for installing and configuring language servers, you can use the following flag to disable prompts to install elixir-ls: -``` +```viml g:lsp_settings_filetype_elixir = ["lexical"] ``` @@ -257,3 +288,37 @@ language-servers = ["lexical"] name = "heex" language-servers = ["lexical"] ``` + +### Sublime Text + +#### Background + +Lexical can be used with Sublime Text via the [LSP-Sublime](https://lsp.sublimetext.io/) package, which integrates Language Servers with Sublime Text. If you don't have the LSP-Sublime package installed already, [install it with Package Control](https://packagecontrol.io/packages/LSP). + +There is currently no [language server package](https://lsp.sublimetext.io/language_servers/) specifically for Lexical that works with LSP-Sublime so we'll need to create a [custom client configuration](https://lsp.sublimetext.io/client_configuration/). + +#### Installation + +First, ensure that you have Lexical [installed from source](https://github.com/lexical-lsp/lexical/blob/main/pages/installation.md#prerequisites) correctly and note the full path of the directory with Lexical's executables: + +Print the full path of the directory holding the Lexical executables: +`cd {directory_you_cloned_lexical_to}/_build/dev/package/lexical/bin/ && pwd` + +Then, install LSP-Sublime with Package Control if you haven't already. + +Next, open up the LSP settings in Sublime. You can do this by invoking the command palette (`ctrl/cmd + shift + p`) and selecting `Preferences: LSP Settings`. + +You'll need to add a key called `"clients"` in the top-level `LSP.sublime-settings` JSON dictionary that is as follows: + +```json +"clients": { + "elixir-lexical": { + "enabled": true, + "command": ["{output_from_pwd_cmd_above}/start_lexical.sh", ""], + "selector": "source.elixir" + } +} +``` +_note: you can name elixir-lexical whatever you like, it's just for your own identification_ + +Upon saving the configuration, LSP-Sublime should enable the new `elixir-lexical` LSP server. Go into an Elixir file and you should now see `elixir-lexical` in the lower left of the status bar. If not, invoke the command palette and select `LSP: Enable Language Server Globally/In Project` and it should run. diff --git a/projects/lexical_credo/mix.exs b/projects/lexical_credo/mix.exs index b99ca626d..a1e1979ab 100644 --- a/projects/lexical_credo/mix.exs +++ b/projects/lexical_credo/mix.exs @@ -2,7 +2,7 @@ defmodule LexicalCredo.MixProject do use Mix.Project @repo_url "https://github.com/lexical-lsp/lexical/" - @version "0.1.1" + @version "0.5.0" def project do [ @@ -28,7 +28,7 @@ defmodule LexicalCredo.MixProject do defp deps do [ env_dep( - hex: {:lexical_plugin, "> 0.0.0"}, + hex: {:lexical_plugin, "~> 0.5"}, else: {:lexical_plugin, path: "../lexical_plugin"} ), {:credo, "> 0.0.0", optional: true}, diff --git a/projects/lexical_credo/mix.lock b/projects/lexical_credo/mix.lock index 0e4695fb0..7b4d6f55e 100644 --- a/projects/lexical_credo/mix.lock +++ b/projects/lexical_credo/mix.lock @@ -5,7 +5,8 @@ "ex_doc": {:hex, :ex_doc, "0.29.4", "6257ecbb20c7396b1fe5accd55b7b0d23f44b6aa18017b415cb4c2b91d997729", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "2c6699a737ae46cb61e4ed012af931b57b699643b24dabe2400a8168414bc4f5"}, "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, "jason": {:hex, :jason, "1.4.0", "e855647bc964a44e2f67df589ccf49105ae039d4179db7f6271dfd3843dc27e6", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "79a3791085b2a0f743ca04cec0f7be26443738779d09302e01318f97bdb82121"}, - "lexical_shared": {:hex, :lexical_shared, "0.0.1", "64e5c509cf1788f594a283416e24f5802b2594dfa38ae3004aad98e0ab428338", [:mix], [], "hexpm", "3d9ba37a32155c22694568a68e4769fca9e6ca95727a3297751a41e34db1b950"}, + "lexical_plugin": {:hex, :lexical_plugin, "0.5.0", "c21cb9cd244eb35e9e64758cfda3176ba38879697383a238f24bb4aae60d7c94", [:mix], [{:lexical_shared, "~> 0.5", [hex: :lexical_shared, repo: "hexpm", optional: false]}], "hexpm", "274abcd8b0316e01e6d70435f6ca1c74cf688c5a878a4164b09705cc74583f3d"}, + "lexical_shared": {:hex, :lexical_shared, "0.5.0", "7a55c7080fcc933689ee4a5ea5f20406129645499ffb6a378d2a8adda28e5cdf", [:mix], [], "hexpm", "c100e582e2d3b4d7dbe9658f9ea4ef0e17a3d64d01b37e3797e366ed641612e2"}, "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, "makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"}, diff --git a/projects/lexical_plugin/.dialyzer_ignore.exs b/projects/lexical_plugin/.dialyzer_ignore.exs new file mode 100644 index 000000000..e69de29bb diff --git a/projects/lexical_plugin/mix.exs b/projects/lexical_plugin/mix.exs index 2f2d2d154..6b2baaebb 100644 --- a/projects/lexical_plugin/mix.exs +++ b/projects/lexical_plugin/mix.exs @@ -1,7 +1,7 @@ defmodule Lexical.Plugin.MixProject do use Mix.Project - @version "0.0.2" + @version "0.5.0" def project do [ app: :lexical_plugin, @@ -20,7 +20,7 @@ defmodule Lexical.Plugin.MixProject do defp deps do [ env_dep( - hex: {:lexical_shared, "> 0.0.0"}, + hex: {:lexical_shared, "~> 0.5"}, else: {:lexical_shared, path: "../lexical_shared"} ), env_dep( diff --git a/projects/lexical_plugin/mix.lock b/projects/lexical_plugin/mix.lock index e4db6a61b..7faa8c5b8 100644 --- a/projects/lexical_plugin/mix.lock +++ b/projects/lexical_plugin/mix.lock @@ -1,8 +1,9 @@ %{ - "dialyxir": {:hex, :dialyxir, "1.3.0", "fd1672f0922b7648ff9ce7b1b26fcf0ef56dda964a459892ad15f6b4410b5284", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "00b2a4bcd6aa8db9dcb0b38c1225b7277dca9bc370b6438715667071a304696f"}, + "dialyxir": {:hex, :dialyxir, "1.4.4", "fb3ce8741edeaea59c9ae84d5cec75da00fa89fe401c72d6e047d11a61f65f70", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "cd6111e8017ccd563e65621a4d9a4a1c5cd333df30cebc7face8029cacb4eff6"}, "earmark_parser": {:hex, :earmark_parser, "1.4.32", "fa739a0ecfa34493de19426681b23f6814573faee95dfd4b4aafe15a7b5b32c6", [:mix], [], "hexpm", "b8b0dd77d60373e77a3d7e8afa598f325e49e8663a51bcc2b88ef41838cca755"}, - "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, + "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, "ex_doc": {:hex, :ex_doc, "0.29.4", "6257ecbb20c7396b1fe5accd55b7b0d23f44b6aa18017b415cb4c2b91d997729", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "2c6699a737ae46cb61e4ed012af931b57b699643b24dabe2400a8168414bc4f5"}, + "lexical_shared": {:hex, :lexical_shared, "0.5.0", "7a55c7080fcc933689ee4a5ea5f20406129645499ffb6a378d2a8adda28e5cdf", [:mix], [], "hexpm", "c100e582e2d3b4d7dbe9658f9ea4ef0e17a3d64d01b37e3797e366ed641612e2"}, "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, "makeup_erlang": {:hex, :makeup_erlang, "0.1.2", "ad87296a092a46e03b7e9b0be7631ddcf64c790fa68a9ef5323b6cbb36affc72", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "f3f5a1ca93ce6e092d92b6d9c049bcda58a3b617a8d888f8e7231c85630e8108"}, diff --git a/projects/lexical_shared/.dialyzer_ignore.exs b/projects/lexical_shared/.dialyzer_ignore.exs new file mode 100644 index 000000000..e69de29bb diff --git a/projects/lexical_shared/lib/lexical/debug.ex b/projects/lexical_shared/lib/lexical/debug.ex new file mode 100644 index 000000000..0e09f8a28 --- /dev/null +++ b/projects/lexical_shared/lib/lexical/debug.ex @@ -0,0 +1,32 @@ +defmodule Lexical.Logging do + require Logger + + defmacro timed(label, do: block) do + if enabled?() do + quote do + timed_log(unquote(label), fn -> unquote(block) end) + end + else + block + end + end + + def timed_log(label, threshold_ms \\ 1, function) when is_function(function, 0) do + if enabled?() do + {elapsed_us, result} = :timer.tc(function) + elapsed_ms = elapsed_us / 1000 + + if elapsed_ms >= threshold_ms do + Logger.info("#{label} took #{Lexical.Formats.time(elapsed_us)}") + end + + result + else + function.() + end + end + + defp enabled? do + true + end +end diff --git a/projects/lexical_shared/lib/lexical/document.ex b/projects/lexical_shared/lib/lexical/document.ex index cb42cab88..3d5abe94c 100644 --- a/projects/lexical_shared/lib/lexical/document.ex +++ b/projects/lexical_shared/lib/lexical/document.ex @@ -16,16 +16,19 @@ defmodule Lexical.Document do import Lexical.Document.Line + require Logger + alias __MODULE__.Path, as: DocumentPath @derive {Inspect, only: [:path, :version, :dirty?, :lines]} - defstruct [:uri, :path, :version, dirty?: false, lines: nil] + defstruct [:uri, :language_id, :path, :version, dirty?: false, lines: nil] @type version :: non_neg_integer() @type fragment_position :: Position.t() | Convertible.t() @type t :: %__MODULE__{ uri: String.t(), + language_id: String.t(), version: version(), dirty?: boolean, lines: Lines.t(), @@ -41,14 +44,23 @@ defmodule Lexical.Document do as a binary and the vewrsion. """ @spec new(Lexical.path() | Lexical.uri(), String.t(), version()) :: t - def new(maybe_uri, text, version) do + def new(maybe_uri, text, version, language_id \\ nil) do uri = DocumentPath.ensure_uri(maybe_uri) + path = DocumentPath.from_uri(uri) + + language_id = + if String.ends_with?(path, ".exs") do + "elixir-script" + else + language_id || language_id_from_path(path) + end %__MODULE__{ uri: uri, version: version, lines: Lines.new(text), - path: DocumentPath.from_uri(uri) + path: path, + language_id: language_id } end @@ -219,6 +231,28 @@ defmodule Lexical.Document do |> IO.iodata_to_binary() end + @spec language_id_from_path(Lexical.path()) :: String.t() + defp language_id_from_path(path) do + case Path.extname(path) do + ".ex" -> + "elixir" + + ".exs" -> + "elixir-script" + + ".eex" -> + "eex" + + ".heex" -> + "phoenix-heex" + + extension -> + Logger.warning("can't infer lang ID for #{path}, ext: #{extension}.") + + "unsupported (#{extension})" + end + end + # private defp line_count(%__MODULE__{} = document) do diff --git a/projects/lexical_shared/lib/lexical/document/location.ex b/projects/lexical_shared/lib/lexical/document/location.ex index afbcb0a31..6db3d322d 100644 --- a/projects/lexical_shared/lib/lexical/document/location.ex +++ b/projects/lexical_shared/lib/lexical/document/location.ex @@ -18,9 +18,9 @@ defmodule Lexical.Document.Location do } use Lexical.StructAccess - @spec new(Range.t(), Document.t()) :: t() + @spec new(Range.t(), Document.t() | String.t()) :: t() def new(%Range{} = range, %Document{} = document) do - %__MODULE__{range: range, document: document} + %__MODULE__{range: range, document: document, uri: document.uri} end def new(%Range{} = range, uri) when is_binary(uri) do diff --git a/projects/lexical_shared/lib/lexical/document/path.ex b/projects/lexical_shared/lib/lexical/document/path.ex index e8d5d3004..0412bb260 100644 --- a/projects/lexical_shared/lib/lexical/document/path.ex +++ b/projects/lexical_shared/lib/lexical/document/path.ex @@ -5,20 +5,24 @@ defmodule Lexical.Document.Path do @file_scheme "file" @type uri_or_path :: Lexical.uri() | Lexical.path() + @doc """ Given a uri or a path, either return the uri unmodified or converts the path to a uri """ + @spec ensure_uri(uri_or_path()) :: Lexical.uri() def ensure_uri("file://" <> _ = uri), do: uri - def ensure_uri(path), do: to_uri(path) + def ensure_uri("untitled:" <> _ = uri), do: uri - def ensure_path("file://" <> _ = uri), do: from_uri(uri) + def ensure_uri(path), do: to_uri(path) @doc """ Given a uri or a path, either return the path unmodified or converts the uri to a path """ - @spec ensure_uri(uri_or_path()) :: Lexical.uri() - def ensure_path(path), do: path + @spec ensure_path(uri_or_path()) :: Lexical.path() + def ensure_path("file://" <> _ = uri), do: from_uri(uri) + + def ensure_path(path) when is_binary(path), do: path @doc """ Returns path from URI in a way that handles windows file:///c%3A/... URLs correctly @@ -50,8 +54,13 @@ defmodule Lexical.Document.Path do convert_separators_to_native(path) end + # `untitled:` URIs are used for unsaved files in vscode. + def from_uri(%URI{scheme: "untitled"} = uri) when uri.path !== nil do + URI.to_string(uri) + end + def from_uri(%URI{scheme: scheme}) do - raise ArgumentError, message: "unexpected URI scheme #{inspect(scheme)}" + raise ArgumentError, message: "unsupported URI scheme #{inspect(scheme)}" end def from_uri(uri) do @@ -59,11 +68,7 @@ defmodule Lexical.Document.Path do end def absolute_from_uri(uri) do - uri |> from_uri |> Path.absname() - end - - def to_uri("file://" <> _path = uri) do - uri + uri |> from_uri() |> Path.absname() end @doc """ diff --git a/projects/lexical_shared/lib/lexical/document/position.ex b/projects/lexical_shared/lib/lexical/document/position.ex index 5a47ce1f1..86c9990ee 100644 --- a/projects/lexical_shared/lib/lexical/document/position.ex +++ b/projects/lexical_shared/lib/lexical/document/position.ex @@ -70,4 +70,48 @@ defmodule Lexical.Document.Position do } end end + + @doc """ + Compares two positions. + + Returns `:gt`, `:lt`, or `:eq` depending on the location of the first + position relative to the second. + """ + @spec compare(t | {line, character}, t | {line, character}) :: :lt | :eq | :gt + def compare(%__MODULE__{} = pos1, %__MODULE__{} = pos2) do + compare({pos1.line, pos1.character}, {pos2.line, pos2.character}) + end + + def compare(%__MODULE__{} = pos1, {_, _} = pos2) do + compare({pos1.line, pos1.character}, pos2) + end + + def compare({_, _} = pos1, %__MODULE__{} = pos2) do + compare(pos1, {pos2.line, pos2.character}) + end + + def compare({l1, c1} = first, {l2, c2} = second) + when is_integer(l1) and is_integer(c1) and is_integer(l2) and is_integer(c2) do + cond do + first < second -> :lt + first > second -> :gt + true -> :eq + end + end +end + +defimpl Inspect, for: Lexical.Document.Position do + import Inspect.Algebra + + def inspect(nil, _), do: "nil" + + def inspect(pos, _) do + concat(["LxPos", to_string(pos)]) + end +end + +defimpl String.Chars, for: Lexical.Document.Position do + def to_string(pos) do + "<<#{pos.line}, #{pos.character}>>" + end end diff --git a/projects/lexical_shared/lib/lexical/document/range.ex b/projects/lexical_shared/lib/lexical/document/range.ex index b704966e2..e3ee41194 100644 --- a/projects/lexical_shared/lib/lexical/document/range.ex +++ b/projects/lexical_shared/lib/lexical/document/range.ex @@ -2,16 +2,18 @@ defmodule Lexical.Document.Range do @moduledoc """ A range in a document - A range consists of a starting and ending position and includes all text in between. + Note that ranges represent a cursor position, and so are inclusive of + lines, but exclusive of the end position. - Note: To select an entire line, construct a range that runs from the first character on the line - to the first character on the next line. + Note: To select an entire line, construct a range that runs from the + first character on the line to the first character on the next line. ``` - whole_line = Range.new( - Position.new(1, 1), - Position.new(2, 1) - ) + whole_line = + Range.new( + Position.new(doc, 1, 1), + Position.new(doc, 2, 1) + ) ``` """ alias Lexical.Document.Position @@ -25,7 +27,39 @@ defmodule Lexical.Document.Range do use Lexical.StructAccess + @doc """ + Builds a new range. + """ def new(%Position{} = start_pos, %Position{} = end_pos) do %__MODULE__{start: start_pos, end: end_pos} end + + @doc """ + Returns whether the range contains the given position. + """ + def contains?(%__MODULE__{} = range, %Position{} = position) do + %__MODULE__{start: start_pos, end: end_pos} = range + + cond do + position.line == start_pos.line and position.line == end_pos.line -> + position.character >= start_pos.character and position.character <= end_pos.character + + position.line == start_pos.line -> + position.character >= start_pos.character + + position.line == end_pos.line -> + position.character < end_pos.character + + true -> + position.line > start_pos.line and position.line < end_pos.line + end + end +end + +defimpl Inspect, for: Lexical.Document.Range do + import Inspect.Algebra + + def inspect(range, _) do + concat(["LxRange[", to_string(range.start), "...", to_string(range.end), "]"]) + end end diff --git a/projects/lexical_shared/lib/lexical/document/store.ex b/projects/lexical_shared/lib/lexical/document/store.ex index 849ecdd3d..d305ba35d 100644 --- a/projects/lexical_shared/lib/lexical/document/store.ex +++ b/projects/lexical_shared/lib/lexical/document/store.ex @@ -1,97 +1,157 @@ defmodule Lexical.Document.Store do @moduledoc """ - A backing store for source file documents - - This implementation stores documents in ETS, and partitions read and write operations. Read operations are served - immediately by querying the ETS table, while writes go through a GenServer process (which is the owner of the ETS table). + Backing store for source file documents. """ + alias Lexical.Document + alias Lexical.ProcessCache + + use GenServer + + @type updater :: (Document.t() -> {:ok, Document.t()} | {:error, any()}) + + @type derivations :: [derivation] + @type derivation :: {derivation_key, derivation_fun} + @type derivation_key :: atom() + @type derivation_fun :: (Document.t() -> derived_value) + @type derived_value :: any() + + @type start_opts :: [start_opt] + @type start_opt :: {:derive, derivations} + defmodule State do @moduledoc false + alias Lexical.Document + alias Lexical.Document.Store + require Logger - defstruct temporary_open_refs: %{} + import Record + + defstruct open: %{}, temporary_open_refs: %{}, derivation_funs: %{} + @type t :: %__MODULE__{} - @table_name Document.Store + defrecord :open_doc, document: nil, derived: %{} + + def new(opts \\ []) do + {derivation_funs, invalid} = + opts + |> Keyword.validate!(derive: []) + |> Keyword.fetch!(:derive) + |> Enum.split_with(fn + {atom, fun} when is_atom(atom) and is_function(fun, 1) -> true + _ -> false + end) - def new do - :ets.new(@table_name, [:named_table, :set, :protected, read_concurrency: true]) + if invalid != [] do + raise ArgumentError, "invalid derive: #{inspect(invalid)}" + end - %__MODULE__{} + %__MODULE__{derivation_funs: Map.new(derivation_funs)} end - @spec fetch(Lexical.uri()) :: {:ok, Document.t()} | {:error, :not_open} - def fetch(uri) do - case ets_fetch(uri, :any) do - {:ok, _} = success -> success - :error -> {:error, :not_open} + @spec fetch(t, Lexical.uri()) :: {:ok, Document.t(), t} | {:error, :not_open} + def fetch(%__MODULE__{} = store, uri) do + case store.open do + %{^uri => open_doc(document: document)} -> {:ok, document, store} + _ -> {:error, :not_open} + end + end + + @spec fetch(t, Lexical.uri(), Store.derivation_key()) :: + {:ok, Document.t(), Store.derived_value(), t} | {:error, :not_open} + def fetch(%__MODULE__{} = store, uri, key) do + case store.open do + %{^uri => open_doc(document: document, derived: %{^key => derivation})} -> + {:ok, document, derivation, store} + + %{^uri => open_doc(document: document, derived: derived)} -> + derivation = derive(store, key, document) + derived = Map.put(derived, key, derivation) + store = put_open_doc(store, document, derived) + {:ok, document, derivation, store} + + _ -> + {:error, :not_open} end end - @spec save(t, Lexical.uri()) :: {:ok, t()} | {:error, :not_open} + @spec save(t, Lexical.uri()) :: {:ok, t} | {:error, :not_open} def save(%__MODULE__{} = store, uri) do - case ets_fetch(uri, :sources) do - {:ok, document} -> + case store.open do + %{^uri => open_doc(document: document, derived: derived)} -> document = Document.mark_clean(document) - ets_put(uri, :sources, document) + store = put_open_doc(store, document, derived) {:ok, store} - :error -> + _ -> {:error, :not_open} end end - @spec open(t, Lexical.uri(), String.t(), pos_integer()) :: {:ok, t} | {:error, :already_open} - def open(%__MODULE__{} = store, uri, text, version) do - case ets_fetch(uri, :sources) do - {:ok, _} -> + @spec open(t, Lexical.uri(), String.t(), pos_integer(), String.t()) :: + {:ok, t} | {:error, :already_open} + def open(%__MODULE__{temporary_open_refs: refs} = store, uri, text, version, language_id) + when is_map_key(refs, uri) do + {_, store} = + store + |> maybe_cancel_ref(uri) + |> pop_open_doc(uri) + + open(store, uri, text, version, language_id) + end + + def open(%__MODULE__{} = store, uri, text, version, language_id) do + case store.open do + %{^uri => _} -> {:error, :already_open} - :error -> - document = Document.new(uri, text, version) - ets_put(uri, :sources, document) + _ -> + document = Document.new(uri, text, version, language_id) + store = put_open_doc(store, document) {:ok, store} end end - @spec open?(Lexical.uri()) :: boolean - def open?(uri) do - ets_has_key?(uri, :any) + @spec open?(t, Lexical.uri()) :: boolean + def open?(%__MODULE__{} = store, uri) do + Map.has_key?(store.open, uri) end - @spec close(t(), Lexical.uri()) :: {:ok, t()} | {:error, :not_open} + @spec close(t, Lexical.uri()) :: {:ok, t} | {:error, :not_open} def close(%__MODULE__{} = store, uri) do - case ets_pop(uri, :sources) do - nil -> + case pop_open_doc(store, uri) do + {nil, _} -> {:error, :not_open} - _document -> - {:ok, store} + {_, store} -> + {:ok, maybe_cancel_ref(store, uri)} end end + @spec get_and_update(t, Lexical.uri(), Store.updater()) :: + {:ok, Document.t(), t} | {:error, any()} def get_and_update(%__MODULE__{} = store, uri, updater_fn) do - with {:ok, document} <- fetch(uri), - {:ok, updated_source} <- updater_fn.(document) do - ets_put(uri, :sources, updated_source) - - {:ok, updated_source, store} + with {:ok, open_doc(document: document)} <- Map.fetch(store.open, uri), + {:ok, document} <- updater_fn.(document) do + {:ok, document, put_open_doc(store, document)} else error -> normalize_error(error) end end + @spec update(t, Lexical.uri(), Store.updater()) :: {:ok, t} | {:error, any()} def update(%__MODULE__{} = store, uri, updater_fn) do - with {:ok, _, new_store} <- get_and_update(store, uri, updater_fn) do - {:ok, new_store} + with {:ok, _, store} <- get_and_update(store, uri, updater_fn) do + {:ok, store} end end - @spec open_temporarily(t(), Lexical.uri() | Path.t(), timeout()) :: - {:ok, Document.t(), t()} | {:error, term()} + @spec open_temporarily(t, Lexical.uri() | Path.t(), timeout()) :: + {:ok, Document.t(), t} | {:error, term()} def open_temporarily(%__MODULE__{} = store, path_or_uri, timeout) do uri = Document.Path.ensure_uri(path_or_uri) path = Document.Path.ensure_path(path_or_uri) @@ -100,115 +160,93 @@ defmodule Lexical.Document.Store do document = Document.new(uri, contents, 0) ref = schedule_unload(uri, timeout) - new_refs = + new_store = store - |> maybe_cancel_old_ref(uri) - |> Map.put(uri, ref) - - ets_put(uri, :temp, document) - new_store = %__MODULE__{store | temporary_open_refs: new_refs} + |> maybe_cancel_ref(uri) + |> put_ref(uri, ref) + |> put_open_doc(document) {:ok, document, new_store} end end + @spec extend_timeout(t, Lexical.uri(), timeout()) :: t def extend_timeout(%__MODULE__{} = store, uri, timeout) do case store.temporary_open_refs do %{^uri => ref} -> Process.cancel_timer(ref) new_ref = schedule_unload(uri, timeout) - new_open_refs = Map.put(store.temporary_open_refs, uri, new_ref) - %__MODULE__{store | temporary_open_refs: new_open_refs} + put_ref(store, uri, new_ref) _ -> store end end + @spec unload(t, Lexical.uri()) :: t def unload(%__MODULE__{} = store, uri) do - new_refs = Map.delete(store.temporary_open_refs, uri) - ets_delete(uri, :temp) - %__MODULE__{store | temporary_open_refs: new_refs} + {_, store} = pop_open_doc(store, uri) + maybe_cancel_ref(store, uri) end - defp maybe_cancel_old_ref(%__MODULE__{} = store, uri) do - {_, new_refs} = - Map.get_and_update(store.temporary_open_refs, uri, fn - nil -> - :pop - - old_ref when is_reference(old_ref) -> - Process.cancel_timer(old_ref) - :pop - end) - - new_refs - end - - defp schedule_unload(uri, timeout) do - Process.send_after(self(), {:unload, uri}, timeout) + defp put_open_doc(%__MODULE__{} = store, %Document{} = document, derived \\ %{}) do + put_in(store.open[document.uri], open_doc(document: document, derived: derived)) end - defp normalize_error(:error), do: {:error, :not_open} - defp normalize_error(e), do: e - - @read_types [:sources, :temp, :any] - @write_types [:sources, :temp] - defp ets_fetch(key, type) when type in @read_types do - case :ets.match(@table_name, {key, type_selector(type), :"$1"}) do - [[value]] -> {:ok, value} - _ -> :error + defp pop_open_doc(%__MODULE__{} = store, uri) do + case Map.pop(store.open, uri) do + {open_doc() = doc, open} -> {doc, %__MODULE__{store | open: open}} + {nil, _} -> {nil, store} end end - defp ets_put(key, type, value) when type in @write_types do - :ets.insert(@table_name, {key, type, value}) - :ok + defp put_ref(%__MODULE__{} = store, uri, ref) do + put_in(store.temporary_open_refs[uri], ref) end - defp ets_has_key?(key, type) when type in @read_types do - match_spec = {key, type_selector(type), :"$1"} - - case :ets.match(@table_name, match_spec) do - [] -> false - _ -> true - end - end + defp maybe_cancel_ref(%__MODULE__{} = store, uri) do + case pop_in(store.temporary_open_refs[uri]) do + {ref, store} when is_reference(ref) -> + Process.cancel_timer(ref) + store - defp ets_pop(key, type) when type in @write_types do - with {:ok, value} <- ets_fetch(key, type), - :ok <- ets_delete(key, type) do - value - else _ -> - nil + store end end - defp ets_delete(key, type) when type in @write_types do - match_spec = {key, type, :_} - :ets.match_delete(@table_name, match_spec) - :ok + defp schedule_unload(uri, timeout) do + Process.send_after(self(), {:unload, uri}, timeout) end - defp type_selector(:any), do: :_ - defp type_selector(type), do: type - end - - alias Lexical.Document - alias Lexical.ProcessCache + defp normalize_error(:error), do: {:error, :not_open} + defp normalize_error(e), do: e - @type t :: %State{} + defp derive(%__MODULE__{} = store, key, document) do + case store.derivation_funs do + %{^key => fun} -> + fun.(document) - @type updater :: (Document.t() -> {:ok, Document.t()} | {:error, any()}) + _ -> + known = Map.keys(store.derivation_funs) - use GenServer + raise ArgumentError, + "No derivation for #{inspect(key)}, expected one of #{inspect(known)}" + end + end + end @spec fetch(Lexical.uri()) :: {:ok, Document.t()} | {:error, :not_open} def fetch(uri) do GenServer.call(name(), {:fetch, uri}) end + @spec fetch(Lexical.uri(), derivation_key) :: + {:ok, Document.t(), derived_value} | {:error, :not_open} + def fetch(uri, key) do + GenServer.call(name(), {:fetch, uri, key}) + end + @spec save(Lexical.uri()) :: :ok | {:error, :not_open} def save(uri) do GenServer.call(name(), {:save, uri}) @@ -216,12 +254,13 @@ defmodule Lexical.Document.Store do @spec open?(Lexical.uri()) :: boolean() def open?(uri) do - State.open?(uri) + GenServer.call(name(), {:open?, uri}) end - @spec open(Lexical.uri(), String.t(), pos_integer()) :: :ok | {:error, :already_open} - def open(uri, text, version) do - GenServer.call(name(), {:open, uri, text, version}) + @spec open(Lexical.uri(), String.t(), String.t(), pos_integer() | nil) :: + :ok | {:error, :already_open} + def open(uri, text, version, language_id \\ nil) do + GenServer.call(name(), {:open, uri, text, version, language_id}) end @spec open_temporary(Lexical.uri() | Path.t()) :: @@ -229,7 +268,7 @@ defmodule Lexical.Document.Store do @spec open_temporary(Lexical.uri() | Path.t(), timeout()) :: {:ok, Document.t()} | {:error, term()} - def open_temporary(uri, timeout \\ 5000) do + def open_temporary(uri, timeout \\ 5000) when is_binary(uri) do ProcessCache.trans(uri, 50, fn -> GenServer.call(name(), {:open_temporarily, uri, timeout}) end) @@ -240,24 +279,27 @@ defmodule Lexical.Document.Store do GenServer.call(name(), {:close, uri}) end - @spec get_and_update(Lexical.uri(), updater()) :: {:ok, Document.t()} | {:error, any()} + @spec get_and_update(Lexical.uri(), updater) :: {:ok, Document.t()} | {:error, any()} def get_and_update(uri, update_fn) do GenServer.call(name(), {:get_and_update, uri, update_fn}) end - @spec update(Lexical.uri(), updater()) :: :ok | {:error, any()} + @spec update(Lexical.uri(), updater) :: :ok | {:error, any()} def update(uri, update_fn) do GenServer.call(name(), {:update, uri, update_fn}) end - def start_link(_) do - GenServer.start_link(__MODULE__, [], name: name()) + @spec start_link(start_opts) :: GenServer.on_start() + def start_link(opts) do + GenServer.start_link(__MODULE__, opts, name: name()) end - def init(_) do - {:ok, State.new()} + @impl GenServer + def init(opts) do + {:ok, State.new(opts)} end + @impl GenServer def handle_call({:save, uri}, _from, %State{} = state) do {reply, new_state} = case State.save(state, uri) do @@ -268,9 +310,9 @@ defmodule Lexical.Document.Store do {:reply, reply, new_state} end - def handle_call({:open, uri, text, version}, _from, %State{} = state) do + def handle_call({:open, uri, text, version, language_id}, _from, %State{} = state) do {reply, new_state} = - case State.open(state, uri, text, version) do + case State.open(state, uri, text, version, language_id) do {:ok, _} = success -> success error -> {error, state} end @@ -278,15 +320,19 @@ defmodule Lexical.Document.Store do {:reply, reply, new_state} end + def handle_call({:open?, uri}, _from, %State{} = state) do + reply = State.open?(state, uri) + {:reply, reply, state} + end + def handle_call({:open_temporarily, uri, timeout_ms}, _, %State{} = state) do {reply, new_state} = - with {:error, :not_open} <- State.fetch(uri), + with {:error, :not_open} <- State.fetch(state, uri), {:ok, document, new_state} <- State.open_temporarily(state, uri, timeout_ms) do {{:ok, document}, new_state} else - {:ok, document} -> - new_state = State.extend_timeout(state, uri, timeout_ms) - {{:ok, document}, new_state} + {:ok, document, new_state} -> + {{:ok, document}, State.extend_timeout(new_state, uri, timeout_ms)} error -> {error, state} @@ -296,14 +342,29 @@ defmodule Lexical.Document.Store do end def handle_call({:fetch, uri}, _from, %State{} = state) do - reply = State.fetch(uri) - {:reply, reply, state} + {reply, new_state} = + case State.fetch(state, uri) do + {:ok, value, new_state} -> {{:ok, value}, new_state} + error -> {error, state} + end + + {:reply, reply, new_state} + end + + def handle_call({:fetch, uri, key}, _from, %State{} = state) do + {reply, new_state} = + case State.fetch(state, uri, key) do + {:ok, value, derived_value, new_state} -> {{:ok, value, derived_value}, new_state} + error -> {error, state} + end + + {:reply, reply, new_state} end def handle_call({:close, uri}, _from, %State{} = state) do {reply, new_state} = case State.close(state, uri) do - {:ok, _} = success -> success + {:ok, new_state} -> {:ok, new_state} error -> {error, state} end @@ -323,18 +384,40 @@ defmodule Lexical.Document.Store do def handle_call({:update, uri, updater_fn}, _, %State{} = state) do {reply, new_state} = case State.update(state, uri, updater_fn) do - {:ok, _} = success -> success + {:ok, new_state} -> {:ok, new_state} error -> {error, state} end {:reply, reply, new_state} end + @impl GenServer def handle_info({:unload, uri}, %State{} = state) do {:noreply, State.unload(state, uri)} end + def set_entropy(entropy) do + :persistent_term.put(entropy_key(), entropy) + entropy + end + + def entropy do + case :persistent_term.get(entropy_key(), :undefined) do + :undefined -> + [:positive] + |> System.unique_integer() + |> set_entropy() + + entropy -> + entropy + end + end + def name do - {:via, :global, __MODULE__} + {:via, :global, {__MODULE__, entropy()}} + end + + defp entropy_key do + {__MODULE__, :entropy} end end diff --git a/projects/lexical_shared/lib/lexical/formats.ex b/projects/lexical_shared/lib/lexical/formats.ex index 752a15a9a..f4fc6c9ba 100644 --- a/projects/lexical_shared/lib/lexical/formats.ex +++ b/projects/lexical_shared/lib/lexical/formats.ex @@ -68,9 +68,10 @@ defmodule Lexical.Formats do string_name = Atom.to_string(module_name) if String.contains?(string_name, ".") do - module_name - |> Module.split() - |> Enum.join(".") + case string_name do + "Elixir." <> rest -> rest + other -> other + end else # erlang module_name ":#{string_name}" @@ -97,6 +98,10 @@ defmodule Lexical.Formats do end end + def mfa(module, function, arity) do + "#{module(module)}.#{function}/#{arity}" + end + defp templatize(count, template) do count_string = Integer.to_string(count) String.replace(template, "${count}", count_string) diff --git a/projects/lexical_shared/lib/lexical/process_cache.ex b/projects/lexical_shared/lib/lexical/process_cache.ex index 21a928801..9ca7bd01a 100644 --- a/projects/lexical_shared/lib/lexical/process_cache.ex +++ b/projects/lexical_shared/lib/lexical/process_cache.ex @@ -58,6 +58,12 @@ defmodule Lexical.ProcessCache do end end + def clear_keys do + keys() + |> MapSet.put(all_keys_key()) + |> Enum.each(&Process.delete/1) + end + @doc """ Retrieves and optionally sets a value in the cache. @@ -76,9 +82,35 @@ defmodule Lexical.ProcessCache do end end + defmacro with_cleanup(do: block) do + quote do + try do + unquote(block) + after + unquote(__MODULE__).clear_keys() + end + end + end + defp set(key, timeout_ms, compute_fn) do value = compute_fn.() + + add_key(key) Process.put(key, Entry.new(value, timeout_ms)) + value end + + defp add_key(key) do + updated_keys = MapSet.put(keys(), key) + Process.put(all_keys_key(), updated_keys) + end + + defp all_keys_key do + {__MODULE__, :all_keys} + end + + defp keys do + Process.get(all_keys_key(), MapSet.new()) + end end diff --git a/projects/lexical_shared/lib/lexical/project.ex b/projects/lexical_shared/lib/lexical/project.ex index 081ce6f52..054d172f5 100644 --- a/projects/lexical_shared/lib/lexical/project.ex +++ b/projects/lexical_shared/lib/lexical/project.ex @@ -14,8 +14,7 @@ defmodule Lexical.Project do mix_target: nil, env_variables: %{}, project_module: nil, - entropy: 1, - project_config: [] + entropy: 1 @type message :: String.t() @type restart_notification :: {:restart, Logger.level(), String.t()} @@ -47,7 +46,7 @@ defmodule Lexical.Project do end def set_project_module(%__MODULE__{} = project, module) when is_atom(module) do - %__MODULE__{project | project_module: module, project_config: module.project()} + %__MODULE__{project | project_module: module} end @doc """ @@ -85,15 +84,31 @@ defmodule Lexical.Project do project.entropy end + def config(%__MODULE__{} = project) do + config_key = {__MODULE__, name(project), :config} + + case :persistent_term.get(config_key, :not_found) do + :not_found -> + config = project.project_module.project() + :persistent_term.put(config_key, config) + config + + config -> + config + end + end + @doc """ Returns the the name definied in the `project/0` of mix.exs file """ - def display_name(%__MODULE__{project_config: []} = project) do - folder_name(project) - end - def display_name(%__MODULE__{} = project) do - Keyword.get(project.project_config, :name, folder_name(project)) + case config(project) do + [] -> + folder_name(project) + + config -> + Keyword.get(config, :name, folder_name(project)) + end end @doc """ diff --git a/apps/common/lib/lexical/text.ex b/projects/lexical_shared/lib/lexical/text.ex similarity index 100% rename from apps/common/lib/lexical/text.ex rename to projects/lexical_shared/lib/lexical/text.ex diff --git a/projects/lexical_shared/mix.exs b/projects/lexical_shared/mix.exs index 6fcabb2e0..f74392cdf 100644 --- a/projects/lexical_shared/mix.exs +++ b/projects/lexical_shared/mix.exs @@ -1,7 +1,7 @@ defmodule Lexical.Shared.MixProject do use Mix.Project @repo_url "https://github.com/lexical-lsp/lexical" - @version "0.0.1" + @version "0.5.0" def project do [ diff --git a/projects/lexical_shared/mix.lock b/projects/lexical_shared/mix.lock index efcd11d56..90de1a703 100644 --- a/projects/lexical_shared/mix.lock +++ b/projects/lexical_shared/mix.lock @@ -1,7 +1,7 @@ %{ - "dialyxir": {:hex, :dialyxir, "1.3.0", "fd1672f0922b7648ff9ce7b1b26fcf0ef56dda964a459892ad15f6b4410b5284", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "00b2a4bcd6aa8db9dcb0b38c1225b7277dca9bc370b6438715667071a304696f"}, + "dialyxir": {:hex, :dialyxir, "1.4.4", "fb3ce8741edeaea59c9ae84d5cec75da00fa89fe401c72d6e047d11a61f65f70", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "cd6111e8017ccd563e65621a4d9a4a1c5cd333df30cebc7face8029cacb4eff6"}, "earmark_parser": {:hex, :earmark_parser, "1.4.32", "fa739a0ecfa34493de19426681b23f6814573faee95dfd4b4aafe15a7b5b32c6", [:mix], [], "hexpm", "b8b0dd77d60373e77a3d7e8afa598f325e49e8663a51bcc2b88ef41838cca755"}, - "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, + "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, "ex_doc": {:hex, :ex_doc, "0.29.4", "6257ecbb20c7396b1fe5accd55b7b0d23f44b6aa18017b415cb4c2b91d997729", [:mix], [{:earmark_parser, "~> 1.4.31", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "2c6699a737ae46cb61e4ed012af931b57b699643b24dabe2400a8168414bc4f5"}, "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"}, "makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"}, diff --git a/projects/lexical_shared/test/lexical/document/path_test.exs b/projects/lexical_shared/test/lexical/document/path_test.exs new file mode 100644 index 000000000..b9b218b8e --- /dev/null +++ b/projects/lexical_shared/test/lexical/document/path_test.exs @@ -0,0 +1,214 @@ +defmodule ElixirLS.LanguageServer.SourceFile.PathTest do + use ExUnit.Case + use Patch + + import Lexical.Document.Path + + defp patch_os(os_type, fun) do + test = self() + + spawn(fn -> + patch(Lexical.Document.Path, :os_type, os_type) + + try do + rv = fun.() + send(test, {:return, rv}) + rescue + e -> + send(test, {:raise, e, __STACKTRACE__}) + end + end) + + receive do + {:return, rv} -> + rv + + {:raise, %ExUnit.AssertionError{} = e, stack} -> + new_message = "In O/S #{inspect(os_type)} #{e.message}" + reraise(%ExUnit.AssertionError{e | message: new_message}, stack) + + {:raise, error, stack} -> + reraise(error, stack) + end + end + + def with_os(:windows, fun) do + patch_os({:win32, :whatever}, fun) + end + + def with_os(:linux, fun) do + patch_os({:unix, :linux}, fun) + end + + def with_os(:macos, fun) do + patch_os({:unix, :darwin}, fun) + end + + describe "from_uri/1" do + # tests based on cases from https://github.com/microsoft/vscode-uri/blob/master/src/test/uri.test.ts + + test "unix" do + with_os(:windows, fn -> + assert from_uri("file:///some/path") == "\\some\\path" + assert from_uri("file:///some/path/") == "\\some\\path\\" + assert from_uri("file:///nodes%2B%23.ex") == "\\nodes+#.ex" + end) + + with_os(:linux, fn -> + assert from_uri("file:///some/path") == "/some/path" + assert from_uri("file:///some/path/") == "/some/path/" + assert from_uri("file:///nodes%2B%23.ex") == "/nodes+#.ex" + end) + end + + test "UNC" do + with_os(:windows, fn -> + assert from_uri("file://shares/files/c%23/p.cs") == "\\\\shares\\files\\c#\\p.cs" + + assert from_uri("file://monacotools1/certificates/SSL/") == + "\\\\monacotools1\\certificates\\SSL\\" + + assert from_uri("file://monacotools1/") == "\\\\monacotools1\\" + end) + + with_os(:linux, fn -> + assert from_uri("file://shares/files/c%23/p.cs") == "//shares/files/c#/p.cs" + + assert from_uri("file://monacotools1/certificates/SSL/") == + "//monacotools1/certificates/SSL/" + + assert from_uri("file://monacotools1/") == "//monacotools1/" + end) + end + + test "no `path` in URI" do + with_os(:windows, fn -> + assert from_uri("file://%2Fhome%2Fticino%2Fdesktop%2Fcpluscplus%2Ftest.cpp") == "\\" + end) + + with_os(:linux, fn -> + assert from_uri("file://%2Fhome%2Fticino%2Fdesktop%2Fcpluscplus%2Ftest.cpp") == "/" + end) + end + + test "windows drive letter" do + with_os(:windows, fn -> + assert from_uri("file:///c:/test/me") == "c:\\test\\me" + assert from_uri("file:///c%3A/test/me") == "c:\\test\\me" + assert from_uri("file:///C:/test/me/") == "c:\\test\\me\\" + assert from_uri("file:///_:/path") == "\\_:\\path" + + assert from_uri( + "file:///c:/Source/Z%C3%BCrich%20or%20Zurich%20(%CB%88zj%CA%8A%C9%99r%C9%AAk,/Code/resources/app/plugins" + ) == "c:\\Source\\Zürich or Zurich (ˈzjʊərɪk,\\Code\\resources\\app\\plugins" + end) + + with_os(:linux, fn -> + assert from_uri("file:///c:/test/me") == "/c:/test/me" + assert from_uri("file:///c%3A/test/me") == "/c:/test/me" + assert from_uri("file:///C:/test/me/") == "/C:/test/me/" + assert from_uri("file:///_:/path") == "/_:/path" + + assert from_uri( + "file:///c:/Source/Z%C3%BCrich%20or%20Zurich%20(%CB%88zj%CA%8A%C9%99r%C9%AAk,/Code/resources/app/plugins" + ) == "/c:/Source/Zürich or Zurich (ˈzjʊərɪk,/Code/resources/app/plugins" + end) + end + + test "vscode unsaved file uri" do + assert from_uri("untitled:Untitled-1") == "untitled:Untitled-1" + end + + test "unsupported uri schemas" do + assert_raise ArgumentError, fn -> + from_uri("https://elixir-lang.org") + end + + assert_raise ArgumentError, fn -> + from_uri("unsaved://343C3EE7-D575-486D-9D33-93AFFAF773BD") + end + end + end + + describe "to_uri/1" do + # tests based on cases from https://github.com/microsoft/vscode-uri/blob/master/src/test/uri.test.ts + test "unix path" do + unless windows?() do + assert "file:///nodes%2B%23.ex" == to_uri("/nodes+#.ex") + assert "file:///coding/c%23/project1" == to_uri("/coding/c#/project1") + + assert "file:///Users/jrieken/Code/_samples/18500/M%C3%B6del%20%2B%20Other%20Th%C3%AEng%C3%9F/model.js" == + to_uri("/Users/jrieken/Code/_samples/18500/Mödel + Other Thîngß/model.js") + + assert "file:///foo/%25A0.txt" == to_uri("/foo/%A0.txt") + assert "file:///foo/%252e.txt" == to_uri("/foo/%2e.txt") + end + end + + test "windows path" do + if windows?() do + drive_letter = Path.expand("/") |> String.split(":") |> hd() + assert "file:///c%3A/win/path" == to_uri("c:/win/path") + assert "file:///c%3A/win/path" == to_uri("C:/win/path") + assert "file:///c%3A/win/path" == to_uri("c:/win/path/") + assert nil == to_uri("c:\win\path") + + # this path may actually expand to other drive letter than C: (on GHA runner it expands to D:) + assert "file:///#{drive_letter}%3A/win/path" == to_uri("/c:/win/path") + + assert "file:///c%3A/win/path" == to_uri("c:\\win\\path") + assert "file:///c%3A/win/path" == to_uri("c:\\win/path") + + assert "file:///c%3A/test%20with%20%25/path" == + to_uri("c:\\test with %\\path") + + assert "file:///c%3A/test%20with%20%2525/c%23code" == + to_uri("c:\\test with %25\\c#code") + end + end + + test "relative path" do + cwd = File.cwd!() + + uri = to_uri("a.file") + + assert from_uri(uri) == + cwd + |> Path.join("a.file") + |> maybe_convert_path_separators() + + uri = to_uri("./foo/bar") + + assert from_uri(uri) == + cwd + |> Path.join("foo/bar") + |> maybe_convert_path_separators + end + + test "UNC path" do + if windows?() do + assert "file://sh%C3%A4res/path/c%23/plugin.json" == + to_uri("\\\\shäres\\path\\c#\\plugin.json") + + assert "file://localhost/c%24/GitDevelopment/express" == + to_uri("\\\\localhost\\c$\\GitDevelopment\\express") + end + end + end + + defp maybe_convert_path_separators(path) do + if windows?() do + String.replace(path, "/", "\\") + else + String.replace(path, "\\", "/") + end + end + + def windows? do + if match?({:win32, _}, :os.type()) do + true + else + false + end + end +end diff --git a/projects/lexical_shared/test/lexical/document/position_test.exs b/projects/lexical_shared/test/lexical/document/position_test.exs new file mode 100644 index 000000000..41561aff6 --- /dev/null +++ b/projects/lexical_shared/test/lexical/document/position_test.exs @@ -0,0 +1,45 @@ +defmodule Lexical.Document.PositionTest do + alias Lexical.Document.Line + alias Lexical.Document.Lines + alias Lexical.Document.Position + + import Line + + use ExUnit.Case, async: true + + describe "compare/2" do + test "positions on the same line" do + assert :eq = Position.compare(position(1, 10), position(1, 10)) + assert :gt = Position.compare(position(1, 11), position(1, 10)) + assert :lt = Position.compare(position(1, 9), position(1, 10)) + end + + test "position on earlier line" do + assert :lt = Position.compare(position(1, 10), position(2, 10)) + assert :lt = Position.compare(position(1, 11), position(2, 10)) + assert :lt = Position.compare(position(1, 9), position(2, 10)) + end + + test "position on later line" do + assert :gt = Position.compare(position(2, 10), position(1, 10)) + assert :gt = Position.compare(position(2, 11), position(1, 10)) + assert :gt = Position.compare(position(2, 9), position(1, 10)) + end + end + + defp position(line, character) do + stub_line = line(text: "", ending: "\n", line_number: line, ascii?: true) + + lines = + line + |> empty_lines() + |> put_in([Access.key(:lines), Access.elem(line - 1)], stub_line) + + Position.new(lines, line, character) + end + + defp empty_lines(length) do + tuple = List.to_tuple(for(x <- 1..length, do: x)) + %Lines{lines: tuple, starting_index: 1} + end +end diff --git a/projects/lexical_shared/test/lexical/document/range_test.exs b/projects/lexical_shared/test/lexical/document/range_test.exs new file mode 100644 index 000000000..1d4c8860a --- /dev/null +++ b/projects/lexical_shared/test/lexical/document/range_test.exs @@ -0,0 +1,67 @@ +defmodule Lexical.Document.RangeTest do + alias Lexical.Document.Lines + alias Lexical.Document.Position + alias Lexical.Document.Range + + import Lexical.Document.Line + + use ExUnit.Case, async: true + + describe "contains?/2" do + test "includes the start position" do + range = Range.new(position(1, 1), position(2, 1)) + assert Range.contains?(range, position(1, 1)) + end + + test "excludes the end position" do + range = Range.new(position(1, 1), position(2, 1)) + refute Range.contains?(range, position(2, 1)) + end + + test "includes position after start character of starting line" do + range = Range.new(position(1, 1), position(2, 1)) + assert Range.contains?(range, position(1, 2)) + end + + test "includes position before end character of ending line" do + range = Range.new(position(1, 1), position(2, 2)) + assert Range.contains?(range, position(2, 1)) + end + + test "includes position within lines" do + range = Range.new(position(1, 3), position(3, 1)) + assert Range.contains?(range, position(2, 2)) + end + + test "excludes position on a different line" do + range = Range.new(position(1, 1), position(3, 3)) + refute Range.contains?(range, position(4, 1)) + end + + test "excludes position before start character of starting line" do + range = Range.new(position(1, 2), position(2, 1)) + refute Range.contains?(range, position(1, 1)) + end + + test "excludes position after end character of ending line" do + range = Range.new(position(1, 1), position(2, 1)) + refute Range.contains?(range, position(2, 2)) + end + end + + defp position(line, character) do + stub_line = line(text: "", ending: "\n", line_number: line, ascii?: true) + + lines = + line + |> empty_lines() + |> put_in([Access.key(:lines), Access.elem(line - 1)], stub_line) + + Position.new(lines, line, character) + end + + defp empty_lines(length) do + tuple = List.to_tuple(for(x <- 1..length, do: x)) + %Lines{lines: tuple, starting_index: 1} + end +end diff --git a/projects/lexical_shared/test/lexical/document/store_test.exs b/projects/lexical_shared/test/lexical/document/store_test.exs index 935f68840..3a466c630 100644 --- a/projects/lexical_shared/test/lexical/document/store_test.exs +++ b/projects/lexical_shared/test/lexical/document/store_test.exs @@ -6,18 +6,18 @@ defmodule Lexical.Document.StoreTest do use ExUnit.Case - setup do - {:ok, _} = start_supervised(Document.Store) + def with_store(%{} = context) do + store_opts = Map.get(context, :store, []) + {:ok, _} = start_supervised({Document.Store, store_opts}) :ok end - def uri do - "file:///file.ex" - end - def with_an_open_document(_) do :ok = Document.Store.open(uri(), "hello", 1) - :ok + end + + def uri do + "file:///file.ex" end defp build_position(_, nil) do @@ -50,7 +50,33 @@ defmodule Lexical.Document.StoreTest do Edit.new(text, range) end + describe "startup" do + test "succeeds without options" do + assert {:ok, _} = start_supervised(Document.Store) + end + + test "succeeds with empty :derive" do + assert {:ok, _} = start_supervised({Document.Store, [derive: []]}) + end + + test "succeeds with valid :derive" do + valid_fun = fn _ -> :ok end + assert {:ok, _} = start_supervised({Document.Store, [derive: [valid: valid_fun]]}) + end + + test "fails with invalid :derive" do + invalid_fun = fn _, _ -> :ok end + assert {:error, _} = start_supervised({Document.Store, [derive: [invalid: invalid_fun]]}) + end + + test "fails with invalid options" do + assert {:error, _} = start_supervised({Document.Store, [invalid: []]}) + end + end + describe "a clean store" do + setup [:with_store] + test "a document can be opened" do :ok = Document.Store.open(uri(), "hello", 1) assert {:ok, file} = Document.Store.fetch(uri()) @@ -70,7 +96,7 @@ defmodule Lexical.Document.StoreTest do end describe "a document that is already open" do - setup [:with_an_open_document] + setup [:with_store, :with_an_open_document] test "can be fetched" do assert {:ok, doc} = Document.Store.fetch(uri()) @@ -148,7 +174,7 @@ defmodule Lexical.Document.StoreTest do end describe "a temp document" do - setup [:with_a_temp_document] + setup [:with_store, :with_a_temp_document] test "can be opened", ctx do assert {:ok, doc} = Document.Store.open_temporary(ctx.uri, 100) @@ -179,4 +205,61 @@ defmodule Lexical.Document.StoreTest do assert Document.Store.open?(ctx.uri) end end + + describe "derived values" do + setup context do + me = self() + + length_fun = fn doc -> + send(me, :length_called) + + doc + |> Document.to_string() + |> String.length() + end + + :ok = with_store(%{store: [derive: [length: length_fun]]}) + :ok = with_an_open_document(context) + end + + test "can be fetched with the document by key" do + assert {:ok, doc, 5} = Document.Store.fetch(uri(), :length) + assert Document.to_string(doc) == "hello" + end + + test "update when the document changes" do + assert :ok = + Document.Store.update(uri(), fn document -> + Document.apply_content_changes(document, 2, [ + build_change(text: "dog") + ]) + end) + + assert {:ok, doc, 3} = Document.Store.fetch(uri(), :length) + assert Document.to_string(doc) == "dog" + end + + test "are lazily computed when first fetched" do + assert {:ok, %Document{}, 5} = Document.Store.fetch(uri(), :length) + assert_received :length_called + end + + test "are only computed again when the document changes" do + assert {:ok, %Document{}, 5} = Document.Store.fetch(uri(), :length) + assert_received :length_called + + assert {:ok, %Document{}, 5} = Document.Store.fetch(uri(), :length) + refute_received :length_called + + assert :ok = + Document.Store.update(uri(), fn document -> + Document.apply_content_changes(document, 2, [ + build_change(text: "dog") + ]) + end) + + assert {:ok, %Document{}, 3} = Document.Store.fetch(uri(), :length) + assert_received :length_called + end + end end diff --git a/projects/lexical_shared/test/lexical/formats_test.exs b/projects/lexical_shared/test/lexical/formats_test.exs index 850f38335..194ef5dff 100644 --- a/projects/lexical_shared/test/lexical/formats_test.exs +++ b/projects/lexical_shared/test/lexical/formats_test.exs @@ -15,6 +15,14 @@ defmodule Lexical.FormatsTest do test "it correctly handles an erlang module name" do assert ":ets" == Formats.module(:ets) end + + test "it drops any `Elixir.` prefix" do + assert "Kernel.SpecialForms" == Formats.module(Elixir.Kernel.SpecialForms) + end + + test "it correctly handles an invalid elixir module" do + assert "This.Is.Not.A.Module" == Formats.module(:"This.Is.Not.A.Module") + end end describe "formatting time" do diff --git a/projects/lexical_shared/test/lexical/process_cache_test.exs b/projects/lexical_shared/test/lexical/process_cache_test.exs index 7be67522e..fe4484cb7 100644 --- a/projects/lexical_shared/test/lexical/process_cache_test.exs +++ b/projects/lexical_shared/test/lexical/process_cache_test.exs @@ -11,50 +11,99 @@ defmodule Lexical.ProcessCacheTest do {:ok, now: 1} end - test "calls the compute function" do - assert 3 == trans("my key", fn -> 3 end) - end + describe "trans/2" do + test "calls the compute function" do + assert 3 == trans("my key", fn -> 3 end) + end - test "pulls from the process cache when an entry exists" do - assert 3 == trans("my key", fn -> 3 end) - assert 3 == trans("my key", fn -> 6 end) - end + test "pulls from the process cache when an entry exists" do + assert 3 == trans("my key", fn -> 3 end) + assert 3 == trans("my key", fn -> 6 end) + end - test "times out after a given timeout", ctx do - now = ctx.now + test "times out after a given timeout", ctx do + now = ctx.now - patch(ProcessCache.Entry, :now_ts, cycle([now, now + 4999, now + 5000])) + patch(ProcessCache.Entry, :now_ts, cycle([now, now + 4999, now + 5000])) - assert 3 == trans("my key", fn -> 3 end) - assert {:ok, 3} == fetch("my key") - assert :error == fetch("my key") - end + assert 3 == trans("my key", fn -> 3 end) + assert {:ok, 3} == fetch("my key") + assert :error == fetch("my key") + end - test "calling get also clears the key after the timeout", ctx do - now = ctx.now + test "calling get also clears the key after the timeout", ctx do + now = ctx.now - patch(ProcessCache.Entry, :now_ts, cycle([now, now + 4999, now + 5000])) + patch(ProcessCache.Entry, :now_ts, cycle([now, now + 4999, now + 5000])) - assert 3 == trans("my key", fn -> 3 end) - assert 3 == get("my key") - assert nil == get("my key") - end + assert 3 == trans("my key", fn -> 3 end) + assert 3 == get("my key") + assert nil == get("my key") + end + + test "the timeout is configurable", ctx do + now = ctx.now + patch(ProcessCache.Entry, :now_ts, cycle([now, now + 49, now + 50])) + + assert 3 = trans("my key", 50, fn -> 3 end) + assert {:ok, 3} == fetch("my key") + assert :error == fetch("my key") + end - test "the timeout is configurable", ctx do - now = ctx.now - patch(ProcessCache.Entry, :now_ts, cycle([now, now + 49, now + 50])) + test "trans will replace an expired key", ctx do + now = ctx.now + patch(ProcessCache.Entry, :now_ts, cycle([now, now + 49, now + 50])) - assert 3 = trans("my key", 50, fn -> 3 end) - assert {:ok, 3} == fetch("my key") - assert :error == fetch("my key") + assert 3 = trans("my key", 50, fn -> 3 end) + assert 3 = trans("my key", 50, fn -> 6 end) + assert 6 = trans("my key", 50, fn -> 6 end) + end end - test "trans will replace an expired key", ctx do - now = ctx.now - patch(ProcessCache.Entry, :now_ts, cycle([now, now + 49, now + 50])) + describe "with_cleanup" do + test "cleans up after a trans call" do + with_cleanup do + trans("my_key", fn -> 3 end) + end + + assert :error = fetch("my_key") + end + + test "cleans up multiple keys" do + with_cleanup do + trans("my_key", fn -> 1 end) + trans("my_key2", fn -> 1 end) + trans("my_key3", fn -> 1 end) + end + + assert :error = fetch("my_key") + assert :error = fetch("my_key1") + assert :error = fetch("my_key2") + end + + test "cleans up even if a trans function raises" do + with_cleanup do + assert_raise RuntimeError, fn -> + with_cleanup do + trans("my_key", fn -> 1 end) + trans("my_key2", fn -> 1 end) + trans("my_key3", fn -> raise "oops" end) + end + end + + assert :error = fetch("my_key") + assert :error = fetch("my_key2") + assert :error = fetch("my_key3") + end + end + + test "returns the last value" do + result = + with_cleanup do + trans("my_key", fn -> 1 end) + 1 + end - assert 3 = trans("my key", 50, fn -> 3 end) - assert 3 = trans("my key", 50, fn -> 6 end) - assert 6 = trans("my key", 50, fn -> 6 end) + assert result == 2 + end end end diff --git a/apps/common/test/lexical/text_test.exs b/projects/lexical_shared/test/lexical/text_test.exs similarity index 100% rename from apps/common/test/lexical/text_test.exs rename to projects/lexical_shared/test/lexical/text_test.exs diff --git a/projects/lexical_test/lib/lexical/test/code_sigil.ex b/projects/lexical_test/lib/lexical/test/code_sigil.ex index 08827f281..577928bcf 100644 --- a/projects/lexical_test/lib/lexical/test/code_sigil.ex +++ b/projects/lexical_test/lib/lexical/test/code_sigil.ex @@ -1,6 +1,11 @@ defmodule Lexical.Test.CodeSigil do def sigil_q(text, opts \\ []) do - ["", first | rest] = text |> String.split("\n") + {first, rest} = + case String.split(text, "\n") do + ["", first | rest] -> {first, rest} + [first | rest] -> {first, rest} + end + base_indent = indent(first) indent_length = String.length(base_indent) diff --git a/projects/lexical_test/lib/lexical/test/cursor_support.ex b/projects/lexical_test/lib/lexical/test/cursor_support.ex index 227450e14..7936d843a 100644 --- a/projects/lexical_test/lib/lexical/test/cursor_support.ex +++ b/projects/lexical_test/lib/lexical/test/cursor_support.ex @@ -4,9 +4,12 @@ defmodule Lexical.Test.CursorSupport do """ alias Lexical.Document + alias Lexical.Document.Line alias Lexical.Document.Position alias Lexical.Test.PositionSupport + import Line + @default_cursor "|" @starting_line 1 @starting_column 1 @@ -62,19 +65,18 @@ defmodule Lexical.Test.CursorSupport do @spec pop_cursor(text :: String.t(), [opt]) :: {Position.t(), String.t() | Document.t()} when opt: {:cursor, String.t()} | {:as, :text | :document} | {:document, String.t()} def pop_cursor(text, opts \\ []) do - cursor = Keyword.get(opts, :cursor, @default_cursor) as_document? = opts[:as] == :document or is_binary(opts[:document]) - {line, column} = cursor_position(text, cursor) - stripped_text = strip_cursor(text, cursor) + position = cursor_position(text, Keyword.take(opts, [:cursor, :default_to_end])) + stripped_text = strip_cursor(text, Keyword.take(opts, [:cursor])) if as_document? do uri = opts |> Keyword.get(:document, "file:///file.ex") |> Document.Path.ensure_uri() document = Document.new(uri, stripped_text, 0) - position = Position.new(document, line, column) + position = position(document, position) {position, document} else - position = PositionSupport.position(line, column) + position = position(position) {position, stripped_text} end end @@ -83,41 +85,85 @@ defmodule Lexical.Test.CursorSupport do Strips all instances of `cursor` from `text`. """ @spec strip_cursor(text :: String.t(), cursor :: String.t()) :: String.t() - def strip_cursor(text, cursor \\ @default_cursor) do - text - |> String.graphemes() - |> Enum.chunk_every(2, 1, [""]) - |> Enum.reduce([], fn - # don't strip the pipe in a `|>` operator when using the default cursor - ["|", ">"], iodata -> - [iodata, "|"] - - [^cursor, _lookahead], iodata -> - iodata - - [c, _], iodata -> - [iodata, c] + def strip_cursor(text, opts \\ []) do + cursor = Keyword.get(opts, :cursor, @default_cursor) + + {_found, iodata} = + text + |> String.graphemes() + |> Enum.chunk_every(2, 1, [""]) + |> Enum.reduce({false, []}, fn + # don't strip the pipe in a `|>` operator when using the default cursor + ["|", ">"], {found?, iodata} -> + {found?, [iodata, "|"]} + + [^cursor, _lookahead], {false, iodata} -> + {true, iodata} + + [c, _], {found?, iodata} -> + {found?, [iodata, c]} + end) + + IO.iodata_to_binary(iodata) + end + + def decorate_cursor(%Document{} = document, %Position{} = position) do + replace_line = position.line + + document.lines + |> Enum.map(fn + line(line_number: ^replace_line, text: text, ending: ending) -> + {leading, trailing} = String.split_at(text, position.character - 1) + + leading = String.pad_leading(leading, position.character - 1) + + [leading, "|", trailing, ending] + + line(text: text, ending: ending) -> + [text, ending] end) |> IO.iodata_to_binary() end - defp cursor_position(text, cursor) do - text - |> String.graphemes() - |> Enum.chunk_every(2, 1, [""]) - |> Enum.reduce_while({@starting_line, @starting_column}, fn - # don't consider the pipe in a `|>` operator when using the default cursor - ["|", ">"], {line, column} -> - {:cont, {line, column + 1}} + defp cursor_position(text, opts) do + cursor = Keyword.get(opts, :cursor, @default_cursor) + default_to_end? = Keyword.get(opts, :default_to_end, true) - [^cursor, _], position -> - {:halt, position} + {found?, position} = + text + |> String.graphemes() + |> Enum.chunk_every(2, 1, [""]) + |> Enum.reduce_while({false, {@starting_line, @starting_column}}, fn + # don't consider the pipe in a `|>` operator when using the default cursor + ["|", ">"], {found?, {line, column}} -> + {:cont, {found?, {line, column + 1}}} - ["\n", _], {line, _column} -> - {:cont, {line + 1, @starting_column}} + [^cursor, _], {_, position} -> + {:halt, {true, position}} - _, {line, column} -> - {:cont, {line, column + 1}} - end) + ["\n", _], {found?, {line, _column}} -> + {:cont, {found?, {line + 1, @starting_column}}} + + _, {found?, {line, column}} -> + {:cont, {found?, {line, column + 1}}} + end) + + if found? or default_to_end? do + position + else + nil + end end + + defp position(%Document{} = document, {line, column}) do + Position.new(document, line, column) + end + + defp position(%Document{}, nil), do: nil + + defp position({line, column}) do + PositionSupport.position(line, column) + end + + defp position(nil), do: nil end diff --git a/projects/lexical_test/lib/lexical/test/diagnostic_support.ex b/projects/lexical_test/lib/lexical/test/diagnostic_support.ex new file mode 100644 index 000000000..8ce5a7ea1 --- /dev/null +++ b/projects/lexical_test/lib/lexical/test/diagnostic_support.ex @@ -0,0 +1,14 @@ +defmodule Lexical.Test.DiagnosticSupport do + def execute_if(feature_condition) do + matched? = + Enum.all?(feature_condition, fn {feature_fn, value} -> + apply(Features, feature_fn, []) == value + end) + + if matched? do + :ok + else + :skip + end + end +end diff --git a/projects/lexical_test/lib/lexical/test/eventual_assertions.ex b/projects/lexical_test/lib/lexical/test/eventual_assertions.ex index 2c8df5418..7bd37a8ef 100644 --- a/projects/lexical_test/lib/lexical/test/eventual_assertions.ex +++ b/projects/lexical_test/lib/lexical/test/eventual_assertions.ex @@ -16,7 +16,7 @@ defmodule Lexical.Test.EventualAssertions do end defp do_eventually(func, {:=, _, [left, _right]} = assertion, timeout) do - quote do + quote generated: true do timer_ref = Process.send_after(self(), :assert_timeout, unquote(timeout)) asserter = fn -> @@ -28,7 +28,7 @@ defmodule Lexical.Test.EventualAssertions do end defp do_eventually(func, assertion, timeout) do - quote do + quote generated: true do timer_ref = Process.send_after(self(), :assert_timeout, unquote(timeout)) asserter = fn -> diff --git a/projects/lexical_test/lib/lexical/test/quiet.ex b/projects/lexical_test/lib/lexical/test/quiet.ex new file mode 100644 index 000000000..a6778570d --- /dev/null +++ b/projects/lexical_test/lib/lexical/test/quiet.ex @@ -0,0 +1,15 @@ +defmodule Lexical.Test.Quiet do + import ExUnit.CaptureIO + + def quiet(io_device \\ :stdio, fun) do + test_pid = self() + + capture_io(io_device, fn -> + send(test_pid, {:result, fun.()}) + end) + + receive do + {:result, result} -> result + end + end +end diff --git a/projects/lexical_test/lib/lexical/test/range_support.ex b/projects/lexical_test/lib/lexical/test/range_support.ex index ea9b10697..3700c4c32 100644 --- a/projects/lexical_test/lib/lexical/test/range_support.ex +++ b/projects/lexical_test/lib/lexical/test/range_support.ex @@ -1,5 +1,8 @@ defmodule Lexical.Test.RangeSupport do + alias Lexical.Math + alias Lexical.Text alias Lexical.Document + alias Lexical.Document.Position alias Lexical.Document.Range alias Lexical.Test.CursorSupport @@ -19,6 +22,10 @@ defmodule Lexical.Test.RangeSupport do {Range.new(start_position, end_position), text} end + def pop_all_ranges(text) do + do_pop_all_ranges(text, []) + end + def decorate(%Document{} = document, %Range{} = range) do index_range = (range.start.line - 1)..(range.end.line - 1) @@ -31,15 +38,94 @@ defmodule Lexical.Test.RangeSupport do |> String.trim_trailing() end - def decorate(document_text, path \\ "/file.ex", %Range{} = range) - when is_binary(document_text) do + def decorate(document_text, path \\ "/file.ex", range) + + def decorate(document_text, path, %Range{} = range) when is_binary(document_text) do "file://#{path}" |> Document.new(document_text, 1) |> decorate(range) end + def decorate(document_text, path, position) when is_binary(document_text) do + document = Document.new("file://#{path}", document_text, 1) + range = position_to_range(document, position) + decorate(document, range) + end + + defp position_to_range(document, {line, column}) do + start_pos = Position.new(document, line, column) + end_pos = Position.new(document, line + 1, 1) + + Range.new( + start_pos, + end_pos + ) + end + + defp position_to_range(document, line_number) when is_integer(line_number) do + line_number = Math.clamp(line_number, 1, Document.size(document)) + + with {:ok, line_text} <- Document.fetch_text_at(document, line_number) do + column = Text.count_leading_spaces(line_text) + 1 + position_to_range(document, {line_number, column}) + end + end + + def extract(%Document{} = document, %Range{} = range) do + zero_based_start_character = max(range.start.character - 1, 0) + zero_based_end_character = max(range.end.character - 1, 0) + start_line = range.start.line + end_line = range.end.line + + document.lines + |> Enum.filter(fn line(line_number: number) -> + number in range.start.line..range.end.line + end) + |> Enum.map(fn + line(line_number: line_number, text: line_text) + when line_number == start_line and line_number == end_line -> + length = zero_based_end_character - zero_based_start_character + String.slice(line_text, zero_based_start_character, length) + + line(line_number: ^start_line, text: line_text, ending: ending) -> + line_length = String.length(line_text) + length = line_length - zero_based_start_character + prefix = String.slice(line_text, zero_based_start_character, length) + + [prefix, ending] + + line(line_number: ^end_line, text: line_text) -> + length = zero_based_end_character + String.slice(line_text, 0, length) + + line(text: line_text, ending: ending) -> + [line_text, ending] + end) + |> IO.iodata_to_binary() + end + + def extract(text, path \\ "/file.ex", %Range{} = range) when is_binary(text) do + "file://#{path}" + |> Document.new(text, 1) + |> extract(range) + end + defp insert_marker(text, marker, character) do {leading, trailing} = String.split_at(text, character - 1) leading <> marker <> trailing end + + defp do_pop_all_ranges(text, ranges) do + {start_position, text} = + CursorSupport.pop_cursor(text, cursor: @range_start_marker, default_to_end: false) + + {end_position, text} = + CursorSupport.pop_cursor(text, cursor: @range_end_marker, default_to_end: false) + + if start_position == nil or end_position == nil do + {Enum.reverse(ranges), text} + else + do_pop_all_ranges(text, [Range.new(start_position, end_position) | ranges]) + end + end end diff --git a/projects/lexical_test/mix.exs b/projects/lexical_test/mix.exs index c99530030..9bb5fed9a 100644 --- a/projects/lexical_test/mix.exs +++ b/projects/lexical_test/mix.exs @@ -4,7 +4,7 @@ defmodule Lexical.Test.MixProject do def project do [ app: :lexical_test, - version: "0.1.0", + version: "0.5.0", elixir: "~> 1.13", start_permanent: Mix.env() == :prod, deps: deps()