diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml new file mode 100644 index 00000000000..0d967879357 --- /dev/null +++ b/.github/workflows/bench.yml @@ -0,0 +1,58 @@ +name: Benchmark + +on: [pull_request] +jobs: + bench: + runs-on: ${{ matrix.os }} + + strategy: + fail-fast: false + matrix: + ghc: ['8.10.2', '8.8.4', '8.6.5'] + os: [ubuntu-latest, macOS-latest] + + steps: + - uses: actions/checkout@v2 + - run: git fetch origin master # check the master branch for benchmarking + - uses: actions/setup-haskell@v1 + with: + ghc-version: ${{ matrix.ghc }} + cabal-version: '3.2' + enable-stack: false + + - name: Cache Cabal + uses: actions/cache@v2 + with: + path: | + ~/.cabal/packages + ~/.cabal/store + key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-bench + + - run: cabal update + + - run: cabal configure --enable-benchmarks + + - name: Build + shell: bash + # Retry it three times to workaround compiler segfaults in windows + run: cabal build ghcide:benchHist || cabal build ghcide:benchHist || cabal build ghcide:benchHist + + - name: Bench + shell: bash + # run the tests without parallelism, otherwise tasty will attempt to run + # all test cases simultaneously which causes way too many hls + # instances to be spun up for the poor github actions runner to handle + run: cabal bench ghcide:benchHist + + - name: Display results + shell: bash + run: | + column -s, -t < ghcide/bench-results/results.csv | tee ghcide/bench-results/results.txt + + - name: Archive benchmarking artifacts + uses: actions/upload-artifact@v2 + with: + name: bench-results-${{ runner.os }}-${{ matrix.ghc }} + path: | + ghcide/bench-results/results.* + ghcide/bench-results/**/*.svg diff --git a/.github/workflows/nix.yml b/.github/workflows/nix.yml index 9ea6f0042ab..218983cb7cd 100644 --- a/.github/workflows/nix.yml +++ b/.github/workflows/nix.yml @@ -1,6 +1,6 @@ name: Nix -on: [push, pull_request] +on: [pull_request] jobs: nix: runs-on: ${{ matrix.os }} @@ -21,6 +21,5 @@ jobs: - uses: cachix/cachix-action@v8 with: name: haskell-language-server - extraPullNames: haskell-ghcide authToken: '${{ secrets.HLS_CACHIX_AUTH_TOKEN }}' - run: nix-shell --argstr compiler ${{ matrix.ghc }} --run "cabal update && cabal build" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e5ee7e62bbe..f29fab2ba5c 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,6 +1,6 @@ name: Testing -on: [push, pull_request] +on: [pull_request] jobs: test: runs-on: ${{ matrix.os }} @@ -9,6 +9,7 @@ jobs: matrix: ghc: ["8.10.2", "8.10.1", "8.8.4", "8.8.3", "8.8.2", "8.6.5", "8.6.4"] os: [ubuntu-latest, macOS-latest, windows-latest] + ghc-lib: [false] exclude: - os: windows-latest ghc: "8.10.2" # broken due to https://gitlab.haskell.org/ghc/ghc/-/issues/18550 @@ -21,6 +22,10 @@ jobs: include: - os: windows-latest ghc: "8.10.2.2" # only available for windows and choco + # one ghc-lib build + - os: ubuntu-latest + ghc: '8.10.1' + ghc-lib: true steps: - uses: actions/checkout@v2 @@ -32,6 +37,9 @@ jobs: cabal-version: "3.2" enable-stack: true + - run: ./fmt.sh + name: "HLint via ./fmt.sh" + - name: Cache Cabal uses: actions/cache@v2 env: @@ -61,7 +69,14 @@ jobs: # Retry it three times to workaround compiler segfaults in windows run: cabal build || cabal build || cabal build + - name: Test ghcide + if: ${{ !matrix.ghc-lib }} + shell: bash + # run the tests without parallelism to avoid running out of memory + run: cabal test ghcide --test-options="-j1 --rerun-update" || cabal test ghcide --test-options="-j1 --rerun" || LSP_TEST_LOG_COLOR=0 LSP_TEST_LOG_MESSAGES=true LSP_TEST_LOG_STDERR=true cabal test ghcide --test-options="-j1 --rerun" + - name: Test func-test suite + if: ${{ !matrix.ghc-lib }} shell: bash env: HLS_TEST_EXE: hls @@ -72,6 +87,7 @@ jobs: run: cabal test func-test --test-options="-j1 --rerun-update" || cabal test func-test --test-options="-j1 --rerun --rerun-update" || cabal test func-test --test-options="-j1 --rerun" || LSP_TEST_LOG_COLOR=0 LSP_TEST_LOG_MESSAGES=true LSP_TEST_LOG_STDERR=true cabal test func-test --test-options="-j1 --rerun" - name: Test wrapper-test suite + if: ${{ !matrix.ghc-lib }} shell: bash env: HLS_TEST_EXE: hls diff --git a/.gitmodules b/.gitmodules index c8abb211bcc..7856aaec360 100644 --- a/.gitmodules +++ b/.gitmodules @@ -8,10 +8,3 @@ # Commit git commit -m "Removed submodule " # Delete the now untracked submodule files # rm -rf path_to_submodule -[submodule "ghcide"] - path = ghcide - # url = https://github.com/alanz/ghcide.git - # url = https://github.com/wz1000/ghcide.git - url = https://github.com/haskell/ghcide.git - # url = https://github.com/fendor/ghcide.git - # url = https://github.com/bubba/ghcide.git diff --git a/cabal.project b/cabal.project index 542232bfb0e..197b2f3c4a1 100644 --- a/cabal.project +++ b/cabal.project @@ -1,6 +1,7 @@ packages: ./ - ./ghcide/hie-compat + ./hie-compat + ./shake-bench ./ghcide ./hls-plugin-api ./plugins/tactics @@ -23,4 +24,16 @@ write-ghc-environment-files: never index-state: 2020-12-13T11:31:58Z -allow-newer: data-tree-print:base +allow-newer: + active:base, + data-tree-print:base, + diagrams-contrib:base, + diagrams-core:base, + diagrams-lib:base, + diagrams-postscript:base, + diagrams-svg:base, + dual-tree:base, + force-layout:base, + monoid-extras:base, + statestack:base, + svg-builder:base diff --git a/fmt.sh b/fmt.sh new file mode 100755 index 00000000000..1bd9a2ff98a --- /dev/null +++ b/fmt.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +set -eou pipefail +curl -sSL https://raw.github.com/ndmitchell/hlint/master/misc/run.sh | sh -s ghcide/src ghcide/exe ghcide/bench shake-bench/src ghcide/test/exe --with-group=extra --hint=ghcide/.hlint.yaml diff --git a/ghcide b/ghcide deleted file mode 160000 index 6de5acdf4c4..00000000000 --- a/ghcide +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 6de5acdf4c4c0d664ed6212e14614426b8adf183 diff --git a/ghcide/.azure/linux-stack.yml b/ghcide/.azure/linux-stack.yml new file mode 100644 index 00000000000..1c2a787b045 --- /dev/null +++ b/ghcide/.azure/linux-stack.yml @@ -0,0 +1,39 @@ +jobs: +- job: ghcide_stack_linux + timeoutInMinutes: 60 + pool: + vmImage: 'ubuntu-latest' + variables: + STACK_ROOT: $(Pipeline.Workspace)/.stack + steps: + - checkout: self + - task: Cache@2 + inputs: + key: stack-root-cache | $(Agent.OS) | $(Build.SourcesDirectory)/stack.yaml | $(Build.SourcesDirectory)/ghcide.cabal + path: $(STACK_ROOT) + cacheHitVar: STACK_ROOT_CACHE_RESTORED + displayName: "Cache stack root" + - task: Cache@2 + inputs: + key: stack-work-cache | $(Agent.OS) | $(Build.SourcesDirectory)/stack.yaml | $(Build.SourcesDirectory)/ghcide.cabal + path: .stack-work + cacheHitVar: STACK_WORK_CACHE_RESTORED + displayName: "Cache stack work" + - bash: | + ./fmt.sh + displayName: "HLint via ./fmt.sh" + - bash: | + sudo add-apt-repository ppa:hvr/ghc + sudo apt-get update + sudo apt-get install -y g++ gcc libc6-dev libffi-dev libgmp-dev make zlib1g-dev cabal-install-3.2 + if ! which stack >/dev/null 2>&1; then + curl -sSL https://get.haskellstack.org/ | sh + fi + mkdir -p $STACK_ROOT + displayName: 'Install Stack' + - bash: stack setup + displayName: 'stack setup' + - bash: cabal update # some tests use Cabal cradles + displayName: 'cabal update' + - bash: stack build --test --no-run-tests + displayName: 'stack build --test --no-run-tests' diff --git a/ghcide/.azure/windows-stack.yml b/ghcide/.azure/windows-stack.yml new file mode 100644 index 00000000000..21b99fc0d43 --- /dev/null +++ b/ghcide/.azure/windows-stack.yml @@ -0,0 +1,41 @@ +jobs: +- job: ghcide_stack_windows + timeoutInMinutes: 120 + pool: + vmImage: 'windows-2019' + variables: + STACK_ROOT: "C:\\sr" + steps: + - checkout: self + - task: Cache@2 + inputs: + key: stack-root-cache | $(Agent.OS) | $(Build.SourcesDirectory)/stack-windows.yaml | $(Build.SourcesDirectory)/ghcide.cabal + path: $(STACK_ROOT) + cacheHitVar: STACK_ROOT_CACHE_RESTORED + displayName: "Cache stack root" + - task: Cache@2 + inputs: + key: stack-work-cache | $(Agent.OS) | $(Build.SourcesDirectory)/stack-windows.yaml | $(Build.SourcesDirectory)/ghcide.cabal + path: .stack-work + cacheHitVar: STACK_WORK_CACHE_RESTORED + displayName: "Cache stack work" + - bash: | + ./fmt.sh + displayName: "HLint via ./fmt.sh" + - bash: | + curl -sSkL http://www.stackage.org/stack/windows-x86_64 -o /usr/bin/stack.zip + unzip -o /usr/bin/stack.zip -d /usr/bin/ + mkdir -p "$STACK_ROOT" + displayName: 'Install Stack' + - bash: stack setup --stack-yaml stack-windows.yaml + displayName: 'stack setup' + - bash: | + # Installing happy and alex standalone to avoid error "strip.exe: unable to rename ../*.exe; reason: File exists" + stack install happy --stack-yaml stack-windows.yaml + stack install alex --stack-yaml stack-windows.yaml + choco install -y cabal --version=$CABAL_VERSION + $(cygpath $ProgramData)/chocolatey/bin/RefreshEnv.cmd + # GHC 8.10.1 fails with ghc segfaults, using -fexternal-interpreter seems to make it working + # There are other transient errors like timeouts downloading from stackage so we retry 3 times + stack build --test --no-run-tests --stack-yaml stack-windows.yaml --ghc-options="-fexternal-interpreter" || stack build --test --no-run-tests --stack-yaml stack-windows.yaml --ghc-options="-fexternal-interpreter" || stack build --test --no-run-tests --stack-yaml stack-windows.yaml --ghc-options="-fexternal-interpreter" + displayName: 'stack build --test' diff --git a/ghcide/.editorconfig b/ghcide/.editorconfig new file mode 100644 index 00000000000..f75cf4d67c5 --- /dev/null +++ b/ghcide/.editorconfig @@ -0,0 +1,11 @@ +; This file is for unifying the coding style for different editors and IDEs. +; More information at https://EditorConfig.org + +root = true + +[*] +end_of_line = LF +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true diff --git a/ghcide/.ghci b/ghcide/.ghci new file mode 100644 index 00000000000..8eb094939ee --- /dev/null +++ b/ghcide/.ghci @@ -0,0 +1,29 @@ +:set -Wunused-binds -Wunused-imports -Worphans -Wunused-matches -Wincomplete-patterns + +:set -XBangPatterns +:set -XDeriveFunctor +:set -XDeriveGeneric +:set -XGeneralizedNewtypeDeriving +:set -XLambdaCase +:set -XNamedFieldPuns +:set -XOverloadedStrings +:set -XRecordWildCards +:set -XScopedTypeVariables +:set -XStandaloneDeriving +:set -XTupleSections +:set -XTypeApplications +:set -XViewPatterns + +:set -package=ghc +:set -ignore-package=ghc-lib-parser +:set -DGHC_STABLE +:set -Iinclude +:set -idist/build/autogen +:set -isrc +:set -isession-loader +:set -iexe + +:set -isrc-ghc88 +:set -idist-newstyle/build/x86_64-osx/ghc-8.8.3/ghcide-0.2.0/build/autogen + +:load Main diff --git a/ghcide/.gitignore b/ghcide/.gitignore new file mode 100644 index 00000000000..8f3e4482bf5 --- /dev/null +++ b/ghcide/.gitignore @@ -0,0 +1,18 @@ +dist/ +.stack-work/ +dist-newstyle/ +cabal.project.local +*~ +*.lock +/.tasty-rerun-log +.vscode +/.hlint-* +bench/example/ +bench-results/ +bench-temp/ +.shake/ +ghcide +ghcide-bench +ghcide-preprocessor +*.benchmark-gcStats +tags diff --git a/ghcide/.hlint.yaml b/ghcide/.hlint.yaml new file mode 100644 index 00000000000..a17e4e52ccc --- /dev/null +++ b/ghcide/.hlint.yaml @@ -0,0 +1,131 @@ +# HLint configuration file +# https://github.com/ndmitchell/hlint +########################## + +# To run HLint do: +# $ hlint --git -j4 + +# Warnings currently triggered by our code +- ignore: {name: "Use <$>"} +- ignore: {name: "Use :"} +- ignore: {name: "Redundant do"} +- ignore: {name: "Avoid lambda"} +- ignore: {name: "Use newtype instead of data"} +- ignore: {name: "Use fromMaybe"} +- ignore: {name: "Use unless"} +- ignore: {name: "Move brackets to avoid $"} +- ignore: {name: "Eta reduce"} +- ignore: {name: "Parse error"} +- ignore: {name: "Reduce duplication"} +- ignore: {name: "Use ++"} +- ignore: {name: "Use $>"} +- ignore: {name: "Use section"} +- ignore: {name: "Use record patterns"} +- ignore: {name: "Use camelCase"} +- ignore: {name: "Use uncurry"} +- ignore: {name: "Avoid lambda using `infix`"} + +# Off by default hints we like +- warn: {name: Use module export list} + +# Condemn nub and friends +- warn: {lhs: nub (sort x), rhs: Data.List.Extra.nubSort x} +- warn: {lhs: nub, rhs: Data.List.Extra.nubOrd} +- warn: {lhs: nubBy, rhs: Data.List.Extra.nubOrdBy} +- warn: {lhs: Data.List.Extra.nubOn, rhs: Data.List.Extra.nubOrdOn} + +# DA specific hints +- warn: {lhs: Data.Text.pack (DA.Pretty.renderPlain x), rhs: DA.Pretty.renderPlain x} +- warn: {lhs: Data.Text.Extended.pack (DA.Pretty.renderPlain x), rhs: DA.Pretty.renderPlain x} +- warn: {lhs: DA.Pretty.renderPlain (DA.Pretty.pretty x), rhs: DA.Pretty.renderPretty x} +- warn: {lhs: Data.Text.readFile, rhs: Data.Text.Extended.readFileUtf8} +- warn: {lhs: Data.Text.writeFile, rhs: Data.Text.Extended.writeFileUtf8} +- warn: {lhs: Data.Text.Lazy.readFile, rhs: Data.Text.Extended.readFileUtf8} +- warn: {lhs: Data.Text.Lazy.writeFile, rhs: Data.Text.Extended.writeFileUtf8} +- warn: {lhs: System.Environment.setEnv, rhs: System.Environment.Blank.setEnv} + +# Specify additional command line arguments +# +- arguments: ["--cpp-include=include"] + +- extensions: + - default: true + + # Extensions enabled by `bazel` and `da-ghci` by default. We ban them here + # to avoid useless pragmas piling up on the top of files. + - {name: BangPatterns, within: []} + - {name: DeriveDataTypeable, within: []} + - {name: DeriveFoldable, within: []} + - {name: DeriveFunctor, within: []} + - {name: DeriveGeneric, within: []} + - {name: DeriveTraversable, within: []} + - {name: FlexibleContexts, within: []} + - {name: GeneralizedNewtypeDeriving, within: []} + - {name: LambdaCase, within: []} + - {name: NamedFieldPuns, within: []} + - {name: PackageImports, within: []} + - {name: RecordWildCards, within: []} + - {name: ScopedTypeVariables, within: []} + - {name: StandaloneDeriving, within: []} + - {name: TupleSections, within: []} + - {name: TypeApplications, within: []} + - {name: ViewPatterns, within: []} + + # Shady extensions + - name: CPP + within: + - Development.IDE.Compat + - Development.IDE.Core.FileStore + - Development.IDE.Core.Compile + - Development.IDE.Core.Rules + - Development.IDE.GHC.Compat + - Development.IDE.GHC.Orphans + - Development.IDE.GHC.Util + - Development.IDE.Import.FindImports + - Development.IDE.LSP.Outline + - Development.IDE.Spans.Calculate + - Development.IDE.Spans.Documentation + - Development.IDE.Spans.Common + - Development.IDE.Plugin.CodeAction + - Development.IDE.Plugin.Completions + - Development.IDE.Plugin.Completions.Logic + - Main + +- flags: + - default: false + - {name: [-Wno-missing-signatures, -Wno-orphans, -Wno-overlapping-patterns, -Wno-incomplete-patterns, -Wno-missing-fields, -Wno-unused-matches]} + - {name: [-Wno-dodgy-imports,-Wno-incomplete-uni-patterns], within: [Main, Development.IDE.GHC.Compat]} +# - modules: +# - {name: [Data.Set, Data.HashSet], as: Set} # if you import Data.Set qualified, it must be as 'Set' +# - {name: Control.Arrow, within: []} # Certain modules are banned entirely +# +- functions: + # Things that are unsafe in Haskell base library + - {name: unsafeInterleaveIO, within: []} + - {name: unsafeDupablePerformIO, within: []} + - {name: unsafeCoerce, within: []} + # Things that are a bit dangerous in the GHC API + - {name: nameModule, within: []} + +# Add custom hints for this project +# +# Will suggest replacing "wibbleMany [myvar]" with "wibbleOne myvar" +# - error: {lhs: "wibbleMany [x]", rhs: wibbleOne x} + +# Turn on hints that are off by default +# +# Ban "module X(module X) where", to require a real export list +# - warn: {name: Use explicit module export list} +# +# Replace a $ b $ c with a . b $ c +# - group: {name: dollar, enabled: true} +# +# Generalise map to fmap, ++ to <> +# - group: {name: generalise, enabled: true} + +# Ignore some builtin hints +# - ignore: {name: Use let} +# - ignore: {name: Use const, within: SpecialModule} # Only within certain modules + +# Define some custom infix operators +# - fixity: infixr 3 ~^#^~ diff --git a/ghcide/CHANGELOG.md b/ghcide/CHANGELOG.md new file mode 100644 index 00000000000..55f1534dcb1 --- /dev/null +++ b/ghcide/CHANGELOG.md @@ -0,0 +1,210 @@ +### 0.6.0.2 (2020-12-26) +* Fix disappearing diagnostics bug (#959) - (Pepe Iborra) +* Use qualified module name from diagnostics in suggestNewImport (#945) - (Potato Hatsue) +* Disable auto extend import snippets in completions (these need a bit more work) + +### 0.6.0.1 (2020-12-13) +* Fix build with GHC 8.8.2 and 8.8.3 - (Javier Neira) +* Update old URLs still pointing to digital-asset - (Jan Hrcek) + +### 0.6.0 (2020-12-06) +* Completions: extend explicit import list automatically (#930) - (Guru Devanla) +* Completions for identifiers not in explicit import lists (#919) - (Guru Devanla) +* Completions for record fields (#900) - (Guru Devanla) +* Bugfix: add constructors to import lists correctly (#916) - (Potato Hatsue) +* Bugfix: respect qualified identifiers (#938) - (Pepe Iborra) +* Bugfix: partial `pathToId` (#926) - (Samuel Ainsworth) +* Bugfix: import suggestions when there's more than one option (#913) - (Guru Devanla) +* Bugfix: parenthesize operators when exporting (#906) - (Potato Hatsue) +* Opentelemetry traces and heapsize memory analysis (#922) - (Michalis Pardalos / Pepe Iborra) +* Make Filetargets absolute before continue using them (#914) - (fendor) +* Do not enable every "unnecessary" warning by default (#907) - (Alejandro Serrano) +* Update implicit-hie to 0.3.0 (#905) - (Avi Dessauer) + +### 0.5.0 (2020-11-07) +* Use implicit-hie-0.1.2.0 (#880) - (Javier Neira) +* Clarify and downgrade implicit-hie message (#883) - (Avi Dessauer) +* Switch back to bytecode (#873) - (wz1000) +* Add code action for remove all redundant imports (#867) - (Potato Hatsue) +* Fix pretty printer for diagnostic ranges (#871) - (Martin Huschenbett) +* Canonicalize import dirs (#870) - (Pepe Iborra) +* Do not show internal hole names (#852) - (Alejandro Serrano) +* Downgrade file watch debug log to logDebug from logInfo (#848) - (Matthew Pickering) +* Pull in local bindings (#845) - (Sandy Maguire) +* Use object code for Template Haskell, emit desugarer warnings (#836) - (wz1000) +* Fix code action for adding missing constraints to type signatures (#839) - (Jan Hrcek) +* Fix duplicated completions (#837) - (Vitalii) +* FileExists: set one watcher instead of thousands (#831) - (Michael Peyton Jones) +* Drop 8.4 support (#834) - (wz1000) +* Add GetHieAsts rule, Replace SpanInfo, add support for DocumentHighlight and scope-aware completions for local variables (#784) - (wz1000) +* Tag unused warning as such (#815) - (Alejandro Serrano) +* Update instructions for stty error in windows (#825) - (Javier Neira) +* Fix docs tooltip for base libraries on Windows (#814) - (Nick Dunets) +* Fix documentation (or source) link when html file is less specific than module (#766) - (Nick Dunets) +* Add completion tests for records. (#804) - (Guru Devanla) +* Restore identifiers missing from hi file (#741) - (maralorn) +* Fix import suggestions when dot is typed (#800) - (Marcelo Lazaroni) + +### 0.4.0 (2020-09-15) +* Fixes for GHC source plugins: dotpreprocessor works now - (srid) +* Use implicit-hie when no explicit hie.yaml (#782) - (Javier Neira) +* Extend position mapping with fuzzy ranges (#785) - (wz1000) +* Sort import suggestions (#793) - (Pepe Iborra) +* Save source files with HIE files (#701) - (fendor) +* Fully asynchronous request handling (#767) - (Pepe Iborra) +* Refinement holes (#748) - (Pepe Iborra) +* Fix haddock to markdown conversion (#757) - (George Thomas) +* Expose `getCompletionsLSP` to allow completions in hls (#756) - (wz1000) +* Suggestions for missing imports from local modules (#739) - (Pepe Iborra) +* Dynamically load libm on Linux for each new session (#723) - (Luke Lau) +* Use InitializeParams.rootUri for initial session setup (#713) - (shaurya gupta) +* Show documentation on hover for symbols defined in the same module (#691) - (wz1000) +* Suggest open imports (#740) - (Pepe Iborra) +* module Development.IDE (#724) - (Pepe Iborra) +* Ignore -Werror (#738) - (Pepe Iborra) +* Fix issue #710: fix suggest delete binding (#728) - (Ray Shih) +* Generate doc file URL via LSP (to fix it for Windows) (#721) - (Nick Dunets) +* Fix `.hie` file location for `.hs-boot` files (#690) - (wz1000) +* Use argsVerbose to determine log level in test mode (#717) - (Ziyang Liu) +* output which cradle files were found (#716) - (Adam Sandberg Eriksson) +* Typecheck entire project on Initial Load and typecheck reverse dependencies of a file on saving (#688) - (wz1000) + +### 0.3.0 (2020-09-02) + +* CI: remove (internal) DA Slack notifications (#750) - (Gary Verhaegen) +* Add session-loader to hie.yaml (#714) - (Luke Lau) +* Codeaction for exporting unused top-level bindings (#711) - (shaurya gupta) +* Add links to haddock and hscolour pages in documentation (#699) - (Luke Lau) +* Expose GHC.Compat module (#709) - (Pepe Iborra) +* Move session loading logic into ghcide library (#697) - (Luke Lau) +* Code action: remove redundant constraints for type signature (#692) - (Denis Frezzato) +* Fix Binary instance of Q to handle empty file paths (#707) - (Moritz Kiefer) +* Populate ms_hs_date in GetModSummary rule (#694) - (Pepe Iborra) +* Allow GHC plugins to be called with an updated StringBuffer (#698) - (Alfredo Di Napoli) +* Relax upper bounds for GHC 8.10.1 (#705) - (Pepe Iborra) +* Obtain the GHC libdir at runtime (#696) - (Luke Lau) +* Expect bench experiments to fail with Cabal (#704) - (Pepe Iborra) +* Bump lodash from 4.17.15 to 4.17.19 in /extension (#702) - (dependabot[bot]) +* Update to hie-bios 0.6.1 (#693) - (fendor) +* Backport HIE files to GHC 8.6 (#689) - (wz1000) +* Performance improvements for GetSpanInfo (#681) - (Pepe Iborra) +* Code action add default type annotation to remove `-Wtype-defaults` warning (#680) - (Serhii) +* Use a global namecache to read `.hie` files (#677) - (wz1000) +* Completions need not depend on typecheck of the current file (#670) - (Pepe Iborra) +* Fix spaninfo Haddocks for local modules (#678) - (Pepe Iborra) +* Avoid excessive retypechecking of TH codebases (#673) - (Pepe Iborra) +* Use stale information if it's available to answer requests quickly (#624) - (Matthew Pickering) +* Code action: add constraint (#653) - (Denis Frezzato) +* Make BenchHist non buildable by default and save logs (#666) - (Pepe Iborra) +* Delete unused top level binding code action (#657) - (Serhii) +* stack810.yaml: bump (#651) - (Domen Kozar) +* Fix debouncer for 0 delay (#662) - (Pepe Iborra) +* Interface file fixes (#645) - (Pepe Iborra) +* Retry GHC 8.10 on Windows (#661) - (Moritz Kiefer) +* Finer dependencies for GhcSessionFun (#643) - (Pepe Iborra) +* Send WorkDoneProgressEnd only when work is done (#649) - (Pepe Iborra) +* Add a note on differential benchmarks (#647) - (Pepe Iborra) +* Cache a ghc session per file of interest (#630) - (Pepe Iborra) +* Remove `Strict` from the language extensions used for code actions (#638) - (Torsten Schmits) +* Report progress when setting up cradle (#644) - (Luke Lau) +* Fix crash when writing to a Barrier more than once (#637) - (Pepe Iborra) +* Write a cabal.project file in the benchmark example (#640) - (Pepe Iborra) +* Performance analysis over time (#629) - (Pepe Iborra) +* More benchmarks (#625) - (Pepe Iborra) +* Canonicalize the locations in the cradle tests (#628) - (Luke Lau) +* Add hie.yaml.stack and use none cradle for test data (#626) - (Javier Neira) +* Fix a bug in getHiFileRule (#623) - (Pepe Iborra) +* ghc initialization error handling (#609) - (Pepe Iborra) +* Fix regression in getSpanInfoRule (#622) - (Pepe Iborra) +* Restore Shake profiling (#621) - (Pepe Iborra) +* Use a better noRange (#612) - (Neil Mitchell) +* Add back a .ghci file (#607) - (Neil Mitchell) +* #573, make haddock errors warnings with the word Haddock in front (#608) - (Neil Mitchell) +* Implement Goto Type Definition (#533) - (Matthew Pickering) +* remove unnecessary FileExists dependency in GetHiFile (#589) - (Pepe Iborra) +* ShakeSession and shakeEnqueue (#554) - (Pepe Iborra) +* Benchmark suite (#590) - (Pepe Iborra) + +### 0.2.0 (2020-06-02) + +* Multi-component support (thanks @mpickering) +* Support for GHC 8.10 (thanks @sheaf and @chshersh) +* Fix some TH issues (thanks @mpickering) +* Automatically pick up changes to cradle dependencies (e.g. cabal + files) (thanks @jinwoo) +* Track dependencies when using `qAddDependentFile` (thanks @mpickering) +* Add record fields to document symbols outline (thanks @bubba) +* Fix some space leaks (thanks @mpickering) +* Strip redundant path information from diagnostics (thanks @tek) +* Fix import suggestions for operators (thanks @eddiemundo) +* Significant reductions in memory usage by using interfaces and `.hie` files (thanks + @pepeiborra) +* Minor improvements to completions +* More comprehensive suggestions for missing imports (thanks @pepeiborra) +* Group imports in document outline (thanks @fendor) +* Upgrade to haskell-lsp-0.22 (thanks @bubba) +* Upgrade to hie-bios 0.5 (thanks @fendor) + +### 0.1.0 (2020-02-04) + +* Code action for inserting new definitions (see #309). +* Better default GC settings (see #329 and #333). +* Various performance improvements (see #322 and #384). +* Improvements to hover information (see #317 and #338). +* Support GHC 8.8.2 (see #355). +* Include keywords in completions (see #351). +* Fix some issues with aborted requests (see #353). +* Use hie-bios 0.4.0 (see #382). +* Avoid stuck progress reporting (see #400). +* Only show progress notifications after 0.1s (see #392). +* Progress reporting is now in terms of the number of files rather + than the number of shake rules (see #379). + +### 0.0.6 (2020-01-10) + +* Fix type in hover information for do-notation and list + comprehensions (see #243). +* Fix hover and goto-definition for multi-clause definitions (see #252). +* Upgrade to `hie-bios-0.3` (see #257) +* Upgrade to `haskell-lsp-0.19` (see #254) +* Code lenses for missing signatures are displayed even if the warning + has not been enabled. The warning itself will not be shown if it is + not enabled. (see #232) +* Define `__GHCIDE__` when running CPP to allow for `ghcide`-specific + workarounds. (see #264) +* Fix some filepath normalization issues. (see #266) +* Fix build with `shake-0.18.4` (see #272) +* Fix hover for type constructors and type classes. (see #267) +* Support custom preprocessors (see #282) +* Add support for code completions (see #227) +* Code action for removing redundant symbols from imports (see #290) +* Support document symbol requests (see #293) +* Show CPP errors as diagnostics (see #296) +* Code action for adding suggested imports (see #295) + +### 0.0.5 (2019-12-12) + +* Support for GHC plugins (see #192) +* Update to haskell-lsp 0.18 (see #203) +* Initial support for `TemplateHaskell` (see #222) +* Code lenses for missing signatures. These are only shown if + `-Wmissing-signatures` is enabled. (see #224) +* Fix path normalisation on Windows (see #225) +* Fix flickering of the progress indicator (see #230) + +### 0.0.4 (2019-10-20) + +* Add a ``--version`` cli option (thanks @jacg) +* Update to use progress reporting as defined in LSP 3.15. The VSCode + extension has also been updated and should now be making use of + this. +* Properly declare that we should support code actions. This helps + with some clients that rely on this information to enable code + actions (thanks @jacg). +* Fix a race condition caused by sharing the finder cache between + concurrent compilations. +* Avoid normalizing include dirs. This avoids issues where the same + file ends up twice in the module graph, e.g., with different casing + for drive letters. + +### 0.0.3 (2019-09-21) diff --git a/ghcide/LICENSE b/ghcide/LICENSE new file mode 100644 index 00000000000..d1f5c9033f6 --- /dev/null +++ b/ghcide/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Digital Asset (Switzerland) GmbH and/or its affiliates + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/ghcide/README.md b/ghcide/README.md new file mode 100644 index 00000000000..5ae04177e82 --- /dev/null +++ b/ghcide/README.md @@ -0,0 +1,358 @@ +# `ghcide` - A library for building Haskell IDE tooling + +Our vision is that you should build an IDE by combining: + +![vscode](https://raw.githubusercontent.com/haskell/ghcide/master/img/vscode2.png) + +* [`hie-bios`](https://github.com/mpickering/hie-bios) for determining where your files are, what are their dependencies, what extensions are enabled and so on; +* `ghcide` (i.e. this library) for defining how to type check, when to type check, and producing diagnostic messages; +* A bunch of plugins that haven't yet been written, e.g. [`hie-hlint`](https://github.com/ndmitchell/hlint) and [`hie-ormolu`](https://github.com/tweag/ormolu), to choose which features you want; +* [`haskell-lsp`](https://github.com/alanz/haskell-lsp) for sending those messages to a [Language Server Protocol (LSP)](https://microsoft.github.io/language-server-protocol/) server; +* An LSP client for your editor. + +There are more details about our approach [in this blog post](https://4ta.uk/p/shaking-up-the-ide). + +## Features + +`ghcide` already exports the following features via the lsp protocol: + +| Feature | LSP name | +| - | - | +| Display error messages (parse errors, typecheck errors, etc.) and enabled warnings. | diagnostics | +| Go to definition in local package | definition | +| Display type and source module of values | hover | +| Remove redundant imports, replace suggested typos for values and module imports, fill type holes, insert missing type signatures, add suggested ghc extensions | codeAction (quickfix) | + + +## Limitations to Multi-Component support + +`ghcide` supports loading multiple components into the same session so that +features such as go-to definition work across components. However, there are +some limitations to this. + +1. You will get much better results currently manually specifying the hie.yaml file. +Until tools like cabal and stack provide the right interface to support multi-component +projects, it is always advised to specify explicitly how your project partitions. +2. Cross-component features only work if you have loaded at least one file +from each component. +3. There is a known issue where if you have three components, such that A depends on B which depends on C +then if you load A and C into the session but not B then under certain situations you +can get strange errors about a type coming from two different places. See [this repo](https://github.com/fendor/ghcide-bad-interface-files) for +a simple reproduction of the bug. + +## Using it + +`ghcide` is not an end-user tool, [don't use `ghcide`](https://neilmitchell.blogspot.com/2020/09/dont-use-ghcide-anymore-directly.html) directly (more about the rationale [here](https://github.com/haskell/ghcide/pull/939)). + + [`haskell-language-server`](http://github.com/haskell/haskell-language-server) is an LSP server built on top of `ghcide` with additional features and a user friendly deployment model. To get it, simply install the [Haskell extension](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) in VS Code, or download prebuilt binaries from the [haskell-language-server](https://github.com/haskell/haskell-language-server) project page. + + +The instructions below are meant for developers interested in setting up ghcide as an LSP server for testing purposes. + +### Install `ghcide` + +#### With Nix + +Note that you need to compile `ghcide` with the same `ghc` as the project you are working on. + +1. If the `ghc` you are using matches the version (or better is) from `nixpkgs` it‘s easiest to use the `ghcide` from `nixpkgs`. You can do so via + ``` + nix-env -iA haskellPackages.ghcide + ``` + or e.g. including `pkgs.haskellPackages.ghcide` in your projects `shell.nix`. + Depending on your `nixpkgs` channel that might not be the newest `ghcide`, though. + +2. If your `ghc` does not match nixpkgs you should try the [ghcide-nix repository](https://github.com/cachix/ghcide-nix) + which provides a `ghcide` via the `haskell.nix` infrastructure. + +#### With Cabal or Stack + +First install the `ghcide` binary using `stack` or `cabal`, e.g. + +1. `git clone https://github.com/haskell/ghcide.git` +2. `cd ghcide` +3. `cabal install` or `stack install` (and make sure `~/.local/bin` is on your `$PATH`) + +It's important that `ghcide` is compiled with the same compiler you use to build your projects. + +### Test `ghcide` + +Next, check that `ghcide` is capable of loading your code. Change to the project directory and run `ghcide`, which will try and load everything using the same code as the IDE, but in a way that's much easier to understand. For example, taking the example of [`shake`](https://github.com/ndmitchell/shake), running `ghcide` gives some error messages and warnings before reporting at the end: + +```console +Files that failed: + * .\model\Main.hs + * .\model\Model.hs + * .\model\Test.hs + * .\model\Util.hs + * .\output\docs\Main.hs + * .\output\docs\Part_Architecture_md.hs +Completed (152 worked, 6 failed) +``` + +Of the 158 files in Shake, as of this moment, 152 can be loaded by the IDE, but 6 can't (error messages for the reasons they can't be loaded are given earlier). The failing files are all prototype work or test output, meaning I can confidently use Shake. + +The `ghcide` executable mostly relies on [`hie-bios`](https://github.com/mpickering/hie-bios) to do the difficult work of setting up your GHC environment. If it doesn't work, see [the `hie-bios` manual](https://github.com/mpickering/hie-bios#readme) to get it working. My default fallback is to figure it out by hand and create a `direct` style [`hie.yaml`](https://github.com/ndmitchell/shake/blob/master/hie.yaml) listing the command line arguments to load the project. + +If you can't get `ghcide` working outside the editor, see [this setup troubleshooting guide](docs/Setup.md). Once you have got `ghcide` working outside the editor, the next step is to pick which editor to integrate with. + +### Optimal project setup + +`ghcide` has been designed to handle projects with hundreds or thousands of modules. If `ghci` can handle it, then `ghcide` should be able to handle it. The only caveat is that this currently requires GHC >= 8.6, and that the first time a module is loaded in the editor will trigger generation of support files in the background if those do not already exist. + +### Configuration + +`ghcide` accepts the following lsp configuration options: + +```typescript +{ + // When to check the dependents of a module + // AlwaysCheck means retypechecking them on every change + // CheckOnSave means dependent/parent modules will only be checked when you save + // "CheckOnSaveAndClose" by default + checkParents : "NeverCheck" | "CheckOnClose" | "CheckOnSaveAndClose" | "AlwaysCheck" | , + // Whether to check the entire project on initial load + // true by default + checkProject : boolean + +} +``` + +### Using with VS Code + +The [Haskell](https://marketplace.visualstudio.com/items?itemName=haskell.haskell) extension has a setting for ghcide. + +### Using with Atom + +You can follow the [instructions](https://github.com/moodmosaic/ide-haskell-ghcide#readme) to install with `apm`. + +### Using with Sublime Text + +* Install [LSP](https://packagecontrol.io/packages/LSP) +* Press Ctrl+Shift+P or Cmd+Shift+P in Sublime Text and search for *Preferences: LSP Settings*, then paste these settings +``` +{ + "clients": + { + "ghcide": + { + "enabled" : true, + "languageId": "haskell", + "command" : ["ghcide", "--lsp"], + "scopes" : ["source.haskell"], + "syntaxes" : ["Packages/Haskell/Haskell.sublime-syntax"] + } + } +} +``` + +### Using with Emacs + +If you don't already have [MELPA](https://melpa.org/#/) package installation configured, visit MELPA [getting started](https://melpa.org/#/getting-started) page to get set up. Then, install [`use-package`](https://melpa.org/#/use-package). + +Now you have a choice of two different Emacs packages which can be used to communicate with the `ghcide` LSP server: + ++ `lsp-ui` ++ `eglot` (requires Emacs 26.1+) + +In each case, you can enable support by adding the shown lines to your `.emacs`: + +#### lsp-ui + +```elisp +;; LSP +(use-package flycheck + :ensure t + :init + (global-flycheck-mode t)) +(use-package yasnippet + :ensure t) +(use-package lsp-mode + :ensure t + :hook (haskell-mode . lsp) + :commands lsp) +(use-package lsp-ui + :ensure t + :commands lsp-ui-mode) +(use-package lsp-haskell + :ensure t + :config + (setq lsp-haskell-process-path-hie "ghcide") + (setq lsp-haskell-process-args-hie '()) + ;; Comment/uncomment this line to see interactions between lsp client/server. + ;;(setq lsp-log-io t) +) +``` + +#### eglot + +````elisp +(use-package eglot + :ensure t + :config + (add-to-list 'eglot-server-programs '(haskell-mode . ("ghcide" "--lsp")))) +```` + +### Using with Vim/Neovim + +#### LanguageClient-neovim +Install [LanguageClient-neovim](https://github.com/autozimu/LanguageClient-neovim) + +Add this to your vim config: +```vim +let g:LanguageClient_rootMarkers = ['*.cabal', 'stack.yaml'] +let g:LanguageClient_serverCommands = { + \ 'rust': ['rls'], + \ 'haskell': ['ghcide', '--lsp'], + \ } +``` + +Refer to `:he LanguageClient` for more details on usage and configuration. + +#### vim-lsp +Install [vim-lsp](https://github.com/prabirshrestha/vim-lsp). + +Add this to your vim config: + +```vim +au User lsp_setup call lsp#register_server({ + \ 'name': 'ghcide', + \ 'cmd': {server_info->['/your/path/to/ghcide', '--lsp']}, + \ 'whitelist': ['haskell'], + \ }) +``` + +To verify it works move your cursor over a symbol and run `:LspHover`. + +### coc.nvim + +Install [coc.nvim](https://github.com/neoclide/coc.nvim) + +Add this to your coc-settings.json (which you can edit with :CocConfig): + +```json +{ + "languageserver": { + "haskell": { + "command": "ghcide", + "args": [ + "--lsp" + ], + "rootPatterns": [ + ".stack.yaml", + ".hie-bios", + "BUILD.bazel", + "cabal.config", + "package.yaml" + ], + "filetypes": [ + "hs", + "lhs", + "haskell" + ] + } + } +} +``` + +Here's a nice article on setting up neovim and coc: [Vim and Haskell in +2019](http://marco-lopes.com/articles/Vim-and-Haskell-in-2019/) (this is actually for haskell-ide, not ghcide) + +Here is a Docker container that pins down the build and configuration for +Neovim and ghcide on a minimal Debian 10 base system: +[docker-ghcide-neovim](https://github.com/carlohamalainen/docker-ghcide-neovim/). + +### SpaceVim + +In the `autocomplete` layer, add the `autocomplete_method` option to force the use of `coc`: + +```toml +[[layers]] + name = 'autocomplete' + auto-completion-return-key-behavior = "complete" + auto-completion-tab-key-behavior = "smart" + [options] + autocomplete_method = "coc" +``` + +Add this to your coc-settings.json (which you can edit with :CocConfig): + +```json +{ + "languageserver": { + "haskell": { + "command": "ghcide", + "args": [ + "--lsp" + ], + "rootPatterns": [ + ".stack.yaml", + ".hie-bios", + "BUILD.bazel", + "cabal.config", + "package.yaml" + ], + "filetypes": [ + "hs", + "lhs", + "haskell" + ] + } + } +} +``` + +This example above describes a setup in which `ghcide` is installed +using `stack install ghcide` within a project. + +### Using with Kakoune + +Install [kak-lsp](https://github.com/ul/kak-lsp). + +Change `kak-lsp.toml` to include this: + +```toml +[language.haskell] +filetypes = ["haskell"] +roots = ["Setup.hs", "stack.yaml", "*.cabal", "cabal.project", "hie.yaml"] +command = "ghcide" +args = ["--lsp"] +``` + +## Hacking on ghcide + +To build and work on `ghcide` itself, you should use cabal, e.g., +running `cabal test` will execute the test suite. You can use `stack test` too, but +note that some tests will fail, and none of the maintainers are currently using `stack`. + +If you are using Nix, there is a Cachix nix-shell cache for all the supported platforms: `cachix use haskell-ghcide`. + +If you are using Windows, you should disable the `auto.crlf` setting and configure your editor to use LF line endings, directly or making it use the existing `.editor-config`. + +If you are chasing down test failures, you can use the tasty-rerun feature by running tests as + + cabal test --test-options"--rerun" + +This writes a log file called `.tasty-rerun-log` of the failures, and only runs those. +See the [tasty-rerun](https://hackage.haskell.org/package/tasty-rerun-1.1.17/docs/Test-Tasty-Ingredients-Rerun.html) documentation for other options. + +If you are touching performance sensitive code, take the time to run a differential +benchmark between HEAD and master using the benchHist script. This assumes that +"master" points to the upstream master. + +Run the benchmarks with `cabal bench`. + +It should take around 15 minutes and the results will be stored in the `bench-results` folder. To interpret the results, see the comments in the `bench/hist/Main.hs` module. + +More details in [bench/README](bench/README.md) + + +## History and relationship to other Haskell IDE's + +The teams behind this project and the [`haskell-ide-engine`](https://github.com/haskell/haskell-ide-engine#readme) have agreed to join forces under the [`haskell-language-server` project](https://github.com/haskell/haskell-language-server), see the [original announcement](https://neilmitchell.blogspot.com/2020/01/one-haskell-ide-to-rule-them-all.html). The technical work is ongoing, with the likely model being that this project serves as the core, while plugins and integrations are kept in the [`haskell-language-server` project](https://github.com/haskell/haskell-language-server). + +The code behind `ghcide` was originally developed by [Digital Asset](https://digitalasset.com/) as part of the [DAML programming language](https://github.com/digital-asset/daml). DAML is a smart contract language targeting distributed-ledger runtimes, based on [GHC](https://www.haskell.org/ghc/) with custom language extensions. The DAML programming language has [an IDE](https://webide.daml.com/), and work was done to separate off a reusable Haskell-only IDE (what is now `ghcide`) which the [DAML IDE then builds upon](https://github.com/digital-asset/daml/tree/master/compiler/damlc). Since that time, there have been various [non-Digital Asset contributors](https://github.com/haskell/ghcide/graphs/contributors), in addition to continued investment by Digital Asset. The project has been handed over to Haskell.org as of September 2020. + +The Haskell community [has](https://github.com/DanielG/ghc-mod) [various](https://github.com/chrisdone/intero) [IDE](https://github.com/rikvdkleij/intellij-haskell) [choices](http://leksah.org/), but the one that had been gathering momentum is [`haskell-ide-engine`](https://github.com/haskell/haskell-ide-engine#readme). Our project owes a debt of gratitude to the `haskell-ide-engine`. We reuse libraries from their ecosystem, including [`hie-bios`](https://github.com/mpickering/hie-bios#readme) (a likely future environment setup layer in `haskell-ide-engine`), [`haskell-lsp`](https://github.com/alanz/haskell-lsp#readme) and [`lsp-test`](https://github.com/bubba/lsp-test#readme) (the `haskell-ide-engine` [LSP protocol](https://microsoft.github.io/language-server-protocol/) pieces). We make heavy use of their contributions to GHC itself, in particular the work to make GHC take string buffers rather than files. + +The best summary of the architecture of `ghcide` is available [this talk](https://www.youtube.com/watch?v=cijsaeWNf2E&list=PLxxF72uPfQVRdAsvj7THoys-nVj-oc4Ss) ([slides](https://ndmitchell.com/downloads/slides-making_a_haskell_ide-07_sep_2019.pdf)), given at [MuniHac 2019](https://munihac.de/2019.html). However, since that talk the project has renamed from `hie-core` to `ghcide`, and the repo has moved to [this location](https://github.com/haskell/ghcide/). diff --git a/ghcide/azure-pipelines.yml b/ghcide/azure-pipelines.yml new file mode 100644 index 00000000000..4021f118fc0 --- /dev/null +++ b/ghcide/azure-pipelines.yml @@ -0,0 +1,18 @@ +# Build master commits +trigger: + batch: false + branches: + include: + - master + - azure* + +# Enable PR triggers that target the master branch +pr: + autoCancel: true # cancel previous builds on push + branches: + include: + - master + +jobs: + - template: ./.azure/linux-stack.yml + - template: ./.azure/windows-stack.yml diff --git a/ghcide/bench-results/.artifactignore b/ghcide/bench-results/.artifactignore new file mode 100644 index 00000000000..326f663a2b9 --- /dev/null +++ b/ghcide/bench-results/.artifactignore @@ -0,0 +1,4 @@ +ghcide +ghcide-bench +ghcide-preprocessor +*.benchmark-gcStats diff --git a/ghcide/bench/README.md b/ghcide/bench/README.md new file mode 100644 index 00000000000..d3b3da1db37 --- /dev/null +++ b/ghcide/bench/README.md @@ -0,0 +1,15 @@ + +# Benchmarks + +This folder contains two Haskell programs that work together to simplify the +performance analysis of ghcide: + +- `exe/Main.hs` - a standalone benchmark runner. Run with `stack run ghcide-bench` +- `hist/Main.hs` - a Shake script for running the benchmark suite over a set of commits. + - Run with `stack bench` or `cabal bench`, + - Requires a `ghcide-bench` binary in the PATH (usually provided by stack/cabal), + - Calls `cabal` (or `stack`, configurable) internally to build the project, + - Driven by the `config.yaml` configuration file. + By default it compares HEAD with "master" + +Further details available in the config file and the module header comments. diff --git a/ghcide/bench/config.yaml b/ghcide/bench/config.yaml new file mode 100644 index 00000000000..ef593adbdbc --- /dev/null +++ b/ghcide/bench/config.yaml @@ -0,0 +1,59 @@ +# The number of samples to run per experiment. +# At least 100 is recommended in order to observe space leaks +samples: 100 + +buildTool: cabal + +# Output folder for the experiments +outputFolder: bench-results + +# Example project used to run the experiments +# Can either be a Hackage package (name,version) +# or a local project (path) with a valid `hie.yaml` file +examples: + # Medium-sized project without TH + - name: Cabal + version: 3.0.0.0 + module: Distribution/Simple.hs + # Small-sized project with TH + - name: haskell-lsp-types + version: 0.22.0.0 + module: src/Language/Haskell/LSP/Types/Lens.hs +# - path: path-to-example +# module: path-to-module + +# The set of experiments to execute +experiments: + - hover + - edit + - getDefinition + - "hover after edit" + - "completions after edit" + - "code actions" + - "code actions after edit" + - "documentSymbols after edit" + +# An ordered list of versions to analyze +versions: +# A version can be defined briefly: +# - +# - +# - + +# Or in extended form, where all the fields are optional: +# - : +# git: +# include: true # whether to include in comparison graphs +# parent: # version to compare with in .diff graphs + + +# - v0.0.5 +# - v0.0.6 +# - v0.1.0 +# - v0.2.0 +# - v0.3.0 +# - v0.4.0 +# - v0.5.0 +# - v0.6.0 +# - upstream: origin/master +- HEAD diff --git a/ghcide/bench/exe/Main.hs b/ghcide/bench/exe/Main.hs new file mode 100644 index 00000000000..9b9ae1fac0d --- /dev/null +++ b/ghcide/bench/exe/Main.hs @@ -0,0 +1,50 @@ +{- An automated benchmark built around the simple experiment described in: + + > https://neilmitchell.blogspot.com/2020/05/fixing-space-leaks-in-ghcide.html + + As an example project, it unpacks Cabal-3.2.0.0 in the local filesystem and + loads the module 'Distribution.Simple'. The rationale for this choice is: + + - It's convenient to download with `cabal unpack Cabal-3.2.0.0` + - It has very few dependencies, and all are already needed to build ghcide + - Distribution.Simple has 235 transitive module dependencies, so non trivial + + The experiments are sequences of lsp commands scripted using lsp-test. + A more refined approach would be to record and replay real IDE interactions, + once the replay functionality is available in lsp-test. + A more declarative approach would be to reuse ide-debug-driver: + + > https://github.com/digital-asset/daml/blob/master/compiler/damlc/ide-debug-driver/README.md + + The result of an experiment is a total duration in seconds after a preset + number of iterations. There is ample room for improvement: + - Statistical analysis to detect outliers and auto infer the number of iterations needed + - GC stats analysis (currently -S is printed as part of the experiment) + - Analyisis of performance over the commit history of the project + + How to run: + 1. `cabal exec cabal run ghcide-bench -- -- ghcide-bench-options` + 1. `stack build ghcide:ghcide-bench && stack exec ghcide-bench -- -- ghcide-bench-options` + + Note that the package database influences the response times of certain actions, + e.g. code actions, and therefore the two methods above do not necessarily + produce the same results. + + -} + +{-# LANGUAGE ImplicitParams #-} + +import Control.Exception.Safe +import Experiments +import Options.Applicative + +main :: IO () +main = do + config <- execParser $ info (configP <**> helper) fullDesc + let ?config = config + + output "starting test" + + SetupResult{..} <- setup + + runBenchmarks experiments `finally` cleanUp diff --git a/ghcide/bench/hist/Main.hs b/ghcide/bench/hist/Main.hs new file mode 100644 index 00000000000..2a9956631cb --- /dev/null +++ b/ghcide/bench/hist/Main.hs @@ -0,0 +1,147 @@ +{- Bench history + + A Shake script to analyze the performance of ghcide over the git history of the project + + Driven by a config file `bench/config.yaml` containing the list of Git references to analyze. + + Builds each one of them and executes a set of experiments using the ghcide-bench suite. + + The results of the benchmarks and the analysis are recorded in the file + system with the following structure: + + bench-results + ├── + │  ├── ghc.path - path to ghc used to build the binary + │  ├── ghcide - binary for this version + ├─ + │ ├── results.csv - aggregated results for all the versions + │ └── + │   ├── .benchmark-gcStats - RTS -s output + │   ├── .csv - stats for the experiment + │   ├── .svg - Graph of bytes over elapsed time + │   ├── .diff.svg - idem, including the previous version + │   ├── .log - ghcide-bench output + │   └── results.csv - results of all the experiments for the example + ├── results.csv - aggregated results of all the experiments and versions + └── .svg - graph of bytes over elapsed time, for all the included versions + + For diff graphs, the "previous version" is the preceding entry in the list of versions + in the config file. A possible improvement is to obtain this info via `git rev-list`. + + To execute the script: + + > cabal/stack bench + + To build a specific analysis, enumerate the desired file artifacts + + > stack bench --ba "bench-results/HEAD/results.csv bench-results/HEAD/edit.diff.svg" + > cabal bench --benchmark-options "bench-results/HEAD/results.csv bench-results/HEAD/edit.diff.svg" + + -} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies#-} +{-# LANGUAGE TypeFamilies #-} +{-# OPTIONS -Wno-orphans #-} + +import Data.Foldable (find) +import Data.Yaml (FromJSON (..), decodeFileThrow) +import Development.Benchmark.Rules +import Development.Shake +import Experiments.Types (Example, exampleToOptions) +import qualified Experiments.Types as E +import GHC.Generics (Generic) +import Numeric.Natural (Natural) + + +config :: FilePath +config = "bench/config.yaml" + +-- | Read the config without dependency +readConfigIO :: FilePath -> IO (Config BuildSystem) +readConfigIO = decodeFileThrow + +instance IsExample Example where getExampleName = E.getExampleName +type instance RuleResult GetExample = Maybe Example +type instance RuleResult GetExamples = [Example] + +main :: IO () +main = shakeArgs shakeOptions {shakeChange = ChangeModtimeAndDigest} $ do + createBuildSystem $ \resource -> do + configStatic <- liftIO $ readConfigIO config + let build = outputFolder configStatic + buildRules build ghcideBuildRules + benchRules build resource (MkBenchRules (benchGhcide $ samples configStatic) "ghcide") + csvRules build + svgRules build + action $ allTargets build + +ghcideBuildRules :: MkBuildRules BuildSystem +ghcideBuildRules = MkBuildRules findGhcForBuildSystem "ghcide" buildGhcide + +-------------------------------------------------------------------------------- + +data Config buildSystem = Config + { experiments :: [Unescaped String], + examples :: [Example], + samples :: Natural, + versions :: [GitCommit], + -- | Output folder ('foo' works, 'foo/bar' does not) + outputFolder :: String, + buildTool :: buildSystem + } + deriving (Generic, Show) + deriving anyclass (FromJSON) + +createBuildSystem :: (Resource -> Rules a) -> Rules a +createBuildSystem userRules = do + readConfig <- newCache $ \fp -> need [fp] >> liftIO (readConfigIO fp) + + _ <- addOracle $ \GetExperiments {} -> experiments <$> readConfig config + _ <- addOracle $ \GetVersions {} -> versions <$> readConfig config + _ <- addOracle $ \GetExamples{} -> examples <$> readConfig config + _ <- addOracle $ \(GetExample name) -> find (\e -> getExampleName e == name) . examples <$> readConfig config + _ <- addOracle $ \GetBuildSystem {} -> buildTool <$> readConfig config + + benchResource <- newResource "ghcide-bench" 1 + + userRules benchResource + +-------------------------------------------------------------------------------- + +buildGhcide :: BuildSystem -> [CmdOption] -> FilePath -> Action () +buildGhcide Cabal args out = do + command_ args "cabal" + ["install" + ,"exe:ghcide" + ,"--installdir=" ++ out + ,"--install-method=copy" + ,"--overwrite-policy=always" + ,"--ghc-options=-rtsopts" + ] + +buildGhcide Stack args out = + command_ args "stack" + ["--local-bin-path=" <> out + ,"build" + ,"ghcide:ghcide" + ,"--copy-bins" + ,"--ghc-options=-rtsopts" + ] + +benchGhcide + :: Natural -> BuildSystem -> [CmdOption] -> BenchProject Example -> Action () +benchGhcide samples buildSystem args BenchProject{..} = + command_ args "ghcide-bench" $ + [ "--timeout=3000", + "-v", + "--samples=" <> show samples, + "--csv=" <> outcsv, + "--ghcide=" <> exePath, + "--select", + unescaped (unescapeExperiment experiment) + ] ++ + exampleToOptions example ++ + [ "--stack" | Stack == buildSystem + ] ++ + exeExtraArgs + diff --git a/ghcide/bench/lib/Experiments.hs b/ghcide/bench/lib/Experiments.hs new file mode 100644 index 00000000000..84ad2eaa427 --- /dev/null +++ b/ghcide/bench/lib/Experiments.hs @@ -0,0 +1,484 @@ +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE ImplicitParams #-} +{-# LANGUAGE ImpredicativeTypes #-} + +module Experiments +( Bench(..) +, BenchRun(..) +, Config(..) +, Verbosity(..) +, CabalStack(..) +, SetupResult(..) +, Example(..) +, experiments +, configP +, defConfig +, output +, setup +, runBench +, exampleToOptions +) where +import Control.Applicative.Combinators (skipManyTill) +import Control.Concurrent +import Control.Exception.Safe +import Control.Monad.Extra +import Control.Monad.IO.Class +import Data.Aeson (Value(Null)) +import Data.Char (isDigit) +import Data.List +import Data.Maybe +import qualified Data.Text as T +import Data.Version +import Development.IDE.Plugin.Test +import Experiments.Types +import Language.Haskell.LSP.Test +import Language.Haskell.LSP.Types +import Language.Haskell.LSP.Types.Capabilities +import Numeric.Natural +import Options.Applicative +import System.Directory +import System.Environment.Blank (getEnv) +import System.FilePath ((), (<.>)) +import System.Process +import System.Time.Extra +import Text.ParserCombinators.ReadP (readP_to_S) + +hygienicEdit :: (?hygienicP :: Position) => TextDocumentContentChangeEvent +hygienicEdit = + TextDocumentContentChangeEvent + { _range = Just (Range ?hygienicP ?hygienicP), + _rangeLength = Nothing, + _text = " " + } + +breakingEdit :: (?identifierP :: Position) => TextDocumentContentChangeEvent +breakingEdit = + TextDocumentContentChangeEvent + { _range = Just (Range ?identifierP ?identifierP), + _rangeLength = Nothing, + _text = "a" + } + +-- | Experiments have access to these special positions: +-- - hygienicP points to a string in the target file, convenient for hygienic edits +-- - identifierP points to the middle of an identifier, convenient for goto-def, hover and completions +type HasPositions = (?hygienicP :: Position, ?identifierP :: Position) + +experiments :: [Bench] +experiments = + [ --------------------------------------------------------------------------------------- + bench "hover" 10 $ \doc -> + isJust <$> getHover doc ?identifierP, + --------------------------------------------------------------------------------------- + bench "edit" 10 $ \doc -> do + changeDoc doc [hygienicEdit] + waitForProgressDone + return True, + --------------------------------------------------------------------------------------- + bench "hover after edit" 10 $ \doc -> do + changeDoc doc [hygienicEdit] + isJust <$> getHover doc ?identifierP, + --------------------------------------------------------------------------------------- + bench "getDefinition" 10 $ \doc -> + not . null <$> getDefinitions doc ?identifierP, + --------------------------------------------------------------------------------------- + bench "documentSymbols" 100 $ + fmap (either (not . null) (not . null)) . getDocumentSymbols, + --------------------------------------------------------------------------------------- + bench "documentSymbols after edit" 100 $ \doc -> do + changeDoc doc [hygienicEdit] + either (not . null) (not . null) <$> getDocumentSymbols doc, + --------------------------------------------------------------------------------------- + bench "completions after edit" 10 $ \doc -> do + changeDoc doc [hygienicEdit] + not . null <$> getCompletions doc ?identifierP, + --------------------------------------------------------------------------------------- + benchWithSetup + "code actions" + 10 + ( \doc -> do + changeDoc doc [breakingEdit] + waitForProgressDone + return ?identifierP + ) + ( \p doc -> do + not . null <$> getCodeActions doc (Range p p) + ), + --------------------------------------------------------------------------------------- + benchWithSetup + "code actions after edit" + 10 + ( \doc -> do + changeDoc doc [breakingEdit] + return ?identifierP + ) + ( \p doc -> do + changeDoc doc [hygienicEdit] + waitForProgressDone + -- NOTE ghcide used to clear and reinstall the diagnostics here + -- new versions no longer do, but keep this logic around + -- to benchmark old versions sucessfully + diags <- getCurrentDiagnostics doc + when (null diags) $ + whileM (null <$> waitForDiagnostics) + not . null <$> getCodeActions doc (Range p p) + ) + ] + +--------------------------------------------------------------------------------------------- + +exampleModulePath :: HasConfig => FilePath +exampleModulePath = exampleModule (example ?config) + +examplesPath :: FilePath +examplesPath = "bench/example" + +defConfig :: Config +Success defConfig = execParserPure defaultPrefs (info configP fullDesc) [] + +quiet, verbose :: Config -> Bool +verbose = (== All) . verbosity +quiet = (== Quiet) . verbosity + +type HasConfig = (?config :: Config) + +configP :: Parser Config +configP = + Config + <$> (flag' All (short 'v' <> long "verbose") + <|> flag' Quiet (short 'q' <> long "quiet") + <|> pure Normal + ) + <*> optional (strOption (long "shake-profiling" <> metavar "PATH")) + <*> optional (strOption (long "ot-profiling" <> metavar "DIR" <> help "Enable OpenTelemetry and write eventlog for each benchmark in DIR")) + <*> strOption (long "csv" <> metavar "PATH" <> value "results.csv" <> showDefault) + <*> flag Cabal Stack (long "stack" <> help "Use stack (by default cabal is used)") + <*> many (strOption (long "ghcide-options" <> help "additional options for ghcide")) + <*> many (strOption (short 's' <> long "select" <> help "select which benchmarks to run")) + <*> optional (option auto (long "samples" <> metavar "NAT" <> help "override sampling count")) + <*> strOption (long "ghcide" <> metavar "PATH" <> help "path to ghcide" <> value "ghcide") + <*> option auto (long "timeout" <> value 60 <> help "timeout for waiting for a ghcide response") + <*> ( GetPackage <$> strOption (long "example-package-name" <> value "Cabal") + <*> moduleOption + <*> option versionP (long "example-package-version" <> value (makeVersion [3,2,0,0])) + <|> + UsePackage <$> strOption (long "example-path") + <*> moduleOption + ) + where + moduleOption = strOption (long "example-module" <> metavar "PATH" <> value "Distribution/Simple.hs") + +versionP :: ReadM Version +versionP = maybeReader $ extract . readP_to_S parseVersion + where + extract parses = listToMaybe [ res | (res,"") <- parses] + +output :: (MonadIO m, HasConfig) => String -> m () +output = if quiet?config then (\_ -> pure ()) else liftIO . putStrLn + +--------------------------------------------------------------------------------------- + +type Experiment = TextDocumentIdentifier -> Session Bool + +data Bench = forall setup. + Bench + { name :: !String, + enabled :: !Bool, + samples :: !Natural, + benchSetup :: HasPositions => TextDocumentIdentifier -> Session setup, + experiment :: HasPositions => setup -> Experiment + } + +select :: HasConfig => Bench -> Bool +select Bench {name, enabled} = + enabled && (null mm || name `elem` mm) + where + mm = matches ?config + +benchWithSetup :: + String -> + Natural -> + (HasPositions => TextDocumentIdentifier -> Session p) -> + (HasPositions => p -> Experiment) -> + Bench +benchWithSetup name samples benchSetup experiment = Bench {..} + where + enabled = True + +bench :: String -> Natural -> (HasPositions => Experiment) -> Bench +bench name defSamples userExperiment = + benchWithSetup name defSamples (const $ pure ()) experiment + where + experiment () = userExperiment + +runBenchmarksFun :: HasConfig => FilePath -> [Bench] -> IO () +runBenchmarksFun dir allBenchmarks = do + let benchmarks = [ b{samples = fromMaybe (samples b) (repetitions ?config) } + | b <- allBenchmarks + , select b ] + + whenJust (otMemoryProfiling ?config) $ \eventlogDir -> + createDirectoryIfMissing True eventlogDir + + results <- forM benchmarks $ \b@Bench{name} -> + let run = runSessionWithConfig conf (cmd name dir) lspTestCaps dir + in (b,) <$> runBench run b + + -- output raw data as CSV + let headers = + [ "name" + , "success" + , "samples" + , "startup" + , "setup" + , "userTime" + , "delayedTime" + , "totalTime" + , "maxResidency" + , "allocatedBytes"] + rows = + [ [ name, + show success, + show samples, + show startup, + show runSetup', + show userWaits, + show delayedWork, + show runExperiment, + show maxResidency, + show allocations + ] + | (Bench {name, samples}, BenchRun {..}) <- results, + let runSetup' = if runSetup < 0.01 then 0 else runSetup + ] + csv = unlines $ map (intercalate ", ") (headers : rows) + writeFile (outputCSV ?config) csv + + -- print a nice table + let pads = map (maximum . map length) (transpose (headers : rowsHuman)) + paddedHeaders = zipWith pad pads headers + outputRow = putStrLn . intercalate " | " + rowsHuman = + [ [ name, + show success, + show samples, + showDuration startup, + showDuration runSetup', + showDuration userWaits, + showDuration delayedWork, + showDuration runExperiment, + showMB maxResidency, + showMB allocations + ] + | (Bench {name, samples}, BenchRun {..}) <- results, + let runSetup' = if runSetup < 0.01 then 0 else runSetup + ] + outputRow paddedHeaders + outputRow $ (map . map) (const '-') paddedHeaders + forM_ rowsHuman $ \row -> outputRow $ zipWith pad pads row + where + cmd name dir = + unwords $ + [ ghcide ?config, + "--lsp", + "--test", + "--cwd", + dir + ] + ++ case otMemoryProfiling ?config of + Just dir -> ["-l", "-ol" ++ (dir map (\c -> if c == ' ' then '-' else c) name <.> "eventlog")] + Nothing -> [] + ++ [ "-RTS" ] + ++ ghcideOptions ?config + ++ concat + [ ["--shake-profiling", path] | Just path <- [shakeProfiling ?config] + ] + ++ ["--verbose" | verbose ?config] + ++ ["--ot-memory-profiling" | Just _ <- [otMemoryProfiling ?config]] + lspTestCaps = + fullCaps {_window = Just $ WindowClientCapabilities $ Just True} + conf = + defaultConfig + { logStdErr = verbose ?config, + logMessages = verbose ?config, + logColor = False, + messageTimeout = timeoutLsp ?config + } + +data BenchRun = BenchRun + { startup :: !Seconds, + runSetup :: !Seconds, + runExperiment :: !Seconds, + userWaits :: !Seconds, + delayedWork :: !Seconds, + success :: !Bool, + maxResidency :: !Int, + allocations :: !Int + } + +badRun :: BenchRun +badRun = BenchRun 0 0 0 0 0 False 0 0 + +waitForProgressDone :: Session () +waitForProgressDone = + void(skipManyTill anyMessage message :: Session WorkDoneProgressEndNotification) + +runBench :: + (?config :: Config) => + (Session BenchRun -> IO BenchRun) -> + (HasPositions => Bench) -> + IO BenchRun +runBench runSess b = handleAny (\e -> print e >> return badRun) + $ runSess + $ do + doc <- openDoc exampleModulePath "haskell" + + -- Setup the special positions used by the experiments + lastLine <- length . T.lines <$> documentContents doc + changeDoc doc [TextDocumentContentChangeEvent + { _range = Just (Range (Position lastLine 0) (Position lastLine 0)) + , _rangeLength = Nothing + , _text = T.unlines + [ "_hygienic = \"hygienic\"" + , "_identifier = _hygienic" + ] + }] + let + -- Points to a string in the target file, + -- convenient for hygienic edits + ?hygienicP = Position lastLine 15 + let + -- Points to the middle of an identifier, + -- convenient for requesting goto-def, hover and completions + ?identifierP = Position (lastLine+1) 15 + + case b of + Bench{..} -> do + (startup, _) <- duration $ do + waitForProgressDone + -- wait again, as the progress is restarted once while loading the cradle + -- make an edit, to ensure this doesn't block + changeDoc doc [hygienicEdit] + waitForProgressDone + + liftIO $ output $ "Running " <> name <> " benchmark" + (runSetup, userState) <- duration $ benchSetup doc + let loop !userWaits !delayedWork 0 = return $ Just (userWaits, delayedWork) + loop !userWaits !delayedWork n = do + (t, res) <- duration $ experiment userState doc + if not res + then return Nothing + else do + output (showDuration t) + -- Wait for the delayed actions to finish + waitId <- sendRequest (CustomClientMethod "test") WaitForShakeQueue + (td, resp) <- duration $ skipManyTill anyMessage $ responseForId waitId + case resp of + ResponseMessage{_result=Right Null} -> do + loop (userWaits+t) (delayedWork+td) (n -1) + _ -> + -- Assume a ghcide build lacking the WaitForShakeQueue command + loop (userWaits+t) delayedWork (n -1) + + (runExperiment, result) <- duration $ loop 0 0 samples + let success = isJust result + (userWaits, delayedWork) = fromMaybe (0,0) result + gcStats = escapeSpaces (name <> ".benchmark-gcStats") + + -- sleep to give ghcide a chance to GC + liftIO $ threadDelay 1100000 + + (maxResidency, allocations) <- liftIO $ + ifM (doesFileExist gcStats) + (parseMaxResidencyAndAllocations <$> readFile gcStats) + (pure (0,0)) + + return BenchRun {..} + +data SetupResult = SetupResult { + runBenchmarks :: [Bench] -> IO (), + -- | Path to the setup benchmark example + benchDir :: FilePath, + cleanUp :: IO () +} + +setup :: HasConfig => IO SetupResult +setup = do + alreadyExists <- doesDirectoryExist examplesPath + when alreadyExists $ removeDirectoryRecursive examplesPath + benchDir <- case example ?config of + UsePackage{..} -> return examplePath + GetPackage{..} -> do + let path = examplesPath package + package = exampleName <> "-" <> showVersion exampleVersion + case buildTool ?config of + Cabal -> do + callCommand $ "cabal get -v0 " <> package <> " -d " <> examplesPath + writeFile + (path "hie.yaml") + ("cradle: {cabal: {component: " <> exampleName <> "}}") + -- Need this in case there is a parent cabal.project somewhere + writeFile + (path "cabal.project") + "packages: ." + writeFile + (path "cabal.project.local") + "" + Stack -> do + callCommand $ "stack --silent unpack " <> package <> " --to " <> examplesPath + -- Generate the stack descriptor to match the one used to build ghcide + stack_yaml <- fromMaybe "stack.yaml" <$> getEnv "STACK_YAML" + stack_yaml_lines <- lines <$> readFile stack_yaml + writeFile (path stack_yaml) + (unlines $ + "packages: [.]" : + [ l + | l <- stack_yaml_lines + , any (`isPrefixOf` l) + ["resolver" + ,"allow-newer" + ,"compiler"] + ] + ) + + writeFile + (path "hie.yaml") + ("cradle: {stack: {component: " <> show (exampleName <> ":lib") <> "}}") + return path + + whenJust (shakeProfiling ?config) $ createDirectoryIfMissing True + + let cleanUp = case example ?config of + GetPackage{} -> removeDirectoryRecursive examplesPath + UsePackage{} -> return () + + runBenchmarks = runBenchmarksFun benchDir + + return SetupResult{..} + +-------------------------------------------------------------------------------------------- + +-- Parse the max residency and allocations in RTS -s output +parseMaxResidencyAndAllocations :: String -> (Int, Int) +parseMaxResidencyAndAllocations input = + (f "maximum residency", f "bytes allocated in the heap") + where + inps = reverse $ lines input + f label = case find (label `isInfixOf`) inps of + Just l -> read $ filter isDigit $ head $ words l + Nothing -> -1 + +escapeSpaces :: String -> String +escapeSpaces = map f + where + f ' ' = '_' + f x = x + +pad :: Int -> String -> String +pad n [] = replicate n ' ' +pad 0 _ = error "pad" +pad n (x:xx) = x : pad (n-1) xx + +showMB :: Int -> String +showMB x = show (x `div` 2^(20::Int)) <> "MB" diff --git a/ghcide/bench/lib/Experiments/Types.hs b/ghcide/bench/lib/Experiments/Types.hs new file mode 100644 index 00000000000..350f89ad949 --- /dev/null +++ b/ghcide/bench/lib/Experiments/Types.hs @@ -0,0 +1,69 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE OverloadedStrings #-} +module Experiments.Types (module Experiments.Types ) where + +import Data.Aeson +import Data.Version +import Numeric.Natural +import System.FilePath (isPathSeparator) +import Development.Shake.Classes +import GHC.Generics + +data CabalStack = Cabal | Stack + deriving (Eq, Show) + +data Verbosity = Quiet | Normal | All + deriving (Eq, Show) +data Config = Config + { verbosity :: !Verbosity, + -- For some reason, the Shake profile files are truncated and won't load + shakeProfiling :: !(Maybe FilePath), + otMemoryProfiling :: !(Maybe FilePath), + outputCSV :: !FilePath, + buildTool :: !CabalStack, + ghcideOptions :: ![String], + matches :: ![String], + repetitions :: Maybe Natural, + ghcide :: FilePath, + timeoutLsp :: Int, + example :: Example + } + deriving (Eq, Show) + +data Example + = GetPackage {exampleName, exampleModule :: String, exampleVersion :: Version} + | UsePackage {examplePath :: FilePath, exampleModule :: String} + deriving (Eq, Generic, Show) + deriving anyclass (Binary, Hashable, NFData) + +getExampleName :: Example -> String +getExampleName UsePackage{examplePath} = map replaceSeparator examplePath + where + replaceSeparator x + | isPathSeparator x = '_' + | otherwise = x +getExampleName GetPackage{exampleName, exampleVersion} = + exampleName <> "-" <> showVersion exampleVersion + +instance FromJSON Example where + parseJSON = withObject "example" $ \x -> do + exampleModule <- x .: "module" + path <- x .:? "path" + case path of + Just examplePath -> return UsePackage{..} + Nothing -> do + exampleName <- x .: "name" + exampleVersion <- x .: "version" + return GetPackage {..} + +exampleToOptions :: Example -> [String] +exampleToOptions GetPackage{..} = + ["--example-package-name", exampleName + ,"--example-package-version", showVersion exampleVersion + ,"--example-module", exampleModule + ] +exampleToOptions UsePackage{..} = + ["--example-path", examplePath + ,"--example-module", exampleModule + ] diff --git a/ghcide/cbits/getmodtime.c b/ghcide/cbits/getmodtime.c new file mode 100644 index 00000000000..0973b52d0d1 --- /dev/null +++ b/ghcide/cbits/getmodtime.c @@ -0,0 +1,21 @@ +// Copyright (c) 2019 The DAML Authors. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +#include +#include +int getmodtime(const char* pathname, time_t* sec, long* nsec) { + struct stat s; + int r = stat(pathname, &s); + if (r != 0) { + return r; + } +#ifdef __APPLE__ + *sec = s.st_mtimespec.tv_sec; + *nsec = s.st_mtimespec.tv_nsec; +#else + *sec = s.st_mtim.tv_sec; + *nsec = s.st_mtim.tv_nsec; +#endif + return 0; +} + diff --git a/ghcide/docs/Setup.md b/ghcide/docs/Setup.md new file mode 100644 index 00000000000..d53c6e24d30 --- /dev/null +++ b/ghcide/docs/Setup.md @@ -0,0 +1,145 @@ +# Setup Troubleshooting + +This page serves as a dumping ground for setup problems and their resolutions. We recommend everyone first runs `ghcide` on the console to check what files in their project load, and only the moves on to using `ghcide` through an editor (e.g. VS Code). + +## "mismatched interface file versions" + +If you see a problem such as: + +```console +File: ./test/Spec.hs +Range: 1:0-1:0 +Source: typecheck +Severity: DsError +Message: + test/Spec.hs:1:1: error: + Bad interface file: + /Users/daml/.stack/programs/x86_64-osx/ghc-8.6.4/lib/ghc-8.6.4/base-4.12.0.0/Prelude.hi + mismatched interface file versions (wanted "8065", got "8064") +``` + +The cause is that your program is configured to use a different GHC to the one you built `ghcide` with. In `ghcide` you can view the version number it was compiled with on the first line as: + +```console +ghcide version: 0.0.3 (GHC: 8.6.5) +``` + +You can see the version of GHC being used by this project in the second-last line of the output with `ghc-8.6.4/`, or in in mismatch interfaces of wanted `8065` (aka 8.6.5), got `8064` (aka 8.6.4). The solution is to use the same GHC version in both places. + +## “failed to load interface for ‘…’ There are files missing” + +If you see a problem such as: + +```console +File: ./src/File/FileStream.hs +Range: 1:0-100001:0 +Source: typecheck +Severity: DsError +Message: + Program error: Failed to load interface for ‘Data.DList’ +Files that failed: + There are files missing in the ‘dlist-0.8.0.7’ package, + * ./src/File/FileStream.hs + try running 'ghc-pkg check'. + Use -v to see a list of the files searched for. +``` + +It might be caused by `ghcide` picking up the wrong cradle. In +particular, this has been observed when running in a `nix-shell` where +`ghcide` picked up the default cradle. Try setting the cradle +explicitly, e.g., to use the cabal cradle create a `hie.yaml` file +with the following content: + +``` +cradle: {cabal: {component: "mylibrary"}} +``` + +If you are using stack, find the list of names you can use: + + $ stack ide targets + mypackage:lib + mypackage:exe:mypackage-exe + mypackage:test:mypackage-test + +and create a `hie.yaml` file as follows: + + {stack: {component: "mypackage:lib"}} + +## ghc: readCreateProcess: does not exist + +On Linux: try `stack exec ghcide`` instead of `ghcide` directly. + +I was getting this in Windows: `ghcide.exe: ghc: readCreateProcess: does not exist (No such file or directory)` + +And we figured a hack around for this: + +VSCode user or workspace settings, add these: + + "hic.executablePath": "stack", + "hic.arguments": "exec ghcide -- --lsp" + +Since I use stack. Required if you don't have a `ghc` on your path. + +## Could not find module ... + +Try adding an explicit `hie.yaml` file and see if that helps. + +## Ambiguous main module + +```console +$ stack exec ghcide + +... + +ghcide: CradleError (ExitFailure 1) ["Failed to parse result of calling stack","","* * * * * * * *","The main module to load is ambiguous. Candidates are: ","1. Package `mypackage' component mypackage:exe:mypackage-exe with main-is file: /home/user/mypackage/app/Main.hs","2. Package `mypackage' component mypackage:exe:otherbin-exe with main-is file: /home/user/mypackage/app/otherbin.hs","You can specify which one to pick by: "," * Specifying targets to stack ghci e.g. stack ghci mypackage:exe:mypackage-exe"," * Specifying what the main is e.g. stack ghci --main-is mypackage:exe:mypackage-exe"," * Choosing from the candidate above [1..2]","* * * * * * * *","",": hGetLine: end of file"] +``` + +Add a `hie.yaml` file to specify the module, e.g. + + cradle: {stack: {component: "mypackage:exe:mypackage-exe"}} + +## Works in `ghcide` but not my editor + +Does `ghcide` alone work on the console? Did you first enter a Nix shell? Or run `stack exec ghcide`? If so, there are two options: + +1. Run your editor via the same mechanism, e.g. `stack exec code`. +2. Change the extension to use the executable as `stack` and the arguments as `exec -- ghcide --lsp`. + +## Issues with Nix + +If you are using packages installed by Nix, then often Nix will set `NIX_GHC_LIBDIR` to say where the libraries are installed. `ghcide` can cope with that. However, sometimes the `ghc` on your shell will actually be a shell script that sets `NIX_GHC_LIBDIR`, which `ghcide` can't find. If that happens, you need to either set `NIX_GHC_LIBDIR` (so `ghcide` can see it) or use a proper [Nix compatible wrapper](https://github.com/hercules-ci/ghcide-nix) over `ghcide`. + +## ghcide: this operation requires -fexternal-interpreter + +This can happen if you have a GHC compiled without GHC library support. This seems to be [the case](https://github.com/input-output-hk/haskell.nix/issues/313) with `haskell.nix` at the moment. + +## Symbol’s value as variable is void: capability + +As described [here](https://github.com/emacs-lsp/lsp-mode/issues/770#issuecomment-483540119) and [here](https://github.com/emacs-lsp/lsp-mode/issues/517#issuecomment-445448700), the default installation of `lsp-mode`, `lsp-ui`, `lsp-ui-mode` and `lsp-haskell` as described in [ghcide's "Using with Emacs" section](https://github.com/haskell/ghcide/#using-with-emacs) may result in the following error message: + +``` +Symbol’s value as variable is void: capability +``` + +This can be caused by either an old version of the Emacs package `dash`, or a cached `.elc` file for an old version. A fix consists of (re)moving the old package from ~/.emacs.d/elpa/ and installing it again, e.g. via M-x `package-list-packages` RET and M-x `package-install` RET `dash` RET. If this is not enough, + +``` +find ~/.emacs.d -name '*.elc' -exec rm {} \; +``` + +(which causes recompilation of all bytecode-compiled scripts.) + + +## Docker stack builds + +You're likely to see `ghcide: (ExitFailure 1,"","")`. Because ghcide can't get at the ghc installed inside Docker, your best bet is to `stack exec ghcide` and make sure `ghcide` is installed within the container. Full details at [issue 221](https://github.com/haskell/ghcide/issues/221). + +## stty error on Windows + Stack + +If you get an error like: + +``` +ghcide.exe: CradleError (ExitFailure 1) ["Failed to parse result of calling stack","'stty' is not recognized as an internal or external command,","operable program or batch file." +``` + +It is fixed for stack-2.3.1 so upgrading your stack installation is the recommended action. However, there is a workaround for earlier versions described here: https://github.com/haskell/haskell-ide-engine/issues/1428#issuecomment-547530794. diff --git a/ghcide/docs/opentelemetry.md b/ghcide/docs/opentelemetry.md new file mode 100644 index 00000000000..81c915a243b --- /dev/null +++ b/ghcide/docs/opentelemetry.md @@ -0,0 +1,66 @@ +# Using opentelemetry + +`ghcide` has support for opentelemetry-based tracing. This allows for tracing +the execution of the process, seeing when Shake rules fire and for how long they +run, when LSP messages are received, and (currently WIP) measuring the memory +occupancy of different objects in memory. + +## Capture opentlemetry data + +Capturing of opentelemetry data can be enabled by first building ghcide with eventlog support: + +```sh +stack build --ghc-options -eventlog +``` + +Then, you can run `ghcide`, giving it a file to dump eventlog information into. + +```sh +ghcide +RTS -l -ol ghcide.eventlog -RTS +``` + +You can also optionally enable reporting detailed memory data with `--ot-memory-profiling` + +```sh +ghcide --ot-memory-profiling +RTS -A4G -l -ol ghcide.eventlog -RTS +``` + +*Note:* This option, while functional, is extremely slow. You will notice this because the memory graph in the output will have datapoints spaced apart by a couple of minutes. The nursery must be big enough (-A1G or larger) or the measurements will self-abort. + +## Viewing with tracy + +After installing `opentelemetry-extra` and `tracy`, you can view the opentelementry output: + +```sh +eventlog-to-tracy ghcide.eventlog +``` + +If everything has been set up correctly, this should open a tracy window with the tracing data you captured + +### Installing opentelemetry-extra + +This package includes a number of binaries for converting between the eventlog output and the formats that various opentelemetry viewers (like tracy) can display: + +```sh +cabal install openetelemetry-extra +``` + + + +### Building tracy + +1. Install the dependencies: `pkg-config` and `glfw, freetype, capstone, GTK3`, along + with their header files (`-dev` on most distros. On Arch the header + files are included with the normal packages). +2. Download tracy from https://github.com/wolfpld/tracy +3. `cd` into the directory containing the source you downloaded +4. Build the `import-chrome` and `Tracy` libraries: + ```sh + make -C profiler/build/unix release + make -C import-chrome/build/unix release + ``` +5. Copy the binaries to your `$PATH`: + ```sh + cp profiler/build/unix/Tracy-release ~/.local/bin/Tracy + cp import-chrome/build/unix/import-chrome-release ~/.local/bin/import-chrome + ``` diff --git a/ghcide/exe/Arguments.hs b/ghcide/exe/Arguments.hs new file mode 100644 index 00000000000..11b4320d82c --- /dev/null +++ b/ghcide/exe/Arguments.hs @@ -0,0 +1,41 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Arguments(Arguments(..), getArguments) where + +import Options.Applicative + + +data Arguments = Arguments + {argLSP :: Bool + ,argsCwd :: Maybe FilePath + ,argFiles :: [FilePath] + ,argsVersion :: Bool + ,argsShakeProfiling :: Maybe FilePath + ,argsOTMemoryProfiling :: Bool + ,argsTesting :: Bool + ,argsDisableKick :: Bool + ,argsThreads :: Int + ,argsVerbose :: Bool + } + +getArguments :: IO Arguments +getArguments = execParser opts + where + opts = info (arguments <**> helper) + ( fullDesc + <> progDesc "Used as a test bed to check your IDE will work" + <> header "ghcide - the core of a Haskell IDE") + +arguments :: Parser Arguments +arguments = Arguments + <$> switch (long "lsp" <> help "Start talking to an LSP server") + <*> optional (strOption $ long "cwd" <> metavar "DIR" <> help "Change to this directory") + <*> many (argument str (metavar "FILES/DIRS...")) + <*> switch (long "version" <> help "Show ghcide and GHC versions") + <*> optional (strOption $ long "shake-profiling" <> metavar "DIR" <> help "Dump profiling reports to this directory") + <*> switch (long "ot-memory-profiling" <> help "Record OpenTelemetry info to the eventlog. Needs the -l RTS flag to have an effect") + <*> switch (long "test" <> help "Enable additional lsp messages used by the testsuite") + <*> switch (long "test-no-kick" <> help "Disable kick. Useful for testing cancellation") + <*> option auto (short 'j' <> help "Number of threads (0: automatic)" <> metavar "NUM" <> value 0 <> showDefault) + <*> switch (long "verbose" <> help "Include internal events in logging output") diff --git a/ghcide/exe/Main.hs b/ghcide/exe/Main.hs new file mode 100644 index 00000000000..59dca21bb4b --- /dev/null +++ b/ghcide/exe/Main.hs @@ -0,0 +1,213 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# OPTIONS_GHC -Wno-dodgy-imports #-} -- GHC no longer exports def in GHC 8.6 and above +{-# LANGUAGE TemplateHaskell #-} + +module Main(main) where + +import Arguments +import Control.Concurrent.Extra +import Control.Monad.Extra +import Control.Lens ( (^.) ) +import Data.Default +import Data.List.Extra +import Data.Maybe +import qualified Data.Text as T +import qualified Data.Text.IO as T +import Data.Version +import Development.IDE.Core.Debouncer +import Development.IDE.Core.FileStore +import Development.IDE.Core.OfInterest +import Development.IDE.Core.Service +import Development.IDE.Core.Rules +import Development.IDE.Core.Shake +import Development.IDE.Core.RuleTypes +import Development.IDE.LSP.Protocol +import Development.IDE.Types.Location +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Options +import Development.IDE.Types.Logger +import Development.IDE.Plugin +import Development.IDE.Plugin.Completions as Completions +import Development.IDE.Plugin.CodeAction as CodeAction +import Development.IDE.Plugin.Test as Test +import Development.IDE.Session (loadSession) +import qualified Language.Haskell.LSP.Core as LSP +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types +import Language.Haskell.LSP.Types.Lens (params, initializationOptions) +import Development.IDE.LSP.LanguageServer +import qualified System.Directory.Extra as IO +import System.Environment +import System.IO +import System.Info +import System.Exit +import System.FilePath +import System.Time.Extra +import Paths_ghcide +import Development.GitRev +import qualified Data.HashMap.Strict as HashMap +import qualified Data.Aeson as J + +import HIE.Bios.Cradle +import Development.IDE (action) +import Text.Printf +import Development.IDE.Core.Tracing +import Development.IDE.Types.Shake (Key(Key)) + +ghcideVersion :: IO String +ghcideVersion = do + path <- getExecutablePath + let gitHashSection = case $(gitHash) of + x | x == "UNKNOWN" -> "" + x -> " (GIT hash: " <> x <> ")" + return $ "ghcide version: " <> showVersion version + <> " (GHC: " <> showVersion compilerVersion + <> ") (PATH: " <> path <> ")" + <> gitHashSection + +main :: IO () +main = do + -- WARNING: If you write to stdout before runLanguageServer + -- then the language server will not work + Arguments{..} <- getArguments + + if argsVersion then ghcideVersion >>= putStrLn >> exitSuccess + else hPutStrLn stderr {- see WARNING above -} =<< ghcideVersion + + -- lock to avoid overlapping output on stdout + lock <- newLock + let logger p = Logger $ \pri msg -> when (pri >= p) $ withLock lock $ + T.putStrLn $ T.pack ("[" ++ upper (show pri) ++ "] ") <> msg + + whenJust argsCwd IO.setCurrentDirectory + + dir <- IO.getCurrentDirectory + command <- makeLspCommandId "typesignature.add" + + let plugins = Completions.plugin <> CodeAction.plugin + <> if argsTesting then Test.plugin else mempty + onInitialConfiguration :: InitializeRequest -> Either T.Text LspConfig + onInitialConfiguration x = case x ^. params . initializationOptions of + Nothing -> Right defaultLspConfig + Just v -> case J.fromJSON v of + J.Error err -> Left $ T.pack err + J.Success a -> Right a + onConfigurationChange = const $ Left "Updating Not supported" + options = def { LSP.executeCommandCommands = Just [command] + , LSP.completionTriggerCharacters = Just "." + } + + if argLSP then do + t <- offsetTime + hPutStrLn stderr "Starting LSP server..." + hPutStrLn stderr "If you are seeing this in a terminal, you probably should have run ghcide WITHOUT the --lsp option!" + runLanguageServer options (pluginHandler plugins) onInitialConfiguration onConfigurationChange $ \getLspId event vfs caps wProg wIndefProg getConfig rootPath -> do + t <- t + hPutStrLn stderr $ "Started LSP server in " ++ showDuration t + sessionLoader <- loadSession $ fromMaybe dir rootPath + config <- fromMaybe defaultLspConfig <$> getConfig + let options = (defaultIdeOptions sessionLoader) + { optReportProgress = clientSupportsProgress caps + , optShakeProfiling = argsShakeProfiling + , optOTMemoryProfiling = IdeOTMemoryProfiling argsOTMemoryProfiling + , optTesting = IdeTesting argsTesting + , optThreads = argsThreads + , optCheckParents = checkParents config + , optCheckProject = checkProject config + } + logLevel = if argsVerbose then minBound else Info + debouncer <- newAsyncDebouncer + let rules = do + -- install the main and ghcide-plugin rules + mainRule + pluginRules plugins + -- install the kick action, which triggers a typecheck on every + -- Shake database restart, i.e. on every user edit. + unless argsDisableKick $ + action kick + initialise caps rules + getLspId event wProg wIndefProg (logger logLevel) debouncer options vfs + else do + -- GHC produces messages with UTF8 in them, so make sure the terminal doesn't error + hSetEncoding stdout utf8 + hSetEncoding stderr utf8 + + putStrLn $ "Ghcide setup tester in " ++ dir ++ "." + putStrLn "Report bugs at https://github.com/haskell/ghcide/issues" + + putStrLn $ "\nStep 1/4: Finding files to test in " ++ dir + files <- expandFiles (argFiles ++ ["." | null argFiles]) + -- LSP works with absolute file paths, so try and behave similarly + files <- nubOrd <$> mapM IO.canonicalizePath files + putStrLn $ "Found " ++ show (length files) ++ " files" + + putStrLn "\nStep 2/4: Looking for hie.yaml files that control setup" + cradles <- mapM findCradle files + let ucradles = nubOrd cradles + let n = length ucradles + putStrLn $ "Found " ++ show n ++ " cradle" ++ ['s' | n /= 1] + when (n > 0) $ putStrLn $ " (" ++ intercalate ", " (catMaybes ucradles) ++ ")" + putStrLn "\nStep 3/4: Initializing the IDE" + vfs <- makeVFSHandle + debouncer <- newAsyncDebouncer + let dummyWithProg _ _ f = f (const (pure ())) + sessionLoader <- loadSession dir + let options = (defaultIdeOptions sessionLoader) + { optShakeProfiling = argsShakeProfiling + -- , optOTMemoryProfiling = IdeOTMemoryProfiling argsOTMemoryProfiling + , optTesting = IdeTesting argsTesting + , optThreads = argsThreads + , optCheckParents = NeverCheck + , optCheckProject = CheckProject False + } + logLevel = if argsVerbose then minBound else Info + ide <- initialise def mainRule (pure $ IdInt 0) (showEvent lock) dummyWithProg (const (const id)) (logger logLevel) debouncer options vfs + + putStrLn "\nStep 4/4: Type checking the files" + setFilesOfInterest ide $ HashMap.fromList $ map ((, OnDisk) . toNormalizedFilePath') files + results <- runAction "User TypeCheck" ide $ uses TypeCheck (map toNormalizedFilePath' files) + _results <- runAction "GetHie" ide $ uses GetHieAst (map toNormalizedFilePath' files) + _results <- runAction "GenerateCore" ide $ uses GenerateCore (map toNormalizedFilePath' files) + let (worked, failed) = partition fst $ zip (map isJust results) files + when (failed /= []) $ + putStr $ unlines $ "Files that failed:" : map ((++) " * " . snd) failed + + let nfiles xs = let n = length xs in if n == 1 then "1 file" else show n ++ " files" + putStrLn $ "\nCompleted (" ++ nfiles worked ++ " worked, " ++ nfiles failed ++ " failed)" + + when argsOTMemoryProfiling $ do + let valuesRef = state $ shakeExtras ide + values <- readVar valuesRef + let consoleObserver Nothing = return $ \size -> printf "Total: %.2fMB\n" (fromIntegral @Int @Double size / 1e6) + consoleObserver (Just k) = return $ \size -> printf " - %s: %.2fKB\n" (show k) (fromIntegral @Int @Double size / 1e3) + + printf "# Shake value store contents(%d):\n" (length values) + let keys = nub + $ Key GhcSession : Key GhcSessionDeps + : [ k | (_,k) <- HashMap.keys values, k /= Key GhcSessionIO] + ++ [Key GhcSessionIO] + measureMemory (logger logLevel) [keys] consoleObserver valuesRef + + unless (null failed) (exitWith $ ExitFailure (length failed)) + +{-# ANN main ("HLint: ignore Use nubOrd" :: String) #-} + +expandFiles :: [FilePath] -> IO [FilePath] +expandFiles = concatMapM $ \x -> do + b <- IO.doesFileExist x + if b then return [x] else do + let recurse "." = True + recurse x | "." `isPrefixOf` takeFileName x = False -- skip .git etc + recurse x = takeFileName x `notElem` ["dist","dist-newstyle"] -- cabal directories + files <- filter (\x -> takeExtension x `elem` [".hs",".lhs"]) <$> IO.listFilesInside (return . recurse) x + when (null files) $ + fail $ "Couldn't find any .hs/.lhs files inside directory: " ++ x + return files + +-- | Print an LSP event. +showEvent :: Lock -> FromServerMessage -> IO () +showEvent _ (EventFileDiagnostics _ []) = return () +showEvent lock (EventFileDiagnostics (toNormalizedFilePath' -> file) diags) = + withLock lock $ T.putStrLn $ showDiagnosticsColored $ map (file,ShowDiag,) diags +showEvent lock e = withLock lock $ print e diff --git a/ghcide/ghcide.cabal b/ghcide/ghcide.cabal new file mode 100644 index 00000000000..b0d99e7188b --- /dev/null +++ b/ghcide/ghcide.cabal @@ -0,0 +1,409 @@ +cabal-version: 1.20 +build-type: Simple +category: Development +name: ghcide +version: 0.6.0.2 +license: Apache-2.0 +license-file: LICENSE +author: Digital Asset and Ghcide contributors +maintainer: Ghcide contributors +copyright: Digital Asset and Ghcide contributors 2018-2020 +synopsis: The core of an IDE +description: + A library for building Haskell IDE's on top of the GHC API. +homepage: https://github.com/haskell/ghcide#readme +bug-reports: https://github.com/haskell/ghcide/issues +tested-with: GHC>=8.6.5 +extra-source-files: include/ghc-api-version.h README.md CHANGELOG.md + test/data/hover/*.hs + test/data/multi/cabal.project + test/data/multi/hie.yaml + test/data/multi/a/a.cabal + test/data/multi/a/*.hs + test/data/multi/b/b.cabal + test/data/multi/b/*.hs + +source-repository head + type: git + location: https://github.com/haskell/ghcide.git + +flag ghc-lib + description: build against ghc-lib instead of the ghc package + default: False + manual: True + +library + default-language: Haskell2010 + build-depends: + aeson, + array, + async, + base == 4.*, + binary, + bytestring, + case-insensitive, + containers, + data-default, + deepseq, + directory, + extra, + fuzzy, + filepath, + fingertree, + Glob, + haddock-library >= 1.8, + hashable, + haskell-lsp-types == 0.22.*, + haskell-lsp == 0.22.*, + hie-compat, + mtl, + network-uri, + parallel, + prettyprinter-ansi-terminal, + prettyprinter-ansi-terminal, + prettyprinter, + regex-tdfa >= 1.3.1.0, + rope-utf16-splay, + safe, + safe-exceptions, + shake >= 0.18.4, + sorted-list, + stm, + syb, + text, + time, + transformers, + unordered-containers >= 0.2.10.0, + utf8-string, + hslogger, + opentelemetry >=0.6.1, + heapsize ==0.3.* + if flag(ghc-lib) + build-depends: + ghc-lib >= 8.8, + ghc-lib-parser >= 8.8 + cpp-options: -DGHC_LIB + else + build-depends: + ghc-boot-th, + ghc-boot, + ghc >= 8.6, + -- These dependencies are used by Development.IDE.Session and are + -- Haskell specific. So don't use them when building with -fghc-lib! + ghc-check >=0.5.0.1, + ghc-paths, + cryptohash-sha1 >=0.11.100 && <0.12, + hie-bios >= 0.7.1 && < 0.8.0, + implicit-hie-cradle >= 0.3.0.2 && < 0.4, + base16-bytestring >=0.1.1 && <0.2 + if os(windows) + build-depends: + Win32 + else + build-depends: + unix + c-sources: + cbits/getmodtime.c + + default-extensions: + BangPatterns + DeriveFunctor + DeriveGeneric + FlexibleContexts + GeneralizedNewtypeDeriving + LambdaCase + NamedFieldPuns + OverloadedStrings + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + ViewPatterns + + hs-source-dirs: + src + include-dirs: + include + exposed-modules: + Development.IDE + Development.IDE.Compat + Development.IDE.Core.Debouncer + Development.IDE.Core.FileStore + Development.IDE.Core.IdeConfiguration + Development.IDE.Core.OfInterest + Development.IDE.Core.PositionMapping + Development.IDE.Core.Preprocessor + Development.IDE.Core.Rules + Development.IDE.Core.RuleTypes + Development.IDE.Core.Service + Development.IDE.Core.Shake + Development.IDE.Core.Tracing + Development.IDE.GHC.Compat + Development.IDE.GHC.Error + Development.IDE.GHC.Orphans + Development.IDE.GHC.Util + Development.IDE.Import.DependencyInformation + Development.IDE.LSP.HoverDefinition + Development.IDE.LSP.LanguageServer + Development.IDE.LSP.Outline + Development.IDE.LSP.Protocol + Development.IDE.LSP.Server + Development.IDE.Spans.Common + Development.IDE.Spans.Documentation + Development.IDE.Spans.AtPoint + Development.IDE.Spans.LocalBindings + Development.IDE.Types.Diagnostics + Development.IDE.Types.Exports + Development.IDE.Types.KnownTargets + Development.IDE.Types.Location + Development.IDE.Types.Logger + Development.IDE.Types.Options + Development.IDE.Types.Shake + Development.IDE.Plugin + Development.IDE.Plugin.Completions + Development.IDE.Plugin.CodeAction + Development.IDE.Plugin.Test + + -- Unfortunately, we cannot use loadSession with ghc-lib since hie-bios uses + -- the real GHC library and the types are incompatible. Furthermore, when + -- building with ghc-lib we need to make this Haskell agnostic, so no + -- hie-bios! + -- We also put these modules into a separate hs-source-dirs so we can avoid + -- compiling them at all if ghc-lib is not set + if !flag(ghc-lib) + hs-source-dirs: + session-loader + exposed-modules: + Development.IDE.Session + other-modules: + Development.IDE.Session.VersionCheck + other-modules: + Development.IDE.Core.Compile + Development.IDE.Core.FileExists + Development.IDE.GHC.CPP + Development.IDE.GHC.Warnings + Development.IDE.Import.FindImports + Development.IDE.LSP.Notifications + Development.IDE.Plugin.CodeAction.PositionIndexed + Development.IDE.Plugin.CodeAction.Rules + Development.IDE.Plugin.CodeAction.RuleTypes + Development.IDE.Plugin.Completions.Logic + Development.IDE.Plugin.Completions.Types + Development.IDE.Types.Action + ghc-options: -Wall -Wno-name-shadowing -Wincomplete-uni-patterns + +executable ghcide-test-preprocessor + default-language: Haskell2010 + hs-source-dirs: test/preprocessor + ghc-options: -Wall -Wno-name-shadowing + main-is: Main.hs + build-depends: + base == 4.* + +benchmark benchHist + type: exitcode-stdio-1.0 + default-language: Haskell2010 + ghc-options: -Wall -Wno-name-shadowing -threaded + main-is: Main.hs + hs-source-dirs: bench/hist bench/lib + other-modules: Experiments.Types + build-tool-depends: + ghcide:ghcide-bench + default-extensions: + BangPatterns + DeriveFunctor + DeriveGeneric + FlexibleContexts + GeneralizedNewtypeDeriving + LambdaCase + NamedFieldPuns + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + ViewPatterns + + build-depends: + aeson, + base == 4.*, + shake-bench == 0.1.*, + directory, + filepath, + shake, + text, + yaml + +executable ghcide + if flag(ghc-lib) + buildable: False + default-language: Haskell2010 + hs-source-dirs: exe + ghc-options: + -threaded + -Wall + -Wincomplete-uni-patterns + -Wno-name-shadowing + -- allow user RTS overrides + -rtsopts + -- disable idle GC + -- disable parallel GC + -- increase nursery size + "-with-rtsopts=-I0 -qg -A128M" + main-is: Main.hs + build-depends: + aeson, + base == 4.*, + data-default, + directory, + extra, + filepath, + gitrev, + hashable, + haskell-lsp, + haskell-lsp-types, + heapsize, + hie-bios, + ghcide, + lens, + optparse-applicative, + text, + unordered-containers + other-modules: + Arguments + Paths_ghcide + + default-extensions: + BangPatterns + DeriveFunctor + DeriveGeneric + FlexibleContexts + GeneralizedNewtypeDeriving + LambdaCase + NamedFieldPuns + OverloadedStrings + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + ViewPatterns + +test-suite ghcide-tests + if flag(ghc-lib) + buildable: False + type: exitcode-stdio-1.0 + default-language: Haskell2010 + build-tool-depends: + ghcide:ghcide, + ghcide:ghcide-test-preprocessor + build-depends: + aeson, + base, + binary, + bytestring, + containers, + directory, + extra, + filepath, + -------------------------------------------------------------- + -- The MIN_GHC_API_VERSION macro relies on MIN_VERSION pragmas + -- which require depending on ghc. So the tests need to depend + -- on ghc if they need to use MIN_GHC_API_VERSION. Maybe a + -- better solution can be found, but this is a quick solution + -- which works for now. + ghc, + -------------------------------------------------------------- + ghcide, + ghc-typelits-knownnat, + haddock-library, + haskell-lsp, + haskell-lsp-types, + network-uri, + lens, + lsp-test >= 0.11.0.6 && < 0.12, + optparse-applicative, + process, + QuickCheck, + quickcheck-instances, + rope-utf16-splay, + safe, + safe-exceptions, + shake, + tasty, + tasty-expected-failure, + tasty-hunit, + tasty-quickcheck, + tasty-rerun, + text + if (impl(ghc >= 8.6)) + build-depends: + record-dot-preprocessor, + record-hasfield + hs-source-dirs: test/cabal test/exe test/src bench/lib + include-dirs: include + ghc-options: -threaded -Wall -Wno-name-shadowing -O0 + main-is: Main.hs + other-modules: + Development.IDE.Test + Development.IDE.Test.Runfiles + Experiments + Experiments.Types + default-extensions: + BangPatterns + DeriveFunctor + DeriveGeneric + FlexibleContexts + GeneralizedNewtypeDeriving + LambdaCase + NamedFieldPuns + OverloadedStrings + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + ViewPatterns + +executable ghcide-bench + default-language: Haskell2010 + build-tool-depends: + ghcide:ghcide + build-depends: + aeson, + base, + bytestring, + containers, + directory, + extra, + filepath, + ghcide, + lsp-test >= 0.11.0.2 && < 0.12, + optparse-applicative, + process, + safe-exceptions, + shake, + text + hs-source-dirs: bench/lib bench/exe + include-dirs: include + ghc-options: -threaded -Wall -Wno-name-shadowing -rtsopts + main-is: Main.hs + other-modules: + Experiments + Experiments.Types + default-extensions: + BangPatterns + DeriveFunctor + DeriveGeneric + FlexibleContexts + GeneralizedNewtypeDeriving + LambdaCase + NamedFieldPuns + OverloadedStrings + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + ViewPatterns diff --git a/ghcide/img/vscode2.png b/ghcide/img/vscode2.png new file mode 100644 index 00000000000..f17de0aa88c Binary files /dev/null and b/ghcide/img/vscode2.png differ diff --git a/ghcide/include/ghc-api-version.h b/ghcide/include/ghc-api-version.h new file mode 100644 index 00000000000..92580a12f80 --- /dev/null +++ b/ghcide/include/ghc-api-version.h @@ -0,0 +1,12 @@ +#ifndef GHC_API_VERSION_H +#define GHC_API_VERSION_H + +#ifdef GHC_LIB +#define MIN_GHC_API_VERSION(x,y,z) MIN_VERSION_ghc_lib(x,y,z) +#define GHC_API_VERSION VERSION_ghc_lib +#else +#define MIN_GHC_API_VERSION(x,y,z) MIN_VERSION_ghc(x,y,z) +#define GHC_API_VERSION VERSION_ghc +#endif + +#endif diff --git a/ghcide/session-loader/Development/IDE/Session.hs b/ghcide/session-loader/Development/IDE/Session.hs new file mode 100644 index 00000000000..6b266100630 --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session.hs @@ -0,0 +1,778 @@ +{-# LANGUAGE TypeFamilies #-} + +{-| +The logic for setting up a ghcide session by tapping into hie-bios. +-} +module Development.IDE.Session + (SessionLoadingOptions(..) + ,defaultLoadingOptions + ,loadSession + ,loadSessionWithOptions + ) where + +-- Unfortunately, we cannot use loadSession with ghc-lib since hie-bios uses +-- the real GHC library and the types are incompatible. Furthermore, when +-- building with ghc-lib we need to make this Haskell agnostic, so no hie-bios! + +import Control.Concurrent.Async +import Control.Concurrent.Extra +import Control.Exception.Safe +import Control.Monad +import Control.Monad.Extra +import Control.Monad.IO.Class +import qualified Crypto.Hash.SHA1 as H +import qualified Data.ByteString.Char8 as B +import qualified Data.HashMap.Strict as HM +import qualified Data.Map.Strict as Map +import qualified Data.Text as T +import Data.Aeson +import Data.Bifunctor +import qualified Data.ByteString.Base16 as B16 +import Data.Either.Extra +import Data.Function +import Data.Hashable +import Data.List +import Data.IORef +import Data.Maybe +import Data.Time.Clock +import Data.Version +import Development.IDE.Core.Shake +import Development.IDE.Core.RuleTypes +import Development.IDE.GHC.Compat hiding (Target, TargetModule, TargetFile) +import qualified Development.IDE.GHC.Compat as GHC +import Development.IDE.GHC.Util +import Development.IDE.Session.VersionCheck +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Exports +import Development.IDE.Types.Location +import Development.IDE.Types.Logger +import Development.IDE.Types.Options +import Development.Shake (Action) +import GHC.Check +import qualified HIE.Bios as HieBios +import HIE.Bios.Environment hiding (getCacheDir) +import HIE.Bios.Types +import Hie.Implicit.Cradle (loadImplicitHieCradle) +import Language.Haskell.LSP.Core +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types +import System.Directory +import qualified System.Directory.Extra as IO +import System.FilePath +import System.Info +import System.IO + +import GHCi +import HscTypes (ic_dflags, hsc_IC, hsc_dflags, hsc_NC) +import Linker +import Module +import NameCache +import Packages +import Control.Exception (evaluate) +import Data.Void + + +data CacheDirs = CacheDirs + { hiCacheDir, hieCacheDir, oCacheDir :: Maybe FilePath} + +data SessionLoadingOptions = SessionLoadingOptions + { findCradle :: FilePath -> IO (Maybe FilePath) + , loadCradle :: FilePath -> IO (HieBios.Cradle Void) + -- | Given the project name and a set of command line flags, + -- return the path for storing generated GHC artifacts, + -- or 'Nothing' to respect the cradle setting + , getCacheDirs :: String -> [String] -> IO CacheDirs + } + +defaultLoadingOptions :: SessionLoadingOptions +defaultLoadingOptions = SessionLoadingOptions + {findCradle = HieBios.findCradle + ,loadCradle = HieBios.loadCradle + ,getCacheDirs = getCacheDirsDefault + } + +-- | Given a root directory, return a Shake 'Action' which setups an +-- 'IdeGhcSession' given a file. +-- Some of the many things this does: +-- +-- * Find the cradle for the file +-- * Get the session options, +-- * Get the GHC lib directory +-- * Make sure the GHC compiletime and runtime versions match +-- * Restart the Shake session +-- +-- This is the key function which implements multi-component support. All +-- components mapping to the same hie.yaml file are mapped to the same +-- HscEnv which is updated as new components are discovered. +loadSession :: FilePath -> IO (Action IdeGhcSession) +loadSession = loadSessionWithOptions defaultLoadingOptions + +loadSessionWithOptions :: SessionLoadingOptions -> FilePath -> IO (Action IdeGhcSession) +loadSessionWithOptions SessionLoadingOptions{..} dir = do + -- Mapping from hie.yaml file to HscEnv, one per hie.yaml file + hscEnvs <- newVar Map.empty :: IO (Var HieMap) + -- Mapping from a Filepath to HscEnv + fileToFlags <- newVar Map.empty :: IO (Var FlagsMap) + -- Version of the mappings above + version <- newVar 0 + let returnWithVersion fun = IdeGhcSession fun <$> liftIO (readVar version) + let invalidateShakeCache = do + modifyVar_ version (return . succ) + -- This caches the mapping from Mod.hs -> hie.yaml + cradleLoc <- liftIO $ memoIO $ \v -> do + res <- findCradle v + -- Sometimes we get C:, sometimes we get c:, and sometimes we get a relative path + -- try and normalise that + -- e.g. see https://github.com/haskell/ghcide/issues/126 + res' <- traverse makeAbsolute res + return $ normalise <$> res' + + dummyAs <- async $ return (error "Uninitialised") + runningCradle <- newVar dummyAs :: IO (Var (Async (IdeResult HscEnvEq,[FilePath]))) + + return $ do + extras@ShakeExtras{logger, eventer, restartShakeSession, + withIndefiniteProgress, ideNc, knownTargetsVar + } <- getShakeExtras + + IdeOptions{ optTesting = IdeTesting optTesting + , optCheckProject = CheckProject checkProject + , optCustomDynFlags + , optExtensions + } <- getIdeOptions + + -- populate the knownTargetsVar with all the + -- files in the project so that `knownFiles` can learn about them and + -- we can generate a complete module graph + let extendKnownTargets newTargets = do + knownTargets <- forM newTargets $ \TargetDetails{..} -> + case targetTarget of + TargetFile f -> pure (targetTarget, [f]) + TargetModule _ -> do + found <- filterM (IO.doesFileExist . fromNormalizedFilePath) targetLocations + return (targetTarget, found) + modifyVar_ knownTargetsVar $ traverseHashed $ \known -> do + let known' = HM.unionWith (<>) known $ HM.fromList knownTargets + when (known /= known') $ + logDebug logger $ "Known files updated: " <> + T.pack(show $ (HM.map . map) fromNormalizedFilePath known') + evaluate known' + + -- Create a new HscEnv from a hieYaml root and a set of options + -- If the hieYaml file already has an HscEnv, the new component is + -- combined with the components in the old HscEnv into a new HscEnv + -- which contains the union. + let packageSetup :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) + -> IO (HscEnv, ComponentInfo, [ComponentInfo]) + packageSetup (hieYaml, cfp, opts, libDir) = do + -- Parse DynFlags for the newly discovered component + hscEnv <- emptyHscEnv ideNc libDir + (df, targets) <- evalGhcEnv hscEnv $ + first optCustomDynFlags <$> setOptions opts (hsc_dflags hscEnv) + let deps = componentDependencies opts ++ maybeToList hieYaml + dep_info <- getDependencyInfo deps + -- Now lookup to see whether we are combining with an existing HscEnv + -- or making a new one. The lookup returns the HscEnv and a list of + -- information about other components loaded into the HscEnv + -- (unitId, DynFlag, Targets) + modifyVar hscEnvs $ \m -> do + -- Just deps if there's already an HscEnv + -- Nothing is it's the first time we are making an HscEnv + let oldDeps = Map.lookup hieYaml m + let -- Add the raw information about this component to the list + -- We will modify the unitId and DynFlags used for + -- compilation but these are the true source of + -- information. + new_deps = RawComponentInfo (thisInstalledUnitId df) df targets cfp opts dep_info + : maybe [] snd oldDeps + -- Get all the unit-ids for things in this component + inplace = map rawComponentUnitId new_deps + + new_deps' <- forM new_deps $ \RawComponentInfo{..} -> do + -- Remove all inplace dependencies from package flags for + -- components in this HscEnv + let (df2, uids) = removeInplacePackages inplace rawComponentDynFlags + let prefix = show rawComponentUnitId + -- See Note [Avoiding bad interface files] + let hscComponents = sort $ map show uids + cacheDirOpts = hscComponents ++ componentOptions opts + cacheDirs <- liftIO $ getCacheDirs prefix cacheDirOpts + processed_df <- setCacheDirs logger cacheDirs df2 + -- The final component information, mostly the same but the DynFlags don't + -- contain any packages which are also loaded + -- into the same component. + pure $ ComponentInfo rawComponentUnitId + processed_df + uids + rawComponentTargets + rawComponentFP + rawComponentCOptions + rawComponentDependencyInfo + -- Make a new HscEnv, we have to recompile everything from + -- scratch again (for now) + -- It's important to keep the same NameCache though for reasons + -- that I do not fully understand + logInfo logger (T.pack ("Making new HscEnv" ++ show inplace)) + hscEnv <- emptyHscEnv ideNc libDir + newHscEnv <- + -- Add the options for the current component to the HscEnv + evalGhcEnv hscEnv $ do + _ <- setSessionDynFlags df + getSession + + -- Modify the map so the hieYaml now maps to the newly created + -- HscEnv + -- Returns + -- . the new HscEnv so it can be used to modify the + -- FilePath -> HscEnv map (fileToFlags) + -- . The information for the new component which caused this cache miss + -- . The modified information (without -inplace flags) for + -- existing packages + pure (Map.insert hieYaml (newHscEnv, new_deps) m, (newHscEnv, head new_deps', tail new_deps')) + + + let session :: (Maybe FilePath, NormalizedFilePath, ComponentOptions, FilePath) + -> IO (IdeResult HscEnvEq,[FilePath]) + session args@(hieYaml, _cfp, _opts, _libDir) = do + (hscEnv, new, old_deps) <- packageSetup args + + -- Whenever we spin up a session on Linux, dynamically load libm.so.6 + -- in. We need this in case the binary is statically linked, in which + -- case the interactive session will fail when trying to load + -- ghc-prim, which happens whenever Template Haskell is being + -- evaluated or haskell-language-server's eval plugin tries to run + -- some code. If the binary is dynamically linked, then this will have + -- no effect. + -- See https://github.com/haskell/haskell-language-server/issues/221 + when (os == "linux") $ do + initObjLinker hscEnv + res <- loadDLL hscEnv "libm.so.6" + case res of + Nothing -> pure () + Just err -> hPutStrLn stderr $ + "Error dynamically loading libm.so.6:\n" <> err + + -- Make a map from unit-id to DynFlags, this is used when trying to + -- resolve imports. (especially PackageImports) + let uids = map (\ci -> (componentUnitId ci, componentDynFlags ci)) (new : old_deps) + + -- For each component, now make a new HscEnvEq which contains the + -- HscEnv for the hie.yaml file but the DynFlags for that component + + -- New HscEnv for the component in question, returns the new HscEnvEq and + -- a mapping from FilePath to the newly created HscEnvEq. + let new_cache = newComponentCache logger optExtensions hieYaml _cfp hscEnv uids + (cs, res) <- new_cache new + -- Modified cache targets for everything else in the hie.yaml file + -- which now uses the same EPS and so on + cached_targets <- concatMapM (fmap fst . new_cache) old_deps + + let all_targets = cs ++ cached_targets + + modifyVar_ fileToFlags $ \var -> do + pure $ Map.insert hieYaml (HM.fromList (concatMap toFlagsMap all_targets)) var + + extendKnownTargets all_targets + + -- Invalidate all the existing GhcSession build nodes by restarting the Shake session + invalidateShakeCache + restartShakeSession [] + + -- Typecheck all files in the project on startup + unless (null cs || not checkProject) $ do + cfps' <- liftIO $ filterM (IO.doesFileExist . fromNormalizedFilePath) (concatMap targetLocations cs) + void $ shakeEnqueue extras $ mkDelayedAction "InitialLoad" Debug $ void $ do + mmt <- uses GetModificationTime cfps' + let cs_exist = catMaybes (zipWith (<$) cfps' mmt) + modIfaces <- uses GetModIface cs_exist + -- update exports map + extras <- getShakeExtras + let !exportsMap' = createExportsMap $ mapMaybe (fmap hirModIface) modIfaces + liftIO $ modifyVar_ (exportsMap extras) $ evaluate . (exportsMap' <>) + + return (second Map.keys res) + + let consultCradle :: Maybe FilePath -> FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + consultCradle hieYaml cfp = do + lfp <- flip makeRelative cfp <$> getCurrentDirectory + logInfo logger $ T.pack ("Consulting the cradle for " <> show lfp) + + when (isNothing hieYaml) $ eventer $ notifyUserImplicitCradle lfp + + cradle <- maybe (loadImplicitHieCradle $ addTrailingPathSeparator dir) loadCradle hieYaml + + when optTesting $ eventer $ notifyCradleLoaded lfp + + -- Display a user friendly progress message here: They probably don't know what a cradle is + let progMsg = "Setting up " <> T.pack (takeBaseName (cradleRootDir cradle)) + <> " (for " <> T.pack lfp <> ")" + eopts <- withIndefiniteProgress progMsg NotCancellable $ + cradleToOptsAndLibDir cradle cfp + + logDebug logger $ T.pack ("Session loading result: " <> show eopts) + case eopts of + -- The cradle gave us some options so get to work turning them + -- into and HscEnv. + Right (opts, libDir) -> do + installationCheck <- ghcVersionChecker libDir + case installationCheck of + InstallationNotFound{..} -> + error $ "GHC installation not found in libdir: " <> libdir + InstallationMismatch{..} -> + return (([renderPackageSetupException cfp GhcVersionMismatch{..}], Nothing),[]) + InstallationChecked _compileTime _ghcLibCheck -> + session (hieYaml, toNormalizedFilePath' cfp, opts, libDir) + -- Failure case, either a cradle error or the none cradle + Left err -> do + dep_info <- getDependencyInfo (maybeToList hieYaml) + let ncfp = toNormalizedFilePath' cfp + let res = (map (renderCradleError ncfp) err, Nothing) + modifyVar_ fileToFlags $ \var -> do + pure $ Map.insertWith HM.union hieYaml (HM.singleton ncfp (res, dep_info)) var + return (res,[]) + + -- This caches the mapping from hie.yaml + Mod.hs -> [String] + -- Returns the Ghc session and the cradle dependencies + let sessionOpts :: (Maybe FilePath, FilePath) + -> IO (IdeResult HscEnvEq, [FilePath]) + sessionOpts (hieYaml, file) = do + v <- fromMaybe HM.empty . Map.lookup hieYaml <$> readVar fileToFlags + cfp <- canonicalizePath file + case HM.lookup (toNormalizedFilePath' cfp) v of + Just (opts, old_di) -> do + deps_ok <- checkDependencyInfo old_di + if not deps_ok + then do + -- If the dependencies are out of date then clear both caches and start + -- again. + modifyVar_ fileToFlags (const (return Map.empty)) + -- Keep the same name cache + modifyVar_ hscEnvs (return . Map.adjust (\(h, _) -> (h, [])) hieYaml ) + consultCradle hieYaml cfp + else return (opts, Map.keys old_di) + Nothing -> consultCradle hieYaml cfp + + -- The main function which gets options for a file. We only want one of these running + -- at a time. Therefore the IORef contains the currently running cradle, if we try + -- to get some more options then we wait for the currently running action to finish + -- before attempting to do so. + let getOptions :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + getOptions file = do + hieYaml <- cradleLoc file + sessionOpts (hieYaml, file) `catch` \e -> + return (([renderPackageSetupException file e], Nothing),[]) + + returnWithVersion $ \file -> do + opts <- liftIO $ join $ mask_ $ modifyVar runningCradle $ \as -> do + -- If the cradle is not finished, then wait for it to finish. + void $ wait as + as <- async $ getOptions file + return (as, wait as) + pure opts + +-- | Run the specific cradle on a specific FilePath via hie-bios. +-- This then builds dependencies or whatever based on the cradle, gets the +-- GHC options/dynflags needed for the session and the GHC library directory + +cradleToOptsAndLibDir :: Show a => Cradle a -> FilePath + -> IO (Either [CradleError] (ComponentOptions, FilePath)) +cradleToOptsAndLibDir cradle file = do + -- Start off by getting the session options + let showLine s = hPutStrLn stderr ("> " ++ s) + hPutStrLn stderr $ "Output from setting up the cradle " <> show cradle + cradleRes <- runCradle (cradleOptsProg cradle) showLine file + case cradleRes of + CradleSuccess r -> do + -- Now get the GHC lib dir + libDirRes <- getRuntimeGhcLibDir cradle + case libDirRes of + -- This is the successful path + CradleSuccess libDir -> pure (Right (r, libDir)) + CradleFail err -> return (Left [err]) + -- For the None cradle perhaps we still want to report an Info + -- message about the fact that the file is being ignored. + CradleNone -> return (Left []) + + CradleFail err -> return (Left [err]) + -- Same here + CradleNone -> return (Left []) + +emptyHscEnv :: IORef NameCache -> FilePath -> IO HscEnv +emptyHscEnv nc libDir = do + env <- runGhc (Just libDir) getSession + initDynLinker env + pure $ setNameCache nc env{ hsc_dflags = (hsc_dflags env){useUnicode = True } } + +data TargetDetails = TargetDetails + { + targetTarget :: !Target, + targetEnv :: !(IdeResult HscEnvEq), + targetDepends :: !DependencyInfo, + targetLocations :: ![NormalizedFilePath] + } + +fromTargetId :: [FilePath] -- ^ import paths + -> [String] -- ^ extensions to consider + -> TargetId + -> IdeResult HscEnvEq + -> DependencyInfo + -> IO [TargetDetails] +-- For a target module we consider all the import paths +fromTargetId is exts (GHC.TargetModule mod) env dep = do + let fps = [i moduleNameSlashes mod -<.> ext <> boot + | ext <- exts + , i <- is + , boot <- ["", "-boot"] + ] + locs <- mapM (fmap toNormalizedFilePath' . canonicalizePath) fps + return [TargetDetails (TargetModule mod) env dep locs] +-- For a 'TargetFile' we consider all the possible module names +fromTargetId _ _ (GHC.TargetFile f _) env deps = do + nf <- toNormalizedFilePath' <$> canonicalizePath f + return [TargetDetails (TargetFile nf) env deps [nf]] + +toFlagsMap :: TargetDetails -> [(NormalizedFilePath, (IdeResult HscEnvEq, DependencyInfo))] +toFlagsMap TargetDetails{..} = + [ (l, (targetEnv, targetDepends)) | l <- targetLocations] + + +setNameCache :: IORef NameCache -> HscEnv -> HscEnv +setNameCache nc hsc = hsc { hsc_NC = nc } + +-- | Create a mapping from FilePaths to HscEnvEqs +newComponentCache + :: Logger + -> [String] -- File extensions to consider + -> Maybe FilePath -- Path to cradle + -> NormalizedFilePath -- Path to file that caused the creation of this component + -> HscEnv + -> [(InstalledUnitId, DynFlags)] + -> ComponentInfo + -> IO ( [TargetDetails], (IdeResult HscEnvEq, DependencyInfo)) +newComponentCache logger exts cradlePath cfp hsc_env uids ci = do + let df = componentDynFlags ci + let hscEnv' = hsc_env { hsc_dflags = df + , hsc_IC = (hsc_IC hsc_env) { ic_dflags = df } } + + let newFunc = maybe newHscEnvEqPreserveImportPaths newHscEnvEq cradlePath + henv <- newFunc hscEnv' uids + let targetEnv = ([], Just henv) + targetDepends = componentDependencyInfo ci + res = (targetEnv, targetDepends) + logDebug logger ("New Component Cache HscEnvEq: " <> T.pack (show res)) + + let mk t = fromTargetId (importPaths df) exts (targetId t) targetEnv targetDepends + ctargets <- concatMapM mk (componentTargets ci) + + -- A special target for the file which caused this wonderful + -- component to be created. In case the cradle doesn't list all the targets for + -- the component, in which case things will be horribly broken anyway. + -- Otherwise, we will immediately attempt to reload this module which + -- causes an infinite loop and high CPU usage. + let special_target = TargetDetails (TargetFile cfp) targetEnv targetDepends [componentFP ci] + return (special_target:ctargets, res) + +{- Note [Avoiding bad interface files] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Originally, we set the cache directory for the various components once +on the first occurrence of the component. +This works fine if these components have no references to each other, +but you have components that depend on each other, the interface files are +updated for each component. +After restarting the session and only opening the component that depended +on the other, suddenly the interface files of this component are stale. +However, from the point of view of `ghcide`, they do not look stale, +thus, not regenerated and the IDE shows weird errors such as: +``` +typecheckIface +Declaration for Rep_ClientRunFlags +Axiom branches Rep_ClientRunFlags: + Failed to load interface for ‘Distribution.Simple.Flag’ + Use -v to see a list of the files searched for. +``` +and +``` +expectJust checkFamInstConsistency +CallStack (from HasCallStack): + error, called at compiler\\utils\\Maybes.hs:55:27 in ghc:Maybes + expectJust, called at compiler\\typecheck\\FamInst.hs:461:30 in ghc:FamInst +``` + +To mitigate this, we set the cache directory for each component dependent +on the components of the current `HscEnv`, additionally to the component options +of the respective components. +Assume two components, c1, c2, where c2 depends on c1, and the options of the +respective components are co1, co2. +If we want to load component c2, followed by c1, we set the cache directory for +each component in this way: + + * Load component c2 + * (Cache Directory State) + - name of c2 + co2 + * Load component c1 + * (Cache Directory State) + - name of c2 + name of c1 + co2 + - name of c2 + name of c1 + co1 + +Overall, we created three cache directories. If we opened c1 first, then we +create a fourth cache directory. +This makes sure that interface files are always correctly updated. + +Since this causes a lot of recompilation, we only update the cache-directory, +if the dependencies of a component have really changed. +E.g. when you load two executables, they can not depend on each other. They +should be filtered out, such that we dont have to re-compile everything. +-} + +-- | Set the cache-directory based on the ComponentOptions and a list of +-- internal packages. +-- For the exact reason, see Note [Avoiding bad interface files]. +setCacheDirs :: MonadIO m => Logger -> CacheDirs -> DynFlags -> m DynFlags +setCacheDirs logger CacheDirs{..} dflags = do + liftIO $ logInfo logger $ "Using interface files cache dir: " <> T.pack cacheDir + pure $ dflags + & maybe id setHiDir hiCacheDir + & maybe id setHieDir hieCacheDir + & maybe id setODir oCacheDir + + +renderCradleError :: NormalizedFilePath -> CradleError -> FileDiagnostic +renderCradleError nfp (CradleError _ _ec t) = + ideErrorWithSource (Just "cradle") (Just DsError) nfp (T.unlines (map T.pack t)) + +-- See Note [Multi Cradle Dependency Info] +type DependencyInfo = Map.Map FilePath (Maybe UTCTime) +type HieMap = Map.Map (Maybe FilePath) (HscEnv, [RawComponentInfo]) +type FlagsMap = Map.Map (Maybe FilePath) (HM.HashMap NormalizedFilePath (IdeResult HscEnvEq, DependencyInfo)) + +-- This is pristine information about a component +data RawComponentInfo = RawComponentInfo + { rawComponentUnitId :: InstalledUnitId + -- | Unprocessed DynFlags. Contains inplace packages such as libraries. + -- We do not want to use them unprocessed. + , rawComponentDynFlags :: DynFlags + -- | All targets of this components. + , rawComponentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , rawComponentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , rawComponentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info]. + , rawComponentDependencyInfo :: DependencyInfo + } + +-- This is processed information about the component, in particular the dynflags will be modified. +data ComponentInfo = ComponentInfo + { componentUnitId :: InstalledUnitId + -- | Processed DynFlags. Does not contain inplace packages such as local + -- libraries. Can be used to actually load this Component. + , componentDynFlags :: DynFlags + -- | Internal units, such as local libraries, that this component + -- is loaded with. These have been extracted from the original + -- ComponentOptions. + , _componentInternalUnits :: [InstalledUnitId] + -- | All targets of this components. + , componentTargets :: [GHC.Target] + -- | Filepath which caused the creation of this component + , componentFP :: NormalizedFilePath + -- | Component Options used to load the component. + , _componentCOptions :: ComponentOptions + -- | Maps cradle dependencies, such as `stack.yaml`, or `.cabal` file + -- to last modification time. See Note [Multi Cradle Dependency Info] + , componentDependencyInfo :: DependencyInfo + } + +-- | Check if any dependency has been modified lately. +checkDependencyInfo :: DependencyInfo -> IO Bool +checkDependencyInfo old_di = do + di <- getDependencyInfo (Map.keys old_di) + return (di == old_di) + +-- Note [Multi Cradle Dependency Info] +-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +-- Why do we implement our own file modification tracking here? +-- The primary reason is that the custom caching logic is quite complicated and going into shake +-- adds even more complexity and more indirection. I did try for about 5 hours to work out how to +-- use shake rules rather than IO but eventually gave up. + +-- | Computes a mapping from a filepath to its latest modification date. +-- See Note [Multi Cradle Dependency Info] why we do this ourselves instead +-- of letting shake take care of it. +getDependencyInfo :: [FilePath] -> IO DependencyInfo +getDependencyInfo fs = Map.fromList <$> mapM do_one fs + + where + tryIO :: IO a -> IO (Either IOException a) + tryIO = try + + do_one :: FilePath -> IO (FilePath, Maybe UTCTime) + do_one fp = (fp,) . eitherToMaybe <$> tryIO (getModificationTime fp) + +-- | This function removes all the -package flags which refer to packages we +-- are going to deal with ourselves. For example, if a executable depends +-- on a library component, then this function will remove the library flag +-- from the package flags for the executable +-- +-- There are several places in GHC (for example the call to hptInstances in +-- tcRnImports) which assume that all modules in the HPT have the same unit +-- ID. Therefore we create a fake one and give them all the same unit id. +removeInplacePackages :: [InstalledUnitId] -> DynFlags -> (DynFlags, [InstalledUnitId]) +removeInplacePackages us df = (df { packageFlags = ps + , thisInstalledUnitId = fake_uid }, uids) + where + (uids, ps) = partitionEithers (map go (packageFlags df)) + fake_uid = toInstalledUnitId (stringToUnitId "fake_uid") + go p@(ExposePackage _ (UnitIdArg u) _) = if toInstalledUnitId u `elem` us + then Left (toInstalledUnitId u) + else Right p + go p = Right p + +-- | Memoize an IO function, with the characteristics: +-- +-- * If multiple people ask for a result simultaneously, make sure you only compute it once. +-- +-- * If there are exceptions, repeatedly reraise them. +-- +-- * If the caller is aborted (async exception) finish computing it anyway. +memoIO :: Ord a => (a -> IO b) -> IO (a -> IO b) +memoIO op = do + ref <- newVar Map.empty + return $ \k -> join $ mask_ $ modifyVar ref $ \mp -> + case Map.lookup k mp of + Nothing -> do + res <- onceFork $ op k + return (Map.insert k res mp, res) + Just res -> return (mp, res) + +-- | Throws if package flags are unsatisfiable +setOptions :: GhcMonad m => ComponentOptions -> DynFlags -> m (DynFlags, [GHC.Target]) +setOptions (ComponentOptions theOpts compRoot _) dflags = do + (dflags', targets') <- addCmdOpts theOpts dflags + let targets = makeTargetsAbsolute compRoot targets' + let dflags'' = + disableWarningsAsErrors $ + -- disabled, generated directly by ghcide instead + flip gopt_unset Opt_WriteInterface $ + -- disabled, generated directly by ghcide instead + -- also, it can confuse the interface stale check + dontWriteHieFiles $ + setIgnoreInterfacePragmas $ + setLinkerOptions $ + disableOptimisation $ + setUpTypedHoles $ + makeDynFlagsAbsolute compRoot dflags' + -- initPackages parses the -package flags and + -- sets up the visibility for each component. + -- Throws if a -package flag cannot be satisfied. + (final_df, _) <- liftIO $ wrapPackageSetupException $ initPackages dflags'' + return (final_df, targets) + + +-- we don't want to generate object code so we compile to bytecode +-- (HscInterpreted) which implies LinkInMemory +-- HscInterpreted +setLinkerOptions :: DynFlags -> DynFlags +setLinkerOptions df = df { + ghcLink = LinkInMemory + , hscTarget = HscNothing + , ghcMode = CompManager + } + +setIgnoreInterfacePragmas :: DynFlags -> DynFlags +setIgnoreInterfacePragmas df = + gopt_set (gopt_set df Opt_IgnoreInterfacePragmas) Opt_IgnoreOptimChanges + +disableOptimisation :: DynFlags -> DynFlags +disableOptimisation df = updOptLevel 0 df + +setHiDir :: FilePath -> DynFlags -> DynFlags +setHiDir f d = + -- override user settings to avoid conflicts leading to recompilation + d { hiDir = Just f} + +setODir :: FilePath -> DynFlags -> DynFlags +setODir f d = + -- override user settings to avoid conflicts leading to recompilation + d { objectDir = Just f} + +getCacheDirsDefault :: String -> [String] -> IO CacheDirs +getCacheDirsDefault prefix opts = do + dir <- Just <$> getXdgDirectory XdgCache (cacheDir prefix ++ "-" ++ opts_hash) + return $ CacheDirs dir dir dir + where + -- Create a unique folder per set of different GHC options, assuming that each different set of + -- GHC options will create incompatible interface files. + opts_hash = B.unpack $ B16.encode $ H.finalize $ H.updates H.init (map B.pack opts) + +-- | Sub directory for the cache path +cacheDir :: String +cacheDir = "ghcide" + +notifyUserImplicitCradle:: FilePath -> FromServerMessage +notifyUserImplicitCradle fp = + NotShowMessage $ + NotificationMessage "2.0" WindowShowMessage $ ShowMessageParams MtInfo $ + "No [cradle](https://github.com/mpickering/hie-bios#hie-bios) found for " + <> T.pack fp <> + ".\n Proceeding with [implicit cradle](https://hackage.haskell.org/package/implicit-hie).\n\ + \You should ignore this message, unless you see a 'Multi Cradle: No prefixes matched' error." + +notifyCradleLoaded :: FilePath -> FromServerMessage +notifyCradleLoaded fp = + NotCustomServer $ + NotificationMessage "2.0" (CustomServerMethod cradleLoadedMethod) $ + toJSON fp + +cradleLoadedMethod :: T.Text +cradleLoadedMethod = "ghcide/cradle/loaded" + +---------------------------------------------------------------------------------------------------- + +data PackageSetupException + = PackageSetupException + { message :: !String + } + | GhcVersionMismatch + { compileTime :: !Version + , runTime :: !Version + } + | PackageCheckFailed !NotCompatibleReason + deriving (Eq, Show, Typeable) + +instance Exception PackageSetupException + +-- | Wrap any exception as a 'PackageSetupException' +wrapPackageSetupException :: IO a -> IO a +wrapPackageSetupException = handleAny $ \case + e | Just (pkgE :: PackageSetupException) <- fromException e -> throwIO pkgE + e -> (throwIO . PackageSetupException . show) e + +showPackageSetupException :: PackageSetupException -> String +showPackageSetupException GhcVersionMismatch{..} = unwords + ["ghcide compiled against GHC" + ,showVersion compileTime + ,"but currently using" + ,showVersion runTime + ,"\nThis is unsupported, ghcide must be compiled with the same GHC version as the project." + ] +showPackageSetupException PackageSetupException{..} = unwords + [ "ghcide compiled by GHC", showVersion compilerVersion + , "failed to load packages:", message <> "." + , "\nPlease ensure that ghcide is compiled with the same GHC installation as the project."] +showPackageSetupException (PackageCheckFailed PackageVersionMismatch{..}) = unwords + ["ghcide compiled with package " + , packageName <> "-" <> showVersion compileTime + ,"but project uses package" + , packageName <> "-" <> showVersion runTime + ,"\nThis is unsupported, ghcide must be compiled with the same GHC installation as the project." + ] +showPackageSetupException (PackageCheckFailed BasePackageAbiMismatch{..}) = unwords + ["ghcide compiled with base-" <> showVersion compileTime <> "-" <> compileTimeAbi + ,"but project uses base-" <> showVersion compileTime <> "-" <> runTimeAbi + ,"\nThis is unsupported, ghcide must be compiled with the same GHC installation as the project." + ] + +renderPackageSetupException :: FilePath -> PackageSetupException -> (NormalizedFilePath, ShowDiagnostic, Diagnostic) +renderPackageSetupException fp e = + ideErrorWithSource (Just "cradle") (Just DsError) (toNormalizedFilePath' fp) (T.pack $ showPackageSetupException e) diff --git a/ghcide/session-loader/Development/IDE/Session/VersionCheck.hs b/ghcide/session-loader/Development/IDE/Session/VersionCheck.hs new file mode 100644 index 00000000000..f15e765e8ea --- /dev/null +++ b/ghcide/session-loader/Development/IDE/Session/VersionCheck.hs @@ -0,0 +1,17 @@ +{-# LANGUAGE TemplateHaskell #-} + +-- | This module exists to circumvent a compile time exception on Windows with +-- Stack and GHC 8.10.1. It's just been pulled out from Development.IDE.Session. +-- See https://github.com/haskell/ghcide/pull/697 +module Development.IDE.Session.VersionCheck (ghcVersionChecker) where + +import Data.Maybe +import GHC.Check +-- Only use this for checking against the compile time GHC libDir! +-- Use getRuntimeGhcLibDir from hie-bios instead for everything else +-- otherwise binaries will not be distributable since paths will be baked into them +import qualified GHC.Paths +import System.Environment + +ghcVersionChecker :: GhcVersionChecker +ghcVersionChecker = $$(makeGhcVersionChecker (fromMaybe GHC.Paths.libdir <$> lookupEnv "NIX_GHC_LIBDIR")) diff --git a/ghcide/src/Development/IDE.hs b/ghcide/src/Development/IDE.hs new file mode 100644 index 00000000000..59da23941ab --- /dev/null +++ b/ghcide/src/Development/IDE.hs @@ -0,0 +1,44 @@ +module Development.IDE +( + -- TODO It would be much nicer to enumerate all the exports + -- and organize them in sections + module X + +) where + +import Development.IDE.Core.RuleTypes as X +import Development.IDE.Core.Rules as X + (getAtPoint + ,getDefinition + ,getParsedModule + ,getTypeDefinition + ) +import Development.IDE.Core.FileExists as X + (getFileExists) +import Development.IDE.Core.FileStore as X + (getFileContents) +import Development.IDE.Core.IdeConfiguration as X + (IdeConfiguration(..) + ,isWorkspaceFile) +import Development.IDE.Core.OfInterest as X (getFilesOfInterest) +import Development.IDE.Core.Service as X (runAction) +import Development.IDE.Core.Shake as X + ( IdeState, + shakeExtras, + ShakeExtras, + IdeRule, + define, defineEarlyCutoff, + use, useNoFile, uses, useWithStale, useWithStaleFast, useWithStaleFast', + FastResult(..), + use_, useNoFile_, uses_, useWithStale_, + ideLogger, + actionLogger, + IdeAction(..), runIdeAction + ) +import Development.IDE.GHC.Error as X +import Development.IDE.GHC.Util as X +import Development.IDE.Plugin as X +import Development.IDE.Types.Diagnostics as X +import Development.IDE.Types.Location as X +import Development.IDE.Types.Logger as X +import Development.Shake as X (Action, action, Rules, RuleResult) diff --git a/ghcide/src/Development/IDE/Compat.hs b/ghcide/src/Development/IDE/Compat.hs new file mode 100644 index 00000000000..30c8b7d88c1 --- /dev/null +++ b/ghcide/src/Development/IDE/Compat.hs @@ -0,0 +1,19 @@ +{-# LANGUAGE CPP #-} +module Development.IDE.Compat + ( + getProcessID + ) where + +#ifdef mingw32_HOST_OS + +import qualified System.Win32.Process as P (getCurrentProcessId) +getProcessID :: IO Int +getProcessID = fromIntegral <$> P.getCurrentProcessId + +#else + +import qualified System.Posix.Process as P (getProcessID) +getProcessID :: IO Int +getProcessID = fromIntegral <$> P.getProcessID + +#endif diff --git a/ghcide/src/Development/IDE/Core/Compile.hs b/ghcide/src/Development/IDE/Core/Compile.hs new file mode 100644 index 00000000000..86401c2c9f5 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Compile.hs @@ -0,0 +1,766 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE CPP #-} +#include "ghc-api-version.h" + +-- | Based on https://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/API. +-- Given a list of paths to find libraries, and a file to compile, produce a list of 'CoreModule' values. +module Development.IDE.Core.Compile + ( TcModuleResult(..) + , RunSimplifier(..) + , compileModule + , parseModule + , typecheckModule + , computePackageDeps + , addRelativeImport + , mkHiFileResultCompile + , mkHiFileResultNoCompile + , generateObjectCode + , generateByteCode + , generateHieAsts + , writeHieFile + , writeHiFile + , getModSummaryFromImports + , loadHieFile + , loadInterface + , loadModulesHome + , setupFinderCache + , getDocsBatch + , lookupName + ) where + +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Preprocessor +import Development.IDE.Core.Shake +import Development.IDE.GHC.Error +import Development.IDE.GHC.Warnings +import Development.IDE.Types.Diagnostics +import Development.IDE.GHC.Orphans() +import Development.IDE.GHC.Util +import Development.IDE.Types.Options +import Development.IDE.Types.Location + +import Language.Haskell.LSP.Types (DiagnosticTag(..)) + +import LoadIface (loadModuleInterface) +import DriverPhases +import HscTypes +import DriverPipeline hiding (unP) + +import qualified Parser +import Lexer +#if MIN_GHC_API_VERSION(8,10,0) +import Control.DeepSeq (force, rnf) +#else +import Control.DeepSeq (rnf) +import ErrUtils +#endif + +import Finder +import Development.IDE.GHC.Compat hiding (parseModule, typecheckModule, writeHieFile) +import qualified Development.IDE.GHC.Compat as GHC +import qualified Development.IDE.GHC.Compat as Compat +import GhcMonad +import GhcPlugins as GHC hiding (fst3, (<>)) +import HscMain (makeSimpleDetails, hscDesugar, hscTypecheckRename, hscSimplify, hscGenHardCode, hscInteractive) +import MkIface +import StringBuffer as SB +import TcRnMonad +import TcIface (typecheckIface) +import TidyPgm + +import Control.Exception.Safe +import Control.Monad.Extra +import Control.Monad.Except +import Control.Monad.Trans.Except +import Data.Bifunctor (first, second) +import qualified Data.ByteString as BS +import qualified Data.Text as T +import Data.IORef +import Data.List.Extra +import Data.Maybe +import qualified Data.Map.Strict as Map +import System.FilePath +import System.Directory +import System.IO.Extra +import Control.Exception (evaluate) +import TcEnv (tcLookup) +import Data.Time (UTCTime, getCurrentTime) +import Linker (unload) +import qualified GHC.LanguageExtensions as LangExt +import PrelNames +import HeaderInfo +import Maybes (orElse) + +-- | Given a string buffer, return the string (after preprocessing) and the 'ParsedModule'. +parseModule + :: IdeOptions + -> HscEnv + -> FilePath + -> ModSummary + -> IO (IdeResult ParsedModule) +parseModule IdeOptions{..} env filename ms = + fmap (either (, Nothing) id) $ + runExceptT $ do + (diag, modu) <- parseFileContents env optPreprocessor filename ms + return (diag, Just modu) + + +-- | Given a package identifier, what packages does it depend on +computePackageDeps + :: HscEnv + -> InstalledUnitId + -> IO (Either [FileDiagnostic] [InstalledUnitId]) +computePackageDeps env pkg = do + let dflags = hsc_dflags env + case lookupInstalledPackage dflags pkg of + Nothing -> return $ Left [ideErrorText (toNormalizedFilePath' noFilePath) $ + T.pack $ "unknown package: " ++ show pkg] + Just pkgInfo -> return $ Right $ depends pkgInfo + +typecheckModule :: IdeDefer + -> HscEnv + -> [Linkable] -- ^ linkables not to unload + -> ParsedModule + -> IO (IdeResult TcModuleResult) +typecheckModule (IdeDefer defer) hsc keep_lbls pm = do + fmap (either (,Nothing) id) $ + catchSrcErrors (hsc_dflags hsc) "typecheck" $ do + + let modSummary = pm_mod_summary pm + dflags = ms_hspp_opts modSummary + + modSummary' <- initPlugins hsc modSummary + (warnings, tcm) <- withWarnings "typecheck" $ \tweak -> + tcRnModule hsc keep_lbls $ enableTopLevelWarnings + $ demoteIfDefer pm{pm_mod_summary = tweak modSummary'} + let errorPipeline = unDefer . hideDiag dflags . tagDiag + diags = map errorPipeline warnings + deferedError = any fst diags + return (map snd diags, Just $ tcm{tmrDeferedError = deferedError}) + where + demoteIfDefer = if defer then demoteTypeErrorsToWarnings else id + +tcRnModule :: HscEnv -> [Linkable] -> ParsedModule -> IO TcModuleResult +tcRnModule hsc_env keep_lbls pmod = do + let ms = pm_mod_summary pmod + hsc_env_tmp = hsc_env { hsc_dflags = ms_hspp_opts ms } + + unload hsc_env_tmp keep_lbls + (tc_gbl_env, mrn_info) <- + hscTypecheckRename hsc_env_tmp ms $ + HsParsedModule { hpm_module = parsedSource pmod, + hpm_src_files = pm_extra_src_files pmod, + hpm_annotations = pm_annotations pmod } + let rn_info = case mrn_info of + Just x -> x + Nothing -> error "no renamed info tcRnModule" + pure (TcModuleResult pmod rn_info tc_gbl_env False) + +mkHiFileResultNoCompile :: HscEnv -> TcModuleResult -> IO HiFileResult +mkHiFileResultNoCompile session tcm = do + let hsc_env_tmp = session { hsc_dflags = ms_hspp_opts ms } + ms = pm_mod_summary $ tmrParsed tcm + tcGblEnv = tmrTypechecked tcm + details <- makeSimpleDetails hsc_env_tmp tcGblEnv + sf <- finalSafeMode (ms_hspp_opts ms) tcGblEnv +#if MIN_GHC_API_VERSION(8,10,0) + iface <- mkIfaceTc session sf details tcGblEnv +#else + (iface, _) <- mkIfaceTc session Nothing sf details tcGblEnv +#endif + let mod_info = HomeModInfo iface details Nothing + pure $! HiFileResult ms mod_info + +mkHiFileResultCompile + :: HscEnv + -> TcModuleResult + -> ModGuts + -> LinkableType -- ^ use object code or byte code? + -> IO (IdeResult HiFileResult) +mkHiFileResultCompile session' tcm simplified_guts ltype = catchErrs $ do + let session = session' { hsc_dflags = ms_hspp_opts ms } + ms = pm_mod_summary $ tmrParsed tcm + -- give variables unique OccNames + (guts, details) <- tidyProgram session simplified_guts + + let genLinkable = case ltype of + ObjectLinkable -> generateObjectCode + BCOLinkable -> generateByteCode + + (diags, linkable) <- genLinkable session ms guts +#if MIN_GHC_API_VERSION(8,10,0) + let !partial_iface = force (mkPartialIface session details simplified_guts) + final_iface <- mkFullIface session partial_iface +#else + (final_iface,_) <- mkIface session Nothing details simplified_guts +#endif + let mod_info = HomeModInfo final_iface details linkable + pure (diags, Just $! HiFileResult ms mod_info) + + where + dflags = hsc_dflags session' + source = "compile" + catchErrs x = x `catches` + [ Handler $ return . (,Nothing) . diagFromGhcException source dflags + , Handler $ return . (,Nothing) . diagFromString source DsError (noSpan "") + . (("Error during " ++ T.unpack source) ++) . show @SomeException + ] + +initPlugins :: HscEnv -> ModSummary -> IO ModSummary +initPlugins session modSummary = do + dflags <- liftIO $ initializePlugins session $ ms_hspp_opts modSummary + return modSummary{ms_hspp_opts = dflags} + +-- | Whether we should run the -O0 simplifier when generating core. +-- +-- This is required for template Haskell to work but we disable this in DAML. +-- See #256 +newtype RunSimplifier = RunSimplifier Bool + +-- | Compile a single type-checked module to a 'CoreModule' value, or +-- provide errors. +compileModule + :: RunSimplifier + -> HscEnv + -> ModSummary + -> TcGblEnv + -> IO (IdeResult ModGuts) +compileModule (RunSimplifier simplify) session ms tcg = + fmap (either (, Nothing) (second Just)) $ + catchSrcErrors (hsc_dflags session) "compile" $ do + (warnings,desugared_guts) <- withWarnings "compile" $ \tweak -> do + let ms' = tweak ms + session' = session{ hsc_dflags = ms_hspp_opts ms'} + desugar <- hscDesugar session' ms' tcg + if simplify + then do + plugins <- readIORef (tcg_th_coreplugins tcg) + hscSimplify session' plugins desugar + else pure desugar + return (map snd warnings, desugared_guts) + +generateObjectCode :: HscEnv -> ModSummary -> CgGuts -> IO (IdeResult Linkable) +generateObjectCode session summary guts = do + fmap (either (, Nothing) (second Just)) $ + catchSrcErrors (hsc_dflags session) "object" $ do + let dot_o = ml_obj_file (ms_location summary) + mod = ms_mod summary + fp = replaceExtension dot_o "s" + createDirectoryIfMissing True (takeDirectory fp) + (warnings, dot_o_fp) <- + withWarnings "object" $ \_tweak -> do + let summary' = _tweak summary + session' = session { hsc_dflags = (ms_hspp_opts summary') { outputFile = Just dot_o }} + (outputFilename, _mStub, _foreign_files) <- hscGenHardCode session' guts +#if MIN_GHC_API_VERSION(8,10,0) + (ms_location summary') +#else + summary' +#endif + fp + compileFile session' StopLn (outputFilename, Just (As False)) + let unlinked = DotO dot_o_fp + -- Need time to be the modification time for recompilation checking + t <- liftIO $ getModificationTime dot_o_fp + let linkable = LM t mod [unlinked] + + pure (map snd warnings, linkable) + +generateByteCode :: HscEnv -> ModSummary -> CgGuts -> IO (IdeResult Linkable) +generateByteCode hscEnv summary guts = do + fmap (either (, Nothing) (second Just)) $ + catchSrcErrors (hsc_dflags hscEnv) "bytecode" $ do + (warnings, (_, bytecode, sptEntries)) <- + withWarnings "bytecode" $ \_tweak -> do + let summary' = _tweak summary + session = hscEnv { hsc_dflags = ms_hspp_opts summary' } + hscInteractive session guts +#if MIN_GHC_API_VERSION(8,10,0) + (ms_location summary') +#else + summary' +#endif + let unlinked = BCOs bytecode sptEntries + time <- liftIO getCurrentTime + let linkable = LM time (ms_mod summary) [unlinked] + + pure (map snd warnings, linkable) + +demoteTypeErrorsToWarnings :: ParsedModule -> ParsedModule +demoteTypeErrorsToWarnings = + (update_pm_mod_summary . update_hspp_opts) demoteTEsToWarns where + + demoteTEsToWarns :: DynFlags -> DynFlags + -- convert the errors into warnings, and also check the warnings are enabled + demoteTEsToWarns = (`wopt_set` Opt_WarnDeferredTypeErrors) + . (`wopt_set` Opt_WarnTypedHoles) + . (`wopt_set` Opt_WarnDeferredOutOfScopeVariables) + . (`gopt_set` Opt_DeferTypeErrors) + . (`gopt_set` Opt_DeferTypedHoles) + . (`gopt_set` Opt_DeferOutOfScopeVariables) + +enableTopLevelWarnings :: ParsedModule -> ParsedModule +enableTopLevelWarnings = + (update_pm_mod_summary . update_hspp_opts) + ((`wopt_set` Opt_WarnMissingPatternSynonymSignatures) . + (`wopt_set` Opt_WarnMissingSignatures)) + -- the line below would show also warnings for let bindings without signature + -- ((`wopt_set` Opt_WarnMissingSignatures) . (`wopt_set` Opt_WarnMissingLocalSignatures))) + +update_hspp_opts :: (DynFlags -> DynFlags) -> ModSummary -> ModSummary +update_hspp_opts up ms = ms{ms_hspp_opts = up $ ms_hspp_opts ms} + +update_pm_mod_summary :: (ModSummary -> ModSummary) -> ParsedModule -> ParsedModule +update_pm_mod_summary up pm = + pm{pm_mod_summary = up $ pm_mod_summary pm} + +unDefer :: (WarnReason, FileDiagnostic) -> (Bool, FileDiagnostic) +unDefer (Reason Opt_WarnDeferredTypeErrors , fd) = (True, upgradeWarningToError fd) +unDefer (Reason Opt_WarnTypedHoles , fd) = (True, upgradeWarningToError fd) +unDefer (Reason Opt_WarnDeferredOutOfScopeVariables, fd) = (True, upgradeWarningToError fd) +unDefer ( _ , fd) = (False, fd) + +upgradeWarningToError :: FileDiagnostic -> FileDiagnostic +upgradeWarningToError (nfp, sh, fd) = + (nfp, sh, fd{_severity = Just DsError, _message = warn2err $ _message fd}) where + warn2err :: T.Text -> T.Text + warn2err = T.intercalate ": error:" . T.splitOn ": warning:" + +hideDiag :: DynFlags -> (WarnReason, FileDiagnostic) -> (WarnReason, FileDiagnostic) +hideDiag originalFlags (Reason warning, (nfp, _sh, fd)) + | not (wopt warning originalFlags) + = (Reason warning, (nfp, HideDiag, fd)) +hideDiag _originalFlags t = t + +-- | Warnings which lead to a diagnostic tag +unnecessaryDeprecationWarningFlags :: [WarningFlag] +unnecessaryDeprecationWarningFlags + = [ Opt_WarnUnusedTopBinds + , Opt_WarnUnusedLocalBinds + , Opt_WarnUnusedPatternBinds + , Opt_WarnUnusedImports + , Opt_WarnUnusedMatches + , Opt_WarnUnusedTypePatterns + , Opt_WarnUnusedForalls +#if MIN_GHC_API_VERSION(8,10,0) + , Opt_WarnUnusedRecordWildcards +#endif + , Opt_WarnInaccessibleCode + , Opt_WarnWarningsDeprecations + ] + +-- | Add a unnecessary/deprecated tag to the required diagnostics. +tagDiag :: (WarnReason, FileDiagnostic) -> (WarnReason, FileDiagnostic) +tagDiag (Reason warning, (nfp, sh, fd)) + | Just tag <- requiresTag warning + = (Reason warning, (nfp, sh, fd { _tags = addTag tag (_tags fd) })) + where + requiresTag :: WarningFlag -> Maybe DiagnosticTag + requiresTag Opt_WarnWarningsDeprecations + = Just DtDeprecated + requiresTag wflag -- deprecation was already considered above + | wflag `elem` unnecessaryDeprecationWarningFlags + = Just DtUnnecessary + requiresTag _ = Nothing + addTag :: DiagnosticTag -> Maybe (List DiagnosticTag) -> Maybe (List DiagnosticTag) + addTag t Nothing = Just (List [t]) + addTag t (Just (List ts)) = Just (List (t : ts)) +-- other diagnostics are left unaffected +tagDiag t = t + +addRelativeImport :: NormalizedFilePath -> ModuleName -> DynFlags -> DynFlags +addRelativeImport fp modu dflags = dflags + {importPaths = nubOrd $ maybeToList (moduleImportPath fp modu) ++ importPaths dflags} + +atomicFileWrite :: FilePath -> (FilePath -> IO a) -> IO () +atomicFileWrite targetPath write = do + let dir = takeDirectory targetPath + createDirectoryIfMissing True dir + (tempFilePath, cleanUp) <- newTempFileWithin dir + (write tempFilePath >> renameFile tempFilePath targetPath) `onException` cleanUp + +generateHieAsts :: HscEnv -> TcModuleResult -> IO ([FileDiagnostic], Maybe (HieASTs Type)) +generateHieAsts hscEnv tcm = + handleGenerationErrors' dflags "extended interface generation" $ runHsc hscEnv $ + Just <$> GHC.enrichHie (tcg_binds $ tmrTypechecked tcm) (tmrRenamed tcm) + where + dflags = hsc_dflags hscEnv + +writeHieFile :: HscEnv -> ModSummary -> [GHC.AvailInfo] -> HieASTs Type -> BS.ByteString -> IO [FileDiagnostic] +writeHieFile hscEnv mod_summary exports ast source = + handleGenerationErrors dflags "extended interface write/compression" $ do + hf <- runHsc hscEnv $ + GHC.mkHieFile' mod_summary exports ast source + atomicFileWrite targetPath $ flip GHC.writeHieFile hf + where + dflags = hsc_dflags hscEnv + mod_location = ms_location mod_summary + targetPath = Compat.ml_hie_file mod_location + +writeHiFile :: HscEnv -> HiFileResult -> IO [FileDiagnostic] +writeHiFile hscEnv tc = + handleGenerationErrors dflags "interface generation" $ do + atomicFileWrite targetPath $ \fp -> + writeIfaceFile dflags fp modIface + where + modIface = hm_iface $ hirHomeMod tc + targetPath = ml_hi_file $ ms_location $ hirModSummary tc + dflags = hsc_dflags hscEnv + +handleGenerationErrors :: DynFlags -> T.Text -> IO () -> IO [FileDiagnostic] +handleGenerationErrors dflags source action = + action >> return [] `catches` + [ Handler $ return . diagFromGhcException source dflags + , Handler $ return . diagFromString source DsError (noSpan "") + . (("Error during " ++ T.unpack source) ++) . show @SomeException + ] + +handleGenerationErrors' :: DynFlags -> T.Text -> IO (Maybe a) -> IO ([FileDiagnostic], Maybe a) +handleGenerationErrors' dflags source action = + fmap ([],) action `catches` + [ Handler $ return . (,Nothing) . diagFromGhcException source dflags + , Handler $ return . (,Nothing) . diagFromString source DsError (noSpan "") + . (("Error during " ++ T.unpack source) ++) . show @SomeException + ] + +-- | Initialise the finder cache, dependencies should be topologically +-- sorted. +setupFinderCache :: [ModSummary] -> HscEnv -> IO HscEnv +setupFinderCache mss session = do + + -- Make modules available for others that import them, + -- by putting them in the finder cache. + let ims = map (InstalledModule (thisInstalledUnitId $ hsc_dflags session) . moduleName . ms_mod) mss + ifrs = zipWith (\ms -> InstalledFound (ms_location ms)) mss ims + -- set the target and module graph in the session + graph = mkModuleGraph mss + + -- We have to create a new IORef here instead of modifying the existing IORef as + -- it is shared between concurrent compilations. + prevFinderCache <- readIORef $ hsc_FC session + let newFinderCache = + foldl' + (\fc (im, ifr) -> GHC.extendInstalledModuleEnv fc im ifr) prevFinderCache + $ zip ims ifrs + newFinderCacheVar <- newIORef $! newFinderCache + + pure $ session { hsc_FC = newFinderCacheVar, hsc_mod_graph = graph } + + +-- | Load modules, quickly. Input doesn't need to be desugared. +-- A module must be loaded before dependent modules can be typechecked. +-- This variant of loadModuleHome will *never* cause recompilation, it just +-- modifies the session. +-- The order modules are loaded is important when there are hs-boot files. +-- In particular you should make sure to load the .hs version of a file after the +-- .hs-boot version. +loadModulesHome + :: [HomeModInfo] + -> HscEnv + -> HscEnv +loadModulesHome mod_infos e = + e { hsc_HPT = addListToHpt (hsc_HPT e) [(mod_name x, x) | x <- mod_infos] + , hsc_type_env_var = Nothing } + where + mod_name = moduleName . mi_module . hm_iface + +withBootSuffix :: HscSource -> ModLocation -> ModLocation +withBootSuffix HsBootFile = addBootSuffixLocnOut +withBootSuffix _ = id + +-- | Given a buffer, env and filepath, produce a module summary by parsing only the imports. +-- Runs preprocessors as needed. +getModSummaryFromImports + :: HscEnv + -> FilePath + -> UTCTime + -> Maybe SB.StringBuffer + -> ExceptT [FileDiagnostic] IO (ModSummary,[LImportDecl GhcPs]) +getModSummaryFromImports env fp modTime contents = do + (contents, dflags) <- preprocessor env fp contents + + -- The warns will hopefully be reported when we actually parse the module + (_warns, L main_loc hsmod) <- parseHeader dflags fp contents + + -- Copied from `HeaderInfo.getImports`, but we also need to keep the parsed imports + let mb_mod = hsmodName hsmod + imps = hsmodImports hsmod + + mod = fmap unLoc mb_mod `orElse` mAIN_NAME + + (src_idecls, ord_idecls) = partition (ideclSource.unLoc) imps + + -- GHC.Prim doesn't exist physically, so don't go looking for it. + ordinary_imps = filter ((/= moduleName gHC_PRIM) . unLoc + . ideclName . unLoc) + ord_idecls + + implicit_prelude = xopt LangExt.ImplicitPrelude dflags + implicit_imports = mkPrelImports mod main_loc + implicit_prelude imps + convImport (L _ i) = (fmap sl_fs (ideclPkgQual i) + , ideclName i) + + srcImports = map convImport src_idecls + textualImports = map convImport (implicit_imports ++ ordinary_imps) + + allImps = implicit_imports ++ imps + + -- Force bits that might keep the string buffer and DynFlags alive unnecessarily + liftIO $ evaluate $ rnf srcImports + liftIO $ evaluate $ rnf textualImports + + modLoc <- liftIO $ mkHomeModLocation dflags mod fp + + let modl = mkModule (thisPackage dflags) mod + sourceType = if "-boot" `isSuffixOf` takeExtension fp then HsBootFile else HsSrcFile + summary = + ModSummary + { ms_mod = modl +#if MIN_GHC_API_VERSION(8,8,0) + , ms_hie_date = Nothing +#endif + , ms_hs_date = modTime + , ms_hsc_src = sourceType + -- The contents are used by the GetModSummary rule + , ms_hspp_buf = Just contents + , ms_hspp_file = fp + , ms_hspp_opts = dflags + , ms_iface_date = Nothing + , ms_location = withBootSuffix sourceType modLoc + , ms_obj_date = Nothing + , ms_parsed_mod = Nothing + , ms_srcimps = srcImports + , ms_textual_imps = textualImports + } + return (summary, allImps) + +-- | Parse only the module header +parseHeader + :: Monad m + => DynFlags -- ^ flags to use + -> FilePath -- ^ the filename (for source locations) + -> SB.StringBuffer -- ^ Haskell module source text (full Unicode is supported) + -> ExceptT [FileDiagnostic] m ([FileDiagnostic], Located(HsModule GhcPs)) +parseHeader dflags filename contents = do + let loc = mkRealSrcLoc (mkFastString filename) 1 1 + case unP Parser.parseHeader (mkPState dflags contents loc) of +#if MIN_GHC_API_VERSION(8,10,0) + PFailed pst -> + throwE $ diagFromErrMsgs "parser" dflags $ getErrorMessages pst dflags +#else + PFailed _ locErr msgErr -> + throwE $ diagFromErrMsg "parser" dflags $ mkPlainErrMsg dflags locErr msgErr +#endif + POk pst rdr_module -> do + let (warns, errs) = getMessages pst dflags + -- Just because we got a `POk`, it doesn't mean there + -- weren't errors! To clarify, the GHC parser + -- distinguishes between fatal and non-fatal + -- errors. Non-fatal errors are the sort that don't + -- prevent parsing from continuing (that is, a parse + -- tree can still be produced despite the error so that + -- further errors/warnings can be collected). Fatal + -- errors are those from which a parse tree just can't + -- be produced. + unless (null errs) $ + throwE $ diagFromErrMsgs "parser" dflags errs + + let warnings = diagFromErrMsgs "parser" dflags warns + return (warnings, rdr_module) + +-- | Given a buffer, flags, and file path, produce a +-- parsed module (or errors) and any parse warnings. Does not run any preprocessors +-- ModSummary must contain the (preprocessed) contents of the buffer +parseFileContents + :: HscEnv + -> (GHC.ParsedSource -> IdePreprocessedSource) + -> FilePath -- ^ the filename (for source locations) + -> ModSummary + -> ExceptT [FileDiagnostic] IO ([FileDiagnostic], ParsedModule) +parseFileContents env customPreprocessor filename ms = do + let loc = mkRealSrcLoc (mkFastString filename) 1 1 + dflags = ms_hspp_opts ms + contents = fromJust $ ms_hspp_buf ms + case unP Parser.parseModule (mkPState dflags contents loc) of +#if MIN_GHC_API_VERSION(8,10,0) + PFailed pst -> throwE $ diagFromErrMsgs "parser" dflags $ getErrorMessages pst dflags +#else + PFailed _ locErr msgErr -> + throwE $ diagFromErrMsg "parser" dflags $ mkPlainErrMsg dflags locErr msgErr +#endif + POk pst rdr_module -> + let hpm_annotations = + (Map.fromListWith (++) $ annotations pst, + Map.fromList ((noSrcSpan,comment_q pst) + :annotations_comments pst)) + (warns, errs) = getMessages pst dflags + in + do + -- Just because we got a `POk`, it doesn't mean there + -- weren't errors! To clarify, the GHC parser + -- distinguishes between fatal and non-fatal + -- errors. Non-fatal errors are the sort that don't + -- prevent parsing from continuing (that is, a parse + -- tree can still be produced despite the error so that + -- further errors/warnings can be collected). Fatal + -- errors are those from which a parse tree just can't + -- be produced. + unless (null errs) $ + throwE $ diagFromErrMsgs "parser" dflags errs + + -- Ok, we got here. It's safe to continue. + let IdePreprocessedSource preproc_warns errs parsed = customPreprocessor rdr_module + + unless (null errs) $ + throwE $ diagFromStrings "parser" DsError errs + + let preproc_warnings = diagFromStrings "parser" DsWarning preproc_warns + parsed' <- liftIO $ applyPluginsParsedResultAction env dflags ms hpm_annotations parsed + + -- To get the list of extra source files, we take the list + -- that the parser gave us, + -- - eliminate files beginning with '<'. gcc likes to use + -- pseudo-filenames like "" and "" + -- - normalise them (eliminate differences between ./f and f) + -- - filter out the preprocessed source file + -- - filter out anything beginning with tmpdir + -- - remove duplicates + -- - filter out the .hs/.lhs source filename if we have one + -- + let n_hspp = normalise filename + srcs0 = nubOrd $ filter (not . (tmpDir dflags `isPrefixOf`)) + $ filter (/= n_hspp) + $ map normalise + $ filter (not . isPrefixOf "<") + $ map unpackFS + $ srcfiles pst + srcs1 = case ml_hs_file (ms_location ms) of + Just f -> filter (/= normalise f) srcs0 + Nothing -> srcs0 + + -- sometimes we see source files from earlier + -- preprocessing stages that cannot be found, so just + -- filter them out: + srcs2 <- liftIO $ filterM doesFileExist srcs1 + + let pm = + ParsedModule { + pm_mod_summary = ms + , pm_parsed_source = parsed' + , pm_extra_src_files = srcs2 + , pm_annotations = hpm_annotations + } + warnings = diagFromErrMsgs "parser" dflags warns + pure (warnings ++ preproc_warnings, pm) + +loadHieFile :: Compat.NameCacheUpdater -> FilePath -> IO GHC.HieFile +loadHieFile ncu f = do + GHC.hie_file_result <$> GHC.readHieFile ncu f + +-- | Retuns an up-to-date module interface, regenerating if needed. +-- Assumes file exists. +-- Requires the 'HscEnv' to be set up with dependencies +loadInterface + :: MonadIO m => HscEnv + -> ModSummary + -> SourceModified + -> Maybe LinkableType + -> (Maybe LinkableType -> m ([FileDiagnostic], Maybe HiFileResult)) -- ^ Action to regenerate an interface + -> m ([FileDiagnostic], Maybe HiFileResult) +loadInterface session ms sourceMod linkableNeeded regen = do + res <- liftIO $ checkOldIface session ms sourceMod Nothing + case res of + (UpToDate, Just iface) + -- If the module used TH splices when it was last + -- compiled, then the recompilation check is not + -- accurate enough (https://gitlab.haskell.org/ghc/ghc/-/issues/481) + -- and we must ignore + -- it. However, if the module is stable (none of + -- the modules it depends on, directly or + -- indirectly, changed), then we *can* skip + -- recompilation. This is why the SourceModified + -- type contains SourceUnmodifiedAndStable, and + -- it's pretty important: otherwise ghc --make + -- would always recompile TH modules, even if + -- nothing at all has changed. Stability is just + -- the same check that make is doing for us in + -- one-shot mode. + | not (mi_used_th iface) || SourceUnmodifiedAndStable == sourceMod + -> do + linkable <- case linkableNeeded of + Just ObjectLinkable -> liftIO $ findObjectLinkableMaybe (ms_mod ms) (ms_location ms) + _ -> pure Nothing + + -- We don't need to regenerate if the object is up do date, or we don't need one + let objUpToDate = isNothing linkableNeeded || case linkable of + Nothing -> False + Just (LM obj_time _ _) -> obj_time > ms_hs_date ms + if objUpToDate + then do + hmi <- liftIO $ mkDetailsFromIface session iface linkable + return ([], Just $ HiFileResult ms hmi) + else regen linkableNeeded + (_reason, _) -> regen linkableNeeded + +mkDetailsFromIface :: HscEnv -> ModIface -> Maybe Linkable -> IO HomeModInfo +mkDetailsFromIface session iface linkable = do + details <- liftIO $ fixIO $ \details -> do + let hsc' = session { hsc_HPT = addToHpt (hsc_HPT session) (moduleName $ mi_module iface) (HomeModInfo iface details linkable) } + initIfaceLoad hsc' (typecheckIface iface) + return (HomeModInfo iface details linkable) + +-- | Non-interactive, batch version of 'InteractiveEval.getDocs'. +-- The interactive paths create problems in ghc-lib builds +--- and leads to fun errors like "Cannot continue after interface file error". +getDocsBatch + :: HscEnv + -> Module -- ^ a moudle where the names are in scope + -> [Name] + -> IO [Either String (Maybe HsDocString, Map.Map Int HsDocString)] +getDocsBatch hsc_env _mod _names = do + ((_warns,errs), res) <- initTc hsc_env HsSrcFile False _mod fakeSpan $ forM _names $ \name -> + case nameModule_maybe name of + Nothing -> return (Left $ NameHasNoModule name) + Just mod -> do + ModIface { mi_doc_hdr = mb_doc_hdr + , mi_decl_docs = DeclDocMap dmap + , mi_arg_docs = ArgDocMap amap + } <- loadModuleInterface "getModuleInterface" mod + if isNothing mb_doc_hdr && Map.null dmap && Map.null amap + then pure (Left (NoDocsInIface mod $ compiled name)) + else pure (Right ( Map.lookup name dmap + , Map.findWithDefault Map.empty name amap)) + case res of + Just x -> return $ map (first prettyPrint) x + Nothing -> throwErrors errs + where + throwErrors = liftIO . throwIO . mkSrcErr + compiled n = + -- TODO: Find a more direct indicator. + case nameSrcLoc n of + RealSrcLoc {} -> False + UnhelpfulLoc {} -> True + +fakeSpan :: RealSrcSpan +fakeSpan = realSrcLocSpan $ mkRealSrcLoc (fsLit "") 1 1 + +-- | Non-interactive, batch version of 'InteractiveEval.lookupNames'. +-- The interactive paths create problems in ghc-lib builds +--- and leads to fun errors like "Cannot continue after interface file error". +lookupName :: HscEnv + -> Module -- ^ A module where the Names are in scope + -> Name + -> IO (Maybe TyThing) +lookupName hsc_env mod name = do + (_messages, res) <- initTc hsc_env HsSrcFile False mod fakeSpan $ do + tcthing <- tcLookup name + case tcthing of + AGlobal thing -> return thing + ATcId{tct_id=id} -> return (AnId id) + _ -> panic "tcRnLookupName'" + return res diff --git a/ghcide/src/Development/IDE/Core/Debouncer.hs b/ghcide/src/Development/IDE/Core/Debouncer.hs new file mode 100644 index 00000000000..7eb46aa92bf --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Debouncer.hs @@ -0,0 +1,57 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Development.IDE.Core.Debouncer + ( Debouncer + , registerEvent + , newAsyncDebouncer + , noopDebouncer + ) where + +import Control.Concurrent.Extra +import Control.Concurrent.Async +import Control.Exception +import Control.Monad.Extra +import Data.Hashable +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as Map +import System.Time.Extra + +-- | A debouncer can be used to avoid triggering many events +-- (e.g. diagnostics) for the same key (e.g. the same file) +-- within a short timeframe. This is accomplished +-- by delaying each event for a given time. If another event +-- is registered for the same key within that timeframe, +-- only the new event will fire. +-- +-- We abstract over the debouncer used so we an use a proper debouncer in the IDE but disable +-- debouncing in the DAML CLI compiler. +newtype Debouncer k = Debouncer { registerEvent :: Seconds -> k -> IO () -> IO () } + +-- | Debouncer used in the IDE that delays events as expected. +newAsyncDebouncer :: (Eq k, Hashable k) => IO (Debouncer k) +newAsyncDebouncer = Debouncer . asyncRegisterEvent <$> newVar Map.empty + +-- | Register an event that will fire after the given delay if no other event +-- for the same key gets registered until then. +-- +-- If there is a pending event for the same key, the pending event will be killed. +-- Events are run unmasked so it is up to the user of `registerEvent` +-- to mask if required. +asyncRegisterEvent :: (Eq k, Hashable k) => Var (HashMap k (Async ())) -> Seconds -> k -> IO () -> IO () +asyncRegisterEvent d 0 k fire = do + modifyVar_ d $ \m -> mask_ $ do + whenJust (Map.lookup k m) cancel + pure $ Map.delete k m + fire +asyncRegisterEvent d delay k fire = modifyVar_ d $ \m -> mask_ $ do + whenJust (Map.lookup k m) cancel + a <- asyncWithUnmask $ \unmask -> unmask $ do + sleep delay + fire + modifyVar_ d (pure . Map.delete k) + pure $ Map.insert k a m + +-- | Debouncer used in the DAML CLI compiler that emits events immediately. +noopDebouncer :: Debouncer k +noopDebouncer = Debouncer $ \_ _ a -> a diff --git a/ghcide/src/Development/IDE/Core/FileExists.hs b/ghcide/src/Development/IDE/Core/FileExists.hs new file mode 100644 index 00000000000..098fd97fd3f --- /dev/null +++ b/ghcide/src/Development/IDE/Core/FileExists.hs @@ -0,0 +1,229 @@ +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} +module Development.IDE.Core.FileExists + ( fileExistsRules + , modifyFileExists + , getFileExists + , watchedGlobs + ) +where + +import Control.Concurrent.Extra +import Control.Exception +import Control.Monad.Extra +import Data.Binary +import qualified Data.ByteString as BS +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import Data.Maybe +import Development.IDE.Core.FileStore +import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.Shake +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import Development.Shake +import Development.Shake.Classes +import GHC.Generics +import Language.Haskell.LSP.Types.Capabilities +import qualified System.Directory as Dir +import qualified System.FilePath.Glob as Glob + +{- Note [File existence cache and LSP file watchers] +Some LSP servers provide the ability to register file watches with the client, which will then notify +us of file changes. Some clients can do this more efficiently than us, or generally it's a tricky +problem + +Here we use this to maintain a quick lookup cache of file existence. How this works is: +- On startup, if the client supports it we ask it to watch some files (see below). +- When those files are created or deleted (we can also see change events, but we don't +care since we're only caching existence here) we get a notification from the client. +- The notification handler calls 'modifyFileExists' to update our cache. + +This means that the cache will only ever work for the files we have set up a watcher for. +So we pick the set that we mostly care about and which are likely to change existence +most often: the source files of the project (as determined by the source extensions +we're configured to care about). + +For all other files we fall back to the slow path. + +There are a few failure modes to think about: + +1. The client doesn't send us the notifications we asked for. + +There's not much we can do in this case: the whole point is to rely on the client so +we don't do the checking ourselves. If the client lets us down, we will just be wrong. + +2. Races between registering watchers, getting notifications, and file changes. + +If a file changes status between us asking for notifications and the client actually +setting up the notifications, we might not get told about it. But this is a relatively +small race window around startup, so we just don't worry about it. + +3. Using the fast path for files that we aren't watching. + +In this case we will fall back to the slow path, but cache that result forever (since +it won't get invalidated by a client notification). To prevent this we guard the +fast path by a check that the path also matches our watching patterns. +-} + +-- See Note [File existence cache and LSP file watchers] +-- | A map for tracking the file existence. +-- If a path maps to 'True' then it exists; if it maps to 'False' then it doesn't exist'; and +-- if it's not in the map then we don't know. +type FileExistsMap = (HashMap NormalizedFilePath Bool) + +-- | A wrapper around a mutable 'FileExistsState' +newtype FileExistsMapVar = FileExistsMapVar (Var FileExistsMap) + +instance IsIdeGlobal FileExistsMapVar + +-- | Grab the current global value of 'FileExistsMap' without acquiring a dependency +getFileExistsMapUntracked :: Action FileExistsMap +getFileExistsMapUntracked = do + FileExistsMapVar v <- getIdeGlobalAction + liftIO $ readVar v + +-- | Modify the global store of file exists. +modifyFileExists :: IdeState -> [(NormalizedFilePath, Bool)] -> IO () +modifyFileExists state changes = do + FileExistsMapVar var <- getIdeGlobalState state + changesMap <- evaluate $ HashMap.fromList changes + -- Masked to ensure that the previous values are flushed together with the map update + mask $ \_ -> do + -- update the map + modifyVar_ var $ evaluate . HashMap.union changesMap + -- See Note [Invalidating file existence results] + -- flush previous values + mapM_ (deleteValue state GetFileExists . fst) changes + +------------------------------------------------------------------------------------- + +type instance RuleResult GetFileExists = Bool + +data GetFileExists = GetFileExists + deriving (Eq, Show, Typeable, Generic) + +instance NFData GetFileExists +instance Hashable GetFileExists +instance Binary GetFileExists + +-- | Returns True if the file exists +-- Note that a file is not considered to exist unless it is saved to disk. +-- In particular, VFS existence is not enough. +-- Consider the following example: +-- 1. The file @A.hs@ containing the line @import B@ is added to the files of interest +-- Since @B.hs@ is neither open nor exists, GetLocatedImports finds Nothing +-- 2. The editor creates a new buffer @B.hs@ +-- Unless the editor also sends a @DidChangeWatchedFile@ event, ghcide will not pick it up +-- Most editors, e.g. VSCode, only send the event when the file is saved to disk. +getFileExists :: NormalizedFilePath -> Action Bool +getFileExists fp = use_ GetFileExists fp + +{- Note [Which files should we watch?] +The watcher system gives us a lot of flexibility: we can set multiple watchers, and they can all watch on glob +patterns. + +We used to have a quite precise system, where we would register a watcher for a single file path only (and always) +when we actually looked to see if it existed. The downside of this is that it sends a *lot* of notifications +to the client (thousands on a large project), and this could lock up some clients like emacs +(https://github.com/emacs-lsp/lsp-mode/issues/2165). + +Now we take the opposite approach: we register a single, quite general watcher that looks for all files +with a predefined set of extensions. The consequences are: +- The client will have to watch more files. This is usually not too bad, since the pattern is a single glob, +and the clients typically call out to an optimized implementation of file watching that understands globs. +- The client will send us a lot more notifications. This isn't too bad in practice, since although +we're watching a lot of files in principle, they don't get created or destroyed that often. +- We won't ever hit the fast lookup path for files which aren't in our watch pattern, since the only way +files get into our map is when the client sends us a notification about them because we're watching them. +This is fine so long as we're watching the files we check most often, i.e. source files. +-} + +-- | The list of file globs that we ask the client to watch. +watchedGlobs :: IdeOptions -> [String] +watchedGlobs opts = [ "**/*." ++ extIncBoot | ext <- optExtensions opts, extIncBoot <- [ext, ext ++ "-boot"]] + +-- | Installs the 'getFileExists' rules. +-- Provides a fast implementation if client supports dynamic watched files. +-- Creates a global state as a side effect in that case. +fileExistsRules :: ClientCapabilities -> VFSHandle -> Rules () +fileExistsRules ClientCapabilities{_workspace} vfs = do + -- Create the global always, although it should only be used if we have fast rules. + -- But there's a chance someone will send unexpected notifications anyway, + -- e.g. https://github.com/haskell/ghcide/issues/599 + addIdeGlobal . FileExistsMapVar =<< liftIO (newVar []) + + extras <- getShakeExtrasRules + opts <- liftIO $ getIdeOptionsIO extras + let globs = watchedGlobs opts + + case () of + _ | Just WorkspaceClientCapabilities{_didChangeWatchedFiles} <- _workspace + , Just DidChangeWatchedFilesClientCapabilities{_dynamicRegistration} <- _didChangeWatchedFiles + , Just True <- _dynamicRegistration + -> fileExistsRulesFast globs vfs + | otherwise -> fileExistsRulesSlow vfs + +-- Requires an lsp client that provides WatchedFiles notifications, but assumes that this has already been checked. +fileExistsRulesFast :: [String] -> VFSHandle -> Rules () +fileExistsRulesFast globs vfs = + let patterns = fmap Glob.compile globs + fpMatches fp = any (\p -> Glob.match p fp) patterns + in defineEarlyCutoff $ \GetFileExists file -> do + isWf <- isWorkspaceFile file + if isWf && fpMatches (fromNormalizedFilePath file) + then fileExistsFast vfs file + else fileExistsSlow vfs file + +{- Note [Invalidating file existence results] +We have two mechanisms for getting file existence information: +- The file existence cache +- The VFS lookup + +Both of these affect the results of the 'GetFileExists' rule, so we need to make sure it +is invalidated properly when things change. + +For the file existence cache, we manually flush the results of 'GetFileExists' when we +modify it (i.e. when a notification comes from the client). This is faster than using +'alwaysRerun' in the 'fileExistsFast', and we need it to be as fast as possible. + +For the VFS lookup, however, we won't get prompted to flush the result, so instead +we use 'alwaysRerun'. +-} + +fileExistsFast :: VFSHandle -> NormalizedFilePath -> Action (Maybe BS.ByteString, ([a], Maybe Bool)) +fileExistsFast vfs file = do + -- Could in principle use 'alwaysRerun' here, but it's too slwo, See Note [Invalidating file existence results] + mp <- getFileExistsMapUntracked + + let mbFilesWatched = HashMap.lookup file mp + exist <- case mbFilesWatched of + Just exist -> pure exist + -- We don't know about it: use the slow route. + -- Note that we do *not* call 'fileExistsSlow', as that would trigger 'alwaysRerun'. + Nothing -> liftIO $ getFileExistsVFS vfs file + pure (summarizeExists exist, ([], Just exist)) + +summarizeExists :: Bool -> Maybe BS.ByteString +summarizeExists x = Just $ if x then BS.singleton 1 else BS.empty + +fileExistsRulesSlow :: VFSHandle -> Rules () +fileExistsRulesSlow vfs = + defineEarlyCutoff $ \GetFileExists file -> fileExistsSlow vfs file + +fileExistsSlow :: VFSHandle -> NormalizedFilePath -> Action (Maybe BS.ByteString, ([a], Maybe Bool)) +fileExistsSlow vfs file = do + -- See Note [Invalidating file existence results] + alwaysRerun + exist <- liftIO $ getFileExistsVFS vfs file + pure (summarizeExists exist, ([], Just exist)) + +getFileExistsVFS :: VFSHandle -> NormalizedFilePath -> IO Bool +getFileExistsVFS vfs file = do + -- we deliberately and intentionally wrap the file as an FilePath WITHOUT mkAbsolute + -- so that if the file doesn't exist, is on a shared drive that is unmounted etc we get a properly + -- cached 'No' rather than an exception in the wrong place + handle (\(_ :: IOException) -> return False) $ + (isJust <$> getVirtualFile vfs (filePathToUri' file)) ||^ + Dir.doesFileExist (fromNormalizedFilePath file) diff --git a/ghcide/src/Development/IDE/Core/FileStore.hs b/ghcide/src/Development/IDE/Core/FileStore.hs new file mode 100644 index 00000000000..90696406093 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/FileStore.hs @@ -0,0 +1,247 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE CPP #-} +{-# LANGUAGE TypeFamilies #-} + +module Development.IDE.Core.FileStore( + getFileContents, + getVirtualFile, + setFileModified, + setSomethingModified, + fileStoreRules, + modificationTime, + typecheckParents, + VFSHandle, + makeVFSHandle, + makeLSPVFSHandle, + isFileOfInterestRule + ) where + +import Development.IDE.GHC.Orphans() +import Development.IDE.Core.Shake +import Control.Concurrent.Extra +import qualified Data.Map.Strict as Map +import qualified Data.HashMap.Strict as HM +import Data.Maybe +import qualified Data.Text as T +import Control.Monad.Extra +import Development.Shake +import Development.Shake.Classes +import Control.Exception +import Data.Either.Extra +import Data.Int (Int64) +import Data.Time +import System.IO.Error +import qualified Data.ByteString.Char8 as BS +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import Development.IDE.Core.OfInterest (getFilesOfInterest) +import Development.IDE.Core.RuleTypes +import Development.IDE.Types.Options +import qualified Data.Rope.UTF16 as Rope +import Development.IDE.Import.DependencyInformation + +#ifdef mingw32_HOST_OS +import qualified System.Directory as Dir +#else +import Data.Time.Clock.System (systemToUTCTime, SystemTime(MkSystemTime)) +import Foreign.Ptr +import Foreign.C.String +import Foreign.C.Types +import Foreign.Marshal (alloca) +import Foreign.Storable +import qualified System.Posix.Error as Posix +#endif + +import qualified Development.IDE.Types.Logger as L + +import Language.Haskell.LSP.Core +import Language.Haskell.LSP.VFS + +-- | haskell-lsp manages the VFS internally and automatically so we cannot use +-- the builtin VFS without spawning up an LSP server. To be able to test things +-- like `setBufferModified` we abstract over the VFS implementation. +data VFSHandle = VFSHandle + { getVirtualFile :: NormalizedUri -> IO (Maybe VirtualFile) + -- ^ get the contents of a virtual file + , setVirtualFileContents :: Maybe (NormalizedUri -> Maybe T.Text -> IO ()) + -- ^ set a specific file to a value. If Nothing then we are ignoring these + -- signals anyway so can just say something was modified + } + +instance IsIdeGlobal VFSHandle + +makeVFSHandle :: IO VFSHandle +makeVFSHandle = do + vfsVar <- newVar (1, Map.empty) + pure VFSHandle + { getVirtualFile = \uri -> do + (_nextVersion, vfs) <- readVar vfsVar + pure $ Map.lookup uri vfs + , setVirtualFileContents = Just $ \uri content -> + modifyVar_ vfsVar $ \(nextVersion, vfs) -> pure $ (nextVersion + 1, ) $ + case content of + Nothing -> Map.delete uri vfs + -- The second version number is only used in persistFileVFS which we do not use so we set it to 0. + Just content -> Map.insert uri (VirtualFile nextVersion 0 (Rope.fromText content)) vfs + } + +makeLSPVFSHandle :: LspFuncs c -> VFSHandle +makeLSPVFSHandle lspFuncs = VFSHandle + { getVirtualFile = getVirtualFileFunc lspFuncs + , setVirtualFileContents = Nothing + } + + +isFileOfInterestRule :: Rules () +isFileOfInterestRule = defineEarlyCutoff $ \IsFileOfInterest f -> do + filesOfInterest <- getFilesOfInterest + let res = maybe NotFOI IsFOI $ f `HM.lookup` filesOfInterest + return (Just $ BS.pack $ show $ hash res, ([], Just res)) + +getModificationTimeRule :: VFSHandle -> Rules () +getModificationTimeRule vfs = + defineEarlyCutoff $ \(GetModificationTime_ missingFileDiags) file -> do + let file' = fromNormalizedFilePath file + let wrap time@(l,s) = (Just $ BS.pack $ show time, ([], Just $ ModificationTime l s)) + alwaysRerun + mbVirtual <- liftIO $ getVirtualFile vfs $ filePathToUri' file + case mbVirtual of + Just (virtualFileVersion -> ver) -> + pure (Just $ BS.pack $ show ver, ([], Just $ VFSVersion ver)) + Nothing -> liftIO $ fmap wrap (getModTime file') + `catch` \(e :: IOException) -> do + let err | isDoesNotExistError e = "File does not exist: " ++ file' + | otherwise = "IO error while reading " ++ file' ++ ", " ++ displayException e + diag = ideErrorText file (T.pack err) + if isDoesNotExistError e && not missingFileDiags + then return (Nothing, ([], Nothing)) + else return (Nothing, ([diag], Nothing)) + +-- Dir.getModificationTime is surprisingly slow since it performs +-- a ton of conversions. Since we do not actually care about +-- the format of the time, we can get away with something cheaper. +-- For now, we only try to do this on Unix systems where it seems to get the +-- time spent checking file modifications (which happens on every change) +-- from > 0.5s to ~0.15s. +-- We might also want to try speeding this up on Windows at some point. +-- TODO leverage DidChangeWatchedFile lsp notifications on clients that +-- support them, as done for GetFileExists +getModTime :: FilePath -> IO (Int64, Int64) +getModTime f = +#ifdef mingw32_HOST_OS + do time <- Dir.getModificationTime f + let !day = fromInteger $ toModifiedJulianDay $ utctDay time + !dayTime = fromInteger $ diffTimeToPicoseconds $ utctDayTime time + pure (day, dayTime) +#else + withCString f $ \f' -> + alloca $ \secPtr -> + alloca $ \nsecPtr -> do + Posix.throwErrnoPathIfMinus1Retry_ "getmodtime" f $ c_getModTime f' secPtr nsecPtr + CTime sec <- peek secPtr + CLong nsec <- peek nsecPtr + pure (sec, nsec) + +-- Sadly even unix’s getFileStatus + modificationTimeHiRes is still about twice as slow +-- as doing the FFI call ourselves :(. +foreign import ccall "getmodtime" c_getModTime :: CString -> Ptr CTime -> Ptr CLong -> IO Int +#endif + +modificationTime :: FileVersion -> Maybe UTCTime +modificationTime VFSVersion{} = Nothing +modificationTime (ModificationTime large small) = Just $ internalTimeToUTCTime large small + +internalTimeToUTCTime :: Int64 -> Int64 -> UTCTime +internalTimeToUTCTime large small = +#ifdef mingw32_HOST_OS + UTCTime (ModifiedJulianDay $ fromIntegral large) (picosecondsToDiffTime $ fromIntegral small) +#else + systemToUTCTime $ MkSystemTime large (fromIntegral small) +#endif + +getFileContentsRule :: VFSHandle -> Rules () +getFileContentsRule vfs = + define $ \GetFileContents file -> do + -- need to depend on modification time to introduce a dependency with Cutoff + time <- use_ GetModificationTime file + res <- liftIO $ ideTryIOException file $ do + mbVirtual <- getVirtualFile vfs $ filePathToUri' file + pure $ Rope.toText . _text <$> mbVirtual + case res of + Left err -> return ([err], Nothing) + Right contents -> return ([], Just (time, contents)) + +ideTryIOException :: NormalizedFilePath -> IO a -> IO (Either FileDiagnostic a) +ideTryIOException fp act = + mapLeft + (\(e :: IOException) -> ideErrorText fp $ T.pack $ show e) + <$> try act + +-- | Returns the modification time and the contents. +-- For VFS paths, the modification time is the current time. +getFileContents :: NormalizedFilePath -> Action (UTCTime, Maybe T.Text) +getFileContents f = do + (fv, txt) <- use_ GetFileContents f + modTime <- case modificationTime fv of + Just t -> pure t + Nothing -> do + foi <- use_ IsFileOfInterest f + liftIO $ case foi of + IsFOI Modified -> getCurrentTime + _ -> do + (large,small) <- getModTime $ fromNormalizedFilePath f + pure $ internalTimeToUTCTime large small + return (modTime, txt) + +fileStoreRules :: VFSHandle -> Rules () +fileStoreRules vfs = do + addIdeGlobal vfs + getModificationTimeRule vfs + getFileContentsRule vfs + isFileOfInterestRule + +-- | Note that some buffer for a specific file has been modified but not +-- with what changes. +setFileModified :: IdeState + -> Bool -- ^ Was the file saved? + -> NormalizedFilePath + -> IO () +setFileModified state saved nfp = do + ideOptions <- getIdeOptionsIO $ shakeExtras state + let checkParents = case optCheckParents ideOptions of + AlwaysCheck -> True + CheckOnSaveAndClose -> saved + _ -> False + VFSHandle{..} <- getIdeGlobalState state + when (isJust setVirtualFileContents) $ + fail "setFileModified can't be called on this type of VFSHandle" + shakeRestart state [] + when checkParents $ + typecheckParents state nfp + +typecheckParents :: IdeState -> NormalizedFilePath -> IO () +typecheckParents state nfp = void $ shakeEnqueue (shakeExtras state) parents + where parents = mkDelayedAction "ParentTC" L.Debug (typecheckParentsAction nfp) + +typecheckParentsAction :: NormalizedFilePath -> Action () +typecheckParentsAction nfp = do + revs <- transitiveReverseDependencies nfp <$> useNoFile_ GetModuleGraph + logger <- logger <$> getShakeExtras + let log = L.logInfo logger . T.pack + case revs of + Nothing -> liftIO $ log $ "Could not identify reverse dependencies for " ++ show nfp + Just rs -> do + liftIO $ (log $ "Typechecking reverse dependencies for " ++ show nfp ++ ": " ++ show revs) + `catch` \(e :: SomeException) -> log (show e) + () <$ uses GetModIface rs + +-- | Note that some buffer somewhere has been modified, but don't say what. +-- Only valid if the virtual file system was initialised by LSP, as that +-- independently tracks which files are modified. +setSomethingModified :: IdeState -> IO () +setSomethingModified state = do + VFSHandle{..} <- getIdeGlobalState state + when (isJust setVirtualFileContents) $ + fail "setSomethingModified can't be called on this type of VFSHandle" + void $ shakeRestart state [] diff --git a/ghcide/src/Development/IDE/Core/IdeConfiguration.hs b/ghcide/src/Development/IDE/Core/IdeConfiguration.hs new file mode 100644 index 00000000000..d42322556d7 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/IdeConfiguration.hs @@ -0,0 +1,91 @@ +{-# LANGUAGE DuplicateRecordFields #-} +module Development.IDE.Core.IdeConfiguration + ( IdeConfiguration(..) + , registerIdeConfiguration + , getIdeConfiguration + , parseConfiguration + , parseWorkspaceFolder + , isWorkspaceFile + , modifyWorkspaceFolders + , modifyClientSettings + , getClientSettings + ) +where + +import Control.Concurrent.Extra +import Control.Monad +import Data.Hashable (Hashed, hashed, unhashed) +import Data.HashSet (HashSet, singleton) +import Data.Text (Text, isPrefixOf) +import Data.Aeson.Types (Value) +import Development.IDE.Core.Shake +import Development.IDE.Types.Location +import Development.Shake +import Language.Haskell.LSP.Types +import System.FilePath (isRelative) + +-- | Lsp client relevant configuration details +data IdeConfiguration = IdeConfiguration + { workspaceFolders :: HashSet NormalizedUri + , clientSettings :: Hashed (Maybe Value) + } + deriving (Show) + +newtype IdeConfigurationVar = IdeConfigurationVar {unIdeConfigurationRef :: Var IdeConfiguration} + +instance IsIdeGlobal IdeConfigurationVar + +registerIdeConfiguration :: ShakeExtras -> IdeConfiguration -> IO () +registerIdeConfiguration extras = + addIdeGlobalExtras extras . IdeConfigurationVar <=< newVar + +getIdeConfiguration :: Action IdeConfiguration +getIdeConfiguration = + getIdeGlobalAction >>= liftIO . readVar . unIdeConfigurationRef + +parseConfiguration :: InitializeParams -> IdeConfiguration +parseConfiguration InitializeParams {..} = + IdeConfiguration {..} + where + workspaceFolders = + foldMap (singleton . toNormalizedUri) _rootUri + <> (foldMap . foldMap) + (singleton . parseWorkspaceFolder) + _workspaceFolders + clientSettings = hashed _initializationOptions + +parseWorkspaceFolder :: WorkspaceFolder -> NormalizedUri +parseWorkspaceFolder = + toNormalizedUri . Uri . (_uri :: WorkspaceFolder -> Text) + +modifyWorkspaceFolders + :: IdeState -> (HashSet NormalizedUri -> HashSet NormalizedUri) -> IO () +modifyWorkspaceFolders ide f = modifyIdeConfiguration ide f' + where f' (IdeConfiguration ws initOpts) = IdeConfiguration (f ws) initOpts + +modifyClientSettings + :: IdeState -> (Maybe Value -> Maybe Value) -> IO () +modifyClientSettings ide f = modifyIdeConfiguration ide f' + where f' (IdeConfiguration ws clientSettings) = + IdeConfiguration ws (hashed . f . unhashed $ clientSettings) + +modifyIdeConfiguration + :: IdeState -> (IdeConfiguration -> IdeConfiguration) -> IO () +modifyIdeConfiguration ide f = do + IdeConfigurationVar var <- getIdeGlobalState ide + modifyVar_ var (pure . f) + +isWorkspaceFile :: NormalizedFilePath -> Action Bool +isWorkspaceFile file = + if isRelative (fromNormalizedFilePath file) + then return True + else do + IdeConfiguration {..} <- getIdeConfiguration + let toText = getUri . fromNormalizedUri + return $ + any + (\root -> toText root `isPrefixOf` toText (filePathToUri' file)) + workspaceFolders + +getClientSettings :: Action (Maybe Value) +getClientSettings = unhashed . clientSettings <$> getIdeConfiguration \ No newline at end of file diff --git a/ghcide/src/Development/IDE/Core/OfInterest.hs b/ghcide/src/Development/IDE/Core/OfInterest.hs new file mode 100644 index 00000000000..d3bef5f1c24 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/OfInterest.hs @@ -0,0 +1,104 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE FlexibleInstances #-} + +-- | Utilities and state for the files of interest - those which are currently +-- open in the editor. The useful function is 'getFilesOfInterest'. +module Development.IDE.Core.OfInterest( + ofInterestRules, + getFilesOfInterest, setFilesOfInterest, modifyFilesOfInterest, + kick, FileOfInterestStatus(..) + ) where + +import Control.Concurrent.Extra +import Data.Binary +import Data.Hashable +import Control.DeepSeq +import GHC.Generics +import Data.Typeable +import qualified Data.ByteString.UTF8 as BS +import Control.Exception +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import qualified Data.Text as T +import Data.Tuple.Extra +import Development.Shake +import Control.Monad (void) + +import Development.IDE.Types.Exports +import Development.IDE.Types.Location +import Development.IDE.Types.Logger +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake +import Data.Maybe (catMaybes) + +newtype OfInterestVar = OfInterestVar (Var (HashMap NormalizedFilePath FileOfInterestStatus)) +instance IsIdeGlobal OfInterestVar + +type instance RuleResult GetFilesOfInterest = HashMap NormalizedFilePath FileOfInterestStatus + +data GetFilesOfInterest = GetFilesOfInterest + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetFilesOfInterest +instance NFData GetFilesOfInterest +instance Binary GetFilesOfInterest + + +-- | The rule that initialises the files of interest state. +ofInterestRules :: Rules () +ofInterestRules = do + addIdeGlobal . OfInterestVar =<< liftIO (newVar HashMap.empty) + defineEarlyCutoff $ \GetFilesOfInterest _file -> assert (null $ fromNormalizedFilePath _file) $ do + alwaysRerun + filesOfInterest <- getFilesOfInterestUntracked + pure (Just $ BS.fromString $ show filesOfInterest, ([], Just filesOfInterest)) + + +-- | Get the files that are open in the IDE. +getFilesOfInterest :: Action (HashMap NormalizedFilePath FileOfInterestStatus) +getFilesOfInterest = useNoFile_ GetFilesOfInterest + + + +------------------------------------------------------------ +-- Exposed API + +-- | Set the files-of-interest - not usually necessary or advisable. +-- The LSP client will keep this information up to date. +setFilesOfInterest :: IdeState -> HashMap NormalizedFilePath FileOfInterestStatus -> IO () +setFilesOfInterest state files = modifyFilesOfInterest state (const files) + +getFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus) +getFilesOfInterestUntracked = do + OfInterestVar var <- getIdeGlobalAction + liftIO $ readVar var + +-- | Modify the files-of-interest - not usually necessary or advisable. +-- The LSP client will keep this information up to date. +modifyFilesOfInterest + :: IdeState + -> (HashMap NormalizedFilePath FileOfInterestStatus -> HashMap NormalizedFilePath FileOfInterestStatus) + -> IO () +modifyFilesOfInterest state f = do + OfInterestVar var <- getIdeGlobalState state + files <- modifyVar var $ pure . dupe . f + logDebug (ideLogger state) $ "Set files of interest to: " <> T.pack (show $ HashMap.toList files) + +-- | Typecheck all the files of interest. +-- Could be improved +kick :: Action () +kick = do + files <- HashMap.keys <$> getFilesOfInterest + ShakeExtras{progressUpdate} <- getShakeExtras + liftIO $ progressUpdate KickStarted + + -- Update the exports map for the project + (results, ()) <- par (uses GenerateCore files) (void $ uses GetHieAst files) + ShakeExtras{exportsMap} <- getShakeExtras + let mguts = catMaybes results + !exportsMap' = createExportsMapMg mguts + liftIO $ modifyVar_ exportsMap $ evaluate . (exportsMap' <>) + + liftIO $ progressUpdate KickCompleted diff --git a/ghcide/src/Development/IDE/Core/PositionMapping.hs b/ghcide/src/Development/IDE/Core/PositionMapping.hs new file mode 100644 index 00000000000..5cb867e8538 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/PositionMapping.hs @@ -0,0 +1,160 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +module Development.IDE.Core.PositionMapping + ( PositionMapping(..) + , PositionResult(..) + , lowerRange + , upperRange + , positionResultToMaybe + , fromCurrentPosition + , toCurrentPosition + , PositionDelta(..) + , addDelta + , mkDelta + , toCurrentRange + , fromCurrentRange + , applyChange + , zeroMapping + -- toCurrent and fromCurrent are mainly exposed for testing + , toCurrent + , fromCurrent + ) where + +import Control.Monad +import qualified Data.Text as T +import Language.Haskell.LSP.Types +import Data.List + +-- | Either an exact position, or the range of text that was substituted +data PositionResult a + = PositionRange -- ^ Fields need to be non-strict otherwise bind is exponential + { unsafeLowerRange :: a + , unsafeUpperRange :: a } + | PositionExact !a + deriving (Eq,Ord,Show,Functor) + +lowerRange :: PositionResult a -> a +lowerRange (PositionExact a) = a +lowerRange (PositionRange lower _) = lower + +upperRange :: PositionResult a -> a +upperRange (PositionExact a) = a +upperRange (PositionRange _ upper) = upper + +positionResultToMaybe :: PositionResult a -> Maybe a +positionResultToMaybe (PositionExact a) = Just a +positionResultToMaybe _ = Nothing + +instance Applicative PositionResult where + pure = PositionExact + (PositionExact f) <*> a = fmap f a + (PositionRange f g) <*> (PositionExact a) = PositionRange (f a) (g a) + (PositionRange f g) <*> (PositionRange lower upper) = PositionRange (f lower) (g upper) + +instance Monad PositionResult where + (PositionExact a) >>= f = f a + (PositionRange lower upper) >>= f = PositionRange lower' upper' + where + lower' = lowerRange $ f lower + upper' = upperRange $ f upper + +-- The position delta is the difference between two versions +data PositionDelta = PositionDelta + { toDelta :: !(Position -> PositionResult Position) + , fromDelta :: !(Position -> PositionResult Position) + } + +fromCurrentPosition :: PositionMapping -> Position -> Maybe Position +fromCurrentPosition (PositionMapping pm) = positionResultToMaybe . fromDelta pm + +toCurrentPosition :: PositionMapping -> Position -> Maybe Position +toCurrentPosition (PositionMapping pm) = positionResultToMaybe . toDelta pm + +-- A position mapping is the difference from the current version to +-- a specific version +newtype PositionMapping = PositionMapping PositionDelta + +toCurrentRange :: PositionMapping -> Range -> Maybe Range +toCurrentRange mapping (Range a b) = + Range <$> toCurrentPosition mapping a <*> toCurrentPosition mapping b + +fromCurrentRange :: PositionMapping -> Range -> Maybe Range +fromCurrentRange mapping (Range a b) = + Range <$> fromCurrentPosition mapping a <*> fromCurrentPosition mapping b + +zeroMapping :: PositionMapping +zeroMapping = PositionMapping idDelta + +-- | Compose two position mappings. Composes in the same way as function +-- composition (ie the second argument is applyed to the position first). +composeDelta :: PositionDelta + -> PositionDelta + -> PositionDelta +composeDelta (PositionDelta to1 from1) (PositionDelta to2 from2) = + PositionDelta (to1 <=< to2) + (from1 >=> from2) + +idDelta :: PositionDelta +idDelta = PositionDelta pure pure + +-- | Convert a set of changes into a delta from k to k + 1 +mkDelta :: [TextDocumentContentChangeEvent] -> PositionDelta +mkDelta cs = foldl' applyChange idDelta cs + +-- | Add a new delta onto a Mapping k n to make a Mapping (k - 1) n +addDelta :: PositionDelta -> PositionMapping -> PositionMapping +addDelta delta (PositionMapping pm) = PositionMapping (composeDelta delta pm) + +applyChange :: PositionDelta -> TextDocumentContentChangeEvent -> PositionDelta +applyChange PositionDelta{..} (TextDocumentContentChangeEvent (Just r) _ t) = PositionDelta + { toDelta = toCurrent r t <=< toDelta + , fromDelta = fromDelta <=< fromCurrent r t + } +applyChange posMapping _ = posMapping + +toCurrent :: Range -> T.Text -> Position -> PositionResult Position +toCurrent (Range start@(Position startLine startColumn) end@(Position endLine endColumn)) t (Position line column) + | line < startLine || line == startLine && column < startColumn = + -- Position is before the change and thereby unchanged. + PositionExact $ Position line column + | line > endLine || line == endLine && column >= endColumn = + -- Position is after the change so increase line and column number + -- as necessary. + PositionExact $ newLine `seq` newColumn `seq` Position newLine newColumn + | otherwise = PositionRange start end + -- Position is in the region that was changed. + where + lineDiff = linesNew - linesOld + linesNew = T.count "\n" t + linesOld = endLine - startLine + newEndColumn + | linesNew == 0 = startColumn + T.length t + | otherwise = T.length $ T.takeWhileEnd (/= '\n') t + newColumn + | line == endLine = column + newEndColumn - endColumn + | otherwise = column + newLine = line + lineDiff + +fromCurrent :: Range -> T.Text -> Position -> PositionResult Position +fromCurrent (Range start@(Position startLine startColumn) end@(Position endLine endColumn)) t (Position line column) + | line < startLine || line == startLine && column < startColumn = + -- Position is before the change and thereby unchanged + PositionExact $ Position line column + | line > newEndLine || line == newEndLine && column >= newEndColumn = + -- Position is after the change so increase line and column number + -- as necessary. + PositionExact $ newLine `seq` newColumn `seq` Position newLine newColumn + | otherwise = PositionRange start end + -- Position is in the region that was changed. + where + lineDiff = linesNew - linesOld + linesNew = T.count "\n" t + linesOld = endLine - startLine + newEndLine = endLine + lineDiff + newEndColumn + | linesNew == 0 = startColumn + T.length t + | otherwise = T.length $ T.takeWhileEnd (/= '\n') t + newColumn + | line == newEndLine = column - (newEndColumn - endColumn) + | otherwise = column + newLine = line - lineDiff diff --git a/ghcide/src/Development/IDE/Core/Preprocessor.hs b/ghcide/src/Development/IDE/Core/Preprocessor.hs new file mode 100644 index 00000000000..0f12c6fcac4 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Preprocessor.hs @@ -0,0 +1,227 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Development.IDE.Core.Preprocessor + ( preprocessor + ) where + +import Development.IDE.GHC.CPP +import Development.IDE.GHC.Orphans() +import Development.IDE.GHC.Compat +import GhcMonad +import StringBuffer as SB + +import Data.List.Extra +import System.FilePath +import System.IO.Extra +import Data.Char +import qualified HeaderInfo as Hdr +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import Development.IDE.GHC.Error +import SysTools (Option (..), runUnlit, runPp) +import Control.Monad.Trans.Except +import qualified GHC.LanguageExtensions as LangExt +import Data.Maybe +import Control.Exception.Safe (catch, throw) +import Data.IORef (IORef, modifyIORef, newIORef, readIORef) +import Data.Text (Text) +import qualified Data.Text as T +import Outputable (showSDoc) +import Control.DeepSeq (NFData(rnf)) +import Control.Exception (evaluate) +import HscTypes (HscEnv(hsc_dflags)) + + +-- | Given a file and some contents, apply any necessary preprocessors, +-- e.g. unlit/cpp. Return the resulting buffer and the DynFlags it implies. +preprocessor :: HscEnv -> FilePath -> Maybe StringBuffer -> ExceptT [FileDiagnostic] IO (StringBuffer, DynFlags) +preprocessor env filename mbContents = do + -- Perform unlit + (isOnDisk, contents) <- + if isLiterate filename then do + let dflags = hsc_dflags env + newcontent <- liftIO $ runLhs dflags filename mbContents + return (False, newcontent) + else do + contents <- liftIO $ maybe (hGetStringBuffer filename) return mbContents + let isOnDisk = isNothing mbContents + return (isOnDisk, contents) + + -- Perform cpp + dflags <- ExceptT $ parsePragmasIntoDynFlags env filename contents + (isOnDisk, contents, dflags) <- + if not $ xopt LangExt.Cpp dflags then + return (isOnDisk, contents, dflags) + else do + cppLogs <- liftIO $ newIORef [] + contents <- ExceptT + $ (Right <$> (runCpp dflags {log_action = logAction cppLogs} filename + $ if isOnDisk then Nothing else Just contents)) + `catch` + ( \(e :: GhcException) -> do + logs <- readIORef cppLogs + case diagsFromCPPLogs filename (reverse logs) of + [] -> throw e + diags -> return $ Left diags + ) + dflags <- ExceptT $ parsePragmasIntoDynFlags env filename contents + return (False, contents, dflags) + + -- Perform preprocessor + if not $ gopt Opt_Pp dflags then + return (contents, dflags) + else do + contents <- liftIO $ runPreprocessor dflags filename $ if isOnDisk then Nothing else Just contents + dflags <- ExceptT $ parsePragmasIntoDynFlags env filename contents + return (contents, dflags) + where + logAction :: IORef [CPPLog] -> LogAction + logAction cppLogs dflags _reason severity srcSpan _style msg = do + let log = CPPLog severity srcSpan $ T.pack $ showSDoc dflags msg + modifyIORef cppLogs (log :) + + +data CPPLog = CPPLog Severity SrcSpan Text + deriving (Show) + + +data CPPDiag + = CPPDiag + { cdRange :: Range, + cdSeverity :: Maybe DiagnosticSeverity, + cdMessage :: [Text] + } + deriving (Show) + + +diagsFromCPPLogs :: FilePath -> [CPPLog] -> [FileDiagnostic] +diagsFromCPPLogs filename logs = + map (\d -> (toNormalizedFilePath' filename, ShowDiag, cppDiagToDiagnostic d)) $ + go [] logs + where + -- On errors, CPP calls logAction with a real span for the initial log and + -- then additional informational logs with `UnhelpfulSpan`. Collect those + -- informational log messages and attaches them to the initial log message. + go :: [CPPDiag] -> [CPPLog] -> [CPPDiag] + go acc [] = reverse $ map (\d -> d {cdMessage = reverse $ cdMessage d}) acc + go acc (CPPLog sev (RealSrcSpan span) msg : logs) = + let diag = CPPDiag (realSrcSpanToRange span) (toDSeverity sev) [msg] + in go (diag : acc) logs + go (diag : diags) (CPPLog _sev (UnhelpfulSpan _) msg : logs) = + go (diag {cdMessage = msg : cdMessage diag} : diags) logs + go [] (CPPLog _sev (UnhelpfulSpan _) _msg : logs) = go [] logs + cppDiagToDiagnostic :: CPPDiag -> Diagnostic + cppDiagToDiagnostic d = + Diagnostic + { _range = cdRange d, + _severity = cdSeverity d, + _code = Nothing, + _source = Just "CPP", + _message = T.unlines $ cdMessage d, + _relatedInformation = Nothing, + _tags = Nothing + } + + +isLiterate :: FilePath -> Bool +isLiterate x = takeExtension x `elem` [".lhs",".lhs-boot"] + + +-- | This reads the pragma information directly from the provided buffer. +parsePragmasIntoDynFlags + :: HscEnv + -> FilePath + -> SB.StringBuffer + -> IO (Either [FileDiagnostic] DynFlags) +parsePragmasIntoDynFlags env fp contents = catchSrcErrors dflags0 "pragmas" $ do + let opts = Hdr.getOptions dflags0 contents fp + + -- Force bits that might keep the dflags and stringBuffer alive unnecessarily + evaluate $ rnf opts + + (dflags, _, _) <- parseDynamicFilePragma dflags0 opts + dflags' <- initializePlugins env dflags + return $ disableWarningsAsErrors dflags' + where dflags0 = hsc_dflags env + +-- | Run (unlit) literate haskell preprocessor on a file, or buffer if set +runLhs :: DynFlags -> FilePath -> Maybe SB.StringBuffer -> IO SB.StringBuffer +runLhs dflags filename contents = withTempDir $ \dir -> do + let fout = dir takeFileName filename <.> "unlit" + filesrc <- case contents of + Nothing -> return filename + Just cnts -> do + let fsrc = dir takeFileName filename <.> "literate" + withBinaryFile fsrc WriteMode $ \h -> + hPutStringBuffer h cnts + return fsrc + unlit filesrc fout + SB.hGetStringBuffer fout + where + unlit filein fileout = SysTools.runUnlit dflags (args filein fileout) + args filein fileout = [ + SysTools.Option "-h" + , SysTools.Option (escape filename) -- name this file + , SysTools.FileOption "" filein -- input file + , SysTools.FileOption "" fileout ] -- output file + -- taken from ghc's DriverPipeline.hs + escape ('\\':cs) = '\\':'\\': escape cs + escape ('\"':cs) = '\\':'\"': escape cs + escape ('\'':cs) = '\\':'\'': escape cs + escape (c:cs) = c : escape cs + escape [] = [] + +-- | Run CPP on a file +runCpp :: DynFlags -> FilePath -> Maybe SB.StringBuffer -> IO SB.StringBuffer +runCpp dflags filename contents = withTempDir $ \dir -> do + let out = dir takeFileName filename <.> "out" + dflags <- pure $ addOptP "-D__GHCIDE__" dflags + + case contents of + Nothing -> do + -- Happy case, file is not modified, so run CPP on it in-place + -- which also makes things like relative #include files work + -- and means location information is correct + doCpp dflags True filename out + liftIO $ SB.hGetStringBuffer out + + Just contents -> do + -- Sad path, we have to create a version of the path in a temp dir + -- __FILE__ macro is wrong, ignoring that for now (likely not a real issue) + + -- Relative includes aren't going to work, so we fix that by adding to the include path. + dflags <- return $ addIncludePathsQuote (takeDirectory filename) dflags + + -- Location information is wrong, so we fix that by patching it afterwards. + let inp = dir "___GHCIDE_MAGIC___" + withBinaryFile inp WriteMode $ \h -> + hPutStringBuffer h contents + doCpp dflags True inp out + + -- Fix up the filename in lines like: + -- # 1 "C:/Temp/extra-dir-914611385186/___GHCIDE_MAGIC___" + let tweak x + | Just x <- stripPrefix "# " x + , "___GHCIDE_MAGIC___" `isInfixOf` x + , let num = takeWhile (not . isSpace) x + -- important to use /, and never \ for paths, even on Windows, since then C escapes them + -- and GHC gets all confused + = "# " <> num <> " \"" <> map (\x -> if isPathSeparator x then '/' else x) filename <> "\"" + | otherwise = x + stringToStringBuffer . unlines . map tweak . lines <$> readFileUTF8' out + + +-- | Run a preprocessor on a file +runPreprocessor :: DynFlags -> FilePath -> Maybe SB.StringBuffer -> IO SB.StringBuffer +runPreprocessor dflags filename contents = withTempDir $ \dir -> do + let out = dir takeFileName filename <.> "out" + inp <- case contents of + Nothing -> return filename + Just contents -> do + let inp = dir takeFileName filename <.> "hs" + withBinaryFile inp WriteMode $ \h -> + hPutStringBuffer h contents + return inp + runPp dflags [SysTools.Option filename, SysTools.Option inp, SysTools.FileOption "" out] + SB.hGetStringBuffer out diff --git a/ghcide/src/Development/IDE/Core/RuleTypes.hs b/ghcide/src/Development/IDE/Core/RuleTypes.hs new file mode 100644 index 00000000000..86bf2a75c97 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/RuleTypes.hs @@ -0,0 +1,400 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE DerivingStrategies #-} + +-- | A Shake implementation of the compiler service, built +-- using the "Shaker" abstraction layer for in-memory use. +-- +module Development.IDE.Core.RuleTypes( + module Development.IDE.Core.RuleTypes + ) where + +import Control.DeepSeq +import Data.Aeson.Types (Value) +import Data.Binary +import Development.IDE.Import.DependencyInformation +import Development.IDE.GHC.Compat hiding (HieFileResult) +import Development.IDE.GHC.Util +import Development.IDE.Types.KnownTargets +import Data.Hashable +import Data.Typeable +import qualified Data.Set as S +import qualified Data.Map as M +import Development.Shake +import GHC.Generics (Generic) + +import Module (InstalledUnitId) +import HscTypes (ModGuts, hm_iface, HomeModInfo, hm_linkable) + +import Development.IDE.Spans.Common +import Development.IDE.Spans.LocalBindings +import Development.IDE.Import.FindImports (ArtifactsLocation) +import Data.ByteString (ByteString) +import Language.Haskell.LSP.Types (NormalizedFilePath) +import TcRnMonad (TcGblEnv) +import qualified Data.ByteString.Char8 as BS +import Development.IDE.Types.Options (IdeGhcSession) +import Data.Text (Text) +import Data.Int (Int64) + +data LinkableType = ObjectLinkable | BCOLinkable + deriving (Eq,Ord,Show) + +-- NOTATION +-- Foo+ means Foo for the dependencies +-- Foo* means Foo for me and Foo+ + +-- | The parse tree for the file using GetFileContents +type instance RuleResult GetParsedModule = ParsedModule + +-- | The dependency information produced by following the imports recursively. +-- This rule will succeed even if there is an error, e.g., a module could not be located, +-- a module could not be parsed or an import cycle. +type instance RuleResult GetDependencyInformation = DependencyInformation + +-- | Transitive module and pkg dependencies based on the information produced by GetDependencyInformation. +-- This rule is also responsible for calling ReportImportCycles for each file in the transitive closure. +type instance RuleResult GetDependencies = TransitiveDependencies + +type instance RuleResult GetModuleGraph = DependencyInformation + +data GetKnownTargets = GetKnownTargets + deriving (Show, Generic, Eq, Ord) +instance Hashable GetKnownTargets +instance NFData GetKnownTargets +instance Binary GetKnownTargets +type instance RuleResult GetKnownTargets = KnownTargets + +-- | Convert to Core, requires TypeCheck* +type instance RuleResult GenerateCore = ModGuts + +data GenerateCore = GenerateCore + deriving (Eq, Show, Typeable, Generic) +instance Hashable GenerateCore +instance NFData GenerateCore +instance Binary GenerateCore + +data GetImportMap = GetImportMap + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetImportMap +instance NFData GetImportMap +instance Binary GetImportMap + +type instance RuleResult GetImportMap = ImportMap +newtype ImportMap = ImportMap + { importMap :: M.Map ModuleName NormalizedFilePath -- ^ Where are the modules imported by this file located? + } deriving stock Show + deriving newtype NFData + +-- | Contains the typechecked module and the OrigNameCache entry for +-- that module. +data TcModuleResult = TcModuleResult + { tmrParsed :: ParsedModule + , tmrRenamed :: RenamedSource + , tmrTypechecked :: TcGblEnv + , tmrDeferedError :: !Bool -- ^ Did we defer any type errors for this module? + } +instance Show TcModuleResult where + show = show . pm_mod_summary . tmrParsed + +instance NFData TcModuleResult where + rnf = rwhnf + +tmrModSummary :: TcModuleResult -> ModSummary +tmrModSummary = pm_mod_summary . tmrParsed + +data HiFileResult = HiFileResult + { hirModSummary :: !ModSummary + -- Bang patterns here are important to stop the result retaining + -- a reference to a typechecked module + , hirHomeMod :: !HomeModInfo + -- ^ Includes the Linkable iff we need object files + } + +hiFileFingerPrint :: HiFileResult -> ByteString +hiFileFingerPrint hfr = ifaceBS <> linkableBS + where + ifaceBS = fingerprintToBS . getModuleHash . hirModIface $ hfr -- will always be two bytes + linkableBS = case hm_linkable $ hirHomeMod hfr of + Nothing -> "" + Just l -> BS.pack $ show $ linkableTime l + +hirModIface :: HiFileResult -> ModIface +hirModIface = hm_iface . hirHomeMod + +instance NFData HiFileResult where + rnf = rwhnf + +instance Show HiFileResult where + show = show . hirModSummary + +-- | Save the uncompressed AST here, we compress it just before writing to disk +data HieAstResult + = HAR + { hieModule :: Module + , hieAst :: !(HieASTs Type) + , refMap :: RefMap + -- ^ Lazy because its value only depends on the hieAst, which is bundled in this type + -- Lazyness can't cause leaks here because the lifetime of `refMap` will be the same + -- as that of `hieAst` + } + +instance NFData HieAstResult where + rnf (HAR m hf _rm) = rnf m `seq` rwhnf hf + +instance Show HieAstResult where + show = show . hieModule + +-- | The type checked version of this file, requires TypeCheck+ +type instance RuleResult TypeCheck = TcModuleResult + +-- | The uncompressed HieAST +type instance RuleResult GetHieAst = HieAstResult + +-- | A IntervalMap telling us what is in scope at each point +type instance RuleResult GetBindings = Bindings + +data DocAndKindMap = DKMap {getDocMap :: !DocMap, getKindMap :: !KindMap} +instance NFData DocAndKindMap where + rnf (DKMap a b) = rwhnf a `seq` rwhnf b + +instance Show DocAndKindMap where + show = const "docmap" + +type instance RuleResult GetDocMap = DocAndKindMap + +-- | A GHC session that we reuse. +type instance RuleResult GhcSession = HscEnvEq + +-- | A GHC session preloaded with all the dependencies +type instance RuleResult GhcSessionDeps = HscEnvEq + +-- | Resolve the imports in a module to the file path of a module +-- in the same package or the package id of another package. +type instance RuleResult GetLocatedImports = ([(Located ModuleName, Maybe ArtifactsLocation)], S.Set InstalledUnitId) + +-- | This rule is used to report import cycles. It depends on GetDependencyInformation. +-- We cannot report the cycles directly from GetDependencyInformation since +-- we can only report diagnostics for the current file. +type instance RuleResult ReportImportCycles = () + +-- | Read the module interface file from disk. Throws an error for VFS files. +-- This is an internal rule, use 'GetModIface' instead. +type instance RuleResult GetModIfaceFromDisk = HiFileResult + +-- | Get a module interface details, either from an interface file or a typechecked module +type instance RuleResult GetModIface = HiFileResult + +-- | Get a module interface details, without the Linkable +-- For better early cuttoff +type instance RuleResult GetModIfaceWithoutLinkable = HiFileResult + +-- | Get the contents of a file, either dirty (if the buffer is modified) or Nothing to mean use from disk. +type instance RuleResult GetFileContents = (FileVersion, Maybe Text) + +-- The Shake key type for getModificationTime queries +data GetModificationTime = GetModificationTime_ + { missingFileDiagnostics :: Bool + -- ^ If false, missing file diagnostics are not reported + } + deriving (Show, Generic) + +instance Eq GetModificationTime where + -- Since the diagnostics are not part of the answer, the query identity is + -- independent from the 'missingFileDiagnostics' field + _ == _ = True + +instance Hashable GetModificationTime where + -- Since the diagnostics are not part of the answer, the query identity is + -- independent from the 'missingFileDiagnostics' field + hashWithSalt salt _ = salt + +instance NFData GetModificationTime +instance Binary GetModificationTime + +pattern GetModificationTime :: GetModificationTime +pattern GetModificationTime = GetModificationTime_ {missingFileDiagnostics=True} + +-- | Get the modification time of a file. +type instance RuleResult GetModificationTime = FileVersion + +data FileVersion + = VFSVersion !Int + | ModificationTime + !Int64 -- ^ Large unit (platform dependent, do not make assumptions) + !Int64 -- ^ Small unit (platform dependent, do not make assumptions) + deriving (Show, Generic) + +instance NFData FileVersion + +vfsVersion :: FileVersion -> Maybe Int +vfsVersion (VFSVersion i) = Just i +vfsVersion ModificationTime{} = Nothing + +data GetFileContents = GetFileContents + deriving (Eq, Show, Generic) +instance Hashable GetFileContents +instance NFData GetFileContents +instance Binary GetFileContents + + +data FileOfInterestStatus = OnDisk | Modified + deriving (Eq, Show, Typeable, Generic) +instance Hashable FileOfInterestStatus +instance NFData FileOfInterestStatus +instance Binary FileOfInterestStatus + +data IsFileOfInterestResult = NotFOI | IsFOI FileOfInterestStatus + deriving (Eq, Show, Typeable, Generic) +instance Hashable IsFileOfInterestResult +instance NFData IsFileOfInterestResult +instance Binary IsFileOfInterestResult + +type instance RuleResult IsFileOfInterest = IsFileOfInterestResult + +-- | Generate a ModSummary that has enough information to be used to get .hi and .hie files. +-- without needing to parse the entire source +type instance RuleResult GetModSummary = (ModSummary,[LImportDecl GhcPs]) + +-- | Generate a ModSummary with the timestamps elided, +-- for more successful early cutoff +type instance RuleResult GetModSummaryWithoutTimestamps = (ModSummary,[LImportDecl GhcPs]) + +data GetParsedModule = GetParsedModule + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetParsedModule +instance NFData GetParsedModule +instance Binary GetParsedModule + +data GetLocatedImports = GetLocatedImports + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetLocatedImports +instance NFData GetLocatedImports +instance Binary GetLocatedImports + +-- | Does this module need to be compiled? +type instance RuleResult NeedsCompilation = Bool + +data NeedsCompilation = NeedsCompilation + deriving (Eq, Show, Typeable, Generic) +instance Hashable NeedsCompilation +instance NFData NeedsCompilation +instance Binary NeedsCompilation + +data GetDependencyInformation = GetDependencyInformation + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetDependencyInformation +instance NFData GetDependencyInformation +instance Binary GetDependencyInformation + +data GetModuleGraph = GetModuleGraph + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetModuleGraph +instance NFData GetModuleGraph +instance Binary GetModuleGraph + +data ReportImportCycles = ReportImportCycles + deriving (Eq, Show, Typeable, Generic) +instance Hashable ReportImportCycles +instance NFData ReportImportCycles +instance Binary ReportImportCycles + +data GetDependencies = GetDependencies + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetDependencies +instance NFData GetDependencies +instance Binary GetDependencies + +data TypeCheck = TypeCheck + deriving (Eq, Show, Typeable, Generic) +instance Hashable TypeCheck +instance NFData TypeCheck +instance Binary TypeCheck + +data GetDocMap = GetDocMap + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetDocMap +instance NFData GetDocMap +instance Binary GetDocMap + +data GetHieAst = GetHieAst + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetHieAst +instance NFData GetHieAst +instance Binary GetHieAst + +data GetBindings = GetBindings + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetBindings +instance NFData GetBindings +instance Binary GetBindings + +data GhcSession = GhcSession + deriving (Eq, Show, Typeable, Generic) +instance Hashable GhcSession +instance NFData GhcSession +instance Binary GhcSession + +data GhcSessionDeps = GhcSessionDeps deriving (Eq, Show, Typeable, Generic) +instance Hashable GhcSessionDeps +instance NFData GhcSessionDeps +instance Binary GhcSessionDeps + +data GetModIfaceFromDisk = GetModIfaceFromDisk + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetModIfaceFromDisk +instance NFData GetModIfaceFromDisk +instance Binary GetModIfaceFromDisk + +data GetModIface = GetModIface + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetModIface +instance NFData GetModIface +instance Binary GetModIface + +data GetModIfaceWithoutLinkable = GetModIfaceWithoutLinkable + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetModIfaceWithoutLinkable +instance NFData GetModIfaceWithoutLinkable +instance Binary GetModIfaceWithoutLinkable + +data IsFileOfInterest = IsFileOfInterest + deriving (Eq, Show, Typeable, Generic) +instance Hashable IsFileOfInterest +instance NFData IsFileOfInterest +instance Binary IsFileOfInterest + +data GetModSummaryWithoutTimestamps = GetModSummaryWithoutTimestamps + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetModSummaryWithoutTimestamps +instance NFData GetModSummaryWithoutTimestamps +instance Binary GetModSummaryWithoutTimestamps + +data GetModSummary = GetModSummary + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetModSummary +instance NFData GetModSummary +instance Binary GetModSummary + +-- | Get the vscode client settings stored in the ide state +data GetClientSettings = GetClientSettings + deriving (Eq, Show, Typeable, Generic) +instance Hashable GetClientSettings +instance NFData GetClientSettings +instance Binary GetClientSettings + +type instance RuleResult GetClientSettings = Hashed (Maybe Value) + +-- A local rule type to get caching. We want to use newCache, but it has +-- thread killed exception issues, so we lift it to a full rule. +-- https://github.com/digital-asset/daml/pull/2808#issuecomment-529639547 +type instance RuleResult GhcSessionIO = IdeGhcSession + +data GhcSessionIO = GhcSessionIO deriving (Eq, Show, Typeable, Generic) +instance Hashable GhcSessionIO +instance NFData GhcSessionIO +instance Binary GhcSessionIO diff --git a/ghcide/src/Development/IDE/Core/Rules.hs b/ghcide/src/Development/IDE/Core/Rules.hs new file mode 100644 index 00000000000..9ad5a705cf4 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Rules.hs @@ -0,0 +1,969 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE DuplicateRecordFields #-} +#include "ghc-api-version.h" + +-- | A Shake implementation of the compiler service, built +-- using the "Shaker" abstraction layer for in-memory use. +-- +module Development.IDE.Core.Rules( + IdeState, GetDependencies(..), GetParsedModule(..), TransitiveDependencies(..), + Priority(..), GhcSessionIO(..), GetClientSettings(..), + priorityTypeCheck, + priorityGenerateCore, + priorityFilesOfInterest, + runAction, useE, useNoFileE, usesE, + toIdeResult, + defineNoFile, + defineEarlyCutOffNoFile, + mainRule, + getAtPoint, + getDefinition, + getTypeDefinition, + highlightAtPoint, + getDependencies, + getParsedModule, + ) where + +import Fingerprint + +import Data.Binary hiding (get, put) +import Data.Tuple.Extra +import Control.Monad.Extra +import Control.Monad.Trans.Class +import Control.Monad.Trans.Maybe +import Development.IDE.Core.Compile +import Development.IDE.Core.OfInterest +import Development.IDE.Types.Options +import Development.IDE.Spans.Documentation +import Development.IDE.Spans.LocalBindings +import Development.IDE.Import.DependencyInformation +import Development.IDE.Import.FindImports +import Development.IDE.Core.FileExists +import Development.IDE.Core.FileStore (modificationTime, getFileContents) +import Development.IDE.Types.Diagnostics as Diag +import Development.IDE.Types.Location +import Development.IDE.GHC.Compat hiding (parseModule, typecheckModule, writeHieFile, TargetModule, TargetFile) +import Development.IDE.GHC.Util +import Data.Either.Extra +import qualified Development.IDE.Types.Logger as L +import Data.Maybe +import Data.Foldable +import qualified Data.IntMap.Strict as IntMap +import Data.IntMap.Strict (IntMap) +import Data.List +import qualified Data.Set as Set +import qualified Data.Map as M +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import Development.IDE.GHC.Error +import Development.Shake hiding (Diagnostic) +import Development.IDE.Core.RuleTypes +import qualified Data.ByteString.Char8 as BS +import Development.IDE.Core.PositionMapping +import Language.Haskell.LSP.Types (DocumentHighlight (..)) + +import qualified GHC.LanguageExtensions as LangExt +import HscTypes hiding (TargetModule, TargetFile) +import GHC.Generics(Generic) + +import qualified Development.IDE.Spans.AtPoint as AtPoint +import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.Service +import Development.IDE.Core.Shake +import Development.Shake.Classes hiding (get, put) +import Control.Monad.Trans.Except (runExceptT) +import Data.ByteString (ByteString) +import Control.Concurrent.Async (concurrently) +import System.Time.Extra +import Control.Monad.Reader +import System.Directory ( getModificationTime ) +import Control.Exception + +import Control.Monad.State +import FastString (FastString(uniq)) +import qualified HeaderInfo as Hdr +import Data.Time (UTCTime(..)) +import Data.Hashable +import qualified Data.HashSet as HashSet +import qualified Data.HashMap.Strict as HM +import TcRnMonad (tcg_dependent_files) +import Data.IORef +import Control.Concurrent.Extra +import Module + +-- | This is useful for rules to convert rules that can only produce errors or +-- a result into the more general IdeResult type that supports producing +-- warnings while also producing a result. +toIdeResult :: Either [FileDiagnostic] v -> IdeResult v +toIdeResult = either (, Nothing) (([],) . Just) + +-- | useE is useful to implement functions that aren’t rules but need shortcircuiting +-- e.g. getDefinition. +useE :: IdeRule k v => k -> NormalizedFilePath -> MaybeT IdeAction (v, PositionMapping) +useE k = MaybeT . useWithStaleFast k + +useNoFileE :: IdeRule k v => IdeState -> k -> MaybeT IdeAction v +useNoFileE _ide k = fst <$> useE k emptyFilePath + +usesE :: IdeRule k v => k -> [NormalizedFilePath] -> MaybeT IdeAction [(v,PositionMapping)] +usesE k = MaybeT . fmap sequence . mapM (useWithStaleFast k) + +defineNoFile :: IdeRule k v => (k -> Action v) -> Rules () +defineNoFile f = define $ \k file -> do + if file == emptyFilePath then do res <- f k; return ([], Just res) else + fail $ "Rule " ++ show k ++ " should always be called with the empty string for a file" + +defineEarlyCutOffNoFile :: IdeRule k v => (k -> Action (ByteString, v)) -> Rules () +defineEarlyCutOffNoFile f = defineEarlyCutoff $ \k file -> do + if file == emptyFilePath then do (hash, res) <- f k; return (Just hash, ([], Just res)) else + fail $ "Rule " ++ show k ++ " should always be called with the empty string for a file" + + +------------------------------------------------------------ +-- Exposed API + +-- | Get all transitive file dependencies of a given module. +-- Does not include the file itself. +getDependencies :: NormalizedFilePath -> Action (Maybe [NormalizedFilePath]) +getDependencies file = fmap transitiveModuleDeps <$> use GetDependencies file + +-- | Try to get hover text for the name under point. +getAtPoint :: NormalizedFilePath -> Position -> IdeAction (Maybe (Maybe Range, [T.Text])) +getAtPoint file pos = fmap join $ runMaybeT $ do + ide <- ask + opts <- liftIO $ getIdeOptionsIO ide + + (hieAst -> hf, mapping) <- useE GetHieAst file + dkMap <- lift $ maybe (DKMap mempty mempty) fst <$> (runMaybeT $ useE GetDocMap file) + + !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) + return $ AtPoint.atPoint opts hf dkMap pos' + +-- | Goto Definition. +getDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe Location) +getDefinition file pos = runMaybeT $ do + ide <- ask + opts <- liftIO $ getIdeOptionsIO ide + (HAR _ hf _ , mapping) <- useE GetHieAst file + (ImportMap imports, _) <- useE GetImportMap file + !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) + AtPoint.gotoDefinition (getHieFile ide file) opts imports hf pos' + +getTypeDefinition :: NormalizedFilePath -> Position -> IdeAction (Maybe [Location]) +getTypeDefinition file pos = runMaybeT $ do + ide <- ask + opts <- liftIO $ getIdeOptionsIO ide + (hieAst -> hf, mapping) <- useE GetHieAst file + !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) + AtPoint.gotoTypeDefinition (getHieFile ide file) opts hf pos' + +highlightAtPoint :: NormalizedFilePath -> Position -> IdeAction (Maybe [DocumentHighlight]) +highlightAtPoint file pos = runMaybeT $ do + (HAR _ hf rf,mapping) <- useE GetHieAst file + !pos' <- MaybeT (return $ fromCurrentPosition mapping pos) + AtPoint.documentHighlight hf rf pos' + +getHieFile + :: ShakeExtras + -> NormalizedFilePath -- ^ file we're editing + -> Module -- ^ module dep we want info for + -> MaybeT IdeAction (HieFile, FilePath) -- ^ hie stuff for the module +getHieFile ide file mod = do + TransitiveDependencies {transitiveNamedModuleDeps} <- fst <$> useE GetDependencies file + case find (\x -> nmdModuleName x == moduleName mod) transitiveNamedModuleDeps of + Just NamedModuleDep{nmdFilePath=nfp} -> do + let modPath = fromNormalizedFilePath nfp + hieFile <- getHomeHieFile nfp + return (hieFile, modPath) + _ -> getPackageHieFile ide mod file + +getHomeHieFile :: NormalizedFilePath -> MaybeT IdeAction HieFile +getHomeHieFile f = do + ms <- fst . fst <$> useE GetModSummaryWithoutTimestamps f + let normal_hie_f = toNormalizedFilePath' hie_f + hie_f = ml_hie_file $ ms_location ms + + mbHieTimestamp <- either (\(_ :: IOException) -> Nothing) Just <$> (liftIO $ try $ getModificationTime hie_f) + srcTimestamp <- MaybeT (either (\(_ :: IOException) -> Nothing) Just <$> (liftIO $ try $ getModificationTime $ fromNormalizedFilePath f)) + liftIO $ print (mbHieTimestamp, srcTimestamp, hie_f, normal_hie_f) + let isUpToDate + | Just d <- mbHieTimestamp = d > srcTimestamp + | otherwise = False + + if isUpToDate + then do + ncu <- mkUpdater + hf <- liftIO $ whenMaybe isUpToDate (loadHieFile ncu hie_f) + MaybeT $ return hf + else do + wait <- lift $ delayedAction $ mkDelayedAction "OutOfDateHie" L.Info $ do + hsc <- hscEnv <$> use_ GhcSession f + pm <- use_ GetParsedModule f + (_, mtm)<- typeCheckRuleDefinition hsc pm + mapM_ (getHieAstRuleDefinition f hsc) mtm -- Write the HiFile to disk + _ <- MaybeT $ liftIO $ timeout 1 wait + ncu <- mkUpdater + liftIO $ loadHieFile ncu hie_f + +getSourceFileSource :: NormalizedFilePath -> Action BS.ByteString +getSourceFileSource nfp = do + (_, msource) <- getFileContents nfp + case msource of + Nothing -> liftIO $ BS.readFile (fromNormalizedFilePath nfp) + Just source -> pure $ T.encodeUtf8 source + +getPackageHieFile :: ShakeExtras + -> Module -- ^ Package Module to load .hie file for + -> NormalizedFilePath -- ^ Path of home module importing the package module + -> MaybeT IdeAction (HieFile, FilePath) +getPackageHieFile ide mod file = do + pkgState <- hscEnv . fst <$> useE GhcSession file + IdeOptions {..} <- liftIO $ getIdeOptionsIO ide + let unitId = moduleUnitId mod + case lookupPackageConfig unitId pkgState of + Just pkgConfig -> do + -- 'optLocateHieFile' returns Nothing if the file does not exist + hieFile <- liftIO $ optLocateHieFile optPkgLocationOpts pkgConfig mod + path <- liftIO $ optLocateSrcFile optPkgLocationOpts pkgConfig mod + case (hieFile, path) of + (Just hiePath, Just modPath) -> do + -- deliberately loaded outside the Shake graph + -- to avoid dependencies on non-workspace files + ncu <- mkUpdater + MaybeT $ liftIO $ Just . (, modPath) <$> loadHieFile ncu hiePath + _ -> MaybeT $ return Nothing + _ -> MaybeT $ return Nothing + +-- | Parse the contents of a daml file. +getParsedModule :: NormalizedFilePath -> Action (Maybe ParsedModule) +getParsedModule file = use GetParsedModule file + +------------------------------------------------------------ +-- Rules +-- These typically go from key to value and are oracles. + +priorityTypeCheck :: Priority +priorityTypeCheck = Priority 0 + +priorityGenerateCore :: Priority +priorityGenerateCore = Priority (-1) + +priorityFilesOfInterest :: Priority +priorityFilesOfInterest = Priority (-2) + +-- | IMPORTANT FOR HLINT INTEGRATION: +-- We currently parse the module both with and without Opt_Haddock, and +-- return the one with Haddocks if it -- succeeds. However, this may not work +-- for hlint, and we might need to save the one without haddocks too. +-- See https://github.com/haskell/ghcide/pull/350#discussion_r370878197 +-- and https://github.com/mpickering/ghcide/pull/22#issuecomment-625070490 +getParsedModuleRule :: Rules () +getParsedModuleRule = defineEarlyCutoff $ \GetParsedModule file -> do + (ms, _) <- use_ GetModSummary file + sess <- use_ GhcSession file + let hsc = hscEnv sess + opt <- getIdeOptions + + let dflags = ms_hspp_opts ms + mainParse = getParsedModuleDefinition hsc opt file ms + + -- Parse again (if necessary) to capture Haddock parse errors + res@(_, (_,pmod)) <- if gopt Opt_Haddock dflags + then + liftIO mainParse + else do + let haddockParse = getParsedModuleDefinition hsc opt file (withOptHaddock ms) + + -- parse twice, with and without Haddocks, concurrently + -- we cannot ignore Haddock parse errors because files of + -- non-interest are always parsed with Haddocks + -- If we can parse Haddocks, might as well use them + -- + -- HLINT INTEGRATION: might need to save the other parsed module too + ((fp,(diags,res)),(fph,(diagsh,resh))) <- liftIO $ concurrently mainParse haddockParse + + -- Merge haddock and regular diagnostics so we can always report haddock + -- parse errors + let diagsM = mergeParseErrorsHaddock diags diagsh + case resh of + Just _ + | HaddockParse <- optHaddockParse opt + -> pure (fph, (diagsM, resh)) + -- If we fail to parse haddocks, report the haddock diagnostics as well and + -- return the non-haddock parse. + -- This seems to be the correct behaviour because the Haddock flag is added + -- by us and not the user, so our IDE shouldn't stop working because of it. + _ -> pure (fp, (diagsM, res)) + -- Add dependencies on included files + _ <- uses GetModificationTime $ map toNormalizedFilePath' (maybe [] pm_extra_src_files pmod) + pure res + +withOptHaddock :: ModSummary -> ModSummary +withOptHaddock ms = ms{ms_hspp_opts= gopt_set (ms_hspp_opts ms) Opt_Haddock} + + +-- | Given some normal parse errors (first) and some from Haddock (second), merge them. +-- Ignore Haddock errors that are in both. Demote Haddock-only errors to warnings. +mergeParseErrorsHaddock :: [FileDiagnostic] -> [FileDiagnostic] -> [FileDiagnostic] +mergeParseErrorsHaddock normal haddock = normal ++ + [ (a,b,c{_severity = Just DsWarning, _message = fixMessage $ _message c}) + | (a,b,c) <- haddock, Diag._range c `Set.notMember` locations] + where + locations = Set.fromList $ map (Diag._range . thd3) normal + + fixMessage x | "parse error " `T.isPrefixOf` x = "Haddock " <> x + | otherwise = "Haddock: " <> x + +getParsedModuleDefinition :: HscEnv -> IdeOptions -> NormalizedFilePath -> ModSummary -> IO (Maybe ByteString, ([FileDiagnostic], Maybe ParsedModule)) +getParsedModuleDefinition packageState opt file ms = do + let fp = fromNormalizedFilePath file + (diag, res) <- parseModule opt packageState fp ms + case res of + Nothing -> pure (Nothing, (diag, Nothing)) + Just modu -> do + mbFingerprint <- traverse (fmap fingerprintToBS . fingerprintFromStringBuffer) (ms_hspp_buf ms) + pure (mbFingerprint, (diag, Just modu)) + +getLocatedImportsRule :: Rules () +getLocatedImportsRule = + define $ \GetLocatedImports file -> do + (ms,_) <- use_ GetModSummaryWithoutTimestamps file + targets <- useNoFile_ GetKnownTargets + let imports = [(False, imp) | imp <- ms_textual_imps ms] ++ [(True, imp) | imp <- ms_srcimps ms] + env_eq <- use_ GhcSession file + let env = hscEnvWithImportPaths env_eq + let import_dirs = deps env_eq + let dflags = hsc_dflags env + isImplicitCradle = isNothing $ envImportPaths env_eq + dflags <- return $ if isImplicitCradle + then addRelativeImport file (moduleName $ ms_mod ms) dflags + else dflags + opt <- getIdeOptions + let getTargetExists modName nfp + | isImplicitCradle = getFileExists nfp + | HM.member (TargetModule modName) targets + || HM.member (TargetFile nfp) targets + = getFileExists nfp + | otherwise = return False + (diags, imports') <- fmap unzip $ forM imports $ \(isSource, (mbPkgName, modName)) -> do + diagOrImp <- locateModule dflags import_dirs (optExtensions opt) getTargetExists modName mbPkgName isSource + case diagOrImp of + Left diags -> pure (diags, Left (modName, Nothing)) + Right (FileImport path) -> pure ([], Left (modName, Just path)) + Right (PackageImport pkgId) -> liftIO $ do + diagsOrPkgDeps <- computePackageDeps env pkgId + case diagsOrPkgDeps of + Left diags -> pure (diags, Right Nothing) + Right pkgIds -> pure ([], Right $ Just $ pkgId : pkgIds) + let (moduleImports, pkgImports) = partitionEithers imports' + case sequence pkgImports of + Nothing -> pure (concat diags, Nothing) + Just pkgImports -> pure (concat diags, Just (moduleImports, Set.fromList $ concat pkgImports)) + +type RawDepM a = StateT (RawDependencyInformation, IntMap ArtifactsLocation) Action a + +execRawDepM :: Monad m => StateT (RawDependencyInformation, IntMap a1) m a2 -> m (RawDependencyInformation, IntMap a1) +execRawDepM act = + execStateT act + ( RawDependencyInformation IntMap.empty emptyPathIdMap IntMap.empty + , IntMap.empty + ) + +-- | Given a target file path, construct the raw dependency results by following +-- imports recursively. +rawDependencyInformation :: [NormalizedFilePath] -> Action RawDependencyInformation +rawDependencyInformation fs = do + (rdi, ss) <- execRawDepM (mapM_ go fs) + let bm = IntMap.foldrWithKey (updateBootMap rdi) IntMap.empty ss + return (rdi { rawBootMap = bm }) + where + go :: NormalizedFilePath -- ^ Current module being processed + -> StateT (RawDependencyInformation, IntMap ArtifactsLocation) Action FilePathId + go f = do + -- First check to see if we have already processed the FilePath + -- If we have, just return its Id but don't update any of the state. + -- Otherwise, we need to process its imports. + checkAlreadyProcessed f $ do + msum <- lift $ fmap fst <$> use GetModSummaryWithoutTimestamps f + let al = modSummaryToArtifactsLocation f msum + -- Get a fresh FilePathId for the new file + fId <- getFreshFid al + -- Adding an edge to the bootmap so we can make sure to + -- insert boot nodes before the real files. + addBootMap al fId + -- Try to parse the imports of the file + importsOrErr <- lift $ use GetLocatedImports f + case importsOrErr of + Nothing -> do + -- File doesn't parse so add the module as a failure into the + -- dependency information, continue processing the other + -- elements in the queue + modifyRawDepInfo (insertImport fId (Left ModuleParseError)) + return fId + Just (modImports, pkgImports) -> do + -- Get NFPs of the imports which have corresponding files + -- Imports either come locally from a file or from a package. + let (no_file, with_file) = splitImports modImports + (mns, ls) = unzip with_file + -- Recursively process all the imports we just learnt about + -- and get back a list of their FilePathIds + fids <- mapM (go . artifactFilePath) ls + -- Associate together the ModuleName with the FilePathId + let moduleImports' = map (,Nothing) no_file ++ zip mns (map Just fids) + -- Insert into the map the information about this modules + -- imports. + modifyRawDepInfo $ insertImport fId (Right $ ModuleImports moduleImports' pkgImports) + return fId + + + checkAlreadyProcessed :: NormalizedFilePath -> RawDepM FilePathId -> RawDepM FilePathId + checkAlreadyProcessed nfp k = do + (rawDepInfo, _) <- get + maybe k return (lookupPathToId (rawPathIdMap rawDepInfo) nfp) + + modifyRawDepInfo :: (RawDependencyInformation -> RawDependencyInformation) -> RawDepM () + modifyRawDepInfo f = modify (first f) + + addBootMap :: ArtifactsLocation -> FilePathId -> RawDepM () + addBootMap al fId = + modify (\(rd, ss) -> (rd, if isBootLocation al + then IntMap.insert (getFilePathId fId) al ss + else ss)) + + getFreshFid :: ArtifactsLocation -> RawDepM FilePathId + getFreshFid al = do + (rawDepInfo, ss) <- get + let (fId, path_map) = getPathId al (rawPathIdMap rawDepInfo) + -- Insert the File into the bootmap if it's a boot module + let rawDepInfo' = rawDepInfo { rawPathIdMap = path_map } + put (rawDepInfo', ss) + return fId + + -- Split in (package imports, local imports) + splitImports :: [(Located ModuleName, Maybe ArtifactsLocation)] + -> ([Located ModuleName], [(Located ModuleName, ArtifactsLocation)]) + splitImports = foldr splitImportsLoop ([],[]) + + splitImportsLoop (imp, Nothing) (ns, ls) = (imp:ns, ls) + splitImportsLoop (imp, Just artifact) (ns, ls) = (ns, (imp,artifact) : ls) + + updateBootMap pm boot_mod_id ArtifactsLocation{..} bm = + if not artifactIsSource + then + let msource_mod_id = lookupPathToId (rawPathIdMap pm) (toNormalizedFilePath' $ dropBootSuffix $ fromNormalizedFilePath artifactFilePath) + in case msource_mod_id of + Just source_mod_id -> insertBootId source_mod_id (FilePathId boot_mod_id) bm + Nothing -> bm + else bm + + dropBootSuffix :: FilePath -> FilePath + dropBootSuffix hs_src = reverse . drop (length @[] "-boot") . reverse $ hs_src + +getDependencyInformationRule :: Rules () +getDependencyInformationRule = + define $ \GetDependencyInformation file -> do + rawDepInfo <- rawDependencyInformation [file] + pure ([], Just $ processDependencyInformation rawDepInfo) + +reportImportCyclesRule :: Rules () +reportImportCyclesRule = + define $ \ReportImportCycles file -> fmap (\errs -> if null errs then ([], Just ()) else (errs, Nothing)) $ do + DependencyInformation{..} <- use_ GetDependencyInformation file + let fileId = pathToId depPathIdMap file + case IntMap.lookup (getFilePathId fileId) depErrorNodes of + Nothing -> pure [] + Just errs -> do + let cycles = mapMaybe (cycleErrorInFile fileId) (toList errs) + -- Convert cycles of files into cycles of module names + forM cycles $ \(imp, files) -> do + modNames <- forM files $ \fileId -> do + let file = idToPath depPathIdMap fileId + getModuleName file + pure $ toDiag imp $ sort modNames + where cycleErrorInFile f (PartOfCycle imp fs) + | f `elem` fs = Just (imp, fs) + cycleErrorInFile _ _ = Nothing + toDiag imp mods = (fp , ShowDiag , ) $ Diagnostic + { _range = rng + , _severity = Just DsError + , _source = Just "Import cycle detection" + , _message = "Cyclic module dependency between " <> showCycle mods + , _code = Nothing + , _relatedInformation = Nothing + , _tags = Nothing + } + where rng = fromMaybe noRange $ srcSpanToRange (getLoc imp) + fp = toNormalizedFilePath' $ fromMaybe noFilePath $ srcSpanToFilename (getLoc imp) + getModuleName file = do + ms <- fst <$> use_ GetModSummaryWithoutTimestamps file + pure (moduleNameString . moduleName . ms_mod $ ms) + showCycle mods = T.intercalate ", " (map T.pack mods) + +-- returns all transitive dependencies in topological order. +-- NOTE: result does not include the argument file. +getDependenciesRule :: Rules () +getDependenciesRule = + defineEarlyCutoff $ \GetDependencies file -> do + depInfo <- use_ GetDependencyInformation file + let allFiles = reachableModules depInfo + _ <- uses_ ReportImportCycles allFiles + opts <- getIdeOptions + let mbFingerprints = map (fingerprintString . fromNormalizedFilePath) allFiles <$ optShakeFiles opts + return (fingerprintToBS . fingerprintFingerprints <$> mbFingerprints, ([], transitiveDeps depInfo file)) + +getHieAstsRule :: Rules () +getHieAstsRule = + define $ \GetHieAst f -> do + tmr <- use_ TypeCheck f + hsc <- hscEnv <$> use_ GhcSession f + getHieAstRuleDefinition f hsc tmr + +getHieAstRuleDefinition :: NormalizedFilePath -> HscEnv -> TcModuleResult -> Action (IdeResult HieAstResult) +getHieAstRuleDefinition f hsc tmr = do + (diags, masts) <- liftIO $ generateHieAsts hsc tmr + + isFoi <- use_ IsFileOfInterest f + diagsWrite <- case isFoi of + IsFOI Modified -> pure [] + _ | Just asts <- masts -> do + source <- getSourceFileSource f + liftIO $ writeHieFile hsc (tmrModSummary tmr) (tcg_exports $ tmrTypechecked tmr) asts source + _ -> pure [] + + let refmap = generateReferencesMap . getAsts <$> masts + pure (diags <> diagsWrite, HAR (ms_mod $ tmrModSummary tmr) <$> masts <*> refmap) + +getImportMapRule :: Rules() +getImportMapRule = define $ \GetImportMap f -> do + im <- use GetLocatedImports f + let mkImports (fileImports, _) = M.fromList $ mapMaybe (\(m, mfp) -> (unLoc m,) . artifactFilePath <$> mfp) fileImports + pure ([], ImportMap . mkImports <$> im) + +getBindingsRule :: Rules () +getBindingsRule = + define $ \GetBindings f -> do + har <- use_ GetHieAst f + pure ([], Just $ bindings $ refMap har) + +getDocMapRule :: Rules () +getDocMapRule = + define $ \GetDocMap file -> do + -- Stale data for the scenario where a broken module has previously typechecked + -- but we never generated a DocMap for it + (tmrTypechecked -> tc, _) <- useWithStale_ TypeCheck file + (hscEnv -> hsc, _) <- useWithStale_ GhcSessionDeps file + (refMap -> rf, _) <- useWithStale_ GetHieAst file + +-- When possible, rely on the haddocks embedded in our interface files +-- This creates problems on ghc-lib, see comment on 'getDocumentationTryGhc' +#if !defined(GHC_LIB) + let parsedDeps = [] +#else + deps <- fromMaybe (TransitiveDependencies [] [] []) <$> use GetDependencies file + let tdeps = transitiveModuleDeps deps + parsedDeps <- uses_ GetParsedModule tdeps +#endif + + dkMap <- liftIO $ mkDocMap hsc parsedDeps rf tc + return ([],Just dkMap) + +-- Typechecks a module. +typeCheckRule :: Rules () +typeCheckRule = define $ \TypeCheck file -> do + pm <- use_ GetParsedModule file + hsc <- hscEnv <$> use_ GhcSessionDeps file + typeCheckRuleDefinition hsc pm + +knownFilesRule :: Rules () +knownFilesRule = defineEarlyCutOffNoFile $ \GetKnownTargets -> do + alwaysRerun + fs <- knownTargets + pure (BS.pack (show $ hash fs), unhashed fs) + +getModuleGraphRule :: Rules () +getModuleGraphRule = defineNoFile $ \GetModuleGraph -> do + fs <- toKnownFiles <$> useNoFile_ GetKnownTargets + rawDepInfo <- rawDependencyInformation (HashSet.toList fs) + pure $ processDependencyInformation rawDepInfo + +-- This is factored out so it can be directly called from the GetModIface +-- rule. Directly calling this rule means that on the initial load we can +-- garbage collect all the intermediate typechecked modules rather than +-- retain the information forever in the shake graph. +typeCheckRuleDefinition + :: HscEnv + -> ParsedModule + -> Action (IdeResult TcModuleResult) +typeCheckRuleDefinition hsc pm = do + setPriority priorityTypeCheck + IdeOptions { optDefer = defer } <- getIdeOptions + + linkables_to_keep <- currentLinkables + + addUsageDependencies $ liftIO $ + typecheckModule defer hsc linkables_to_keep pm + where + addUsageDependencies :: Action (a, Maybe TcModuleResult) -> Action (a, Maybe TcModuleResult) + addUsageDependencies a = do + r@(_, mtc) <- a + forM_ mtc $ \tc -> do + used_files <- liftIO $ readIORef $ tcg_dependent_files $ tmrTypechecked tc + void $ uses_ GetModificationTime (map toNormalizedFilePath' used_files) + return r + +-- | Get all the linkables stored in the graph, i.e. the ones we *do not* need to unload. +-- Doesn't actually contain the code, since we don't need it to unload +currentLinkables :: Action [Linkable] +currentLinkables = do + compiledLinkables <- getCompiledLinkables <$> getIdeGlobalAction + hm <- liftIO $ readVar compiledLinkables + pure $ map go $ moduleEnvToList hm + where + go (mod, time) = LM time mod [] + +loadGhcSession :: Rules () +loadGhcSession = do + -- This function should always be rerun because it tracks changes + -- to the version of the collection of HscEnv's. + defineEarlyCutOffNoFile $ \GhcSessionIO -> do + alwaysRerun + opts <- getIdeOptions + res <- optGhcSession opts + + let fingerprint = hash (sessionVersion res) + return (BS.pack (show fingerprint), res) + + defineEarlyCutoff $ \GhcSession file -> do + IdeGhcSession{loadSessionFun} <- useNoFile_ GhcSessionIO + (val,deps) <- liftIO $ loadSessionFun $ fromNormalizedFilePath file + + -- add the deps to the Shake graph + let addDependency fp = do + let nfp = toNormalizedFilePath' fp + itExists <- getFileExists nfp + when itExists $ void $ use_ GetModificationTime nfp + mapM_ addDependency deps + + opts <- getIdeOptions + let cutoffHash = + case optShakeFiles opts of + -- optShakeFiles is only set in the DAML case. + -- https://github.com/haskell/ghcide/pull/522#discussion_r428622915 + Just {} -> "" + -- Hash the HscEnvEq returned so cutoff if it didn't change + -- from last time + Nothing -> BS.pack (show (hash (snd val))) + return (Just cutoffHash, val) + + define $ \GhcSessionDeps file -> ghcSessionDepsDefinition file + +ghcSessionDepsDefinition :: NormalizedFilePath -> Action (IdeResult HscEnvEq) +ghcSessionDepsDefinition file = do + env <- use_ GhcSession file + let hsc = hscEnv env + (ms,_) <- use_ GetModSummaryWithoutTimestamps file + deps <- use_ GetDependencies file + let tdeps = transitiveModuleDeps deps + uses_th_qq = + xopt LangExt.TemplateHaskell dflags || xopt LangExt.QuasiQuotes dflags + dflags = ms_hspp_opts ms + ifaces <- if uses_th_qq + then uses_ GetModIface tdeps + else uses_ GetModIfaceWithoutLinkable tdeps + + -- Currently GetDependencies returns things in topological order so A comes before B if A imports B. + -- We need to reverse this as GHC gets very unhappy otherwise and complains about broken interfaces. + -- Long-term we might just want to change the order returned by GetDependencies + let inLoadOrder = reverse (map hirHomeMod ifaces) + + session' <- liftIO $ loadModulesHome inLoadOrder <$> setupFinderCache (map hirModSummary ifaces) hsc + + res <- liftIO $ newHscEnvEqWithImportPaths (envImportPaths env) session' [] + return ([], Just res) + +getModIfaceFromDiskRule :: Rules () +getModIfaceFromDiskRule = defineEarlyCutoff $ \GetModIfaceFromDisk f -> do + (ms,_) <- use_ GetModSummary f + (diags_session, mb_session) <- ghcSessionDepsDefinition f + case mb_session of + Nothing -> return (Nothing, (diags_session, Nothing)) + Just session -> do + sourceModified <- use_ IsHiFileStable f + linkableType <- getLinkableType f + r <- loadInterface (hscEnv session) ms sourceModified linkableType (regenerateHiFile session f ms) + case r of + (diags, Just x) -> do + let fp = Just (hiFileFingerPrint x) + return (fp, (diags <> diags_session, Just x)) + (diags, Nothing) -> return (Nothing, (diags ++ diags_session, Nothing)) + +isHiFileStableRule :: Rules () +isHiFileStableRule = defineEarlyCutoff $ \IsHiFileStable f -> do + (ms,_) <- use_ GetModSummaryWithoutTimestamps f + let hiFile = toNormalizedFilePath' + $ ml_hi_file $ ms_location ms + mbHiVersion <- use GetModificationTime_{missingFileDiagnostics=False} hiFile + modVersion <- use_ GetModificationTime f + sourceModified <- case mbHiVersion of + Nothing -> pure SourceModified + Just x -> + if modificationTime x < modificationTime modVersion + then pure SourceModified + else do + (fileImports, _) <- use_ GetLocatedImports f + let imports = fmap artifactFilePath . snd <$> fileImports + deps <- uses_ IsHiFileStable (catMaybes imports) + pure $ if all (== SourceUnmodifiedAndStable) deps + then SourceUnmodifiedAndStable + else SourceUnmodified + return (Just (BS.pack $ show sourceModified), ([], Just sourceModified)) + +getModSummaryRule :: Rules () +getModSummaryRule = do + defineEarlyCutoff $ \GetModSummary f -> do + session <- hscEnv <$> use_ GhcSession f + let dflags = hsc_dflags session + (modTime, mFileContent) <- getFileContents f + let fp = fromNormalizedFilePath f + modS <- liftIO $ runExceptT $ + getModSummaryFromImports session fp modTime (textToStringBuffer <$> mFileContent) + case modS of + Right res@(ms,_) -> do + let fingerPrint = hash (computeFingerprint f (fromJust $ ms_hspp_buf ms) dflags ms, hashUTC modTime) + return ( Just (BS.pack $ show fingerPrint) , ([], Just res)) + Left diags -> return (Nothing, (diags, Nothing)) + + defineEarlyCutoff $ \GetModSummaryWithoutTimestamps f -> do + ms <- use GetModSummary f + case ms of + Just res@(msWithTimestamps,_) -> do + let ms = msWithTimestamps { + ms_hs_date = error "use GetModSummary instead of GetModSummaryWithoutTimestamps", + ms_hspp_buf = error "use GetModSummary instead of GetModSummaryWithoutTimestamps" + } + dflags <- hsc_dflags . hscEnv <$> use_ GhcSession f + let fp = BS.pack $ show $ hash (computeFingerprint f (fromJust $ ms_hspp_buf msWithTimestamps) dflags ms) + return (Just fp, ([], Just res)) + Nothing -> return (Nothing, ([], Nothing)) + where + -- Compute a fingerprint from the contents of `ModSummary`, + -- eliding the timestamps and other non relevant fields. + computeFingerprint f sb dflags ModSummary{..} = + let fingerPrint = + ( moduleNameString (moduleName ms_mod) + , ms_hspp_file + , map unLoc opts + , ml_hs_file ms_location + , fingerPrintImports ms_srcimps + , fingerPrintImports ms_textual_imps + ) + fingerPrintImports = map (fmap uniq *** (moduleNameString . unLoc)) + opts = Hdr.getOptions dflags sb (fromNormalizedFilePath f) + in fingerPrint + + hashUTC UTCTime{..} = (fromEnum utctDay, fromEnum utctDayTime) + + +generateCore :: RunSimplifier -> NormalizedFilePath -> Action (IdeResult ModGuts) +generateCore runSimplifier file = do + packageState <- hscEnv <$> use_ GhcSessionDeps file + tm <- use_ TypeCheck file + setPriority priorityGenerateCore + liftIO $ compileModule runSimplifier packageState (tmrModSummary tm) (tmrTypechecked tm) + +generateCoreRule :: Rules () +generateCoreRule = + define $ \GenerateCore -> generateCore (RunSimplifier True) + +getModIfaceRule :: Rules () +getModIfaceRule = defineEarlyCutoff $ \GetModIface f -> do +#if !defined(GHC_LIB) + fileOfInterest <- use_ IsFileOfInterest f + res@(_,(_,mhmi)) <- case fileOfInterest of + IsFOI status -> do + -- Never load from disk for files of interest + tmr <- use_ TypeCheck f + linkableType <- getLinkableType f + hsc <- hscEnv <$> use_ GhcSessionDeps f + let compile = fmap ([],) $ use GenerateCore f + (diags, !hiFile) <- compileToObjCodeIfNeeded hsc linkableType compile tmr + let fp = hiFileFingerPrint <$> hiFile + hiDiags <- case hiFile of + Just hiFile + | OnDisk <- status + , not (tmrDeferedError tmr) -> liftIO $ writeHiFile hsc hiFile + _ -> pure [] + return (fp, (diags++hiDiags, hiFile)) + NotFOI -> do + hiFile <- use GetModIfaceFromDisk f + let fp = hiFileFingerPrint <$> hiFile + return (fp, ([], hiFile)) + + -- Record the linkable so we know not to unload it + whenJust (hm_linkable . hirHomeMod =<< mhmi) $ \(LM time mod _) -> do + compiledLinkables <- getCompiledLinkables <$> getIdeGlobalAction + liftIO $ modifyVar_ compiledLinkables $ \old -> pure $ extendModuleEnv old mod time + pure res +#else + tm <- use_ TypeCheck f + hsc <- hscEnv <$> use_ GhcSessionDeps f + (diags, !hiFile) <- liftIO $ compileToObjCodeIfNeeded hsc Nothing (error "can't compile with ghc-lib") tm + let fp = hiFileFingerPrint <$> hiFile + return (fp, (diags, hiFile)) +#endif + +getModIfaceWithoutLinkableRule :: Rules () +getModIfaceWithoutLinkableRule = defineEarlyCutoff $ \GetModIfaceWithoutLinkable f -> do + mhfr <- use GetModIface f + let mhfr' = fmap (\x -> x{ hirHomeMod = (hirHomeMod x){ hm_linkable = Just (error msg) } }) mhfr + msg = "tried to look at linkable for GetModIfaceWithoutLinkable for " ++ show f + pure (fingerprintToBS . getModuleHash . hirModIface <$> mhfr', ([],mhfr')) + +regenerateHiFile :: HscEnvEq -> NormalizedFilePath -> ModSummary -> Maybe LinkableType -> Action ([FileDiagnostic], Maybe HiFileResult) +regenerateHiFile sess f ms compNeeded = do + let hsc = hscEnv sess + opt <- getIdeOptions + + -- Embed haddocks in the interface file + (_, (diags, mb_pm)) <- liftIO $ getParsedModuleDefinition hsc opt f (withOptHaddock ms) + (diags, mb_pm) <- case mb_pm of + Just _ -> return (diags, mb_pm) + Nothing -> do + -- if parsing fails, try parsing again with Haddock turned off + (_, (diagsNoHaddock, mb_pm)) <- liftIO $ getParsedModuleDefinition hsc opt f ms + return (mergeParseErrorsHaddock diagsNoHaddock diags, mb_pm) + case mb_pm of + Nothing -> return (diags, Nothing) + Just pm -> do + -- Invoke typechecking directly to update it without incurring a dependency + -- on the parsed module and the typecheck rules + (diags', mtmr) <- typeCheckRuleDefinition hsc pm + case mtmr of + Nothing -> pure (diags', Nothing) + Just tmr -> do + + -- compile writes .o file + let compile = compileModule (RunSimplifier True) hsc (pm_mod_summary pm) $ tmrTypechecked tmr + + -- Bang pattern is important to avoid leaking 'tmr' + (diags'', !res) <- liftIO $ compileToObjCodeIfNeeded hsc compNeeded compile tmr + + -- Write hi file + hiDiags <- case res of + Just hiFile + | not $ tmrDeferedError tmr -> + liftIO $ writeHiFile hsc hiFile + _ -> pure [] + + -- Write hie file + (gDiags, masts) <- liftIO $ generateHieAsts hsc tmr + source <- getSourceFileSource f + wDiags <- forM masts $ \asts -> + liftIO $ writeHieFile hsc (tmrModSummary tmr) (tcg_exports $ tmrTypechecked tmr) asts source + + return (diags <> diags' <> diags'' <> hiDiags <> gDiags <> concat wDiags, res) + + +type CompileMod m = m (IdeResult ModGuts) + +-- | HscEnv should have deps included already +compileToObjCodeIfNeeded :: MonadIO m => HscEnv -> Maybe LinkableType -> CompileMod m -> TcModuleResult -> m (IdeResult HiFileResult) +compileToObjCodeIfNeeded hsc Nothing _ tmr = liftIO $ do + res <- mkHiFileResultNoCompile hsc tmr + pure ([], Just $! res) +compileToObjCodeIfNeeded hsc (Just linkableType) getGuts tmr = do + (diags, mguts) <- getGuts + case mguts of + Nothing -> pure (diags, Nothing) + Just guts -> do + (diags', !res) <- liftIO $ mkHiFileResultCompile hsc tmr guts linkableType + pure (diags++diags', res) + +getClientSettingsRule :: Rules () +getClientSettingsRule = defineEarlyCutOffNoFile $ \GetClientSettings -> do + alwaysRerun + settings <- clientSettings <$> getIdeConfiguration + return (BS.pack . show . hash $ settings, settings) + +-- | For now we always use bytecode +getLinkableType :: NormalizedFilePath -> Action (Maybe LinkableType) +getLinkableType f = do + needsComp <- use_ NeedsCompilation f + pure $ if needsComp then Just BCOLinkable else Nothing + +needsCompilationRule :: Rules () +needsCompilationRule = defineEarlyCutoff $ \NeedsCompilation file -> do + -- It's important to use stale data here to avoid wasted work. + -- if NeedsCompilation fails for a module M its result will be under-approximated + -- to False in its dependencies. However, if M actually used TH, this will + -- cause a re-evaluation of GetModIface for all dependencies + -- (since we don't need to generate object code anymore). + -- Once M is fixed we will discover that we actually needed all the object code + -- that we just threw away, and thus have to recompile all dependencies once + -- again, this time keeping the object code. + (ms,_) <- fst <$> useWithStale_ GetModSummaryWithoutTimestamps file + -- A file needs object code if it uses TemplateHaskell or any file that depends on it uses TemplateHaskell + res <- + if uses_th_qq ms + then pure True + else do + graph <- useNoFile GetModuleGraph + case graph of + -- Treat as False if some reverse dependency header fails to parse + Nothing -> pure False + Just depinfo -> case immediateReverseDependencies file depinfo of + -- If we fail to get immediate reverse dependencies, fail with an error message + Nothing -> fail $ "Failed to get the immediate reverse dependencies of " ++ show file + Just revdeps -> anyM (fmap (fromMaybe False) . use NeedsCompilation) revdeps + + pure (Just $ BS.pack $ show $ hash res, ([], Just res)) + where + uses_th_qq (ms_hspp_opts -> dflags) = + xopt LangExt.TemplateHaskell dflags || xopt LangExt.QuasiQuotes dflags + +-- | Tracks which linkables are current, so we don't need to unload them +newtype CompiledLinkables = CompiledLinkables { getCompiledLinkables :: Var (ModuleEnv UTCTime) } +instance IsIdeGlobal CompiledLinkables + +-- | A rule that wires per-file rules together +mainRule :: Rules () +mainRule = do + linkables <- liftIO $ newVar emptyModuleEnv + addIdeGlobal $ CompiledLinkables linkables + getParsedModuleRule + getLocatedImportsRule + getDependencyInformationRule + reportImportCyclesRule + getDependenciesRule + typeCheckRule + getDocMapRule + loadGhcSession + getModIfaceFromDiskRule + getModIfaceRule + getModIfaceWithoutLinkableRule + getModSummaryRule + isHiFileStableRule + getModuleGraphRule + knownFilesRule + getClientSettingsRule + getHieAstsRule + getBindingsRule + needsCompilationRule + generateCoreRule + getImportMapRule + +-- | Given the path to a module src file, this rule returns True if the +-- corresponding `.hi` file is stable, that is, if it is newer +-- than the src file, and all its dependencies are stable too. +data IsHiFileStable = IsHiFileStable + deriving (Eq, Show, Typeable, Generic) +instance Hashable IsHiFileStable +instance NFData IsHiFileStable +instance Binary IsHiFileStable + +type instance RuleResult IsHiFileStable = SourceModified diff --git a/ghcide/src/Development/IDE/Core/Service.hs b/ghcide/src/Development/IDE/Core/Service.hs new file mode 100644 index 00000000000..e43a8658a69 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Service.hs @@ -0,0 +1,87 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE RankNTypes #-} + +-- | A Shake implementation of the compiler service, built +-- using the "Shaker" abstraction layer for in-memory use. +-- +module Development.IDE.Core.Service( + getIdeOptions, getIdeOptionsIO, + IdeState, initialise, shutdown, + runAction, + writeProfile, + getDiagnostics, + ideLogger, + updatePositionMapping, + ) where + +import Data.Maybe +import Development.IDE.Types.Options (IdeOptions(..)) +import Development.IDE.Core.Debouncer +import Development.IDE.Core.FileStore (VFSHandle, fileStoreRules) +import Development.IDE.Core.FileExists (fileExistsRules) +import Development.IDE.Core.OfInterest +import Development.IDE.Types.Logger as Logger +import Development.Shake +import qualified Language.Haskell.LSP.Messages as LSP +import qualified Language.Haskell.LSP.Types as LSP +import qualified Language.Haskell.LSP.Types.Capabilities as LSP + +import Development.IDE.Core.Shake +import Control.Monad + + + +------------------------------------------------------------ +-- Exposed API + +-- | Initialise the Compiler Service. +initialise :: LSP.ClientCapabilities + -> Rules () + -> IO LSP.LspId + -> (LSP.FromServerMessage -> IO ()) + -> WithProgressFunc + -> WithIndefiniteProgressFunc + -> Logger + -> Debouncer LSP.NormalizedUri + -> IdeOptions + -> VFSHandle + -> IO IdeState +initialise caps mainRule getLspId toDiags wProg wIndefProg logger debouncer options vfs = + shakeOpen + getLspId + toDiags + wProg + wIndefProg + caps + logger + debouncer + (optShakeProfiling options) + (optReportProgress options) + (optTesting options) + shakeOptions + { shakeThreads = optThreads options + , shakeFiles = fromMaybe "/dev/null" (optShakeFiles options) + } $ do + addIdeGlobal $ GlobalIdeOptions options + fileStoreRules vfs + ofInterestRules + fileExistsRules caps vfs + mainRule + +writeProfile :: IdeState -> FilePath -> IO () +writeProfile = shakeProfile + +-- | Shutdown the Compiler Service. +shutdown :: IdeState -> IO () +shutdown = shakeShut + +-- This will return as soon as the result of the action is +-- available. There might still be other rules running at this point, +-- e.g., the ofInterestRule. +runAction :: String -> IdeState -> Action a -> IO a +runAction herald ide act = + join $ shakeEnqueue (shakeExtras ide) (mkDelayedAction herald Logger.Info act) diff --git a/ghcide/src/Development/IDE/Core/Shake.hs b/ghcide/src/Development/IDE/Core/Shake.hs new file mode 100644 index 00000000000..7d5a9eca5a9 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Shake.hs @@ -0,0 +1,1110 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecursiveDo #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE ConstraintKinds #-} + +-- | A Shake implementation of the compiler service. +-- +-- There are two primary locations where data lives, and both of +-- these contain much the same data: +-- +-- * The Shake database (inside 'shakeDb') stores a map of shake keys +-- to shake values. In our case, these are all of type 'Q' to 'A'. +-- During a single run all the values in the Shake database are consistent +-- so are used in conjunction with each other, e.g. in 'uses'. +-- +-- * The 'Values' type stores a map of keys to values. These values are +-- always stored as real Haskell values, whereas Shake serialises all 'A' values +-- between runs. To deserialise a Shake value, we just consult Values. +module Development.IDE.Core.Shake( + IdeState, shakeExtras, + ShakeExtras(..), getShakeExtras, getShakeExtrasRules, + KnownTargets, Target(..), toKnownFiles, + IdeRule, IdeResult, + GetModificationTime(GetModificationTime, GetModificationTime_, missingFileDiagnostics), + shakeOpen, shakeShut, + shakeRestart, + shakeEnqueue, + shakeProfile, + use, useNoFile, uses, useWithStaleFast, useWithStaleFast', delayedAction, + FastResult(..), + use_, useNoFile_, uses_, + useWithStale, usesWithStale, + useWithStale_, usesWithStale_, + define, defineEarlyCutoff, defineOnDisk, needOnDisk, needOnDisks, + getDiagnostics, + getHiddenDiagnostics, + IsIdeGlobal, addIdeGlobal, addIdeGlobalExtras, getIdeGlobalState, getIdeGlobalAction, + getIdeGlobalExtras, + getIdeOptions, + getIdeOptionsIO, + GlobalIdeOptions(..), + garbageCollect, + knownTargets, + setPriority, + sendEvent, + ideLogger, + actionLogger, + FileVersion(..), + Priority(..), + updatePositionMapping, + deleteValue, + OnDiskRule(..), + WithProgressFunc, WithIndefiniteProgressFunc, + ProgressEvent(..), + DelayedAction, mkDelayedAction, + IdeAction(..), runIdeAction, + mkUpdater, + -- Exposed for testing. + Q(..), + ) where + +import Development.Shake hiding (ShakeValue, doesFileExist, Info) +import Development.Shake.Database +import Development.Shake.Classes +import Development.Shake.Rule +import qualified Data.HashMap.Strict as HMap +import qualified Data.Map.Strict as Map +import qualified Data.ByteString.Char8 as BS +import Data.Dynamic +import Data.Maybe +import Data.Map.Strict (Map) +import Data.List.Extra (partition, takeEnd) +import qualified Data.Set as Set +import qualified Data.Text as T +import Data.Tuple.Extra +import Data.Unique +import Development.IDE.Core.Debouncer +import Development.IDE.GHC.Compat (NameCacheUpdater(..), upNameCache ) +import Development.IDE.GHC.Orphans () +import Development.IDE.Core.PositionMapping +import Development.IDE.Core.RuleTypes +import Development.IDE.Types.Action +import Development.IDE.Types.Logger hiding (Priority) +import Development.IDE.Types.KnownTargets +import Development.IDE.Types.Shake +import qualified Development.IDE.Types.Logger as Logger +import Language.Haskell.LSP.Diagnostics +import qualified Data.SortedList as SL +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Exports +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import Control.Concurrent.Async +import Control.Concurrent.Extra +import Control.Concurrent.STM (readTVar, writeTVar, newTVarIO, atomically) +import Control.DeepSeq +import Control.Exception.Extra +import System.Time.Extra +import Data.Typeable +import qualified Language.Haskell.LSP.Core as LSP +import qualified Language.Haskell.LSP.Messages as LSP +import qualified Language.Haskell.LSP.Types as LSP +import System.FilePath hiding (makeRelative) +import qualified Development.Shake as Shake +import Control.Monad.Extra +import Data.Time +import GHC.Generics +import System.IO.Unsafe +import Language.Haskell.LSP.Types +import qualified Control.Monad.STM as STM +import Control.Monad.IO.Class +import Control.Monad.Reader +import Control.Monad.Trans.Maybe +import Data.Traversable +import Data.Hashable +import Development.IDE.Core.Tracing + +import Data.IORef +import NameCache +import UniqSupply +import PrelInfo +import Language.Haskell.LSP.Types.Capabilities +import OpenTelemetry.Eventlog + +-- information we stash inside the shakeExtra field +data ShakeExtras = ShakeExtras + {eventer :: LSP.FromServerMessage -> IO () + ,debouncer :: Debouncer NormalizedUri + ,logger :: Logger + ,globals :: Var (HMap.HashMap TypeRep Dynamic) + ,state :: Var Values + ,diagnostics :: Var DiagnosticStore + ,hiddenDiagnostics :: Var DiagnosticStore + ,publishedDiagnostics :: Var (HMap.HashMap NormalizedUri [Diagnostic]) + -- ^ This represents the set of diagnostics that we have published. + -- Due to debouncing not every change might get published. + ,positionMapping :: Var (HMap.HashMap NormalizedUri (Map TextDocumentVersion (PositionDelta, PositionMapping))) + -- ^ Map from a text document version to a PositionMapping that describes how to map + -- positions in a version of that document to positions in the latest version + -- First mapping is delta from previous version and second one is an + -- accumlation of all previous mappings. + ,inProgress :: Var (HMap.HashMap NormalizedFilePath Int) + -- ^ How many rules are running for each file + ,progressUpdate :: ProgressEvent -> IO () + -- ^ The generator for unique Lsp identifiers + ,ideTesting :: IdeTesting + -- ^ Whether to enable additional lsp messages used by the test suite for checking invariants + ,session :: MVar ShakeSession + -- ^ Used in the GhcSession rule to forcefully restart the session after adding a new component + ,withProgress :: WithProgressFunc + -- ^ Report progress about some long running operation (on top of the progress shown by 'lspShakeProgress') + ,withIndefiniteProgress :: WithIndefiniteProgressFunc + -- ^ Same as 'withProgress', but for processes that do not report the percentage complete + ,restartShakeSession :: [DelayedAction ()] -> IO () + ,ideNc :: IORef NameCache + -- | A mapping of module name to known target (or candidate targets, if missing) + ,knownTargetsVar :: Var (Hashed KnownTargets) + -- | A mapping of exported identifiers for local modules. Updated on kick + ,exportsMap :: Var ExportsMap + -- | A work queue for actions added via 'runInShakeSession' + ,actionQueue :: ActionQueue + ,clientCapabilities :: ClientCapabilities + } + +type WithProgressFunc = forall a. + T.Text -> LSP.ProgressCancellable -> ((LSP.Progress -> IO ()) -> IO a) -> IO a +type WithIndefiniteProgressFunc = forall a. + T.Text -> LSP.ProgressCancellable -> IO a -> IO a + +data ProgressEvent + = KickStarted + | KickCompleted + +getShakeExtras :: Action ShakeExtras +getShakeExtras = do + Just x <- getShakeExtra @ShakeExtras + return x + +getShakeExtrasRules :: Rules ShakeExtras +getShakeExtrasRules = do + Just x <- getShakeExtraRules @ShakeExtras + return x + +class Typeable a => IsIdeGlobal a where + +addIdeGlobal :: IsIdeGlobal a => a -> Rules () +addIdeGlobal x = do + extras <- getShakeExtrasRules + liftIO $ addIdeGlobalExtras extras x + +addIdeGlobalExtras :: IsIdeGlobal a => ShakeExtras -> a -> IO () +addIdeGlobalExtras ShakeExtras{globals} x@(typeOf -> ty) = + liftIO $ modifyVar_ globals $ \mp -> case HMap.lookup ty mp of + Just _ -> errorIO $ "Internal error, addIdeGlobalExtras, got the same type twice for " ++ show ty + Nothing -> return $! HMap.insert ty (toDyn x) mp + + +getIdeGlobalExtras :: forall a . IsIdeGlobal a => ShakeExtras -> IO a +getIdeGlobalExtras ShakeExtras{globals} = do + let typ = typeRep (Proxy :: Proxy a) + x <- HMap.lookup (typeRep (Proxy :: Proxy a)) <$> readVar globals + case x of + Just x + | Just x <- fromDynamic x -> pure x + | otherwise -> errorIO $ "Internal error, getIdeGlobalExtras, wrong type for " ++ show typ ++ " (got " ++ show (dynTypeRep x) ++ ")" + Nothing -> errorIO $ "Internal error, getIdeGlobalExtras, no entry for " ++ show typ + +getIdeGlobalAction :: forall a . IsIdeGlobal a => Action a +getIdeGlobalAction = liftIO . getIdeGlobalExtras =<< getShakeExtras + +getIdeGlobalState :: forall a . IsIdeGlobal a => IdeState -> IO a +getIdeGlobalState = getIdeGlobalExtras . shakeExtras + + +newtype GlobalIdeOptions = GlobalIdeOptions IdeOptions +instance IsIdeGlobal GlobalIdeOptions + +getIdeOptions :: Action IdeOptions +getIdeOptions = do + GlobalIdeOptions x <- getIdeGlobalAction + return x + +getIdeOptionsIO :: ShakeExtras -> IO IdeOptions +getIdeOptionsIO ide = do + GlobalIdeOptions x <- getIdeGlobalExtras ide + return x + +-- | Return the most recent, potentially stale, value and a PositionMapping +-- for the version of that value. +lastValueIO :: ShakeExtras -> NormalizedFilePath -> Value v -> IO (Maybe (v, PositionMapping)) +lastValueIO ShakeExtras{positionMapping} file v = do + allMappings <- liftIO $ readVar positionMapping + pure $ case v of + Succeeded ver v -> Just (v, mappingForVersion allMappings file ver) + Stale ver v -> Just (v, mappingForVersion allMappings file ver) + Failed -> Nothing + +-- | Return the most recent, potentially stale, value and a PositionMapping +-- for the version of that value. +lastValue :: NormalizedFilePath -> Value v -> Action (Maybe (v, PositionMapping)) +lastValue file v = do + s <- getShakeExtras + liftIO $ lastValueIO s file v + +valueVersion :: Value v -> Maybe TextDocumentVersion +valueVersion = \case + Succeeded ver _ -> Just ver + Stale ver _ -> Just ver + Failed -> Nothing + +mappingForVersion + :: HMap.HashMap NormalizedUri (Map TextDocumentVersion (a, PositionMapping)) + -> NormalizedFilePath + -> TextDocumentVersion + -> PositionMapping +mappingForVersion allMappings file ver = + maybe zeroMapping snd $ + Map.lookup ver =<< + HMap.lookup (filePathToUri' file) allMappings + +type IdeRule k v = + ( Shake.RuleResult k ~ v + , Shake.ShakeValue k + , Show v + , Typeable v + , NFData v + ) + +-- | A live Shake session with the ability to enqueue Actions for running. +-- Keeps the 'ShakeDatabase' open, so at most one 'ShakeSession' per database. +newtype ShakeSession = ShakeSession + { cancelShakeSession :: IO () + -- ^ Closes the Shake session + } + +-- | A Shake database plus persistent store. Can be thought of as storing +-- mappings from @(FilePath, k)@ to @RuleResult k@. +data IdeState = IdeState + {shakeDb :: ShakeDatabase + ,shakeSession :: MVar ShakeSession + ,shakeClose :: IO () + ,shakeExtras :: ShakeExtras + ,shakeProfileDir :: Maybe FilePath + ,stopProgressReporting :: IO () + } + + + +-- This is debugging code that generates a series of profiles, if the Boolean is true +shakeDatabaseProfile :: Maybe FilePath -> ShakeDatabase -> IO (Maybe FilePath) +shakeDatabaseProfile mbProfileDir shakeDb = + for mbProfileDir $ \dir -> do + count <- modifyVar profileCounter $ \x -> let !y = x+1 in return (y,y) + let file = "ide-" ++ profileStartTime ++ "-" ++ takeEnd 5 ("0000" ++ show count) <.> "html" + shakeProfileDatabase shakeDb $ dir file + return (dir file) + +{-# NOINLINE profileStartTime #-} +profileStartTime :: String +profileStartTime = unsafePerformIO $ formatTime defaultTimeLocale "%Y%m%d-%H%M%S" <$> getCurrentTime + +{-# NOINLINE profileCounter #-} +profileCounter :: Var Int +profileCounter = unsafePerformIO $ newVar 0 + +setValues :: IdeRule k v + => Var Values + -> k + -> NormalizedFilePath + -> Value v + -> IO () +setValues state key file val = modifyVar_ state $ \vals -> do + -- Force to make sure the old HashMap is not retained + evaluate $ HMap.insert (file, Key key) (fmap toDyn val) vals + +-- | Delete the value stored for a given ide build key +deleteValue + :: (Typeable k, Hashable k, Eq k, Show k) + => IdeState + -> k + -> NormalizedFilePath + -> IO () +deleteValue IdeState{shakeExtras = ShakeExtras{state}} key file = modifyVar_ state $ \vals -> + evaluate $ HMap.delete (file, Key key) vals + +-- | We return Nothing if the rule has not run and Just Failed if it has failed to produce a value. +getValues :: forall k v. IdeRule k v => Var Values -> k -> NormalizedFilePath -> IO (Maybe (Value v)) +getValues state key file = do + vs <- readVar state + case HMap.lookup (file, Key key) vs of + Nothing -> pure Nothing + Just v -> do + let r = fmap (fromJust . fromDynamic @v) v + -- Force to make sure we do not retain a reference to the HashMap + -- and we blow up immediately if the fromJust should fail + -- (which would be an internal error). + evaluate (r `seqValue` Just r) + +-- | Get all the files in the project +knownTargets :: Action (Hashed KnownTargets) +knownTargets = do + ShakeExtras{knownTargetsVar} <- getShakeExtras + liftIO $ readVar knownTargetsVar + +-- | Seq the result stored in the Shake value. This only +-- evaluates the value to WHNF not NF. We take care of the latter +-- elsewhere and doing it twice is expensive. +seqValue :: Value v -> b -> b +seqValue v b = case v of + Succeeded ver v -> rnf ver `seq` v `seq` b + Stale ver v -> rnf ver `seq` v `seq` b + Failed -> b + +-- | Open a 'IdeState', should be shut using 'shakeShut'. +shakeOpen :: IO LSP.LspId + -> (LSP.FromServerMessage -> IO ()) -- ^ diagnostic handler + -> WithProgressFunc + -> WithIndefiniteProgressFunc + -> ClientCapabilities + -> Logger + -> Debouncer NormalizedUri + -> Maybe FilePath + -> IdeReportProgress + -> IdeTesting + -> ShakeOptions + -> Rules () + -> IO IdeState +shakeOpen getLspId eventer withProgress withIndefiniteProgress clientCapabilities logger debouncer + shakeProfileDir (IdeReportProgress reportProgress) ideTesting@(IdeTesting testing) opts rules = mdo + + inProgress <- newVar HMap.empty + us <- mkSplitUniqSupply 'r' + ideNc <- newIORef (initNameCache us knownKeyNames) + (shakeExtras, stopProgressReporting) <- do + globals <- newVar HMap.empty + state <- newVar HMap.empty + diagnostics <- newVar mempty + hiddenDiagnostics <- newVar mempty + publishedDiagnostics <- newVar mempty + positionMapping <- newVar HMap.empty + knownTargetsVar <- newVar $ hashed HMap.empty + let restartShakeSession = shakeRestart ideState + let session = shakeSession + mostRecentProgressEvent <- newTVarIO KickCompleted + let progressUpdate = atomically . writeTVar mostRecentProgressEvent + progressAsync <- async $ + when reportProgress $ + progressThread mostRecentProgressEvent inProgress + exportsMap <- newVar mempty + + actionQueue <- newQueue + + pure (ShakeExtras{..}, cancel progressAsync) + (shakeDbM, shakeClose) <- + shakeOpenDatabase + opts { shakeExtra = addShakeExtra shakeExtras $ shakeExtra opts } + rules + shakeDb <- shakeDbM + initSession <- newSession shakeExtras shakeDb [] + shakeSession <- newMVar initSession + let ideState = IdeState{..} + + IdeOptions{ optOTMemoryProfiling = IdeOTMemoryProfiling otProfilingEnabled } <- getIdeOptionsIO shakeExtras + when otProfilingEnabled $ + startTelemetry logger $ state shakeExtras + + return ideState + where + -- The progress thread is a state machine with two states: + -- 1. Idle + -- 2. Reporting a kick event + -- And two transitions, modelled by 'ProgressEvent': + -- 1. KickCompleted - transitions from Reporting into Idle + -- 2. KickStarted - transitions from Idle into Reporting + progressThread mostRecentProgressEvent inProgress = progressLoopIdle + where + progressLoopIdle = do + atomically $ do + v <- readTVar mostRecentProgressEvent + case v of + KickCompleted -> STM.retry + KickStarted -> return () + asyncReporter <- async lspShakeProgress + progressLoopReporting asyncReporter + progressLoopReporting asyncReporter = do + atomically $ do + v <- readTVar mostRecentProgressEvent + case v of + KickStarted -> STM.retry + KickCompleted -> return () + cancel asyncReporter + progressLoopIdle + + lspShakeProgress = do + -- first sleep a bit, so we only show progress messages if it's going to take + -- a "noticable amount of time" (we often expect a thread kill to arrive before the sleep finishes) + unless testing $ sleep 0.1 + lspId <- getLspId + u <- ProgressTextToken . T.pack . show . hashUnique <$> newUnique + eventer $ LSP.ReqWorkDoneProgressCreate $ + LSP.fmServerWorkDoneProgressCreateRequest lspId $ + LSP.WorkDoneProgressCreateParams { _token = u } + bracket_ (start u) (stop u) (loop u Nothing) + where + start id = eventer $ LSP.NotWorkDoneProgressBegin $ + LSP.fmServerWorkDoneProgressBeginNotification + LSP.ProgressParams + { _token = id + , _value = WorkDoneProgressBeginParams + { _title = "Processing" + , _cancellable = Nothing + , _message = Nothing + , _percentage = Nothing + } + } + stop id = eventer $ LSP.NotWorkDoneProgressEnd $ + LSP.fmServerWorkDoneProgressEndNotification + LSP.ProgressParams + { _token = id + , _value = WorkDoneProgressEndParams + { _message = Nothing + } + } + sample = 0.1 + loop id prev = do + sleep sample + current <- readVar inProgress + let done = length $ filter (== 0) $ HMap.elems current + let todo = HMap.size current + let next = Just $ T.pack $ show done <> "/" <> show todo + when (next /= prev) $ + eventer $ LSP.NotWorkDoneProgressReport $ + LSP.fmServerWorkDoneProgressReportNotification + LSP.ProgressParams + { _token = id + , _value = LSP.WorkDoneProgressReportParams + { _cancellable = Nothing + , _message = next + , _percentage = Nothing + } + } + loop id next + +shakeProfile :: IdeState -> FilePath -> IO () +shakeProfile IdeState{..} = shakeProfileDatabase shakeDb + +shakeShut :: IdeState -> IO () +shakeShut IdeState{..} = withMVar shakeSession $ \runner -> do + -- Shake gets unhappy if you try to close when there is a running + -- request so we first abort that. + void $ cancelShakeSession runner + shakeClose + stopProgressReporting + + +-- | This is a variant of withMVar where the first argument is run unmasked and if it throws +-- an exception, the previous value is restored while the second argument is executed masked. +withMVar' :: MVar a -> (a -> IO b) -> (b -> IO (a, c)) -> IO c +withMVar' var unmasked masked = uninterruptibleMask $ \restore -> do + a <- takeMVar var + b <- restore (unmasked a) `onException` putMVar var a + (a', c) <- masked b + putMVar var a' + pure c + + +mkDelayedAction :: String -> Logger.Priority -> Action a -> DelayedAction a +mkDelayedAction = DelayedAction Nothing + +-- | These actions are run asynchronously after the current action is +-- finished running. For example, to trigger a key build after a rule +-- has already finished as is the case with useWithStaleFast +delayedAction :: DelayedAction a -> IdeAction (IO a) +delayedAction a = do + extras <- ask + liftIO $ shakeEnqueue extras a + +-- | Restart the current 'ShakeSession' with the given system actions. +-- Any actions running in the current session will be aborted, +-- but actions added via 'shakeEnqueue' will be requeued. +shakeRestart :: IdeState -> [DelayedAction ()] -> IO () +shakeRestart IdeState{..} acts = + withMVar' + shakeSession + (\runner -> do + (stopTime,()) <- duration (cancelShakeSession runner) + res <- shakeDatabaseProfile shakeProfileDir shakeDb + let profile = case res of + Just fp -> ", profile saved at " <> fp + _ -> "" + let msg = T.pack $ "Restarting build session (aborting the previous one took " + ++ showDuration stopTime ++ profile ++ ")" + logDebug (logger shakeExtras) msg + notifyTestingLogMessage shakeExtras msg + ) + -- It is crucial to be masked here, otherwise we can get killed + -- between spawning the new thread and updating shakeSession. + -- See https://github.com/haskell/ghcide/issues/79 + (\() -> do + (,()) <$> newSession shakeExtras shakeDb acts) + +notifyTestingLogMessage :: ShakeExtras -> T.Text -> IO () +notifyTestingLogMessage extras msg = do + (IdeTesting isTestMode) <- optTesting <$> getIdeOptionsIO extras + let notif = LSP.NotLogMessage $ LSP.NotificationMessage "2.0" LSP.WindowLogMessage + $ LSP.LogMessageParams LSP.MtLog msg + when isTestMode $ eventer extras notif + + +-- | Enqueue an action in the existing 'ShakeSession'. +-- Returns a computation to block until the action is run, propagating exceptions. +-- Assumes a 'ShakeSession' is available. +-- +-- Appropriate for user actions other than edits. +shakeEnqueue :: ShakeExtras -> DelayedAction a -> IO (IO a) +shakeEnqueue ShakeExtras{actionQueue, logger} act = do + (b, dai) <- instantiateDelayedAction act + atomically $ pushQueue dai actionQueue + let wait' b = + waitBarrier b `catches` + [ Handler(\BlockedIndefinitelyOnMVar -> + fail $ "internal bug: forever blocked on MVar for " <> + actionName act) + , Handler (\e@AsyncCancelled -> do + logPriority logger Debug $ T.pack $ actionName act <> " was cancelled" + + atomically $ abortQueue dai actionQueue + throw e) + ] + return (wait' b >>= either throwIO return) + +-- | Set up a new 'ShakeSession' with a set of initial actions +-- Will crash if there is an existing 'ShakeSession' running. +newSession :: ShakeExtras -> ShakeDatabase -> [DelayedActionInternal] -> IO ShakeSession +newSession extras@ShakeExtras{..} shakeDb acts = do + reenqueued <- atomically $ peekInProgress actionQueue + let + -- A daemon-like action used to inject additional work + -- Runs actions from the work queue sequentially + pumpActionThread otSpan = do + d <- liftIO $ atomically $ popQueue actionQueue + void $ parallel [run otSpan d, pumpActionThread otSpan] + + -- TODO figure out how to thread the otSpan into defineEarlyCutoff + run _otSpan d = do + start <- liftIO offsetTime + getAction d + liftIO $ atomically $ doneQueue d actionQueue + runTime <- liftIO start + let msg = T.pack $ "finish: " ++ actionName d + ++ " (took " ++ showDuration runTime ++ ")" + liftIO $ do + logPriority logger (actionPriority d) msg + notifyTestingLogMessage extras msg + + workRun restore = withSpan "Shake session" $ \otSpan -> do + let acts' = pumpActionThread otSpan : map (run otSpan) (reenqueued ++ acts) + res <- try @SomeException (restore $ shakeRunDatabase shakeDb acts') + let res' = case res of + Left e -> "exception: " <> displayException e + Right _ -> "completed" + let msg = T.pack $ "Finishing build session(" ++ res' ++ ")" + return $ do + logDebug logger msg + notifyTestingLogMessage extras msg + + -- Do the work in a background thread + workThread <- asyncWithUnmask workRun + + -- run the wrap up in a separate thread since it contains interruptible + -- commands (and we are not using uninterruptible mask) + _ <- async $ join $ wait workThread + + -- Cancelling is required to flush the Shake database when either + -- the filesystem or the Ghc configuration have changed + let cancelShakeSession :: IO () + cancelShakeSession = cancel workThread + + pure (ShakeSession{..}) + +instantiateDelayedAction + :: DelayedAction a + -> IO (Barrier (Either SomeException a), DelayedActionInternal) +instantiateDelayedAction (DelayedAction _ s p a) = do + u <- newUnique + b <- newBarrier + let a' = do + -- work gets reenqueued when the Shake session is restarted + -- it can happen that a work item finished just as it was reenqueud + -- in that case, skipping the work is fine + alreadyDone <- liftIO $ isJust <$> waitBarrierMaybe b + unless alreadyDone $ do + x <- actionCatch @SomeException (Right <$> a) (pure . Left) + -- ignore exceptions if the barrier has been filled concurrently + liftIO $ void $ try @SomeException $ signalBarrier b x + d' = DelayedAction (Just u) s p a' + return (b, d') + +getDiagnostics :: IdeState -> IO [FileDiagnostic] +getDiagnostics IdeState{shakeExtras = ShakeExtras{diagnostics}} = do + val <- readVar diagnostics + return $ getAllDiagnostics val + +getHiddenDiagnostics :: IdeState -> IO [FileDiagnostic] +getHiddenDiagnostics IdeState{shakeExtras = ShakeExtras{hiddenDiagnostics}} = do + val <- readVar hiddenDiagnostics + return $ getAllDiagnostics val + +-- | Clear the results for all files that do not match the given predicate. +garbageCollect :: (NormalizedFilePath -> Bool) -> Action () +garbageCollect keep = do + ShakeExtras{state, diagnostics,hiddenDiagnostics,publishedDiagnostics,positionMapping} <- getShakeExtras + liftIO $ + do newState <- modifyVar state $ \values -> do + values <- evaluate $ HMap.filterWithKey (\(file, _) _ -> keep file) values + return $! dupe values + modifyVar_ diagnostics $ \diags -> return $! filterDiagnostics keep diags + modifyVar_ hiddenDiagnostics $ \hdiags -> return $! filterDiagnostics keep hdiags + modifyVar_ publishedDiagnostics $ \diags -> return $! HMap.filterWithKey (\uri _ -> keep (fromUri uri)) diags + let versionsForFile = + HMap.fromListWith Set.union $ + mapMaybe (\((file, _key), v) -> (filePathToUri' file,) . Set.singleton <$> valueVersion v) $ + HMap.toList newState + modifyVar_ positionMapping $ \mappings -> return $! filterVersionMap versionsForFile mappings + +-- | Define a new Rule without early cutoff +define + :: IdeRule k v + => (k -> NormalizedFilePath -> Action (IdeResult v)) -> Rules () +define op = defineEarlyCutoff $ \k v -> (Nothing,) <$> op k v + +-- | Request a Rule result if available +use :: IdeRule k v + => k -> NormalizedFilePath -> Action (Maybe v) +use key file = head <$> uses key [file] + +-- | Request a Rule result, it not available return the last computed result, if any, which may be stale +useWithStale :: IdeRule k v + => k -> NormalizedFilePath -> Action (Maybe (v, PositionMapping)) +useWithStale key file = head <$> usesWithStale key [file] + +-- | Request a Rule result, it not available return the last computed result which may be stale. +-- Errors out if none available. +useWithStale_ :: IdeRule k v + => k -> NormalizedFilePath -> Action (v, PositionMapping) +useWithStale_ key file = head <$> usesWithStale_ key [file] + +-- | Plural version of 'useWithStale_' +usesWithStale_ :: IdeRule k v => k -> [NormalizedFilePath] -> Action [(v, PositionMapping)] +usesWithStale_ key files = do + res <- usesWithStale key files + case sequence res of + Nothing -> liftIO $ throwIO $ BadDependency (show key) + Just v -> return v + +newtype IdeAction a = IdeAction { runIdeActionT :: (ReaderT ShakeExtras IO) a } + deriving newtype (MonadReader ShakeExtras, MonadIO, Functor, Applicative, Monad) + +-- | IdeActions are used when we want to return a result immediately, even if it +-- is stale Useful for UI actions like hover, completion where we don't want to +-- block. +runIdeAction :: String -> ShakeExtras -> IdeAction a -> IO a +runIdeAction _herald s i = runReaderT (runIdeActionT i) s + +askShake :: IdeAction ShakeExtras +askShake = ask + +mkUpdater :: MaybeT IdeAction NameCacheUpdater +mkUpdater = do + ref <- lift $ ideNc <$> askShake + pure $ NCU (upNameCache ref) + +-- | A (maybe) stale result now, and an up to date one later +data FastResult a = FastResult { stale :: Maybe (a,PositionMapping), uptoDate :: IO (Maybe a) } + +-- | Lookup value in the database and return with the stale value immediately +-- Will queue an action to refresh the value. +-- Might block the first time the rule runs, but never blocks after that. +useWithStaleFast :: IdeRule k v => k -> NormalizedFilePath -> IdeAction (Maybe (v, PositionMapping)) +useWithStaleFast key file = stale <$> useWithStaleFast' key file + +-- | Same as useWithStaleFast but lets you wait for an up to date result +useWithStaleFast' :: IdeRule k v => k -> NormalizedFilePath -> IdeAction (FastResult v) +useWithStaleFast' key file = do + -- This lookup directly looks up the key in the shake database and + -- returns the last value that was computed for this key without + -- checking freshness. + + -- Async trigger the key to be built anyway because we want to + -- keep updating the value in the key. + wait <- delayedAction $ mkDelayedAction ("C:" ++ show key) Debug $ use key file + + s@ShakeExtras{state} <- askShake + r <- liftIO $ getValues state key file + liftIO $ case r of + -- block for the result if we haven't computed before + Nothing -> do + a <- wait + r <- getValues state key file + case r of + Nothing -> return $ FastResult Nothing (pure a) + Just v -> do + res <- lastValueIO s file v + pure $ FastResult res (pure a) + -- Otherwise, use the computed value even if it's out of date. + Just v -> do + res <- lastValueIO s file v + pure $ FastResult res wait + +useNoFile :: IdeRule k v => k -> Action (Maybe v) +useNoFile key = use key emptyFilePath + +use_ :: IdeRule k v => k -> NormalizedFilePath -> Action v +use_ key file = head <$> uses_ key [file] + +useNoFile_ :: IdeRule k v => k -> Action v +useNoFile_ key = use_ key emptyFilePath + +uses_ :: IdeRule k v => k -> [NormalizedFilePath] -> Action [v] +uses_ key files = do + res <- uses key files + case sequence res of + Nothing -> liftIO $ throwIO $ BadDependency (show key) + Just v -> return v + + +-- | When we depend on something that reported an error, and we fail as a direct result, throw BadDependency +-- which short-circuits the rest of the action +data BadDependency = BadDependency String deriving Show +instance Exception BadDependency + +isBadDependency :: SomeException -> Bool +isBadDependency x + | Just (x :: ShakeException) <- fromException x = isBadDependency $ shakeExceptionInner x + | Just (_ :: BadDependency) <- fromException x = True + | otherwise = False + +newtype Q k = Q (k, NormalizedFilePath) + deriving newtype (Eq, Hashable, NFData) + +instance Binary k => Binary (Q k) where + put (Q (k, fp)) = put (k, fp) + get = do + (k, fp) <- get + -- The `get` implementation of NormalizedFilePath + -- does not handle empty file paths so we + -- need to handle this ourselves here. + pure (Q (k, toNormalizedFilePath' fp)) + +instance Show k => Show (Q k) where + show (Q (k, file)) = show k ++ "; " ++ fromNormalizedFilePath file + +-- | Invariant: the 'v' must be in normal form (fully evaluated). +-- Otherwise we keep repeatedly 'rnf'ing values taken from the Shake database +newtype A v = A (Value v) + deriving Show + +instance NFData (A v) where rnf (A v) = v `seq` () + +-- In the Shake database we only store one type of key/result pairs, +-- namely Q (question) / A (answer). +type instance RuleResult (Q k) = A (RuleResult k) + + +-- | Plural version of 'use' +uses :: IdeRule k v + => k -> [NormalizedFilePath] -> Action [Maybe v] +uses key files = map (\(A value) -> currentValue value) <$> apply (map (Q . (key,)) files) + +-- | Return the last computed result which might be stale. +usesWithStale :: IdeRule k v + => k -> [NormalizedFilePath] -> Action [Maybe (v, PositionMapping)] +usesWithStale key files = do + values <- map (\(A value) -> value) <$> apply (map (Q . (key,)) files) + zipWithM lastValue files values + +-- | Define a new Rule with early cutoff +defineEarlyCutoff + :: IdeRule k v + => (k -> NormalizedFilePath -> Action (Maybe BS.ByteString, IdeResult v)) + -> Rules () +defineEarlyCutoff op = addBuiltinRule noLint noIdentity $ \(Q (key, file)) (old :: Maybe BS.ByteString) mode -> otTracedAction key file isSuccess $ do + extras@ShakeExtras{state, inProgress} <- getShakeExtras + -- don't do progress for GetFileExists, as there are lots of non-nodes for just that one key + (if show key == "GetFileExists" then id else withProgressVar inProgress file) $ do + val <- case old of + Just old | mode == RunDependenciesSame -> do + v <- liftIO $ getValues state key file + case v of + -- No changes in the dependencies and we have + -- an existing result. + Just v -> return $ Just $ RunResult ChangedNothing old $ A v + _ -> return Nothing + _ -> return Nothing + case val of + Just res -> return res + Nothing -> do + (bs, (diags, res)) <- actionCatch + (do v <- op key file; liftIO $ evaluate $ force v) $ + \(e :: SomeException) -> pure (Nothing, ([ideErrorText file $ T.pack $ show e | not $ isBadDependency e],Nothing)) + modTime <- liftIO $ (currentValue =<<) <$> getValues state GetModificationTime file + (bs, res) <- case res of + Nothing -> do + staleV <- liftIO $ getValues state key file + pure $ case staleV of + Nothing -> (toShakeValue ShakeResult bs, Failed) + Just v -> case v of + Succeeded ver v -> (toShakeValue ShakeStale bs, Stale ver v) + Stale ver v -> (toShakeValue ShakeStale bs, Stale ver v) + Failed -> (toShakeValue ShakeResult bs, Failed) + Just v -> pure (maybe ShakeNoCutoff ShakeResult bs, Succeeded (vfsVersion =<< modTime) v) + liftIO $ setValues state key file res + updateFileDiagnostics file (Key key) extras $ map (\(_,y,z) -> (y,z)) diags + let eq = case (bs, fmap decodeShakeValue old) of + (ShakeResult a, Just (ShakeResult b)) -> a == b + (ShakeStale a, Just (ShakeStale b)) -> a == b + -- If we do not have a previous result + -- or we got ShakeNoCutoff we always return False. + _ -> False + return $ RunResult + (if eq then ChangedRecomputeSame else ChangedRecomputeDiff) + (encodeShakeValue bs) $ + A res + where + withProgressVar :: (Eq a, Hashable a) => Var (HMap.HashMap a Int) -> a -> Action b -> Action b + withProgressVar var file = actionBracket (f succ) (const $ f pred) . const + -- This functions are deliberately eta-expanded to avoid space leaks. + -- Do not remove the eta-expansion without profiling a session with at + -- least 1000 modifications. + where f shift = modifyVar_ var $ \x -> evaluate $ HMap.insertWith (\_ x -> shift x) file (shift 0) x + +isSuccess :: RunResult (A v) -> Bool +isSuccess (RunResult _ _ (A Failed)) = False +isSuccess _ = True + +-- | Rule type, input file +data QDisk k = QDisk k NormalizedFilePath + deriving (Eq, Generic) + +instance Hashable k => Hashable (QDisk k) + +instance NFData k => NFData (QDisk k) + +instance Binary k => Binary (QDisk k) + +instance Show k => Show (QDisk k) where + show (QDisk k file) = + show k ++ "; " ++ fromNormalizedFilePath file + +type instance RuleResult (QDisk k) = Bool + +data OnDiskRule = OnDiskRule + { getHash :: Action BS.ByteString + -- This is used to figure out if the state on disk corresponds to the state in the Shake + -- database and we can therefore avoid rerunning. Often this can just be the file hash but + -- in some cases we can be more aggressive, e.g., for GHC interface files this can be the ABI hash which + -- is more stable than the hash of the interface file. + -- An empty bytestring indicates that the state on disk is invalid, e.g., files are missing. + -- We do not use a Maybe since we have to deal with encoding things into a ByteString anyway in the Shake DB. + , runRule :: Action (IdeResult BS.ByteString) + -- The actual rule code which produces the new hash (or Nothing if the rule failed) and the diagnostics. + } + +-- This is used by the DAML compiler for incremental builds. Right now this is not used by +-- ghcide itself but that might change in the future. +-- The reason why this code lives in ghcide and in particular in this module is that it depends quite heavily on +-- the internals of this module that we do not want to expose. +defineOnDisk + :: (Shake.ShakeValue k, RuleResult k ~ ()) + => (k -> NormalizedFilePath -> OnDiskRule) + -> Rules () +defineOnDisk act = addBuiltinRule noLint noIdentity $ + \(QDisk key file) (mbOld :: Maybe BS.ByteString) mode -> do + extras <- getShakeExtras + let OnDiskRule{..} = act key file + let validateHash h + | BS.null h = Nothing + | otherwise = Just h + let runAct = actionCatch runRule $ + \(e :: SomeException) -> pure ([ideErrorText file $ T.pack $ displayException e | not $ isBadDependency e], Nothing) + case mbOld of + Nothing -> do + (diags, mbHash) <- runAct + updateFileDiagnostics file (Key key) extras $ map (\(_,y,z) -> (y,z)) diags + pure $ RunResult ChangedRecomputeDiff (fromMaybe "" mbHash) (isJust mbHash) + Just old -> do + current <- validateHash <$> (actionCatch getHash $ \(_ :: SomeException) -> pure "") + if mode == RunDependenciesSame && Just old == current && not (BS.null old) + then + -- None of our dependencies changed, we’ve had a successful run before and + -- the state on disk matches the state in the Shake database. + pure $ RunResult ChangedNothing (fromMaybe "" current) (isJust current) + else do + (diags, mbHash) <- runAct + updateFileDiagnostics file (Key key) extras $ map (\(_,y,z) -> (y,z)) diags + let change + | mbHash == Just old = ChangedRecomputeSame + | otherwise = ChangedRecomputeDiff + pure $ RunResult change (fromMaybe "" mbHash) (isJust mbHash) + +needOnDisk :: (Shake.ShakeValue k, RuleResult k ~ ()) => k -> NormalizedFilePath -> Action () +needOnDisk k file = do + successfull <- apply1 (QDisk k file) + liftIO $ unless successfull $ throwIO $ BadDependency (show k) + +needOnDisks :: (Shake.ShakeValue k, RuleResult k ~ ()) => k -> [NormalizedFilePath] -> Action () +needOnDisks k files = do + successfulls <- apply $ map (QDisk k) files + liftIO $ unless (and successfulls) $ throwIO $ BadDependency (show k) + +toShakeValue :: (BS.ByteString -> ShakeValue) -> Maybe BS.ByteString -> ShakeValue +toShakeValue = maybe ShakeNoCutoff + +data ShakeValue + = ShakeNoCutoff + -- ^ This is what we use when we get Nothing from + -- a rule. + | ShakeResult !BS.ByteString + -- ^ This is used both for `Failed` + -- as well as `Succeeded`. + | ShakeStale !BS.ByteString + deriving (Generic, Show) + +instance NFData ShakeValue + +encodeShakeValue :: ShakeValue -> BS.ByteString +encodeShakeValue = \case + ShakeNoCutoff -> BS.empty + ShakeResult r -> BS.cons 'r' r + ShakeStale r -> BS.cons 's' r + +decodeShakeValue :: BS.ByteString -> ShakeValue +decodeShakeValue bs = case BS.uncons bs of + Nothing -> ShakeNoCutoff + Just (x, xs) + | x == 'r' -> ShakeResult xs + | x == 's' -> ShakeStale xs + | otherwise -> error $ "Failed to parse shake value " <> show bs + + +updateFileDiagnostics :: MonadIO m + => NormalizedFilePath + -> Key + -> ShakeExtras + -> [(ShowDiagnostic,Diagnostic)] -- ^ current results + -> m () +updateFileDiagnostics fp k ShakeExtras{diagnostics, hiddenDiagnostics, publishedDiagnostics, state, debouncer, eventer} current = liftIO $ do + modTime <- (currentValue =<<) <$> getValues state GetModificationTime fp + let (currentShown, currentHidden) = partition ((== ShowDiag) . fst) current + uri = filePathToUri' fp + ver = vfsVersion =<< modTime + updateDiagnosticsWithForcing new store = do + store' <- evaluate $ setStageDiagnostics uri ver (T.pack $ show k) new store + new' <- evaluate $ getUriDiagnostics uri store' + return (store', new') + mask_ $ do + -- Mask async exceptions to ensure that updated diagnostics are always + -- published. Otherwise, we might never publish certain diagnostics if + -- an exception strikes between modifyVar but before + -- publishDiagnosticsNotification. + newDiags <- modifyVar diagnostics $ updateDiagnosticsWithForcing $ map snd currentShown + _ <- modifyVar hiddenDiagnostics $ updateDiagnosticsWithForcing $ map snd currentHidden + let uri = filePathToUri' fp + let delay = if null newDiags then 0.1 else 0 + registerEvent debouncer delay uri $ do + mask_ $ modifyVar_ publishedDiagnostics $ \published -> do + let lastPublish = HMap.lookupDefault [] uri published + when (lastPublish /= newDiags) $ + eventer $ publishDiagnosticsNotification (fromNormalizedUri uri) newDiags + pure $! HMap.insert uri newDiags published + +publishDiagnosticsNotification :: Uri -> [Diagnostic] -> LSP.FromServerMessage +publishDiagnosticsNotification uri diags = + LSP.NotPublishDiagnostics $ + LSP.NotificationMessage "2.0" LSP.TextDocumentPublishDiagnostics $ + LSP.PublishDiagnosticsParams uri (List diags) + +newtype Priority = Priority Double + +setPriority :: Priority -> Action () +setPriority (Priority p) = reschedule p + +sendEvent :: LSP.FromServerMessage -> Action () +sendEvent e = do + ShakeExtras{eventer} <- getShakeExtras + liftIO $ eventer e + +ideLogger :: IdeState -> Logger +ideLogger IdeState{shakeExtras=ShakeExtras{logger}} = logger + +actionLogger :: Action Logger +actionLogger = do + ShakeExtras{logger} <- getShakeExtras + return logger + + +getDiagnosticsFromStore :: StoreItem -> [Diagnostic] +getDiagnosticsFromStore (StoreItem _ diags) = concatMap SL.fromSortedList $ Map.elems diags + + +-- | Sets the diagnostics for a file and compilation step +-- if you want to clear the diagnostics call this with an empty list +setStageDiagnostics + :: NormalizedUri + -> TextDocumentVersion -- ^ the time that the file these diagnostics originate from was last edited + -> T.Text + -> [LSP.Diagnostic] + -> DiagnosticStore + -> DiagnosticStore +setStageDiagnostics uri ver stage diags ds = newDiagsStore where + -- When 'ver' is a new version, updateDiagnostics throws away diagnostics from all stages + -- This interacts bady with early cutoff, so we make sure to preserve diagnostics + -- from other stages when calling updateDiagnostics + -- But this means that updateDiagnostics cannot be called concurrently + -- for different stages anymore + updatedDiags = Map.insert (Just stage) (SL.toSortedList diags) oldDiags + oldDiags = case HMap.lookup uri ds of + Just (StoreItem _ byStage) -> byStage + _ -> Map.empty + newDiagsStore = updateDiagnostics ds uri ver updatedDiags + + +getAllDiagnostics :: + DiagnosticStore -> + [FileDiagnostic] +getAllDiagnostics = + concatMap (\(k,v) -> map (fromUri k,ShowDiag,) $ getDiagnosticsFromStore v) . HMap.toList + +getUriDiagnostics :: + NormalizedUri -> + DiagnosticStore -> + [LSP.Diagnostic] +getUriDiagnostics uri ds = + maybe [] getDiagnosticsFromStore $ + HMap.lookup uri ds + +filterDiagnostics :: + (NormalizedFilePath -> Bool) -> + DiagnosticStore -> + DiagnosticStore +filterDiagnostics keep = + HMap.filterWithKey (\uri _ -> maybe True (keep . toNormalizedFilePath') $ uriToFilePath' $ fromNormalizedUri uri) + +filterVersionMap + :: HMap.HashMap NormalizedUri (Set.Set TextDocumentVersion) + -> HMap.HashMap NormalizedUri (Map TextDocumentVersion a) + -> HMap.HashMap NormalizedUri (Map TextDocumentVersion a) +filterVersionMap = + HMap.intersectionWith $ \versionsToKeep versionMap -> Map.restrictKeys versionMap versionsToKeep + +updatePositionMapping :: IdeState -> VersionedTextDocumentIdentifier -> List TextDocumentContentChangeEvent -> IO () +updatePositionMapping IdeState{shakeExtras = ShakeExtras{positionMapping}} VersionedTextDocumentIdentifier{..} (List changes) = do + modifyVar_ positionMapping $ \allMappings -> do + let uri = toNormalizedUri _uri + let mappingForUri = HMap.lookupDefault Map.empty uri allMappings + let (_, updatedMapping) = + -- Very important to use mapAccum here so that the tails of + -- each mapping can be shared, otherwise quadratic space is + -- used which is evident in long running sessions. + Map.mapAccumRWithKey (\acc _k (delta, _) -> let new = addDelta delta acc in (new, (delta, acc))) + zeroMapping + (Map.insert _version (shared_change, zeroMapping) mappingForUri) + pure $! HMap.insert uri updatedMapping allMappings + where + shared_change = mkDelta changes diff --git a/ghcide/src/Development/IDE/Core/Tracing.hs b/ghcide/src/Development/IDE/Core/Tracing.hs new file mode 100644 index 00000000000..698115585a7 --- /dev/null +++ b/ghcide/src/Development/IDE/Core/Tracing.hs @@ -0,0 +1,183 @@ +{-# LANGUAGE DataKinds #-} +module Development.IDE.Core.Tracing + ( otTracedHandler + , otTracedAction + , startTelemetry + , measureMemory + , getInstrumentCached + ) +where + +import Control.Concurrent.Async (Async, async) +import Control.Concurrent.Extra (Var, modifyVar_, newVar, + readVar, threadDelay) +import Control.Exception (evaluate) +import Control.Exception.Safe (catch, SomeException) +import Control.Monad (unless, forM_, forever, (>=>)) +import Control.Monad.Extra (whenJust) +import Control.Seq (r0, seqList, seqTuple2, using) +import Data.Dynamic (Dynamic) +import qualified Data.HashMap.Strict as HMap +import Data.IORef (modifyIORef', newIORef, + readIORef, writeIORef) +import Data.List (nub) +import Data.String (IsString (fromString)) +import Development.IDE.Core.RuleTypes (GhcSession (GhcSession), + GhcSessionDeps (GhcSessionDeps), + GhcSessionIO (GhcSessionIO)) +import Development.IDE.Types.Logger (logInfo, Logger, logDebug) +import Development.IDE.Types.Shake (Key (..), Value, Values) +import Development.Shake (Action, actionBracket, liftIO) +import Foreign.Storable (Storable (sizeOf)) +import HeapSize (recursiveSize, runHeapsize) +import Language.Haskell.LSP.Types (NormalizedFilePath, + fromNormalizedFilePath) +import Numeric.Natural (Natural) +import OpenTelemetry.Eventlog (addEvent, beginSpan, endSpan, + mkValueObserver, observe, + setTag, withSpan, withSpan_) + +-- | Trace a handler using OpenTelemetry. Adds various useful info into tags in the OpenTelemetry span. +otTracedHandler + :: String -- ^ Message type + -> String -- ^ Message label + -> IO a + -> IO a +otTracedHandler requestType label act = + let !name = + if null label + then requestType + else requestType <> ":" <> show label + -- Add an event so all requests can be quickly seen in the viewer without searching + in withSpan (fromString name) (\sp -> addEvent sp "" (fromString $ name <> " received") >> act) + +-- | Trace a Shake action using opentelemetry. +otTracedAction + :: Show k + => k -- ^ The Action's Key + -> NormalizedFilePath -- ^ Path to the file the action was run for + -> (a -> Bool) -- ^ Did this action succeed? + -> Action a -- ^ The action + -> Action a +otTracedAction key file success act = actionBracket + (do + sp <- beginSpan (fromString (show key)) + setTag sp "File" (fromString $ fromNormalizedFilePath file) + return sp + ) + endSpan + (\sp -> do + res <- act + unless (success res) $ setTag sp "error" "1" + return res) + +startTelemetry :: Logger -> Var Values -> IO () +startTelemetry logger stateRef = do + instrumentFor <- getInstrumentCached + mapCountInstrument <- mkValueObserver "values map count" + + _ <- regularly (1 * seconds) $ + withSpan_ "Measure length" $ + readVar stateRef + >>= observe mapCountInstrument . length + + _ <- regularly (1 * seconds) $ do + values <- readVar stateRef + let keys = nub + $ Key GhcSession : Key GhcSessionDeps + : [ k | (_,k) <- HMap.keys values + -- do GhcSessionIO last since it closes over stateRef itself + , k /= Key GhcSessionIO] + ++ [Key GhcSessionIO] + !groupedForSharing <- evaluate (keys `using` seqList r0) + measureMemory logger [groupedForSharing] instrumentFor stateRef + `catch` \(e::SomeException) -> + logInfo logger ("MEMORY PROFILING ERROR: " <> fromString (show e)) + return () + where + seconds = 1000000 + + regularly :: Int -> IO () -> IO (Async ()) + regularly delay act = async $ forever (act >> threadDelay delay) + +{-# ANN startTelemetry ("HLint: ignore Use nubOrd" :: String) #-} + +type OurValueObserver = Int -> IO () + +getInstrumentCached :: IO (Maybe Key -> IO OurValueObserver) +getInstrumentCached = do + instrumentMap <- newVar HMap.empty + mapBytesInstrument <- mkValueObserver "value map size_bytes" + + let instrumentFor k = do + mb_inst <- HMap.lookup k <$> readVar instrumentMap + case mb_inst of + Nothing -> do + instrument <- mkValueObserver (fromString (show k ++ " size_bytes")) + modifyVar_ instrumentMap (return . HMap.insert k instrument) + return $ observe instrument + Just v -> return $ observe v + return $ maybe (return $ observe mapBytesInstrument) instrumentFor + +whenNothing :: IO () -> IO (Maybe a) -> IO () +whenNothing act mb = mb >>= f + where f Nothing = act + f Just{} = return () + +measureMemory + :: Logger + -> [[Key]] -- ^ Grouping of keys for the sharing-aware analysis + -> (Maybe Key -> IO OurValueObserver) + -> Var Values + -> IO () +measureMemory logger groups instrumentFor stateRef = withSpan_ "Measure Memory" $ do + values <- readVar stateRef + valuesSizeRef <- newIORef $ Just 0 + let !groupsOfGroupedValues = groupValues values + logDebug logger "STARTING MEMORY PROFILING" + forM_ groupsOfGroupedValues $ \groupedValues -> do + keepGoing <- readIORef valuesSizeRef + whenJust keepGoing $ \_ -> + whenNothing (writeIORef valuesSizeRef Nothing) $ + repeatUntilJust 3 $ do + -- logDebug logger (fromString $ show $ map fst groupedValues) + runHeapsize 25000000 $ + forM_ groupedValues $ \(k,v) -> withSpan ("Measure " <> (fromString $ show k)) $ \sp -> do + acc <- liftIO $ newIORef 0 + observe <- liftIO $ instrumentFor $ Just k + mapM_ (recursiveSize >=> \x -> liftIO (modifyIORef' acc (+ x))) v + size <- liftIO $ readIORef acc + let !byteSize = sizeOf (undefined :: Word) * size + setTag sp "size" (fromString (show byteSize ++ " bytes")) + () <- liftIO $ observe byteSize + liftIO $ modifyIORef' valuesSizeRef (fmap (+ byteSize)) + + mbValuesSize <- readIORef valuesSizeRef + case mbValuesSize of + Just valuesSize -> do + observe <- instrumentFor Nothing + observe valuesSize + logDebug logger "MEMORY PROFILING COMPLETED" + Nothing -> + logInfo logger "Memory profiling could not be completed: increase the size of your nursery (+RTS -Ax) and try again" + + where + groupValues :: Values -> [ [(Key, [Value Dynamic])] ] + groupValues values = + let !groupedValues = + [ [ (k, vv) + | k <- groupKeys + , let vv = [ v | ((_,k'), v) <- HMap.toList values , k == k'] + ] + | groupKeys <- groups + ] + -- force the spine of the nested lists + in groupedValues `using` seqList (seqList (seqTuple2 r0 (seqList r0))) + +repeatUntilJust :: Monad m => Natural -> m (Maybe a) -> m (Maybe a) +repeatUntilJust 0 _ = return Nothing +repeatUntilJust nattempts action = do + res <- action + case res of + Nothing -> repeatUntilJust (nattempts-1) action + Just{} -> return res diff --git a/ghcide/src/Development/IDE/GHC/CPP.hs b/ghcide/src/Development/IDE/GHC/CPP.hs new file mode 100644 index 00000000000..afdab484d79 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/CPP.hs @@ -0,0 +1,228 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +-- Copied from https://github.com/ghc/ghc/blob/master/compiler/main/DriverPipeline.hs on 14 May 2019 +-- Requested to be exposed at https://gitlab.haskell.org/ghc/ghc/merge_requests/944. +-- Update the above MR got merged to master on 31 May 2019. When it becomes avialable to ghc-lib, this file can be removed. + +{- HLINT ignore -} -- since copied from upstream + +{-# LANGUAGE CPP, NamedFieldPuns, NondecreasingIndentation, BangPatterns, MultiWayIf #-} +{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} +#include "ghc-api-version.h" + +----------------------------------------------------------------------------- +-- +-- GHC Driver +-- +-- (c) The University of Glasgow 2005 +-- +----------------------------------------------------------------------------- + +module Development.IDE.GHC.CPP(doCpp, addOptP) +where + +import Development.IDE.GHC.Compat +import Packages +import SysTools +import Module +import Panic +import FileCleanup +#if MIN_GHC_API_VERSION(8,8,2) +import LlvmCodeGen (llvmVersionList) +#elif MIN_GHC_API_VERSION(8,8,0) +import LlvmCodeGen (LlvmVersion (..)) +#endif +#if MIN_GHC_API_VERSION (8,10,0) +import Fingerprint +import ToolSettings +#endif + +import System.Directory +import System.FilePath +import Control.Monad +import System.Info +import Data.List ( intercalate ) +import Data.Maybe +import Data.Version + + + +doCpp :: DynFlags -> Bool -> FilePath -> FilePath -> IO () +doCpp dflags raw input_fn output_fn = do + let hscpp_opts = picPOpts dflags + let cmdline_include_paths = includePaths dflags + + pkg_include_dirs <- getPackageIncludePath dflags [] + let include_paths_global = foldr (\ x xs -> ("-I" ++ x) : xs) [] + (includePathsGlobal cmdline_include_paths ++ pkg_include_dirs) + let include_paths_quote = foldr (\ x xs -> ("-iquote" ++ x) : xs) [] + (includePathsQuote cmdline_include_paths) + let include_paths = include_paths_quote ++ include_paths_global + + let verbFlags = getVerbFlags dflags + + let cpp_prog args | raw = SysTools.runCpp dflags args +#if MIN_GHC_API_VERSION(8,10,0) + | otherwise = SysTools.runCc Nothing +#else + | otherwise = SysTools.runCc +#endif + dflags (SysTools.Option "-E" : args) + + let target_defs = + -- NEIL: Patched to use System.Info instead of constants from CPP + [ "-D" ++ os ++ "_BUILD_OS", + "-D" ++ arch ++ "_BUILD_ARCH", + "-D" ++ os ++ "_HOST_OS", + "-D" ++ arch ++ "_HOST_ARCH" ] + -- remember, in code we *compile*, the HOST is the same our TARGET, + -- and BUILD is the same as our HOST. + + let sse_defs = + [ "-D__SSE__" | isSseEnabled dflags ] ++ + [ "-D__SSE2__" | isSse2Enabled dflags ] ++ + [ "-D__SSE4_2__" | isSse4_2Enabled dflags ] + + let avx_defs = + [ "-D__AVX__" | isAvxEnabled dflags ] ++ + [ "-D__AVX2__" | isAvx2Enabled dflags ] ++ + [ "-D__AVX512CD__" | isAvx512cdEnabled dflags ] ++ + [ "-D__AVX512ER__" | isAvx512erEnabled dflags ] ++ + [ "-D__AVX512F__" | isAvx512fEnabled dflags ] ++ + [ "-D__AVX512PF__" | isAvx512pfEnabled dflags ] + + backend_defs <- getBackendDefs dflags + + let th_defs = [ "-D__GLASGOW_HASKELL_TH__" ] + -- Default CPP defines in Haskell source + ghcVersionH <- getGhcVersionPathName dflags + let hsSourceCppOpts = [ "-include", ghcVersionH ] + + -- MIN_VERSION macros + let uids = explicitPackages (pkgState dflags) + pkgs = catMaybes (map (lookupPackage dflags) uids) + mb_macro_include <- + if not (null pkgs) && gopt Opt_VersionMacros dflags + then do macro_stub <- newTempName dflags TFL_CurrentModule "h" + writeFile macro_stub (generatePackageVersionMacros pkgs) + -- Include version macros for every *exposed* package. + -- Without -hide-all-packages and with a package database + -- size of 1000 packages, it takes cpp an estimated 2 + -- milliseconds to process this file. See #10970 + -- comment 8. + return [SysTools.FileOption "-include" macro_stub] + else return [] + + cpp_prog ( map SysTools.Option verbFlags + ++ map SysTools.Option include_paths + ++ map SysTools.Option hsSourceCppOpts + ++ map SysTools.Option target_defs + ++ map SysTools.Option backend_defs + ++ map SysTools.Option th_defs + ++ map SysTools.Option hscpp_opts + ++ map SysTools.Option sse_defs + ++ map SysTools.Option avx_defs + ++ mb_macro_include + -- Set the language mode to assembler-with-cpp when preprocessing. This + -- alleviates some of the C99 macro rules relating to whitespace and the hash + -- operator, which we tend to abuse. Clang in particular is not very happy + -- about this. + ++ [ SysTools.Option "-x" + , SysTools.Option "assembler-with-cpp" + , SysTools.Option input_fn + -- We hackily use Option instead of FileOption here, so that the file + -- name is not back-slashed on Windows. cpp is capable of + -- dealing with / in filenames, so it works fine. Furthermore + -- if we put in backslashes, cpp outputs #line directives + -- with *double* backslashes. And that in turn means that + -- our error messages get double backslashes in them. + -- In due course we should arrange that the lexer deals + -- with these \\ escapes properly. + , SysTools.Option "-o" + , SysTools.FileOption "" output_fn + ]) + +getBackendDefs :: DynFlags -> IO [String] +getBackendDefs dflags | hscTarget dflags == HscLlvm = do + llvmVer <- figureLlvmVersion dflags + return $ case llvmVer of +#if MIN_GHC_API_VERSION(8,8,2) + Just v + | [m] <- llvmVersionList v -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format (m, 0) ] + | m:n:_ <- llvmVersionList v -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format (m, n) ] +#elif MIN_GHC_API_VERSION(8,8,0) + Just (LlvmVersion n) -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format (n,0) ] + Just (LlvmVersionOld m n) -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format (m,n) ] +#else + Just n -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format n ] +#endif + _ -> [] + where + format (major, minor) + | minor >= 100 = error "getBackendDefs: Unsupported minor version" + | otherwise = show $ (100 * major + minor :: Int) -- Contract is Int + +getBackendDefs _ = + return [] + +addOptP :: String -> DynFlags -> DynFlags +#if MIN_GHC_API_VERSION (8,10,0) +addOptP f = alterToolSettings $ \s -> s + { toolSettings_opt_P = f : toolSettings_opt_P s + , toolSettings_opt_P_fingerprint = fingerprintStrings (f : toolSettings_opt_P s) + } + where + fingerprintStrings ss = fingerprintFingerprints $ map fingerprintString ss + alterToolSettings f dynFlags = dynFlags { toolSettings = f (toolSettings dynFlags) } +#else +addOptP opt = onSettings (onOptP (opt:)) + where + onSettings f x = x{settings = f $ settings x} + onOptP f x = x{sOpt_P = f $ sOpt_P x} +#endif + +-- --------------------------------------------------------------------------- +-- Macros (cribbed from Cabal) + +generatePackageVersionMacros :: [PackageConfig] -> String +generatePackageVersionMacros pkgs = concat + -- Do not add any C-style comments. See #3389. + [ generateMacros "" pkgname version + | pkg <- pkgs + , let version = packageVersion pkg + pkgname = map fixchar (packageNameString pkg) + ] + +fixchar :: Char -> Char +fixchar '-' = '_' +fixchar c = c + +generateMacros :: String -> String -> Version -> String +generateMacros prefix name version = + concat + ["#define ", prefix, "VERSION_",name," ",show (showVersion version),"\n" + ,"#define MIN_", prefix, "VERSION_",name,"(major1,major2,minor) (\\\n" + ," (major1) < ",major1," || \\\n" + ," (major1) == ",major1," && (major2) < ",major2," || \\\n" + ," (major1) == ",major1," && (major2) == ",major2," && (minor) <= ",minor,")" + ,"\n\n" + ] + where + (major1:major2:minor:_) = map show (versionBranch version ++ repeat 0) + + +-- | Find out path to @ghcversion.h@ file +getGhcVersionPathName :: DynFlags -> IO FilePath +getGhcVersionPathName dflags = do + candidates <- case ghcVersionFile dflags of + Just path -> return [path] + Nothing -> (map ( "ghcversion.h")) <$> + (getPackageIncludePath dflags [toInstalledUnitId rtsUnitId]) + + found <- filterM doesFileExist candidates + case found of + [] -> throwGhcExceptionIO (InstallationError + ("ghcversion.h missing; tried: " + ++ intercalate ", " candidates)) + (x:_) -> return x diff --git a/ghcide/src/Development/IDE/GHC/Compat.hs b/ghcide/src/Development/IDE/GHC/Compat.hs new file mode 100644 index 00000000000..8091bdb9c1f --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Compat.hs @@ -0,0 +1,285 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE PatternSynonyms #-} +{-# OPTIONS -Wno-dodgy-imports -Wno-incomplete-uni-patterns #-} +#include "ghc-api-version.h" + +-- | Attempt at hiding the GHC version differences we can. +module Development.IDE.GHC.Compat( + HieFileResult(..), + HieFile(..), + NameCacheUpdater(..), + hieExportNames, + mkHieFile, + mkHieFile', + enrichHie, + RefMap, + writeHieFile, + readHieFile, + supportsHieFiles, + setHieDir, + dontWriteHieFiles, +#if !MIN_GHC_API_VERSION(8,8,0) + ml_hie_file, + addBootSuffixLocnOut, +#endif + hPutStringBuffer, + addIncludePathsQuote, + getModuleHash, + getPackageName, + setUpTypedHoles, + GHC.ModLocation, + Module.addBootSuffix, + pattern ModLocation, + pattern ExposePackage, + HasSrcSpan, + getLoc, + upNameCache, + disableWarningsAsErrors, + AvailInfo, + tcg_exports, + +#if MIN_GHC_API_VERSION(8,10,0) + module GHC.Hs.Extension, + module LinkerTypes, +#else + module HsExtension, + noExtField, + linkableTime, +#endif + + module GHC, + module DynFlags, + initializePlugins, + applyPluginsParsedResultAction, + module Compat.HieTypes, + module Compat.HieUtils, + + ) where + +#if MIN_GHC_API_VERSION(8,10,0) +import LinkerTypes +#endif + +import StringBuffer +import qualified DynFlags +import DynFlags hiding (ExposePackage) +import Fingerprint (Fingerprint) +import qualified Module +import Packages +import Data.IORef +import HscTypes +import NameCache +import qualified Data.ByteString as BS +import MkIface +import TcRnTypes +import Compat.HieAst (mkHieFile,enrichHie) +import Compat.HieBin +import Compat.HieTypes +import Compat.HieUtils + +#if MIN_GHC_API_VERSION(8,10,0) +import GHC.Hs.Extension +#else +import HsExtension +#endif + +import qualified GHC +import GHC hiding ( + ModLocation, + HasSrcSpan, + lookupName, + getLoc + ) +import Avail +#if MIN_GHC_API_VERSION(8,8,0) +import Data.List (foldl') +#else +import Data.List (foldl', isSuffixOf) +#endif + +import DynamicLoading +import Plugins (Plugin(parsedResultAction), withPlugins) +import Data.Map.Strict (Map) + +#if !MIN_GHC_API_VERSION(8,8,0) +import System.FilePath ((-<.>)) +#endif + +#if !MIN_GHC_API_VERSION(8,8,0) +import qualified EnumSet + +import System.IO +import Foreign.ForeignPtr + + +hPutStringBuffer :: Handle -> StringBuffer -> IO () +hPutStringBuffer hdl (StringBuffer buf len cur) + = withForeignPtr (plusForeignPtr buf cur) $ \ptr -> + hPutBuf hdl ptr len + +#endif + +#if !MIN_GHC_API_VERSION(8,10,0) +noExtField :: NoExt +noExtField = noExt +#endif + +supportsHieFiles :: Bool +supportsHieFiles = True + +hieExportNames :: HieFile -> [(SrcSpan, Name)] +hieExportNames = nameListFromAvails . hie_exports + +#if !MIN_GHC_API_VERSION(8,8,0) +ml_hie_file :: GHC.ModLocation -> FilePath +ml_hie_file ml + | "boot" `isSuffixOf ` ml_hi_file ml = ml_hi_file ml -<.> ".hie-boot" + | otherwise = ml_hi_file ml -<.> ".hie" +#endif + +upNameCache :: IORef NameCache -> (NameCache -> (NameCache, c)) -> IO c +#if !MIN_GHC_API_VERSION(8,8,0) +upNameCache ref upd_fn + = atomicModifyIORef' ref upd_fn +#else +upNameCache = updNameCache +#endif + + +type RefMap = Map Identifier [(Span, IdentifierDetails Type)] + +mkHieFile' :: ModSummary + -> [AvailInfo] + -> HieASTs Type + -> BS.ByteString + -> Hsc HieFile +mkHieFile' ms exports asts src = do + let Just src_file = ml_hs_file $ ms_location ms + (asts',arr) = compressTypes asts + return $ HieFile + { hie_hs_file = src_file + , hie_module = ms_mod ms + , hie_types = arr + , hie_asts = asts' + -- mkIfaceExports sorts the AvailInfos for stability + , hie_exports = mkIfaceExports exports + , hie_hs_src = src + } + +addIncludePathsQuote :: FilePath -> DynFlags -> DynFlags +addIncludePathsQuote path x = x{includePaths = f $ includePaths x} + where f i = i{includePathsQuote = path : includePathsQuote i} + +pattern ModLocation :: Maybe FilePath -> FilePath -> FilePath -> GHC.ModLocation +pattern ModLocation a b c <- +#if MIN_GHC_API_VERSION(8,8,0) + GHC.ModLocation a b c _ where ModLocation a b c = GHC.ModLocation a b c "" +#else + GHC.ModLocation a b c where ModLocation a b c = GHC.ModLocation a b c +#endif + +setHieDir :: FilePath -> DynFlags -> DynFlags +setHieDir _f d = +#if MIN_GHC_API_VERSION(8,8,0) + d { hieDir = Just _f} +#else + d +#endif + +dontWriteHieFiles :: DynFlags -> DynFlags +dontWriteHieFiles d = +#if MIN_GHC_API_VERSION(8,8,0) + gopt_unset d Opt_WriteHie +#else + d +#endif + +setUpTypedHoles ::DynFlags -> DynFlags +setUpTypedHoles df + = flip gopt_unset Opt_AbstractRefHoleFits -- too spammy +#if MIN_GHC_API_VERSION(8,8,0) + $ flip gopt_unset Opt_ShowDocsOfHoleFits -- not used +#endif + $ flip gopt_unset Opt_ShowMatchesOfHoleFits -- nice but broken (forgets module qualifiers) + $ flip gopt_unset Opt_ShowProvOfHoleFits -- not used + $ flip gopt_unset Opt_ShowTypeAppOfHoleFits -- not used + $ flip gopt_unset Opt_ShowTypeAppVarsOfHoleFits -- not used + $ flip gopt_unset Opt_ShowTypeOfHoleFits -- massively simplifies parsing + $ flip gopt_set Opt_SortBySubsumHoleFits -- very nice and fast enough in most cases + $ flip gopt_unset Opt_SortValidHoleFits + $ flip gopt_unset Opt_UnclutterValidHoleFits + $ df + { refLevelHoleFits = Just 1 -- becomes slow at higher levels + , maxRefHoleFits = Just 10 -- quantity does not impact speed + , maxValidHoleFits = Nothing -- quantity does not impact speed + } + + +nameListFromAvails :: [AvailInfo] -> [(SrcSpan, Name)] +nameListFromAvails as = + map (\n -> (nameSrcSpan n, n)) (concatMap availNames as) + +#if MIN_GHC_API_VERSION(8,8,0) + +type HasSrcSpan = GHC.HasSrcSpan +getLoc :: HasSrcSpan a => a -> SrcSpan +getLoc = GHC.getLoc + +#else + +class HasSrcSpan a where + getLoc :: a -> SrcSpan +instance HasSrcSpan Name where + getLoc = nameSrcSpan +instance HasSrcSpan (GenLocated SrcSpan a) where + getLoc = GHC.getLoc + +-- | Add the @-boot@ suffix to all output file paths associated with the +-- module, not including the input file itself +addBootSuffixLocnOut :: GHC.ModLocation -> GHC.ModLocation +addBootSuffixLocnOut locn + = locn { ml_hi_file = Module.addBootSuffix (ml_hi_file locn) + , ml_obj_file = Module.addBootSuffix (ml_obj_file locn) + } +#endif + +getModuleHash :: ModIface -> Fingerprint +#if MIN_GHC_API_VERSION(8,10,0) +getModuleHash = mi_mod_hash . mi_final_exts +#else +getModuleHash = mi_mod_hash +#endif + +getPackageName :: DynFlags -> Module.InstalledUnitId -> Maybe PackageName +getPackageName dfs i = packageName <$> lookupPackage dfs (Module.DefiniteUnitId (Module.DefUnitId i)) + +disableWarningsAsErrors :: DynFlags -> DynFlags +disableWarningsAsErrors df = + flip gopt_unset Opt_WarnIsError $ foldl' wopt_unset_fatal df [toEnum 0 ..] + +#if !MIN_GHC_API_VERSION(8,8,0) +wopt_unset_fatal :: DynFlags -> WarningFlag -> DynFlags +wopt_unset_fatal dfs f + = dfs { fatalWarningFlags = EnumSet.delete f (fatalWarningFlags dfs) } +#endif + +applyPluginsParsedResultAction :: HscEnv -> DynFlags -> ModSummary -> ApiAnns -> ParsedSource -> IO ParsedSource +applyPluginsParsedResultAction env dflags ms hpm_annotations parsed = do + -- Apply parsedResultAction of plugins + let applyPluginAction p opts = parsedResultAction p opts ms + fmap hpm_module $ + runHsc env $ withPlugins dflags applyPluginAction + (HsParsedModule parsed [] hpm_annotations) + +pattern ExposePackage :: String -> PackageArg -> ModRenaming -> PackageFlag +-- https://github.com/facebook/fbghc +#ifdef __FACEBOOK_HASKELL__ +pattern ExposePackage s a mr <- DynFlags.ExposePackage s a _ mr +#else +pattern ExposePackage s a mr = DynFlags.ExposePackage s a mr +#endif diff --git a/ghcide/src/Development/IDE/GHC/Error.hs b/ghcide/src/Development/IDE/GHC/Error.hs new file mode 100644 index 00000000000..14caa1174cd --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Error.hs @@ -0,0 +1,195 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +module Development.IDE.GHC.Error + ( + -- * Producing Diagnostic values + diagFromErrMsgs + , diagFromErrMsg + , diagFromString + , diagFromStrings + , diagFromGhcException + , catchSrcErrors + + -- * utilities working with spans + , srcSpanToLocation + , srcSpanToRange + , realSrcSpanToRange + , realSrcLocToPosition + , srcSpanToFilename + , zeroSpan + , realSpan + , isInsideSrcSpan + , noSpan + + -- * utilities working with severities + , toDSeverity + ) where + +import Development.IDE.Types.Diagnostics as D +import qualified Data.Text as T +import Data.Maybe +import Development.IDE.Types.Location +import Development.IDE.GHC.Orphans() +import qualified FastString as FS +import GHC +import Bag +import HscTypes +import Panic +import ErrUtils +import SrcLoc +import qualified Outputable as Out + + + +diagFromText :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> T.Text -> FileDiagnostic +diagFromText diagSource sev loc msg = (toNormalizedFilePath' $ fromMaybe noFilePath $ srcSpanToFilename loc,ShowDiag,) + Diagnostic + { _range = fromMaybe noRange $ srcSpanToRange loc + , _severity = Just sev + , _source = Just diagSource -- not shown in the IDE, but useful for ghcide developers + , _message = msg + , _code = Nothing + , _relatedInformation = Nothing + , _tags = Nothing + } + +-- | Produce a GHC-style error from a source span and a message. +diagFromErrMsg :: T.Text -> DynFlags -> ErrMsg -> [FileDiagnostic] +diagFromErrMsg diagSource dflags e = + [ diagFromText diagSource sev (errMsgSpan e) + $ T.pack $ formatErrorWithQual dflags e + | Just sev <- [toDSeverity $ errMsgSeverity e]] + +formatErrorWithQual :: DynFlags -> ErrMsg -> String +formatErrorWithQual dflags e = + Out.showSDoc dflags + $ Out.withPprStyle (Out.mkErrStyle dflags $ errMsgContext e) + $ ErrUtils.formatErrDoc dflags + $ ErrUtils.errMsgDoc e + +diagFromErrMsgs :: T.Text -> DynFlags -> Bag ErrMsg -> [FileDiagnostic] +diagFromErrMsgs diagSource dflags = concatMap (diagFromErrMsg diagSource dflags) . bagToList + +-- | Convert a GHC SrcSpan to a DAML compiler Range +srcSpanToRange :: SrcSpan -> Maybe Range +srcSpanToRange (UnhelpfulSpan _) = Nothing +srcSpanToRange (RealSrcSpan real) = Just $ realSrcSpanToRange real + +realSrcSpanToRange :: RealSrcSpan -> Range +realSrcSpanToRange real = + Range (realSrcLocToPosition $ realSrcSpanStart real) + (realSrcLocToPosition $ realSrcSpanEnd real) + +realSrcLocToPosition :: RealSrcLoc -> Position +realSrcLocToPosition real = + Position (srcLocLine real - 1) (srcLocCol real - 1) + +-- | Extract a file name from a GHC SrcSpan (use message for unhelpful ones) +-- FIXME This may not be an _absolute_ file name, needs fixing. +srcSpanToFilename :: SrcSpan -> Maybe FilePath +srcSpanToFilename (UnhelpfulSpan _) = Nothing +srcSpanToFilename (RealSrcSpan real) = Just $ FS.unpackFS $ srcSpanFile real + +srcSpanToLocation :: SrcSpan -> Maybe Location +srcSpanToLocation src = do + fs <- srcSpanToFilename src + rng <- srcSpanToRange src + -- important that the URI's we produce have been properly normalized, otherwise they point at weird places in VS Code + pure $ Location (fromNormalizedUri $ filePathToUri' $ toNormalizedFilePath' fs) rng + +isInsideSrcSpan :: Position -> SrcSpan -> Bool +p `isInsideSrcSpan` r = case srcSpanToRange r of + Just (Range sp ep) -> sp <= p && p <= ep + _ -> False + +-- | Convert a GHC severity to a DAML compiler Severity. Severities below +-- "Warning" level are dropped (returning Nothing). +toDSeverity :: GHC.Severity -> Maybe D.DiagnosticSeverity +toDSeverity SevOutput = Nothing +toDSeverity SevInteractive = Nothing +toDSeverity SevDump = Nothing +toDSeverity SevInfo = Just DsInfo +toDSeverity SevWarning = Just DsWarning +toDSeverity SevError = Just DsError +toDSeverity SevFatal = Just DsError + + +-- | Produce a bag of GHC-style errors (@ErrorMessages@) from the given +-- (optional) locations and message strings. +diagFromStrings :: T.Text -> D.DiagnosticSeverity -> [(SrcSpan, String)] -> [FileDiagnostic] +diagFromStrings diagSource sev = concatMap (uncurry (diagFromString diagSource sev)) + +-- | Produce a GHC-style error from a source span and a message. +diagFromString :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> String -> [FileDiagnostic] +diagFromString diagSource sev sp x = [diagFromText diagSource sev sp $ T.pack x] + + +-- | Produces an "unhelpful" source span with the given string. +noSpan :: String -> SrcSpan +noSpan = UnhelpfulSpan . FS.fsLit + + +-- | creates a span with zero length in the filename of the argument passed +zeroSpan :: FS.FastString -- ^ file path of span + -> RealSrcSpan +zeroSpan file = realSrcLocSpan (mkRealSrcLoc file 1 1) + +realSpan :: SrcSpan + -> Maybe RealSrcSpan +realSpan = \case + RealSrcSpan r -> Just r + UnhelpfulSpan _ -> Nothing + + +-- | Catch the errors thrown by GHC (SourceErrors and +-- compiler-internal exceptions like Panic or InstallationError), and turn them into +-- diagnostics +catchSrcErrors :: DynFlags -> T.Text -> IO a -> IO (Either [FileDiagnostic] a) +catchSrcErrors dflags fromWhere ghcM = do + handleGhcException (ghcExceptionToDiagnostics dflags) $ + handleSourceError (sourceErrorToDiagnostics dflags) $ + Right <$> ghcM + where + ghcExceptionToDiagnostics dflags = return . Left . diagFromGhcException fromWhere dflags + sourceErrorToDiagnostics dflags = return . Left . diagFromErrMsgs fromWhere dflags . srcErrorMessages + + +diagFromGhcException :: T.Text -> DynFlags -> GhcException -> [FileDiagnostic] +diagFromGhcException diagSource dflags exc = diagFromString diagSource DsError (noSpan "") (showGHCE dflags exc) + +showGHCE :: DynFlags -> GhcException -> String +showGHCE dflags exc = case exc of + Signal n + -> "Signal: " <> show n + + Panic s + -> unwords ["Compilation Issue:", s, "\n", requestReport] + PprPanic s sdoc + -> unlines ["Compilation Issue", s,"" + , Out.showSDoc dflags sdoc + , requestReport ] + + Sorry s + -> "Unsupported feature: " <> s + PprSorry s sdoc + -> unlines ["Unsupported feature: ", s,"" + , Out.showSDoc dflags sdoc] + + + ---------- errors below should not happen at all -------- + InstallationError str + -> "Installation error: " <> str + + UsageError str -- should never happen + -> unlines ["Unexpected usage error", str] + + CmdLineError str + -> unlines ["Unexpected usage error", str] + + ProgramError str + -> "Program error: " <> str + PprProgramError str sdoc -> + unlines ["Program error:", str,"" + , Out.showSDoc dflags sdoc] + where + requestReport = "Please report this bug to the compiler authors." diff --git a/ghcide/src/Development/IDE/GHC/Orphans.hs b/ghcide/src/Development/IDE/GHC/Orphans.hs new file mode 100644 index 00000000000..135bbb211f2 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Orphans.hs @@ -0,0 +1,112 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +{-# LANGUAGE FlexibleInstances #-} +{-# OPTIONS_GHC -Wno-orphans #-} +#include "ghc-api-version.h" + +-- | Orphan instances for GHC. +-- Note that the 'NFData' instances may not be law abiding. +module Development.IDE.GHC.Orphans() where + +import Bag +import Control.DeepSeq +import Data.Hashable +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Util +import GHC () +import GhcPlugins +import qualified StringBuffer as SB + + +-- Orphan instances for types from the GHC API. +instance Show CoreModule where show = prettyPrint +instance NFData CoreModule where rnf = rwhnf +instance Show CgGuts where show = prettyPrint . cg_module +instance NFData CgGuts where rnf = rwhnf +instance Show ModDetails where show = const "" +instance NFData ModDetails where rnf = rwhnf +instance NFData SafeHaskellMode where rnf = rwhnf +instance Show Linkable where show = prettyPrint +instance NFData Linkable where rnf = rwhnf +instance Show PackageFlag where show = prettyPrint +instance Show InteractiveImport where show = prettyPrint +instance Show ComponentId where show = prettyPrint +instance Show PackageName where show = prettyPrint +instance Show SourcePackageId where show = prettyPrint + +instance Show InstalledUnitId where + show = installedUnitIdString + +instance NFData InstalledUnitId where rnf = rwhnf . installedUnitIdFS + +instance NFData SB.StringBuffer where rnf = rwhnf + +instance Show Module where + show = moduleNameString . moduleName + +instance Outputable a => Show (GenLocated SrcSpan a) where show = prettyPrint + +instance (NFData l, NFData e) => NFData (GenLocated l e) where + rnf (L l e) = rnf l `seq` rnf e + +instance Show ModSummary where + show = show . ms_mod + +instance Show ParsedModule where + show = show . pm_mod_summary + +instance NFData ModSummary where + rnf = rwhnf + +#if !MIN_GHC_API_VERSION(8,10,0) +instance NFData FastString where + rnf = rwhnf +#endif + +instance NFData ParsedModule where + rnf = rwhnf + +instance Hashable InstalledUnitId where + hashWithSalt salt = hashWithSalt salt . installedUnitIdString + +instance Show HieFile where + show = show . hie_module + +instance NFData HieFile where + rnf = rwhnf + +deriving instance Eq SourceModified +deriving instance Show SourceModified +instance NFData SourceModified where + rnf = rwhnf + +instance Show ModuleName where + show = moduleNameString +instance Hashable ModuleName where + hashWithSalt salt = hashWithSalt salt . show + + +instance NFData a => NFData (IdentifierDetails a) where + rnf (IdentifierDetails a b) = rnf a `seq` rnf (length b) + +instance NFData RealSrcSpan where + rnf = rwhnf + +instance NFData Type where + rnf = rwhnf + +instance Show a => Show (Bag a) where + show = show . bagToList + +instance NFData HsDocString where + rnf = rwhnf + +instance Show ModGuts where + show _ = "modguts" +instance NFData ModGuts where + rnf = rwhnf + +instance NFData (ImportDecl GhcPs) where + rnf = rwhnf diff --git a/ghcide/src/Development/IDE/GHC/Util.hs b/ghcide/src/Development/IDE/GHC/Util.hs new file mode 100644 index 00000000000..6213e23a038 --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Util.hs @@ -0,0 +1,336 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +-- | General utility functions, mostly focused around GHC operations. +module Development.IDE.GHC.Util( + -- * HcsEnv and environment + HscEnvEq, + hscEnv, newHscEnvEq, + hscEnvWithImportPaths, + envImportPaths, + modifyDynFlags, + evalGhcEnv, + deps, + -- * GHC wrappers + prettyPrint, + printRdrName, + printName, + ParseResult(..), runParser, + lookupPackageConfig, + textToStringBuffer, + bytestringToStringBuffer, + stringBufferToByteString, + moduleImportPath, + cgGutsToCoreModule, + fingerprintToBS, + fingerprintFromStringBuffer, + -- * General utilities + readFileUtf8, + hDuplicateTo', + setHieDir, + dontWriteHieFiles, + disableWarningsAsErrors, + newHscEnvEqPreserveImportPaths, + newHscEnvEqWithImportPaths) where + +import Control.Concurrent +import Data.List.Extra +import Data.ByteString.Internal (ByteString(..)) +import Data.Maybe +import Data.Typeable +import qualified Data.ByteString.Internal as BS +import Fingerprint +import GhcMonad +import Control.Exception +import Data.IORef +import FileCleanup +import Foreign.Ptr +import Foreign.ForeignPtr +import Foreign.Storable +import GHC.IO.BufferedIO (BufferedIO) +import GHC.IO.Device as IODevice +import GHC.IO.Encoding +import GHC.IO.Exception +import GHC.IO.Handle.Types +import GHC.IO.Handle.Internals +import Data.Unique +import Development.Shake.Classes +import qualified Data.Text as T +import qualified Data.Text.Encoding as T +import qualified Data.Text.Encoding.Error as T +import qualified Data.ByteString as BS +import Lexer +import StringBuffer +import System.FilePath +import HscTypes (cg_binds, md_types, cg_module, ModDetails, CgGuts, ic_dflags, hsc_IC, HscEnv(hsc_dflags)) +import PackageConfig (PackageConfig) +import Outputable (showSDocUnsafe, ppr, showSDoc, Outputable) +import Packages (getPackageConfigMap, lookupPackage') +import SrcLoc (mkRealSrcLoc) +import FastString (mkFastString) +import Module (moduleNameSlashes, InstalledUnitId) +import OccName (parenSymOcc) +import RdrName (nameRdrName, rdrNameOcc) + +import Development.IDE.GHC.Compat as GHC +import Development.IDE.Types.Location +import System.Directory (canonicalizePath) + + +---------------------------------------------------------------------- +-- GHC setup + +-- | Used to modify dyn flags in preference to calling 'setSessionDynFlags', +-- since that function also reloads packages (which is very slow). +modifyDynFlags :: GhcMonad m => (DynFlags -> DynFlags) -> m () +modifyDynFlags f = do + newFlags <- f <$> getSessionDynFlags + -- We do not use setSessionDynFlags here since we handle package + -- initialization separately. + modifySession $ \h -> + h { hsc_dflags = newFlags, hsc_IC = (hsc_IC h) {ic_dflags = newFlags} } + +-- | Given a 'UnitId' try and find the associated 'PackageConfig' in the environment. +lookupPackageConfig :: UnitId -> HscEnv -> Maybe PackageConfig +lookupPackageConfig unitId env = + lookupPackage' False pkgConfigMap unitId + where + pkgConfigMap = + -- For some weird reason, the GHC API does not provide a way to get the PackageConfigMap + -- from PackageState so we have to wrap it in DynFlags first. + getPackageConfigMap $ hsc_dflags env + + +-- | Convert from the @text@ package to the @GHC@ 'StringBuffer'. +-- Currently implemented somewhat inefficiently (if it ever comes up in a profile). +textToStringBuffer :: T.Text -> StringBuffer +textToStringBuffer = stringToStringBuffer . T.unpack + +runParser :: DynFlags -> String -> P a -> ParseResult a +runParser flags str parser = unP parser parseState + where + filename = "" + location = mkRealSrcLoc (mkFastString filename) 1 1 + buffer = stringToStringBuffer str + parseState = mkPState flags buffer location + +stringBufferToByteString :: StringBuffer -> ByteString +stringBufferToByteString StringBuffer{..} = PS buf cur len + +bytestringToStringBuffer :: ByteString -> StringBuffer +bytestringToStringBuffer (PS buf cur len) = StringBuffer{..} + +-- | Pretty print a GHC value using 'unsafeGlobalDynFlags '. +prettyPrint :: Outputable a => a -> String +prettyPrint = showSDoc unsafeGlobalDynFlags . ppr + +-- | Pretty print a 'RdrName' wrapping operators in parens +printRdrName :: RdrName -> String +printRdrName name = showSDocUnsafe $ parenSymOcc rn (ppr rn) + where + rn = rdrNameOcc name + +-- | Pretty print a 'Name' wrapping operators in parens +printName :: Name -> String +printName = printRdrName . nameRdrName + +-- | Run a 'Ghc' monad value using an existing 'HscEnv'. Sets up and tears down all the required +-- pieces, but designed to be more efficient than a standard 'runGhc'. +evalGhcEnv :: HscEnv -> Ghc b -> IO b +evalGhcEnv env act = snd <$> runGhcEnv env act + +-- | Run a 'Ghc' monad value using an existing 'HscEnv'. Sets up and tears down all the required +-- pieces, but designed to be more efficient than a standard 'runGhc'. +runGhcEnv :: HscEnv -> Ghc a -> IO (HscEnv, a) +runGhcEnv env act = do + filesToClean <- newIORef emptyFilesToClean + dirsToClean <- newIORef mempty + let dflags = (hsc_dflags env){filesToClean=filesToClean, dirsToClean=dirsToClean, useUnicode=True} + ref <- newIORef env{hsc_dflags=dflags} + res <- unGhc act (Session ref) `finally` do + cleanTempFiles dflags + cleanTempDirs dflags + (,res) <$> readIORef ref + +-- | Given a module location, and its parse tree, figure out what is the include directory implied by it. +-- For example, given the file @\/usr\/\Test\/Foo\/Bar.hs@ with the module name @Foo.Bar@ the directory +-- @\/usr\/Test@ should be on the include path to find sibling modules. +moduleImportPath :: NormalizedFilePath -> GHC.ModuleName -> Maybe FilePath +-- The call to takeDirectory is required since DAML does not require that +-- the file name matches the module name in the last component. +-- Once that has changed we can get rid of this. +moduleImportPath (takeDirectory . fromNormalizedFilePath -> pathDir) mn + -- This happens for single-component modules since takeDirectory "A" == "." + | modDir == "." = Just pathDir + | otherwise = dropTrailingPathSeparator <$> stripSuffix modDir pathDir + where + -- A for module A.B + modDir = + takeDirectory $ + fromNormalizedFilePath $ toNormalizedFilePath' $ + moduleNameSlashes mn + +-- | An 'HscEnv' with equality. Two values are considered equal +-- if they are created with the same call to 'newHscEnvEq'. +data HscEnvEq = HscEnvEq + { envUnique :: !Unique + , hscEnv :: !HscEnv + , deps :: [(InstalledUnitId, DynFlags)] + -- ^ In memory components for this HscEnv + -- This is only used at the moment for the import dirs in + -- the DynFlags + , envImportPaths :: Maybe [String] + -- ^ If Just, import dirs originally configured in this env + -- If Nothing, the env import dirs are unaltered + } + +-- | Wrap an 'HscEnv' into an 'HscEnvEq'. +newHscEnvEq :: FilePath -> HscEnv -> [(InstalledUnitId, DynFlags)] -> IO HscEnvEq +newHscEnvEq cradlePath hscEnv0 deps = do + envUnique <- newUnique + let relativeToCradle = (takeDirectory cradlePath ) + hscEnv = removeImportPaths hscEnv0 + + -- Canonicalize import paths since we also canonicalize targets + importPathsCanon <- + mapM canonicalizePath $ relativeToCradle <$> importPaths (hsc_dflags hscEnv0) + let envImportPaths = Just importPathsCanon + + return HscEnvEq{..} + +newHscEnvEqWithImportPaths :: Maybe [String] -> HscEnv -> [(InstalledUnitId, DynFlags)] -> IO HscEnvEq +newHscEnvEqWithImportPaths envImportPaths hscEnv deps = do + envUnique <- newUnique + return HscEnvEq{..} + +-- | Wrap an 'HscEnv' into an 'HscEnvEq'. +newHscEnvEqPreserveImportPaths + :: HscEnv -> [(InstalledUnitId, DynFlags)] -> IO HscEnvEq +newHscEnvEqPreserveImportPaths hscEnv deps = do + let envImportPaths = Nothing + envUnique <- newUnique + return HscEnvEq{..} + +-- | Unwrap the 'HscEnv' with the original import paths. +-- Used only for locating imports +hscEnvWithImportPaths :: HscEnvEq -> HscEnv +hscEnvWithImportPaths HscEnvEq{..} + | Just imps <- envImportPaths + = hscEnv{hsc_dflags = (hsc_dflags hscEnv){importPaths = imps}} + | otherwise + = hscEnv + +removeImportPaths :: HscEnv -> HscEnv +removeImportPaths hsc = hsc{hsc_dflags = (hsc_dflags hsc){importPaths = []}} + +instance Show HscEnvEq where + show HscEnvEq{envUnique} = "HscEnvEq " ++ show (hashUnique envUnique) + +instance Eq HscEnvEq where + a == b = envUnique a == envUnique b + +instance NFData HscEnvEq where + rnf (HscEnvEq a b c d) = rnf (hashUnique a) `seq` b `seq` c `seq` rnf d + +instance Hashable HscEnvEq where + hashWithSalt s = hashWithSalt s . envUnique + +-- Fake instance needed to persuade Shake to accept this type as a key. +-- No harm done as ghcide never persists these keys currently +instance Binary HscEnvEq where + put _ = error "not really" + get = error "not really" + +-- | Read a UTF8 file, with lenient decoding, so it will never raise a decoding error. +readFileUtf8 :: FilePath -> IO T.Text +readFileUtf8 f = T.decodeUtf8With T.lenientDecode <$> BS.readFile f + +-- | Convert from a 'CgGuts' to a 'CoreModule'. +cgGutsToCoreModule :: SafeHaskellMode -> CgGuts -> ModDetails -> CoreModule +cgGutsToCoreModule safeMode guts modDetails = CoreModule + (cg_module guts) + (md_types modDetails) + (cg_binds guts) + safeMode + +-- | Convert a 'Fingerprint' to a 'ByteString' by copying the byte across. +-- Will produce an 8 byte unreadable ByteString. +fingerprintToBS :: Fingerprint -> BS.ByteString +fingerprintToBS (Fingerprint a b) = BS.unsafeCreate 8 $ \ptr -> do + ptr <- pure $ castPtr ptr + pokeElemOff ptr 0 a + pokeElemOff ptr 1 b + +-- | Take the 'Fingerprint' of a 'StringBuffer'. +fingerprintFromStringBuffer :: StringBuffer -> IO Fingerprint +fingerprintFromStringBuffer (StringBuffer buf len cur) = + withForeignPtr buf $ \ptr -> fingerprintData (ptr `plusPtr` cur) len + + +-- | A slightly modified version of 'hDuplicateTo' from GHC. +-- Importantly, it avoids the bug listed in https://gitlab.haskell.org/ghc/ghc/merge_requests/2318. +hDuplicateTo' :: Handle -> Handle -> IO () +hDuplicateTo' h1@(FileHandle path m1) h2@(FileHandle _ m2) = do + withHandle__' "hDuplicateTo" h2 m2 $ \h2_ -> do + -- The implementation in base has this call to hClose_help. + -- _ <- hClose_help h2_ + -- hClose_help does two things: + -- 1. It flushes the buffer, we replicate this here + _ <- flushWriteBuffer h2_ `catch` \(_ :: IOException) -> pure () + -- 2. It closes the handle. This is redundant since dup2 takes care of that + -- but even worse it is actively harmful! Once the handle has been closed + -- another thread is free to reallocate it. This leads to dup2 failing with EBUSY + -- if it happens just in the right moment. + withHandle_' "hDuplicateTo" h1 m1 $ \h1_ -> do + dupHandleTo path h1 Nothing h2_ h1_ (Just handleFinalizer) +hDuplicateTo' h1@(DuplexHandle path r1 w1) h2@(DuplexHandle _ r2 w2) = do + withHandle__' "hDuplicateTo" h2 w2 $ \w2_ -> do + _ <- hClose_help w2_ + withHandle_' "hDuplicateTo" h1 w1 $ \w1_ -> do + dupHandleTo path h1 Nothing w2_ w1_ (Just handleFinalizer) + withHandle__' "hDuplicateTo" h2 r2 $ \r2_ -> do + _ <- hClose_help r2_ + withHandle_' "hDuplicateTo" h1 r1 $ \r1_ -> do + dupHandleTo path h1 (Just w1) r2_ r1_ Nothing +hDuplicateTo' h1 _ = + ioe_dupHandlesNotCompatible h1 + +-- | This is copied unmodified from GHC since it is not exposed. +dupHandleTo :: FilePath + -> Handle + -> Maybe (MVar Handle__) + -> Handle__ + -> Handle__ + -> Maybe HandleFinalizer + -> IO Handle__ +dupHandleTo filepath h other_side + _hto_@Handle__{haDevice=devTo} + h_@Handle__{haDevice=dev} mb_finalizer = do + flushBuffer h_ + case cast devTo of + Nothing -> ioe_dupHandlesNotCompatible h + Just dev' -> do + _ <- IODevice.dup2 dev dev' + FileHandle _ m <- dupHandle_ dev' filepath other_side h_ mb_finalizer + takeMVar m + +-- | This is copied unmodified from GHC since it is not exposed. +-- Note the beautiful inline comment! +dupHandle_ :: (IODevice dev, BufferedIO dev, Typeable dev) => dev + -> FilePath + -> Maybe (MVar Handle__) + -> Handle__ + -> Maybe HandleFinalizer + -> IO Handle +dupHandle_ new_dev filepath other_side _h_@Handle__{..} mb_finalizer = do + -- XXX wrong! + mb_codec <- if isJust haEncoder then fmap Just getLocaleEncoding else return Nothing + mkHandle new_dev filepath haType True{-buffered-} mb_codec + NewlineMode { inputNL = haInputNL, outputNL = haOutputNL } + mb_finalizer other_side + +-- | This is copied unmodified from GHC since it is not exposed. +ioe_dupHandlesNotCompatible :: Handle -> IO a +ioe_dupHandlesNotCompatible h = + ioException (IOError (Just h) IllegalOperation "hDuplicateTo" + "handles are incompatible" Nothing Nothing) diff --git a/ghcide/src/Development/IDE/GHC/Warnings.hs b/ghcide/src/Development/IDE/GHC/Warnings.hs new file mode 100644 index 00000000000..68c52cf982f --- /dev/null +++ b/ghcide/src/Development/IDE/GHC/Warnings.hs @@ -0,0 +1,34 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Development.IDE.GHC.Warnings(withWarnings) where + +import ErrUtils +import GhcPlugins as GHC hiding (Var) + +import Control.Concurrent.Extra +import qualified Data.Text as T + +import Development.IDE.Types.Diagnostics +import Development.IDE.GHC.Error + + +-- | Take a GHC monadic action (e.g. @typecheckModule pm@ for some +-- parsed module 'pm@') and produce a "decorated" action that will +-- harvest any warnings encountered executing the action. The 'phase' +-- argument classifies the context (e.g. "Parser", "Typechecker"). +-- +-- The ModSummary function is required because of +-- https://github.com/ghc/ghc/blob/5f1d949ab9e09b8d95319633854b7959df06eb58/compiler/main/GHC.hs#L623-L640 +-- which basically says that log_action is taken from the ModSummary when GHC feels like it. +-- The given argument lets you refresh a ModSummary log_action +withWarnings :: T.Text -> ((ModSummary -> ModSummary) -> IO a) -> IO ([(WarnReason, FileDiagnostic)], a) +withWarnings diagSource action = do + warnings <- newVar [] + let newAction :: DynFlags -> WarnReason -> Severity -> SrcSpan -> PprStyle -> SDoc -> IO () + newAction dynFlags wr _ loc style msg = do + let wr_d = fmap (wr,) $ diagFromErrMsg diagSource dynFlags $ mkWarnMsg dynFlags loc (queryQual style) msg + modifyVar_ warnings $ return . (wr_d:) + res <- action $ \x -> x{ms_hspp_opts = (ms_hspp_opts x){log_action = newAction}} + warns <- readVar warnings + return (reverse $ concat warns, res) diff --git a/ghcide/src/Development/IDE/Import/DependencyInformation.hs b/ghcide/src/Development/IDE/Import/DependencyInformation.hs new file mode 100644 index 00000000000..3c591abd2c7 --- /dev/null +++ b/ghcide/src/Development/IDE/Import/DependencyInformation.hs @@ -0,0 +1,403 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Development.IDE.Import.DependencyInformation + ( DependencyInformation(..) + , ModuleImports(..) + , RawDependencyInformation(..) + , NodeError(..) + , ModuleParseError(..) + , TransitiveDependencies(..) + , FilePathId(..) + , NamedModuleDep(..) + + , PathIdMap + , emptyPathIdMap + , getPathId + , lookupPathToId + , insertImport + , pathToId + , idToPath + , reachableModules + , processDependencyInformation + , transitiveDeps + , transitiveReverseDependencies + , immediateReverseDependencies + + , BootIdMap + , insertBootId + ) where + +import Control.DeepSeq +import Data.Bifunctor +import Data.Coerce +import Data.List +import Data.Tuple.Extra hiding (first, second) +import Development.IDE.GHC.Orphans() +import Data.Either +import Data.Graph +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HMS +import Data.List.NonEmpty (NonEmpty(..), nonEmpty) +import qualified Data.List.NonEmpty as NonEmpty +import Data.IntMap (IntMap) +import qualified Data.IntMap.Strict as IntMap +import qualified Data.IntMap.Lazy as IntMapLazy +import Data.IntSet (IntSet) +import qualified Data.IntSet as IntSet +import Data.Maybe +import Data.Set (Set) +import qualified Data.Set as Set +import GHC.Generics (Generic) + +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import Development.IDE.Import.FindImports (ArtifactsLocation(..)) + +import GHC +import Module + +-- | The imports for a given module. +data ModuleImports = ModuleImports + { moduleImports :: ![(Located ModuleName, Maybe FilePathId)] + -- ^ Imports of a module in the current package and the file path of + -- that module on disk (if we found it) + , packageImports :: !(Set InstalledUnitId) + -- ^ Transitive package dependencies unioned for all imports. + } deriving Show + +-- | For processing dependency information, we need lots of maps and sets of +-- filepaths. Comparing Strings is really slow, so we work with IntMap/IntSet +-- instead and only convert at the edges. +newtype FilePathId = FilePathId { getFilePathId :: Int } + deriving (Show, NFData, Eq, Ord) + +-- | Map from 'FilePathId' +type FilePathIdMap = IntMap + +-- | Set of 'FilePathId's +type FilePathIdSet = IntSet + +data PathIdMap = PathIdMap + { idToPathMap :: !(FilePathIdMap ArtifactsLocation) + , pathToIdMap :: !(HashMap NormalizedFilePath FilePathId) + } + deriving (Show, Generic) + +instance NFData PathIdMap + +emptyPathIdMap :: PathIdMap +emptyPathIdMap = PathIdMap IntMap.empty HMS.empty + +getPathId :: ArtifactsLocation -> PathIdMap -> (FilePathId, PathIdMap) +getPathId path m@PathIdMap{..} = + case HMS.lookup (artifactFilePath path) pathToIdMap of + Nothing -> + let !newId = FilePathId $ HMS.size pathToIdMap + in (newId, insertPathId path newId m) + Just id -> (id, m) + +insertPathId :: ArtifactsLocation -> FilePathId -> PathIdMap -> PathIdMap +insertPathId path id PathIdMap{..} = + PathIdMap (IntMap.insert (getFilePathId id) path idToPathMap) (HMS.insert (artifactFilePath path) id pathToIdMap) + +insertImport :: FilePathId -> Either ModuleParseError ModuleImports -> RawDependencyInformation -> RawDependencyInformation +insertImport (FilePathId k) v rawDepInfo = rawDepInfo { rawImports = IntMap.insert k v (rawImports rawDepInfo) } + +pathToId :: PathIdMap -> NormalizedFilePath -> FilePathId +pathToId PathIdMap{pathToIdMap} path = pathToIdMap HMS.! path + +lookupPathToId :: PathIdMap -> NormalizedFilePath -> Maybe FilePathId +lookupPathToId PathIdMap{pathToIdMap} path = HMS.lookup path pathToIdMap + +idToPath :: PathIdMap -> FilePathId -> NormalizedFilePath +idToPath pathIdMap filePathId = artifactFilePath $ idToModLocation pathIdMap filePathId + +idToModLocation :: PathIdMap -> FilePathId -> ArtifactsLocation +idToModLocation PathIdMap{idToPathMap} (FilePathId id) = idToPathMap IntMap.! id + +type BootIdMap = FilePathIdMap FilePathId + +insertBootId :: FilePathId -> FilePathId -> BootIdMap -> BootIdMap +insertBootId k = IntMap.insert (getFilePathId k) + +-- | Unprocessed results that we find by following imports recursively. +data RawDependencyInformation = RawDependencyInformation + { rawImports :: !(FilePathIdMap (Either ModuleParseError ModuleImports)) + , rawPathIdMap :: !PathIdMap + -- The rawBootMap maps the FilePathId of a hs-boot file to its + -- corresponding hs file. It is used when topologically sorting as we + -- need to add edges between .hs-boot and .hs so that the .hs files + -- appear later in the sort. + , rawBootMap :: !BootIdMap + } deriving Show + +pkgDependencies :: RawDependencyInformation -> FilePathIdMap (Set InstalledUnitId) +pkgDependencies RawDependencyInformation{..} = + IntMap.map (either (const Set.empty) packageImports) rawImports + +data DependencyInformation = + DependencyInformation + { depErrorNodes :: !(FilePathIdMap (NonEmpty NodeError)) + -- ^ Nodes that cannot be processed correctly. + , depModuleNames :: !(FilePathIdMap ShowableModuleName) + , depModuleDeps :: !(FilePathIdMap FilePathIdSet) + -- ^ For a non-error node, this contains the set of module immediate dependencies + -- in the same package. + , depReverseModuleDeps :: !(IntMap IntSet) + -- ^ Contains a reverse mapping from a module to all those that immediately depend on it. + , depPkgDeps :: !(FilePathIdMap (Set InstalledUnitId)) + -- ^ For a non-error node, this contains the set of immediate pkg deps. + , depPathIdMap :: !PathIdMap + -- ^ Map from FilePath to FilePathId + , depBootMap :: !BootIdMap + -- ^ Map from hs-boot file to the corresponding hs file + } deriving (Show, Generic) + +newtype ShowableModuleName = + ShowableModuleName {showableModuleName :: ModuleName} + deriving NFData + +instance Show ShowableModuleName where show = moduleNameString . showableModuleName + +reachableModules :: DependencyInformation -> [NormalizedFilePath] +reachableModules DependencyInformation{..} = + map (idToPath depPathIdMap . FilePathId) $ IntMap.keys depErrorNodes <> IntMap.keys depModuleDeps + +instance NFData DependencyInformation + +-- | This does not contain the actual parse error as that is already reported by GetParsedModule. +data ModuleParseError = ModuleParseError + deriving (Show, Generic) + +instance NFData ModuleParseError + +-- | Error when trying to locate a module. +data LocateError = LocateError [Diagnostic] + deriving (Eq, Show, Generic) + +instance NFData LocateError + +-- | An error attached to a node in the dependency graph. +data NodeError + = PartOfCycle (Located ModuleName) [FilePathId] + -- ^ This module is part of an import cycle. The module name corresponds + -- to the import that enters the cycle starting from this module. + -- The list of filepaths represents the elements + -- in the cycle in unspecified order. + | FailedToLocateImport (Located ModuleName) + -- ^ This module has an import that couldn’t be located. + | ParseError ModuleParseError + | ParentOfErrorNode (Located ModuleName) + -- ^ This module is the parent of a module that cannot be + -- processed (either it cannot be parsed, is part of a cycle + -- or the parent of another error node). + deriving (Show, Generic) + +instance NFData NodeError where + rnf (PartOfCycle m fs) = m `seq` rnf fs + rnf (FailedToLocateImport m) = m `seq` () + rnf (ParseError e) = rnf e + rnf (ParentOfErrorNode m) = m `seq` () + +-- | A processed node in the dependency graph. If there was any error +-- during processing the node or any of its dependencies, this is an +-- `ErrorNode`. Otherwise it is a `SuccessNode`. +data NodeResult + = ErrorNode (NonEmpty NodeError) + | SuccessNode [(Located ModuleName, FilePathId)] + deriving Show + +partitionNodeResults + :: [(a, NodeResult)] + -> ([(a, NonEmpty NodeError)], [(a, [(Located ModuleName, FilePathId)])]) +partitionNodeResults = partitionEithers . map f + where f (a, ErrorNode errs) = Left (a, errs) + f (a, SuccessNode imps) = Right (a, imps) + +instance Semigroup NodeResult where + ErrorNode errs <> ErrorNode errs' = ErrorNode (errs <> errs') + ErrorNode errs <> SuccessNode _ = ErrorNode errs + SuccessNode _ <> ErrorNode errs = ErrorNode errs + SuccessNode a <> SuccessNode _ = SuccessNode a + +processDependencyInformation :: RawDependencyInformation -> DependencyInformation +processDependencyInformation rawDepInfo@RawDependencyInformation{..} = + DependencyInformation + { depErrorNodes = IntMap.fromList errorNodes + , depModuleDeps = moduleDeps + , depReverseModuleDeps = reverseModuleDeps + , depModuleNames = IntMap.fromList $ coerce moduleNames + , depPkgDeps = pkgDependencies rawDepInfo + , depPathIdMap = rawPathIdMap + , depBootMap = rawBootMap + } + where resultGraph = buildResultGraph rawImports + (errorNodes, successNodes) = partitionNodeResults $ IntMap.toList resultGraph + moduleNames :: [(FilePathId, ModuleName)] + moduleNames = + [ (fId, modName) | (_, imports) <- successNodes, (L _ modName, fId) <- imports] + successEdges :: [(FilePathId, [FilePathId])] + successEdges = + map + (bimap FilePathId (map snd)) + successNodes + moduleDeps = + IntMap.fromList $ + map (\(FilePathId v, vs) -> (v, IntSet.fromList $ coerce vs)) + successEdges + reverseModuleDeps = + foldr (\(p, cs) res -> + let new = IntMap.fromList (map (, IntSet.singleton (coerce p)) (coerce cs)) + in IntMap.unionWith IntSet.union new res ) IntMap.empty successEdges + + +-- | Given a dependency graph, buildResultGraph detects and propagates errors in that graph as follows: +-- 1. Mark each node that is part of an import cycle as an error node. +-- 2. Mark each node that has a parse error as an error node. +-- 3. Mark each node whose immediate children could not be located as an error. +-- 4. Recursively propagate errors to parents if they are not already error nodes. +buildResultGraph :: FilePathIdMap (Either ModuleParseError ModuleImports) -> FilePathIdMap NodeResult +buildResultGraph g = propagatedErrors + where + sccs = stronglyConnComp (graphEdges g) + (_, cycles) = partitionSCC sccs + cycleErrors :: IntMap NodeResult + cycleErrors = IntMap.unionsWith (<>) $ map errorsForCycle cycles + errorsForCycle :: [FilePathId] -> IntMap NodeResult + errorsForCycle files = + IntMap.fromListWith (<>) $ coerce $ concatMap (cycleErrorsForFile files) files + cycleErrorsForFile :: [FilePathId] -> FilePathId -> [(FilePathId,NodeResult)] + cycleErrorsForFile cycle f = + let entryPoints = mapMaybe (findImport f) cycle + in map (\imp -> (f, ErrorNode (PartOfCycle imp cycle :| []))) entryPoints + otherErrors = IntMap.map otherErrorsForFile g + otherErrorsForFile :: Either ModuleParseError ModuleImports -> NodeResult + otherErrorsForFile (Left err) = ErrorNode (ParseError err :| []) + otherErrorsForFile (Right ModuleImports{moduleImports}) = + let toEither (imp, Nothing) = Left imp + toEither (imp, Just path) = Right (imp, path) + (errs, imports') = partitionEithers (map toEither moduleImports) + in case nonEmpty errs of + Nothing -> SuccessNode imports' + Just errs' -> ErrorNode (NonEmpty.map FailedToLocateImport errs') + + unpropagatedErrors = IntMap.unionWith (<>) cycleErrors otherErrors + -- The recursion here is fine since we use a lazy map and + -- we only recurse on SuccessNodes. In particular, we do not recurse + -- on nodes that are part of a cycle as they are already marked as + -- error nodes. + propagatedErrors = + IntMapLazy.map propagate unpropagatedErrors + propagate :: NodeResult -> NodeResult + propagate n@(ErrorNode _) = n + propagate n@(SuccessNode imps) = + let results = map (\(imp, FilePathId dep) -> (imp, propagatedErrors IntMap.! dep)) imps + (errs, _) = partitionNodeResults results + in case nonEmpty errs of + Nothing -> n + Just errs' -> ErrorNode (NonEmpty.map (ParentOfErrorNode . fst) errs') + findImport :: FilePathId -> FilePathId -> Maybe (Located ModuleName) + findImport (FilePathId file) importedFile = + case g IntMap.! file of + Left _ -> error "Tried to call findImport on a module with a parse error" + Right ModuleImports{moduleImports} -> + fmap fst $ find (\(_, resolvedImp) -> resolvedImp == Just importedFile) moduleImports + +graphEdges :: FilePathIdMap (Either ModuleParseError ModuleImports) -> [(FilePathId, FilePathId, [FilePathId])] +graphEdges g = + map (\(k, v) -> (FilePathId k, FilePathId k, deps v)) $ IntMap.toList g + where deps :: Either e ModuleImports -> [FilePathId] + deps (Left _) = [] + deps (Right ModuleImports{moduleImports}) = mapMaybe snd moduleImports + +partitionSCC :: [SCC a] -> ([a], [[a]]) +partitionSCC (CyclicSCC xs:rest) = second (xs:) $ partitionSCC rest +partitionSCC (AcyclicSCC x:rest) = first (x:) $ partitionSCC rest +partitionSCC [] = ([], []) + +-- | Transitive reverse dependencies of a file +transitiveReverseDependencies :: NormalizedFilePath -> DependencyInformation -> Maybe [NormalizedFilePath] +transitiveReverseDependencies file DependencyInformation{..} = do + FilePathId cur_id <- lookupPathToId depPathIdMap file + return $ map (idToPath depPathIdMap . FilePathId) (IntSet.toList (go cur_id IntSet.empty)) + where + go :: Int -> IntSet -> IntSet + go k i = + let outwards = fromMaybe IntSet.empty (IntMap.lookup k depReverseModuleDeps) + res = IntSet.union i outwards + new = IntSet.difference i outwards + in IntSet.foldr go res new + +-- | Immediate reverse dependencies of a file +immediateReverseDependencies :: NormalizedFilePath -> DependencyInformation -> Maybe [NormalizedFilePath] +immediateReverseDependencies file DependencyInformation{..} = do + FilePathId cur_id <- lookupPathToId depPathIdMap file + return $ map (idToPath depPathIdMap . FilePathId) (maybe mempty IntSet.toList (IntMap.lookup cur_id depReverseModuleDeps)) + +transitiveDeps :: DependencyInformation -> NormalizedFilePath -> Maybe TransitiveDependencies +transitiveDeps DependencyInformation{..} file = do + let !fileId = pathToId depPathIdMap file + reachableVs <- + -- Delete the starting node + IntSet.delete (getFilePathId fileId) . + IntSet.fromList . map (fst3 . fromVertex) . + reachable g <$> toVertex (getFilePathId fileId) + let transitiveModuleDepIds = + filter (\v -> v `IntSet.member` reachableVs) $ map (fst3 . fromVertex) vs + let transitivePkgDeps = + Set.toList $ Set.unions $ + map (\f -> IntMap.findWithDefault Set.empty f depPkgDeps) $ + getFilePathId fileId : transitiveModuleDepIds + let transitiveModuleDeps = + map (idToPath depPathIdMap . FilePathId) transitiveModuleDepIds + let transitiveNamedModuleDeps = + [ NamedModuleDep (idToPath depPathIdMap (FilePathId fid)) mn artifactModLocation + | (fid, ShowableModuleName mn) <- IntMap.toList depModuleNames + , let ArtifactsLocation{artifactModLocation} = idToPathMap depPathIdMap IntMap.! fid + ] + pure TransitiveDependencies {..} + where + (g, fromVertex, toVertex) = graphFromEdges edges + edges = map (\(f, fs) -> (f, f, IntSet.toList fs ++ boot_edge f)) $ IntMap.toList depModuleDeps + + -- Need to add an edge between the .hs and .hs-boot file if it exists + -- so the .hs file gets loaded after the .hs-boot file and the right + -- stuff ends up in the HPT. If you don't have this check then GHC will + -- fail to work with ghcide. + boot_edge f = [getFilePathId f' | Just f' <- [IntMap.lookup f depBootMap]] + + vs = topSort g + +data TransitiveDependencies = TransitiveDependencies + { transitiveModuleDeps :: [NormalizedFilePath] + -- ^ Transitive module dependencies in topological order. + -- The module itself is not included. + , transitiveNamedModuleDeps :: [NamedModuleDep] + -- ^ Transitive module dependencies in topological order. + -- The module itself is not included. + , transitivePkgDeps :: [InstalledUnitId] + -- ^ Transitive pkg dependencies in unspecified order. + } deriving (Eq, Show, Generic) + +instance NFData TransitiveDependencies + +data NamedModuleDep = NamedModuleDep { + nmdFilePath :: !NormalizedFilePath, + nmdModuleName :: !ModuleName, + nmdModLocation :: !(Maybe ModLocation) + } + deriving Generic + +instance Eq NamedModuleDep where + a == b = nmdFilePath a == nmdFilePath b + +instance NFData NamedModuleDep where + rnf NamedModuleDep{..} = + rnf nmdFilePath `seq` + rnf nmdModuleName `seq` + -- 'ModLocation' lacks an 'NFData' instance + rwhnf nmdModLocation + +instance Show NamedModuleDep where + show NamedModuleDep{..} = show nmdFilePath diff --git a/ghcide/src/Development/IDE/Import/FindImports.hs b/ghcide/src/Development/IDE/Import/FindImports.hs new file mode 100644 index 00000000000..4811745014c --- /dev/null +++ b/ghcide/src/Development/IDE/Import/FindImports.hs @@ -0,0 +1,178 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +#include "ghc-api-version.h" + +module Development.IDE.Import.FindImports + ( locateModule + , Import(..) + , ArtifactsLocation(..) + , modSummaryToArtifactsLocation + , isBootLocation + , mkImportDirs + ) where + +import Development.IDE.GHC.Error as ErrUtils +import Development.IDE.GHC.Orphans() +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import Development.IDE.GHC.Compat +-- GHC imports +import FastString +import qualified Module as M +import Packages +import Outputable (showSDoc, ppr, pprPanic) +import Finder +import Control.DeepSeq + +-- standard imports +import Control.Monad.Extra +import Control.Monad.IO.Class +import System.FilePath +import DriverPhases +import Data.Maybe +import Data.List (isSuffixOf) + +data Import + = FileImport !ArtifactsLocation + | PackageImport !M.InstalledUnitId + deriving (Show) + +data ArtifactsLocation = ArtifactsLocation + { artifactFilePath :: !NormalizedFilePath + , artifactModLocation :: !(Maybe ModLocation) + , artifactIsSource :: !Bool -- ^ True if a module is a source input + } + deriving (Show) + +instance NFData ArtifactsLocation where + rnf ArtifactsLocation{..} = rnf artifactFilePath `seq` rwhnf artifactModLocation `seq` rnf artifactIsSource + +isBootLocation :: ArtifactsLocation -> Bool +isBootLocation = not . artifactIsSource + +instance NFData Import where + rnf (FileImport x) = rnf x + rnf (PackageImport x) = rnf x + +modSummaryToArtifactsLocation :: NormalizedFilePath -> Maybe ModSummary -> ArtifactsLocation +modSummaryToArtifactsLocation nfp ms = ArtifactsLocation nfp (ms_location <$> ms) source + where + isSource HsSrcFile = True + isSource _ = False + source = case ms of + Nothing -> "-boot" `isSuffixOf` fromNormalizedFilePath nfp + Just ms -> isSource (ms_hsc_src ms) + +-- | locate a module in the file system. Where we go from *daml to Haskell +locateModuleFile :: MonadIO m + => [[FilePath]] + -> [String] + -> (ModuleName -> NormalizedFilePath -> m Bool) + -> Bool + -> ModuleName + -> m (Maybe NormalizedFilePath) +locateModuleFile import_dirss exts doesExist isSource modName = do + let candidates import_dirs = + [ toNormalizedFilePath' (prefix M.moduleNameSlashes modName <.> maybeBoot ext) + | prefix <- import_dirs , ext <- exts] + findM (doesExist modName) (concatMap candidates import_dirss) + where + maybeBoot ext + | isSource = ext ++ "-boot" + | otherwise = ext + +-- | This function is used to map a package name to a set of import paths. +-- It only returns Just for unit-ids which are possible to import into the +-- current module. In particular, it will return Nothing for 'main' components +-- as they can never be imported into another package. +mkImportDirs :: DynFlags -> (M.InstalledUnitId, DynFlags) -> Maybe (PackageName, [FilePath]) +mkImportDirs df (i, DynFlags{importPaths}) = (, importPaths) <$> getPackageName df i + +-- | locate a module in either the file system or the package database. Where we go from *daml to +-- Haskell +locateModule + :: MonadIO m + => DynFlags + -> [(M.InstalledUnitId, DynFlags)] -- ^ Import directories + -> [String] -- ^ File extensions + -> (ModuleName -> NormalizedFilePath -> m Bool) -- ^ does file exist predicate + -> Located ModuleName -- ^ Moudle name + -> Maybe FastString -- ^ Package name + -> Bool -- ^ Is boot module + -> m (Either [FileDiagnostic] Import) +locateModule dflags comp_info exts doesExist modName mbPkgName isSource = do + case mbPkgName of + -- "this" means that we should only look in the current package + Just "this" -> do + lookupLocal [importPaths dflags] + -- if a package name is given we only go look for a package + Just pkgName + | Just dirs <- lookup (PackageName pkgName) import_paths + -> lookupLocal [dirs] + | otherwise -> lookupInPackageDB dflags + Nothing -> do + -- first try to find the module as a file. If we can't find it try to find it in the package + -- database. + -- Here the importPaths for the current modules are added to the front of the import paths from the other components. + -- This is particularly important for Paths_* modules which get generated for every component but unless you use it in + -- each component will end up being found in the wrong place and cause a multi-cradle match failure. + mbFile <- locateModuleFile (importPaths dflags : map snd import_paths) exts doesExist isSource $ unLoc modName + case mbFile of + Nothing -> lookupInPackageDB dflags + Just file -> toModLocation file + where + import_paths = mapMaybe (mkImportDirs dflags) comp_info + toModLocation file = liftIO $ do + loc <- mkHomeModLocation dflags (unLoc modName) (fromNormalizedFilePath file) + return $ Right $ FileImport $ ArtifactsLocation file (Just loc) (not isSource) + + lookupLocal dirs = do + mbFile <- locateModuleFile dirs exts doesExist isSource $ unLoc modName + case mbFile of + Nothing -> return $ Left $ notFoundErr dflags modName $ LookupNotFound [] + Just file -> toModLocation file + + lookupInPackageDB dfs = + case lookupModuleWithSuggestions dfs (unLoc modName) mbPkgName of + LookupFound _m pkgConfig -> return $ Right $ PackageImport $ unitId pkgConfig + reason -> return $ Left $ notFoundErr dfs modName reason + +-- | Don't call this on a found module. +notFoundErr :: DynFlags -> Located M.ModuleName -> LookupResult -> [FileDiagnostic] +notFoundErr dfs modName reason = + mkError' $ ppr' $ cannotFindModule dfs modName0 $ lookupToFindResult reason + where + mkError' = diagFromString "not found" DsError (getLoc modName) + modName0 = unLoc modName + ppr' = showSDoc dfs + -- We convert the lookup result to a find result to reuse GHC's cannotFindMoudle pretty printer. + lookupToFindResult = + \case + LookupFound _m _pkgConfig -> + pprPanic "Impossible: called lookupToFind on found module." (ppr modName0) + LookupMultiple rs -> FoundMultiple rs + LookupHidden pkg_hiddens mod_hiddens -> + notFound + { fr_pkgs_hidden = map (moduleUnitId . fst) pkg_hiddens + , fr_mods_hidden = map (moduleUnitId . fst) mod_hiddens + } + LookupUnusable unusable -> + let unusables' = map get_unusable unusable + get_unusable (m, ModUnusable r) = (moduleUnitId m, r) + get_unusable (_, r) = + pprPanic "findLookupResult: unexpected origin" (ppr r) + in notFound {fr_unusables = unusables'} + LookupNotFound suggest -> + notFound {fr_suggestions = suggest} + +notFound :: FindResult +notFound = NotFound + { fr_paths = [] + , fr_pkg = Nothing + , fr_pkgs_hidden = [] + , fr_mods_hidden = [] + , fr_unusables = [] + , fr_suggestions = [] + } diff --git a/ghcide/src/Development/IDE/LSP/HoverDefinition.hs b/ghcide/src/Development/IDE/LSP/HoverDefinition.hs new file mode 100644 index 00000000000..6aa73574f3c --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/HoverDefinition.hs @@ -0,0 +1,72 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + + +-- | Display information on hover. +module Development.IDE.LSP.HoverDefinition + ( setHandlersHover + , setHandlersDefinition + , setHandlersTypeDefinition + , setHandlersDocHighlight + -- * For haskell-language-server + , hover + , gotoDefinition + , gotoTypeDefinition + ) where + +import Development.IDE.Core.Rules +import Development.IDE.Core.Shake +import Development.IDE.LSP.Server +import Development.IDE.Types.Location +import Development.IDE.Types.Logger +import qualified Language.Haskell.LSP.Core as LSP +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types + +import qualified Data.Text as T + +gotoDefinition :: IdeState -> TextDocumentPositionParams -> IO (Either ResponseError LocationResponseParams) +hover :: IdeState -> TextDocumentPositionParams -> IO (Either ResponseError (Maybe Hover)) +gotoTypeDefinition :: IdeState -> TextDocumentPositionParams -> IO (Either ResponseError LocationResponseParams) +documentHighlight :: IdeState -> TextDocumentPositionParams -> IO (Either ResponseError (List DocumentHighlight)) +gotoDefinition = request "Definition" getDefinition (MultiLoc []) SingleLoc +gotoTypeDefinition = request "TypeDefinition" getTypeDefinition (MultiLoc []) MultiLoc +hover = request "Hover" getAtPoint Nothing foundHover +documentHighlight = request "DocumentHighlight" highlightAtPoint (List []) List + +foundHover :: (Maybe Range, [T.Text]) -> Maybe Hover +foundHover (mbRange, contents) = + Just $ Hover (HoverContents $ MarkupContent MkMarkdown $ T.intercalate sectionSeparator contents) mbRange + +setHandlersDefinition, setHandlersHover, setHandlersTypeDefinition, setHandlersDocHighlight :: PartialHandlers c +setHandlersDefinition = PartialHandlers $ \WithMessage{..} x -> + return x{LSP.definitionHandler = withResponse RspDefinition $ const gotoDefinition} +setHandlersTypeDefinition = PartialHandlers $ \WithMessage{..} x -> + return x{LSP.typeDefinitionHandler = withResponse RspDefinition $ const gotoTypeDefinition} +setHandlersHover = PartialHandlers $ \WithMessage{..} x -> + return x{LSP.hoverHandler = withResponse RspHover $ const hover} +setHandlersDocHighlight = PartialHandlers $ \WithMessage{..} x -> + return x{LSP.documentHighlightHandler = withResponse RspDocumentHighlights $ const documentHighlight} + +-- | Respond to and log a hover or go-to-definition request +request + :: T.Text + -> (NormalizedFilePath -> Position -> IdeAction (Maybe a)) + -> b + -> (a -> b) + -> IdeState + -> TextDocumentPositionParams + -> IO (Either ResponseError b) +request label getResults notFound found ide (TextDocumentPositionParams (TextDocumentIdentifier uri) pos _) = do + mbResult <- case uriToFilePath' uri of + Just path -> logAndRunRequest label getResults ide pos path + Nothing -> pure Nothing + pure $ Right $ maybe notFound found mbResult + +logAndRunRequest :: T.Text -> (NormalizedFilePath -> Position -> IdeAction b) -> IdeState -> Position -> String -> IO b +logAndRunRequest label getResults ide pos path = do + let filePath = toNormalizedFilePath' path + logInfo (ideLogger ide) $ + label <> " request at position " <> T.pack (showPosition pos) <> + " in file: " <> T.pack path + runIdeAction (T.unpack label) (shakeExtras ide) (getResults filePath pos) diff --git a/ghcide/src/Development/IDE/LSP/LanguageServer.hs b/ghcide/src/Development/IDE/LSP/LanguageServer.hs new file mode 100644 index 00000000000..9a3c37a166b --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/LanguageServer.hs @@ -0,0 +1,256 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE RankNTypes #-} + +-- WARNING: A copy of DA.Daml.LanguageServer, try to keep them in sync +-- This version removes the daml: handling +module Development.IDE.LSP.LanguageServer + ( runLanguageServer + ) where + +import Language.Haskell.LSP.Types +import Language.Haskell.LSP.Types.Capabilities +import Development.IDE.LSP.Server +import qualified Development.IDE.GHC.Util as Ghcide +import qualified Language.Haskell.LSP.Control as LSP +import qualified Language.Haskell.LSP.Core as LSP +import Control.Concurrent.Chan +import Control.Concurrent.Extra +import Control.Concurrent.Async +import Control.Concurrent.STM +import Control.Exception.Safe +import Data.Default +import Data.Maybe +import qualified Data.Set as Set +import qualified Data.Text as T +import GHC.IO.Handle (hDuplicate) +import System.IO +import Control.Monad.Extra + +import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.Shake +import Development.IDE.LSP.HoverDefinition +import Development.IDE.LSP.Notifications +import Development.IDE.LSP.Outline +import Development.IDE.Types.Logger +import Development.IDE.Core.FileStore +import Development.IDE.Core.Tracing +import Language.Haskell.LSP.Core (LspFuncs(..)) +import Language.Haskell.LSP.Messages + +runLanguageServer + :: forall config. (Show config) + => LSP.Options + -> PartialHandlers config + -> (InitializeRequest -> Either T.Text config) + -> (DidChangeConfigurationNotification -> Either T.Text config) + -> (IO LspId -> (FromServerMessage -> IO ()) -> VFSHandle -> ClientCapabilities + -> WithProgressFunc -> WithIndefiniteProgressFunc -> IO (Maybe config) -> Maybe FilePath -> IO IdeState) + -> IO () +runLanguageServer options userHandlers onInitialConfig onConfigChange getIdeState = do + -- Move stdout to another file descriptor and duplicate stderr + -- to stdout. This guards against stray prints from corrupting the JSON-RPC + -- message stream. + newStdout <- hDuplicate stdout + stderr `Ghcide.hDuplicateTo'` stdout + hSetBuffering stderr NoBuffering + hSetBuffering stdout NoBuffering + + -- Print out a single space to assert that the above redirection works. + -- This is interleaved with the logger, hence we just print a space here in + -- order not to mess up the output too much. Verified that this breaks + -- the language server tests without the redirection. + putStr " " >> hFlush stdout + + -- Send everything over a channel, since you need to wait until after initialise before + -- LspFuncs is available + clientMsgChan :: Chan (Message config) <- newChan + + -- These barriers are signaled when the threads reading from these chans exit. + -- This should not happen but if it does, we will make sure that the whole server + -- dies and can be restarted instead of losing threads silently. + clientMsgBarrier <- newBarrier + -- Forcefully exit + let exit = signalBarrier clientMsgBarrier () + + -- The set of requests ids that we have received but not finished processing + pendingRequests <- newTVarIO Set.empty + -- The set of requests that have been cancelled and are also in pendingRequests + cancelledRequests <- newTVarIO Set.empty + + let withResponse wrap f = Just $ \r@RequestMessage{_id, _method} -> do + atomically $ modifyTVar pendingRequests (Set.insert _id) + writeChan clientMsgChan $ Response r wrap f + let withNotification old f = Just $ \r@NotificationMessage{_method} -> + writeChan clientMsgChan $ Notification r (\lsp ide x -> f lsp ide x >> whenJust old ($ r)) + let withResponseAndRequest wrap wrapNewReq f = Just $ \r@RequestMessage{_id, _method} -> do + atomically $ modifyTVar pendingRequests (Set.insert _id) + writeChan clientMsgChan $ ResponseAndRequest r wrap wrapNewReq f + let withInitialize f = Just $ \r -> + writeChan clientMsgChan $ InitialParams r (\lsp ide x -> f lsp ide x) + let cancelRequest reqId = atomically $ do + queued <- readTVar pendingRequests + -- We want to avoid that the list of cancelled requests + -- keeps growing if we receive cancellations for requests + -- that do not exist or have already been processed. + when (reqId `elem` queued) $ + modifyTVar cancelledRequests (Set.insert reqId) + let clearReqId reqId = atomically $ do + modifyTVar pendingRequests (Set.delete reqId) + modifyTVar cancelledRequests (Set.delete reqId) + -- We implement request cancellation by racing waitForCancel against + -- the actual request handler. + let waitForCancel reqId = atomically $ do + cancelled <- readTVar cancelledRequests + unless (reqId `Set.member` cancelled) retry + let PartialHandlers parts = + initializeRequestHandler <> + setHandlersIgnore <> -- least important + setHandlersDefinition <> setHandlersHover <> setHandlersTypeDefinition <> + setHandlersDocHighlight <> + setHandlersOutline <> + userHandlers <> + setHandlersNotifications <> -- absolutely critical, join them with user notifications + cancelHandler cancelRequest <> + exitHandler exit + -- Cancel requests are special since they need to be handled + -- out of order to be useful. Existing handlers are run afterwards. + handlers <- parts WithMessage{withResponse, withNotification, withResponseAndRequest, withInitialize} def + + let initializeCallbacks = LSP.InitializeCallbacks + { LSP.onInitialConfiguration = onInitialConfig + , LSP.onConfigurationChange = onConfigChange + , LSP.onStartup = handleInit exit clearReqId waitForCancel clientMsgChan + } + + void $ waitAnyCancel =<< traverse async + [ void $ LSP.runWithHandles + stdin + newStdout + initializeCallbacks + handlers + (modifyOptions options) + Nothing + , void $ waitBarrier clientMsgBarrier + ] + where + handleInit :: IO () -> (LspId -> IO ()) -> (LspId -> IO ()) -> Chan (Message config) -> LSP.LspFuncs config -> IO (Maybe err) + handleInit exitClientMsg clearReqId waitForCancel clientMsgChan lspFuncs@LSP.LspFuncs{..} = do + + ide <- getIdeState getNextReqId sendFunc (makeLSPVFSHandle lspFuncs) clientCapabilities + withProgress withIndefiniteProgress config rootPath + + _ <- flip forkFinally (const exitClientMsg) $ forever $ do + msg <- readChan clientMsgChan + -- We dispatch notifications synchronously and requests asynchronously + -- This is to ensure that all file edits and config changes are applied before a request is handled + case msg of + Notification x@NotificationMessage{_params, _method} act -> otTracedHandler "Notification" (show _method) $ do + catch (act lspFuncs ide _params) $ \(e :: SomeException) -> + logError (ideLogger ide) $ T.pack $ + "Unexpected exception on notification, please report!\n" ++ + "Message: " ++ show x ++ "\n" ++ + "Exception: " ++ show e + Response x@RequestMessage{_id, _method, _params} wrap act -> void $ async $ + otTracedHandler "Request" (show _method) $ + checkCancelled ide clearReqId waitForCancel lspFuncs wrap act x _id _params $ + \case + Left e -> sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) (Left e) + Right r -> sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) (Right r) + ResponseAndRequest x@RequestMessage{_id, _method, _params} wrap wrapNewReq act -> void $ async $ + otTracedHandler "Request" (show _method) $ + checkCancelled ide clearReqId waitForCancel lspFuncs wrap act x _id _params $ + \(res, newReq) -> do + case res of + Left e -> sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) (Left e) + Right r -> sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) (Right r) + whenJust newReq $ \(rm, newReqParams) -> do + reqId <- getNextReqId + sendFunc $ wrapNewReq $ RequestMessage "2.0" reqId rm newReqParams + InitialParams x@RequestMessage{_id, _method, _params} act -> + otTracedHandler "Initialize" (show _method) $ + catch (act lspFuncs ide _params) $ \(e :: SomeException) -> + logError (ideLogger ide) $ T.pack $ + "Unexpected exception on InitializeRequest handler, please report!\n" ++ + "Message: " ++ show x ++ "\n" ++ + "Exception: " ++ show e + pure Nothing + + checkCancelled ide clearReqId waitForCancel lspFuncs@LSP.LspFuncs{..} wrap act msg _id _params k = + flip finally (clearReqId _id) $ + catch (do + -- We could optimize this by first checking if the id + -- is in the cancelled set. However, this is unlikely to be a + -- bottleneck and the additional check might hide + -- issues with async exceptions that need to be fixed. + cancelOrRes <- race (waitForCancel _id) $ act lspFuncs ide _params + case cancelOrRes of + Left () -> do + logDebug (ideLogger ide) $ T.pack $ + "Cancelled request " <> show _id + sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) $ Left + $ ResponseError RequestCancelled "" Nothing + Right res -> k res + ) $ \(e :: SomeException) -> do + logError (ideLogger ide) $ T.pack $ + "Unexpected exception on request, please report!\n" ++ + "Message: " ++ show msg ++ "\n" ++ + "Exception: " ++ show e + sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) $ Left + $ ResponseError InternalError (T.pack $ show e) Nothing + +initializeRequestHandler :: PartialHandlers config +initializeRequestHandler = PartialHandlers $ \WithMessage{..} x -> return x{ + LSP.initializeRequestHandler = withInitialize initHandler + } + +initHandler + :: LSP.LspFuncs c + -> IdeState + -> InitializeParams + -> IO () +initHandler _ ide params = do + let initConfig = parseConfiguration params + logInfo (ideLogger ide) $ T.pack $ "Registering ide configuration: " <> show initConfig + registerIdeConfiguration (shakeExtras ide) initConfig + +-- | Things that get sent to us, but we don't deal with. +-- Set them to avoid a warning in VS Code output. +setHandlersIgnore :: PartialHandlers config +setHandlersIgnore = PartialHandlers $ \_ x -> return x + {LSP.responseHandler = none + } + where none = Just $ const $ return () + +cancelHandler :: (LspId -> IO ()) -> PartialHandlers config +cancelHandler cancelRequest = PartialHandlers $ \_ x -> return x + {LSP.cancelNotificationHandler = Just $ \msg@NotificationMessage {_params = CancelParams {_id}} -> do + cancelRequest _id + whenJust (LSP.cancelNotificationHandler x) ($ msg) + } + +exitHandler :: IO () -> PartialHandlers c +exitHandler exit = PartialHandlers $ \_ x -> return x + {LSP.exitNotificationHandler = Just $ const exit} + +-- | A message that we need to deal with - the pieces are split up with existentials to gain additional type safety +-- and defer precise processing until later (allows us to keep at a higher level of abstraction slightly longer) +data Message c + = forall m req resp . (Show m, Show req) => Response (RequestMessage m req resp) (ResponseMessage resp -> FromServerMessage) (LSP.LspFuncs c -> IdeState -> req -> IO (Either ResponseError resp)) + -- | Used for cases in which we need to send not only a response, + -- but also an additional request to the client. + -- For example, 'executeCommand' may generate an 'applyWorkspaceEdit' request. + | forall m rm req resp newReqParams newReqBody . (Show m, Show rm, Show req) => ResponseAndRequest (RequestMessage m req resp) (ResponseMessage resp -> FromServerMessage) (RequestMessage rm newReqParams newReqBody -> FromServerMessage) (LSP.LspFuncs c -> IdeState -> req -> IO (Either ResponseError resp, Maybe (rm, newReqParams))) + | forall m req . (Show m, Show req) => Notification (NotificationMessage m req) (LSP.LspFuncs c -> IdeState -> req -> IO ()) + -- | Used for the InitializeRequest only, where the response is generated by the LSP core handler. + | InitialParams InitializeRequest (LSP.LspFuncs c -> IdeState -> InitializeParams -> IO ()) + +modifyOptions :: LSP.Options -> LSP.Options +modifyOptions x = x{ LSP.textDocumentSync = Just $ tweakTDS origTDS + } + where + tweakTDS tds = tds{_openClose=Just True, _change=Just TdSyncIncremental, _save=Just $ SaveOptions Nothing} + origTDS = fromMaybe tdsDefault $ LSP.textDocumentSync x + tdsDefault = TextDocumentSyncOptions Nothing Nothing Nothing Nothing Nothing diff --git a/ghcide/src/Development/IDE/LSP/Notifications.hs b/ghcide/src/Development/IDE/LSP/Notifications.hs new file mode 100644 index 00000000000..a0df325ffc8 --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/Notifications.hs @@ -0,0 +1,147 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE RankNTypes #-} + +module Development.IDE.LSP.Notifications + ( setHandlersNotifications + ) where + +import Development.IDE.LSP.Server +import qualified Language.Haskell.LSP.Core as LSP +import Language.Haskell.LSP.Types +import qualified Language.Haskell.LSP.Types as LSP +import qualified Language.Haskell.LSP.Messages as LSP +import qualified Language.Haskell.LSP.Types.Capabilities as LSP + +import Development.IDE.Core.IdeConfiguration +import Development.IDE.Core.Service +import Development.IDE.Core.Shake +import Development.IDE.Types.Location +import Development.IDE.Types.Logger +import Development.IDE.Types.Options + +import Control.Monad.Extra +import qualified Data.Aeson as A +import Data.Foldable as F +import Data.Maybe +import qualified Data.HashMap.Strict as M +import qualified Data.HashSet as S +import qualified Data.Text as Text + +import Development.IDE.Core.FileStore (setSomethingModified, setFileModified, typecheckParents) +import Development.IDE.Core.FileExists (modifyFileExists, watchedGlobs) +import Development.IDE.Core.OfInterest + + +whenUriFile :: Uri -> (NormalizedFilePath -> IO ()) -> IO () +whenUriFile uri act = whenJust (LSP.uriToFilePath uri) $ act . toNormalizedFilePath' + +setHandlersNotifications :: PartialHandlers c +setHandlersNotifications = PartialHandlers $ \WithMessage{..} x -> return x + {LSP.didOpenTextDocumentNotificationHandler = withNotification (LSP.didOpenTextDocumentNotificationHandler x) $ + \_ ide (DidOpenTextDocumentParams TextDocumentItem{_uri,_version}) -> do + updatePositionMapping ide (VersionedTextDocumentIdentifier _uri (Just _version)) (List []) + whenUriFile _uri $ \file -> do + -- We don't know if the file actually exists, or if the contents match those on disk + -- For example, vscode restores previously unsaved contents on open + modifyFilesOfInterest ide (M.insert file Modified) + setFileModified ide False file + logInfo (ideLogger ide) $ "Opened text document: " <> getUri _uri + + ,LSP.didChangeTextDocumentNotificationHandler = withNotification (LSP.didChangeTextDocumentNotificationHandler x) $ + \_ ide (DidChangeTextDocumentParams identifier@VersionedTextDocumentIdentifier{_uri} changes) -> do + updatePositionMapping ide identifier changes + whenUriFile _uri $ \file -> do + modifyFilesOfInterest ide (M.insert file Modified) + setFileModified ide False file + logInfo (ideLogger ide) $ "Modified text document: " <> getUri _uri + + ,LSP.didSaveTextDocumentNotificationHandler = withNotification (LSP.didSaveTextDocumentNotificationHandler x) $ + \_ ide (DidSaveTextDocumentParams TextDocumentIdentifier{_uri}) -> do + whenUriFile _uri $ \file -> do + modifyFilesOfInterest ide (M.insert file OnDisk) + setFileModified ide True file + logInfo (ideLogger ide) $ "Saved text document: " <> getUri _uri + + ,LSP.didCloseTextDocumentNotificationHandler = withNotification (LSP.didCloseTextDocumentNotificationHandler x) $ + \_ ide (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> do + whenUriFile _uri $ \file -> do + modifyFilesOfInterest ide (M.delete file) + -- Refresh all the files that depended on this + IdeOptions{optCheckParents} <- getIdeOptionsIO $ shakeExtras ide + when (optCheckParents >= CheckOnClose) $ typecheckParents ide file + logInfo (ideLogger ide) $ "Closed text document: " <> getUri _uri + ,LSP.didChangeWatchedFilesNotificationHandler = withNotification (LSP.didChangeWatchedFilesNotificationHandler x) $ + \_ ide (DidChangeWatchedFilesParams fileEvents) -> do + -- See Note [File existence cache and LSP file watchers] which explains why we get these notifications and + -- what we do with them + let events = + mapMaybe + (\(FileEvent uri ev) -> + (, ev /= FcDeleted) . toNormalizedFilePath' + <$> LSP.uriToFilePath uri + ) + ( F.toList fileEvents ) + let msg = Text.pack $ show events + logDebug (ideLogger ide) $ "Files created or deleted: " <> msg + modifyFileExists ide events + setSomethingModified ide + + ,LSP.didChangeWorkspaceFoldersNotificationHandler = withNotification (LSP.didChangeWorkspaceFoldersNotificationHandler x) $ + \_ ide (DidChangeWorkspaceFoldersParams events) -> do + let add = S.union + substract = flip S.difference + modifyWorkspaceFolders ide + $ add (foldMap (S.singleton . parseWorkspaceFolder) (_added events)) + . substract (foldMap (S.singleton . parseWorkspaceFolder) (_removed events)) + + ,LSP.didChangeConfigurationParamsHandler = withNotification (LSP.didChangeConfigurationParamsHandler x) $ + \_ ide (DidChangeConfigurationParams cfg) -> do + let msg = Text.pack $ show cfg + logInfo (ideLogger ide) $ "Configuration changed: " <> msg + modifyClientSettings ide (const $ Just cfg) + setSomethingModified ide + + -- Initialized handler, good time to dynamically register capabilities + ,LSP.initializedHandler = withNotification (LSP.initializedHandler x) $ \lsp@LSP.LspFuncs{..} ide _ -> do + let watchSupported = case () of + _ | LSP.ClientCapabilities{_workspace} <- clientCapabilities + , Just LSP.WorkspaceClientCapabilities{_didChangeWatchedFiles} <- _workspace + , Just LSP.DidChangeWatchedFilesClientCapabilities{_dynamicRegistration} <- _didChangeWatchedFiles + , Just True <- _dynamicRegistration + -> True + | otherwise -> False + + if watchSupported + then registerWatcher lsp ide + else logDebug (ideLogger ide) "Warning: Client does not support watched files. Falling back to OS polling" + + } + where + registerWatcher LSP.LspFuncs{..} ide = do + lspId <- getNextReqId + opts <- getIdeOptionsIO $ shakeExtras ide + let + req = RequestMessage "2.0" lspId ClientRegisterCapability regParams + regParams = RegistrationParams (List [registration]) + -- The registration ID is arbitrary and is only used in case we want to deregister (which we won't). + -- We could also use something like a random UUID, as some other servers do, but this works for + -- our purposes. + registration = Registration "globalFileWatches" + WorkspaceDidChangeWatchedFiles + (Just (A.toJSON regOptions)) + regOptions = + DidChangeWatchedFilesRegistrationOptions { _watchers = List watchers } + -- See Note [File existence cache and LSP file watchers] for why this exists, and the choice of watch kind + watchKind = WatchKind { _watchCreate = True, _watchChange = False, _watchDelete = True} + -- See Note [Which files should we watch?] for an explanation of why the pattern is the way that it is + -- The patterns will be something like "**/.hs", i.e. "any number of directory segments, + -- followed by a file with an extension 'hs'. + watcher glob = FileSystemWatcher { _globPattern = glob, _kind = Just watchKind } + -- We use multiple watchers instead of one using '{}' because lsp-test doesn't + -- support that: https://github.com/bubba/lsp-test/issues/77 + watchers = [ watcher glob | glob <- watchedGlobs opts ] + + sendFunc $ LSP.ReqRegisterCapability req diff --git a/ghcide/src/Development/IDE/LSP/Outline.hs b/ghcide/src/Development/IDE/LSP/Outline.hs new file mode 100644 index 00000000000..579e4e18e39 --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/Outline.hs @@ -0,0 +1,230 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DuplicateRecordFields #-} +#include "ghc-api-version.h" + +module Development.IDE.LSP.Outline + ( setHandlersOutline + -- * For haskell-language-server + , moduleOutline + ) +where + +import qualified Language.Haskell.LSP.Core as LSP +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types +import Data.Functor +import Data.Generics +import Data.Maybe +import Data.Text ( Text + , pack + ) +import qualified Data.Text as T +import Development.IDE.Core.Rules +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error ( realSrcSpanToRange ) +import Development.IDE.LSP.Server +import Development.IDE.Types.Location +import Outputable ( Outputable + , ppr + , showSDocUnsafe + ) + +setHandlersOutline :: PartialHandlers c +setHandlersOutline = PartialHandlers $ \WithMessage {..} x -> return x + { LSP.documentSymbolHandler = withResponse RspDocumentSymbols moduleOutline + } + +moduleOutline + :: LSP.LspFuncs c -> IdeState -> DocumentSymbolParams -> IO (Either ResponseError DSResult) +moduleOutline _lsp ideState DocumentSymbolParams { _textDocument = TextDocumentIdentifier uri } + = case uriToFilePath uri of + Just (toNormalizedFilePath' -> fp) -> do + mb_decls <- fmap fst <$> runIdeAction "Outline" (shakeExtras ideState) (useWithStaleFast GetParsedModule fp) + pure $ Right $ case mb_decls of + Nothing -> DSDocumentSymbols (List []) + Just ParsedModule { pm_parsed_source = L _ltop HsModule { hsmodName, hsmodDecls, hsmodImports } } + -> let + declSymbols = mapMaybe documentSymbolForDecl hsmodDecls + moduleSymbol = hsmodName >>= \case + (L (RealSrcSpan l) m) -> Just $ + (defDocumentSymbol l :: DocumentSymbol) + { _name = pprText m + , _kind = SkFile + , _range = Range (Position 0 0) (Position maxBound 0) -- _ltop is 0 0 0 0 + } + _ -> Nothing + importSymbols = maybe [] pure $ + documentSymbolForImportSummary + (mapMaybe documentSymbolForImport hsmodImports) + allSymbols = case moduleSymbol of + Nothing -> importSymbols <> declSymbols + Just x -> + [ x { _children = Just (List (importSymbols <> declSymbols)) + } + ] + in + DSDocumentSymbols (List allSymbols) + + + Nothing -> pure $ Right $ DSDocumentSymbols (List []) + +documentSymbolForDecl :: Located (HsDecl GhcPs) -> Maybe DocumentSymbol +documentSymbolForDecl (L (RealSrcSpan l) (TyClD _ FamDecl { tcdFam = FamilyDecl { fdLName = L _ n, fdInfo, fdTyVars } })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName n + <> (case pprText fdTyVars of + "" -> "" + t -> " " <> t + ) + , _detail = Just $ pprText fdInfo + , _kind = SkClass + } +documentSymbolForDecl (L (RealSrcSpan l) (TyClD _ ClassDecl { tcdLName = L _ name, tcdSigs, tcdTyVars })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName name + <> (case pprText tcdTyVars of + "" -> "" + t -> " " <> t + ) + , _kind = SkClass + , _detail = Just "class" + , _children = + Just $ List + [ (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName n + , _kind = SkMethod + , _selectionRange = realSrcSpanToRange l' + } + | L (RealSrcSpan l) (ClassOpSig _ False names _) <- tcdSigs + , L (RealSrcSpan l') n <- names + ] + } +documentSymbolForDecl (L (RealSrcSpan l) (TyClD _ DataDecl { tcdLName = L _ name, tcdDataDefn = HsDataDefn { dd_cons } })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName name + , _kind = SkStruct + , _children = + Just $ List + [ (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName n + , _kind = SkConstructor + , _selectionRange = realSrcSpanToRange l' + , _children = conArgRecordFields (getConArgs x) + } + | L (RealSrcSpan l ) x <- dd_cons + , L (RealSrcSpan l') n <- getConNames x + ] + } + where + -- | Extract the record fields of a constructor + conArgRecordFields (RecCon (L _ lcdfs)) = Just $ List + [ (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName n + , _kind = SkField + } + | L _ cdf <- lcdfs + , L (RealSrcSpan l) n <- rdrNameFieldOcc . unLoc <$> cd_fld_names cdf + ] + conArgRecordFields _ = Nothing +documentSymbolForDecl (L (RealSrcSpan l) (TyClD _ SynDecl { tcdLName = L (RealSrcSpan l') n })) = Just + (defDocumentSymbol l :: DocumentSymbol) { _name = showRdrName n + , _kind = SkTypeParameter + , _selectionRange = realSrcSpanToRange l' + } +documentSymbolForDecl (L (RealSrcSpan l) (InstD _ ClsInstD { cid_inst = ClsInstDecl { cid_poly_ty } })) + = Just (defDocumentSymbol l :: DocumentSymbol) { _name = pprText cid_poly_ty + , _kind = SkInterface + } +documentSymbolForDecl (L (RealSrcSpan l) (InstD _ DataFamInstD { dfid_inst = DataFamInstDecl HsIB { hsib_body = FamEqn { feqn_tycon, feqn_pats } } })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName (unLoc feqn_tycon) <> " " <> T.unwords + (map pprText feqn_pats) + , _kind = SkInterface + } +documentSymbolForDecl (L (RealSrcSpan l) (InstD _ TyFamInstD { tfid_inst = TyFamInstDecl HsIB { hsib_body = FamEqn { feqn_tycon, feqn_pats } } })) + = Just (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName (unLoc feqn_tycon) <> " " <> T.unwords + (map pprText feqn_pats) + , _kind = SkInterface + } +documentSymbolForDecl (L (RealSrcSpan l) (DerivD _ DerivDecl { deriv_type })) = + gfindtype deriv_type <&> \(L (_ :: SrcSpan) name) -> + (defDocumentSymbol l :: DocumentSymbol) { _name = pprText @(HsType GhcPs) + name + , _kind = SkInterface + } +documentSymbolForDecl (L (RealSrcSpan l) (ValD _ FunBind{fun_id = L _ name})) = Just + (defDocumentSymbol l :: DocumentSymbol) + { _name = showRdrName name + , _kind = SkFunction + } +documentSymbolForDecl (L (RealSrcSpan l) (ValD _ PatBind{pat_lhs})) = Just + (defDocumentSymbol l :: DocumentSymbol) + { _name = pprText pat_lhs + , _kind = SkFunction + } + +documentSymbolForDecl (L (RealSrcSpan l) (ForD _ x)) = Just + (defDocumentSymbol l :: DocumentSymbol) + { _name = case x of + ForeignImport{} -> name + ForeignExport{} -> name + XForeignDecl{} -> "?" + , _kind = SkObject + , _detail = case x of + ForeignImport{} -> Just "import" + ForeignExport{} -> Just "export" + XForeignDecl{} -> Nothing + } + where name = showRdrName $ unLoc $ fd_name x + +documentSymbolForDecl _ = Nothing + +-- | Wrap the Document imports into a hierarchical outline for +-- a better overview of symbols in scope. +-- If there are no imports, then no hierarchy will be created. +documentSymbolForImportSummary :: [DocumentSymbol] -> Maybe DocumentSymbol +documentSymbolForImportSummary [] = Nothing +documentSymbolForImportSummary importSymbols = + let + -- safe because if we have no ranges then we don't take this branch + mergeRanges xs = Range (minimum $ map _start xs) (maximum $ map _end xs) + importRange = mergeRanges $ map (_range :: DocumentSymbol -> Range) importSymbols + in + Just (defDocumentSymbol empty :: DocumentSymbol) + { _name = "imports" + , _kind = SkModule + , _children = Just (List importSymbols) + , _range = importRange + , _selectionRange = importRange + } + +documentSymbolForImport :: Located (ImportDecl GhcPs) -> Maybe DocumentSymbol +documentSymbolForImport (L (RealSrcSpan l) ImportDecl { ideclName, ideclQualified }) = Just + (defDocumentSymbol l :: DocumentSymbol) + { _name = "import " <> pprText ideclName + , _kind = SkModule +#if MIN_GHC_API_VERSION(8,10,0) + , _detail = case ideclQualified of { NotQualified -> Nothing; _ -> Just "qualified" } +#else + , _detail = if ideclQualified then Just "qualified" else Nothing +#endif + } +documentSymbolForImport _ = Nothing + +defDocumentSymbol :: RealSrcSpan -> DocumentSymbol +defDocumentSymbol l = DocumentSymbol { .. } where + _detail = Nothing + _deprecated = Nothing + _name = "" + _kind = SkUnknown 0 + _range = realSrcSpanToRange l + _selectionRange = realSrcSpanToRange l + _children = Nothing + +showRdrName :: RdrName -> Text +showRdrName = pprText + +pprText :: Outputable a => a -> Text +pprText = pack . showSDocUnsafe . ppr diff --git a/ghcide/src/Development/IDE/LSP/Protocol.hs b/ghcide/src/Development/IDE/LSP/Protocol.hs new file mode 100644 index 00000000000..1c1870e2c4f --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/Protocol.hs @@ -0,0 +1,23 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 +{-# LANGUAGE PatternSynonyms #-} + +module Development.IDE.LSP.Protocol + ( pattern EventFileDiagnostics + ) where + +import Development.IDE.Types.Diagnostics +import Development.IDE.Types.Location +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types + +---------------------------------------------------------------------------------------------------- +-- Pretty printing +---------------------------------------------------------------------------------------------------- + +-- | Pattern synonym to make it a bit more convenient to match on diagnostics +-- in things like damlc test. +pattern EventFileDiagnostics :: FilePath -> [Diagnostic] -> FromServerMessage +pattern EventFileDiagnostics fp diags <- + NotPublishDiagnostics + (NotificationMessage _ _ (PublishDiagnosticsParams (uriToFilePath' -> Just fp) (List diags))) diff --git a/ghcide/src/Development/IDE/LSP/Server.hs b/ghcide/src/Development/IDE/LSP/Server.hs new file mode 100644 index 00000000000..976c25328a6 --- /dev/null +++ b/ghcide/src/Development/IDE/LSP/Server.hs @@ -0,0 +1,47 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE RankNTypes #-} +module Development.IDE.LSP.Server + ( WithMessage(..) + , PartialHandlers(..) + ) where + + +import Data.Default + +import Language.Haskell.LSP.Types +import qualified Language.Haskell.LSP.Core as LSP +import qualified Language.Haskell.LSP.Messages as LSP +import Development.IDE.Core.Service + +data WithMessage c = WithMessage + {withResponse :: forall m req resp . (Show m, Show req) => + (ResponseMessage resp -> LSP.FromServerMessage) -> -- how to wrap a response + (LSP.LspFuncs c -> IdeState -> req -> IO (Either ResponseError resp)) -> -- actual work + Maybe (LSP.Handler (RequestMessage m req resp)) + ,withNotification :: forall m req . (Show m, Show req) => + Maybe (LSP.Handler (NotificationMessage m req)) -> -- old notification handler + (LSP.LspFuncs c -> IdeState -> req -> IO ()) -> -- actual work + Maybe (LSP.Handler (NotificationMessage m req)) + ,withResponseAndRequest :: forall m rm req resp newReqParams newReqBody . + (Show m, Show rm, Show req, Show newReqParams, Show newReqBody) => + (ResponseMessage resp -> LSP.FromServerMessage) -> -- how to wrap a response + (RequestMessage rm newReqParams newReqBody -> LSP.FromServerMessage) -> -- how to wrap the additional req + (LSP.LspFuncs c -> IdeState -> req -> IO (Either ResponseError resp, Maybe (rm, newReqParams))) -> -- actual work + Maybe (LSP.Handler (RequestMessage m req resp)) + , withInitialize :: (LSP.LspFuncs c -> IdeState -> InitializeParams -> IO ()) + -> Maybe (LSP.Handler InitializeRequest) + } + +newtype PartialHandlers c = PartialHandlers (WithMessage c -> LSP.Handlers -> IO LSP.Handlers) + +instance Default (PartialHandlers c) where + def = PartialHandlers $ \_ x -> pure x + +instance Semigroup (PartialHandlers c) where + PartialHandlers a <> PartialHandlers b = PartialHandlers $ \w x -> a w x >>= b w + +instance Monoid (PartialHandlers c) where + mempty = def diff --git a/ghcide/src/Development/IDE/Plugin.hs b/ghcide/src/Development/IDE/Plugin.hs new file mode 100644 index 00000000000..e232e3f20c3 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin.hs @@ -0,0 +1,60 @@ + +module Development.IDE.Plugin(Plugin(..), codeActionPlugin, codeActionPluginWithRules,makeLspCommandId,getPid) where + +import Data.Default +import qualified Data.Text as T +import Development.Shake +import Development.IDE.LSP.Server + +import Language.Haskell.LSP.Types +import Development.IDE.Compat +import Development.IDE.Core.Rules +import qualified Language.Haskell.LSP.Core as LSP +import Language.Haskell.LSP.Messages + + +data Plugin c = Plugin + {pluginRules :: Rules () + ,pluginHandler :: PartialHandlers c + } + +instance Default (Plugin c) where + def = Plugin mempty def + +instance Semigroup (Plugin c) where + Plugin x1 y1 <> Plugin x2 y2 = Plugin (x1<>x2) (y1<>y2) + +instance Monoid (Plugin c) where + mempty = def + + +codeActionPlugin :: (LSP.LspFuncs c -> IdeState -> TextDocumentIdentifier -> Range -> CodeActionContext -> IO (Either ResponseError [CAResult])) -> Plugin c +codeActionPlugin = codeActionPluginWithRules mempty + +codeActionPluginWithRules :: Rules () -> (LSP.LspFuncs c -> IdeState -> TextDocumentIdentifier -> Range -> CodeActionContext -> IO (Either ResponseError [CAResult])) -> Plugin c +codeActionPluginWithRules rr f = Plugin rr $ PartialHandlers $ \WithMessage{..} x -> return x{ + LSP.codeActionHandler = withResponse RspCodeAction g + } + where + g lsp state (CodeActionParams a b c _) = fmap List <$> f lsp state a b c + +-- | Prefix to uniquely identify commands sent to the client. This +-- has two parts +-- +-- - A representation of the process id to make sure that a client has +-- unique commands if it is running multiple servers, since some +-- clients have a global command table and get confused otherwise. +-- +-- - A string to identify ghcide, to ease integration into +-- haskell-language-server, which routes commands to plugins based +-- on that. +makeLspCommandId :: T.Text -> IO T.Text +makeLspCommandId command = do + pid <- getPid + return $ pid <> ":ghcide:" <> command + +-- | Get the operating system process id for the running server +-- instance. This should be the same for the lifetime of the instance, +-- and different from that of any other currently running instance. +getPid :: IO T.Text +getPid = T.pack . show <$> getProcessID diff --git a/ghcide/src/Development/IDE/Plugin/CodeAction.hs b/ghcide/src/Development/IDE/Plugin/CodeAction.hs new file mode 100644 index 00000000000..a1bff637ad3 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/CodeAction.hs @@ -0,0 +1,1177 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE CPP #-} +#include "ghc-api-version.h" + +-- | Go to the definition of a variable. +module Development.IDE.Plugin.CodeAction + ( + plugin + + -- * For haskell-language-server + , codeAction + , codeLens + , rulePackageExports + , commandHandler + + -- * For testing + , blockCommandId + , typeSignatureCommandId + , matchRegExMultipleImports + ) where + +import Control.Monad (join, guard) +import Development.IDE.Plugin +import Development.IDE.GHC.Compat +import Development.IDE.Core.Rules +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Service +import Development.IDE.Core.Shake +import Development.IDE.GHC.Error +import Development.IDE.LSP.Server +import Development.IDE.Plugin.CodeAction.PositionIndexed +import Development.IDE.Plugin.CodeAction.RuleTypes +import Development.IDE.Plugin.CodeAction.Rules +import Development.IDE.Types.Exports +import Development.IDE.Types.Location +import Development.IDE.Types.Options +import Development.Shake (Rules) +import qualified Data.HashMap.Strict as Map +import qualified Language.Haskell.LSP.Core as LSP +import Language.Haskell.LSP.VFS +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types +import qualified Data.Rope.UTF16 as Rope +import Data.Aeson.Types (toJSON, fromJSON, Value(..), Result(..)) +import Data.Char +import Data.Maybe +import Data.List.Extra +import qualified Data.Text as T +import Text.Regex.TDFA (mrAfter, (=~), (=~~)) +import Outputable (ppr, showSDocUnsafe) +import Data.Function +import Control.Arrow ((>>>)) +import Data.Functor +import Control.Applicative ((<|>)) +import Safe (atMay) +import Bag (isEmptyBag) +import qualified Data.HashSet as Set +import Control.Concurrent.Extra (threadDelay, readVar) + +plugin :: Plugin c +plugin = codeActionPluginWithRules rules codeAction <> Plugin mempty setHandlersCodeLens + +rules :: Rules () +rules = rulePackageExports + +-- | a command that blocks forever. Used for testing +blockCommandId :: T.Text +blockCommandId = "ghcide.command.block" + +typeSignatureCommandId :: T.Text +typeSignatureCommandId = "typesignature.add" + +-- | Generate code actions. +codeAction + :: LSP.LspFuncs c + -> IdeState + -> TextDocumentIdentifier + -> Range + -> CodeActionContext + -> IO (Either ResponseError [CAResult]) +codeAction lsp state (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics=List xs} = do + contents <- LSP.getVirtualFileFunc lsp $ toNormalizedUri uri + let text = Rope.toText . (_text :: VirtualFile -> Rope.Rope) <$> contents + mbFile = toNormalizedFilePath' <$> uriToFilePath uri + diag <- fmap (\(_, _, d) -> d) . filter (\(p, _, _) -> mbFile == Just p) <$> getDiagnostics state + (ideOptions, join -> parsedModule, join -> env) <- runAction "CodeAction" state $ + (,,) <$> getIdeOptions + <*> getParsedModule `traverse` mbFile + <*> use GhcSession `traverse` mbFile + -- This is quite expensive 0.6-0.7s on GHC + pkgExports <- runAction "CodeAction:PackageExports" state $ (useNoFile_ . PackageExports) `traverse` env + localExports <- readVar (exportsMap $ shakeExtras state) + let exportsMap = localExports <> fromMaybe mempty pkgExports + pure . Right $ + [ CACodeAction $ CodeAction title (Just CodeActionQuickFix) (Just $ List [x]) (Just edit) Nothing + | x <- xs, (title, tedit) <- suggestAction exportsMap ideOptions parsedModule text x + , let edit = WorkspaceEdit (Just $ Map.singleton uri $ List tedit) Nothing + ] <> caRemoveRedundantImports parsedModule text diag xs uri + +-- | Generate code lenses. +codeLens + :: LSP.LspFuncs c + -> IdeState + -> CodeLensParams + -> IO (Either ResponseError (List CodeLens)) +codeLens _lsp ideState CodeLensParams{_textDocument=TextDocumentIdentifier uri} = do + commandId <- makeLspCommandId "typesignature.add" + fmap (Right . List) $ case uriToFilePath' uri of + Just (toNormalizedFilePath' -> filePath) -> do + _ <- runAction "codeLens" ideState (use TypeCheck filePath) + diag <- getDiagnostics ideState + hDiag <- getHiddenDiagnostics ideState + pure + [ CodeLens _range (Just (Command title commandId (Just $ List [toJSON edit]))) Nothing + | (dFile, _, dDiag@Diagnostic{_range=_range}) <- diag ++ hDiag + , dFile == filePath + , (title, tedit) <- suggestSignature False dDiag + , let edit = WorkspaceEdit (Just $ Map.singleton uri $ List tedit) Nothing + ] + Nothing -> pure [] + +-- | Execute the "typesignature.add" command. +commandHandler + :: LSP.LspFuncs c + -> IdeState + -> ExecuteCommandParams + -> IO (Either ResponseError Value, Maybe (ServerMethod, ApplyWorkspaceEditParams)) +commandHandler lsp _ideState ExecuteCommandParams{..} + -- _command is prefixed with a process ID, because certain clients + -- have a global command registry, and all commands must be + -- unique. And there can be more than one ghcide instance running + -- at a time against the same client. + | T.isSuffixOf blockCommandId _command + = do + LSP.sendFunc lsp $ NotCustomServer $ + NotificationMessage "2.0" (CustomServerMethod "ghcide/blocking/command") Null + threadDelay maxBound + return (Right Null, Nothing) + | T.isSuffixOf typeSignatureCommandId _command + , Just (List [edit]) <- _arguments + , Success wedit <- fromJSON edit + = return (Right Null, Just (WorkspaceApplyEdit, ApplyWorkspaceEditParams wedit)) + | otherwise + = return (Right Null, Nothing) + +suggestAction + :: ExportsMap + -> IdeOptions + -> Maybe ParsedModule + -> Maybe T.Text + -> Diagnostic + -> [(T.Text, [TextEdit])] +suggestAction packageExports ideOptions parsedModule text diag = concat + -- Order these suggestions by priority + [ suggestSignature True diag + , suggestExtendImport packageExports text diag + , suggestFillTypeWildcard diag + , suggestFixConstructorImport text diag + , suggestModuleTypo diag + , suggestReplaceIdentifier text diag + , removeRedundantConstraints text diag + , suggestAddTypeAnnotationToSatisfyContraints text diag + ] ++ concat + [ suggestConstraint pm text diag + ++ suggestNewDefinition ideOptions pm text diag + ++ suggestNewImport packageExports pm diag + ++ suggestDeleteUnusedBinding pm text diag + ++ suggestExportUnusedTopBinding text pm diag + | Just pm <- [parsedModule] + ] ++ + suggestFillHole diag -- Lowest priority + + +suggestRemoveRedundantImport :: ParsedModule -> Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestRemoveRedundantImport ParsedModule{pm_parsed_source = L _ HsModule{hsmodImports}} contents Diagnostic{_range=_range,..} +-- The qualified import of ‘many’ from module ‘Control.Applicative’ is redundant + | Just [_, bindings] <- matchRegexUnifySpaces _message "The( qualified)? import of ‘([^’]*)’ from module [^ ]* is redundant" + , Just (L _ impDecl) <- find (\(L l _) -> srcSpanToRange l == Just _range ) hsmodImports + , Just c <- contents + , ranges <- map (rangesForBinding impDecl . T.unpack) (T.splitOn ", " bindings) + , ranges' <- extendAllToIncludeCommaIfPossible (indexedByPosition $ T.unpack c) (concat ranges) + , not (null ranges') + = [( "Remove " <> bindings <> " from import" , [ TextEdit r "" | r <- ranges' ] )] + +-- File.hs:16:1: warning: +-- The import of `Data.List' is redundant +-- except perhaps to import instances from `Data.List' +-- To import instances alone, use: import Data.List() + | _message =~ ("The( qualified)? import of [^ ]* is redundant" :: String) + = [("Remove import", [TextEdit (extendToWholeLineIfPossible contents _range) ""])] + | otherwise = [] + +caRemoveRedundantImports :: Maybe ParsedModule -> Maybe T.Text -> [Diagnostic] -> [Diagnostic] -> Uri -> [CAResult] +caRemoveRedundantImports m contents digs ctxDigs uri + | Just pm <- m, + r <- join $ map (\d -> repeat d `zip` suggestRemoveRedundantImport pm contents d) digs, + allEdits <- [ e | (_, (_, edits)) <- r, e <- edits], + caRemoveAll <- removeAll allEdits, + ctxEdits <- [ x | x@(d, _) <- r, d `elem` ctxDigs], + not $ null ctxEdits, + caRemoveCtx <- map (\(d, (title, tedit)) -> removeSingle title tedit d) ctxEdits + = caRemoveCtx ++ [caRemoveAll] + | otherwise = [] + where + removeSingle title tedit diagnostic = CACodeAction CodeAction{..} where + _changes = Just $ Map.singleton uri $ List tedit + _title = title + _kind = Just CodeActionQuickFix + _diagnostics = Just $ List [diagnostic] + _documentChanges = Nothing + _edit = Just WorkspaceEdit{..} + _command = Nothing + removeAll tedit = CACodeAction CodeAction {..} where + _changes = Just $ Map.singleton uri $ List tedit + _title = "Remove all redundant imports" + _kind = Just CodeActionQuickFix + _diagnostics = Nothing + _documentChanges = Nothing + _edit = Just WorkspaceEdit{..} + _command = Nothing + +suggestDeleteUnusedBinding :: ParsedModule -> Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestDeleteUnusedBinding + ParsedModule{pm_parsed_source = L _ HsModule{hsmodDecls}} + contents + Diagnostic{_range=_range,..} +-- Foo.hs:4:1: warning: [-Wunused-binds] Defined but not used: ‘f’ + | Just [name] <- matchRegexUnifySpaces _message ".*Defined but not used: ‘([^ ]+)’" + , Just indexedContent <- indexedByPosition . T.unpack <$> contents + = let edits = flip TextEdit "" <$> relatedRanges indexedContent (T.unpack name) + in ([("Delete ‘" <> name <> "’", edits) | not (null edits)]) + | otherwise = [] + where + relatedRanges indexedContent name = + concatMap (findRelatedSpans indexedContent name) hsmodDecls + toRange = realSrcSpanToRange + extendForSpaces = extendToIncludePreviousNewlineIfPossible + + findRelatedSpans :: PositionIndexedString -> String -> Located (HsDecl GhcPs) -> [Range] + findRelatedSpans + indexedContent + name + (L (RealSrcSpan l) (ValD _ (extractNameAndMatchesFromFunBind -> Just (lname, matches)))) = + case lname of + (L nLoc _name) | isTheBinding nLoc -> + let findSig (L (RealSrcSpan l) (SigD _ sig)) = findRelatedSigSpan indexedContent name l sig + findSig _ = [] + in + [extendForSpaces indexedContent $ toRange l] + ++ concatMap findSig hsmodDecls + _ -> concatMap (findRelatedSpanForMatch indexedContent name) matches + findRelatedSpans _ _ _ = [] + + extractNameAndMatchesFromFunBind + :: HsBind GhcPs + -> Maybe (Located (IdP GhcPs), [LMatch GhcPs (LHsExpr GhcPs)]) + extractNameAndMatchesFromFunBind + FunBind + { fun_id=lname + , fun_matches=MG {mg_alts=L _ matches} + } = Just (lname, matches) + extractNameAndMatchesFromFunBind _ = Nothing + + findRelatedSigSpan :: PositionIndexedString -> String -> RealSrcSpan -> Sig GhcPs -> [Range] + findRelatedSigSpan indexedContent name l sig = + let maybeSpan = findRelatedSigSpan1 name sig + in case maybeSpan of + Just (_span, True) -> pure $ extendForSpaces indexedContent $ toRange l -- a :: Int + Just (RealSrcSpan span, False) -> pure $ toRange span -- a, b :: Int, a is unused + _ -> [] + + -- Second of the tuple means there is only one match + findRelatedSigSpan1 :: String -> Sig GhcPs -> Maybe (SrcSpan, Bool) + findRelatedSigSpan1 name (TypeSig _ lnames _) = + let maybeIdx = findIndex (\(L _ id) -> isSameName id name) lnames + in case maybeIdx of + Nothing -> Nothing + Just _ | length lnames == 1 -> Just (getLoc $ head lnames, True) + Just idx -> + let targetLname = getLoc $ lnames !! idx + startLoc = srcSpanStart targetLname + endLoc = srcSpanEnd targetLname + startLoc' = if idx == 0 + then startLoc + else srcSpanEnd . getLoc $ lnames !! (idx - 1) + endLoc' = if idx == 0 && idx < length lnames - 1 + then srcSpanStart . getLoc $ lnames !! (idx + 1) + else endLoc + in Just (mkSrcSpan startLoc' endLoc', False) + findRelatedSigSpan1 _ _ = Nothing + + -- for where clause + findRelatedSpanForMatch + :: PositionIndexedString + -> String + -> LMatch GhcPs (LHsExpr GhcPs) + -> [Range] + findRelatedSpanForMatch + indexedContent + name + (L _ Match{m_grhss=GRHSs{grhssLocalBinds}}) = do + case grhssLocalBinds of + (L _ (HsValBinds _ (ValBinds _ bag lsigs))) -> + if isEmptyBag bag + then [] + else concatMap (findRelatedSpanForHsBind indexedContent name lsigs) bag + _ -> [] + findRelatedSpanForMatch _ _ _ = [] + + findRelatedSpanForHsBind + :: PositionIndexedString + -> String + -> [LSig GhcPs] + -> LHsBind GhcPs + -> [Range] + findRelatedSpanForHsBind + indexedContent + name + lsigs + (L (RealSrcSpan l) (extractNameAndMatchesFromFunBind -> Just (lname, matches))) = + if isTheBinding (getLoc lname) + then + let findSig (L (RealSrcSpan l) sig) = findRelatedSigSpan indexedContent name l sig + findSig _ = [] + in [extendForSpaces indexedContent $ toRange l] ++ concatMap findSig lsigs + else concatMap (findRelatedSpanForMatch indexedContent name) matches + findRelatedSpanForHsBind _ _ _ _ = [] + + isTheBinding :: SrcSpan -> Bool + isTheBinding span = srcSpanToRange span == Just _range + + isSameName :: IdP GhcPs -> String -> Bool + isSameName x name = showSDocUnsafe (ppr x) == name + +data ExportsAs = ExportName | ExportPattern | ExportAll + deriving (Eq) + +suggestExportUnusedTopBinding :: Maybe T.Text -> ParsedModule -> Diagnostic -> [(T.Text, [TextEdit])] +suggestExportUnusedTopBinding srcOpt ParsedModule{pm_parsed_source = L _ HsModule{..}} Diagnostic{..} +-- Foo.hs:4:1: warning: [-Wunused-top-binds] Defined but not used: ‘f’ +-- Foo.hs:5:1: warning: [-Wunused-top-binds] Defined but not used: type constructor or class ‘F’ +-- Foo.hs:6:1: warning: [-Wunused-top-binds] Defined but not used: data constructor ‘Bar’ + | Just source <- srcOpt + , Just [name] <- matchRegexUnifySpaces _message ".*Defined but not used: ‘([^ ]+)’" + <|> matchRegexUnifySpaces _message ".*Defined but not used: type constructor or class ‘([^ ]+)’" + <|> matchRegexUnifySpaces _message ".*Defined but not used: data constructor ‘([^ ]+)’" + , Just (exportType, _) <- find (matchWithDiagnostic _range . snd) + . mapMaybe + (\(L l b) -> if maybe False isTopLevel $ srcSpanToRange l + then exportsAs b else Nothing) + $ hsmodDecls + , Just pos <- fmap _end . getLocatedRange =<< hsmodExports + , Just needComma <- needsComma source <$> hsmodExports + , let exportName = (if needComma then "," else "") <> printExport exportType name + insertPos = pos {_character = pred $ _character pos} + = [("Export ‘" <> name <> "’", [TextEdit (Range insertPos insertPos) exportName])] + | otherwise = [] + where + -- we get the last export and the closing bracket and check for comma in that range + needsComma :: T.Text -> Located [LIE GhcPs] -> Bool + needsComma _ (L _ []) = False + needsComma source (L (RealSrcSpan l) exports) = + let closeParan = _end $ realSrcSpanToRange l + lastExport = fmap _end . getLocatedRange $ last exports + in case lastExport of + Just lastExport -> not $ T.isInfixOf "," $ textInRange (Range lastExport closeParan) source + _ -> False + needsComma _ _ = False + + opLetter :: String + opLetter = ":!#$%&*+./<=>?@\\^|-~" + + parenthesizeIfNeeds :: Bool -> T.Text -> T.Text + parenthesizeIfNeeds needsTypeKeyword x + | T.head x `elem` opLetter = (if needsTypeKeyword then "type " else "") <> "(" <> x <>")" + | otherwise = x + + getLocatedRange :: Located a -> Maybe Range + getLocatedRange = srcSpanToRange . getLoc + + matchWithDiagnostic :: Range -> Located (IdP GhcPs) -> Bool + matchWithDiagnostic Range{_start=l,_end=r} x = + let loc = fmap _start . getLocatedRange $ x + in loc >= Just l && loc <= Just r + + printExport :: ExportsAs -> T.Text -> T.Text + printExport ExportName x = parenthesizeIfNeeds False x + printExport ExportPattern x = "pattern " <> x + printExport ExportAll x = parenthesizeIfNeeds True x <> "(..)" + + isTopLevel :: Range -> Bool + isTopLevel l = (_character . _start) l == 0 + + exportsAs :: HsDecl p -> Maybe (ExportsAs, Located (IdP p)) + exportsAs (ValD _ FunBind {fun_id}) = Just (ExportName, fun_id) + exportsAs (ValD _ (PatSynBind _ PSB {psb_id})) = Just (ExportPattern, psb_id) + exportsAs (TyClD _ SynDecl{tcdLName}) = Just (ExportName, tcdLName) + exportsAs (TyClD _ DataDecl{tcdLName}) = Just (ExportAll, tcdLName) + exportsAs (TyClD _ ClassDecl{tcdLName}) = Just (ExportAll, tcdLName) + exportsAs (TyClD _ FamDecl{tcdFam}) = Just (ExportAll, fdLName tcdFam) + exportsAs _ = Nothing + +suggestAddTypeAnnotationToSatisfyContraints :: Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestAddTypeAnnotationToSatisfyContraints sourceOpt Diagnostic{_range=_range,..} +-- File.hs:52:41: warning: +-- * Defaulting the following constraint to type ‘Integer’ +-- Num p0 arising from the literal ‘1’ +-- * In the expression: 1 +-- In an equation for ‘f’: f = 1 +-- File.hs:52:41: warning: +-- * Defaulting the following constraints to type ‘[Char]’ +-- (Show a0) +-- arising from a use of ‘traceShow’ +-- at A.hs:228:7-25 +-- (IsString a0) +-- arising from the literal ‘"debug"’ +-- at A.hs:228:17-23 +-- * In the expression: traceShow "debug" a +-- In an equation for ‘f’: f a = traceShow "debug" a +-- File.hs:52:41: warning: +-- * Defaulting the following constraints to type ‘[Char]’ +-- (Show a0) +-- arising from a use of ‘traceShow’ +-- at A.hs:255:28-43 +-- (IsString a0) +-- arising from the literal ‘"test"’ +-- at /Users/serhiip/workspace/ghcide/src/Development/IDE/Plugin/CodeAction.hs:255:38-43 +-- * In the fourth argument of ‘seq’, namely ‘(traceShow "test")’ +-- In the expression: seq "test" seq "test" (traceShow "test") +-- In an equation for ‘f’: +-- f = seq "test" seq "test" (traceShow "test") + | Just [ty, lit] <- matchRegexUnifySpaces _message (pat False False True) + <|> matchRegexUnifySpaces _message (pat False False False) + = codeEdit ty lit (makeAnnotatedLit ty lit) + | Just source <- sourceOpt + , Just [ty, lit] <- matchRegexUnifySpaces _message (pat True True False) + = let lit' = makeAnnotatedLit ty lit; + tir = textInRange _range source + in codeEdit ty lit (T.replace lit lit' tir) + | otherwise = [] + where + makeAnnotatedLit ty lit = "(" <> lit <> " :: " <> ty <> ")" + pat multiple at inThe = T.concat [ ".*Defaulting the following constraint" + , if multiple then "s" else "" + , " to type ‘([^ ]+)’ " + , ".*arising from the literal ‘(.+)’" + , if inThe then ".+In the.+argument" else "" + , if at then ".+at" else "" + , ".+In the expression" + ] + codeEdit ty lit replacement = + let title = "Add type annotation ‘" <> ty <> "’ to ‘" <> lit <> "’" + edits = [TextEdit _range replacement] + in [( title, edits )] + + +suggestReplaceIdentifier :: Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestReplaceIdentifier contents Diagnostic{_range=_range,..} +-- File.hs:52:41: error: +-- * Variable not in scope: +-- suggestAcion :: Maybe T.Text -> Range -> Range +-- * Perhaps you meant ‘suggestAction’ (line 83) +-- File.hs:94:37: error: +-- Not in scope: ‘T.isPrfixOf’ +-- Perhaps you meant one of these: +-- ‘T.isPrefixOf’ (imported from Data.Text), +-- ‘T.isInfixOf’ (imported from Data.Text), +-- ‘T.isSuffixOf’ (imported from Data.Text) +-- Module ‘Data.Text’ does not export ‘isPrfixOf’. + | renameSuggestions@(_:_) <- extractRenamableTerms _message + = [ ("Replace with ‘" <> name <> "’", [mkRenameEdit contents _range name]) | name <- renameSuggestions ] + | otherwise = [] + +suggestNewDefinition :: IdeOptions -> ParsedModule -> Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestNewDefinition ideOptions parsedModule contents Diagnostic{_message, _range} +-- * Variable not in scope: +-- suggestAcion :: Maybe T.Text -> Range -> Range + | Just [name, typ] <- matchRegexUnifySpaces message "Variable not in scope: ([^ ]+) :: ([^*•]+)" + = newDefinitionAction ideOptions parsedModule _range name typ + | Just [name, typ] <- matchRegexUnifySpaces message "Found hole: _([^ ]+) :: ([^*•]+) Or perhaps" + , [(label, newDefinitionEdits)] <- newDefinitionAction ideOptions parsedModule _range name typ + = [(label, mkRenameEdit contents _range name : newDefinitionEdits)] + | otherwise = [] + where + message = unifySpaces _message + +newDefinitionAction :: IdeOptions -> ParsedModule -> Range -> T.Text -> T.Text -> [(T.Text, [TextEdit])] +newDefinitionAction IdeOptions{..} parsedModule Range{_start} name typ + | Range _ lastLineP : _ <- + [ realSrcSpanToRange sp + | (L l@(RealSrcSpan sp) _) <- hsmodDecls + , _start `isInsideSrcSpan` l] + , nextLineP <- Position{ _line = _line lastLineP + 1, _character = 0} + = [ ("Define " <> sig + , [TextEdit (Range nextLineP nextLineP) (T.unlines ["", sig, name <> " = error \"not implemented\""])] + )] + | otherwise = [] + where + colon = if optNewColonConvention then " : " else " :: " + sig = name <> colon <> T.dropWhileEnd isSpace typ + ParsedModule{pm_parsed_source = L _ HsModule{hsmodDecls}} = parsedModule + + +suggestFillTypeWildcard :: Diagnostic -> [(T.Text, [TextEdit])] +suggestFillTypeWildcard Diagnostic{_range=_range,..} +-- Foo.hs:3:8: error: +-- * Found type wildcard `_' standing for `p -> p1 -> p' + + | "Found type wildcard" `T.isInfixOf` _message + , " standing for " `T.isInfixOf` _message + , typeSignature <- extractWildCardTypeSignature _message + = [("Use type signature: ‘" <> typeSignature <> "’", [TextEdit _range typeSignature])] + | otherwise = [] + +suggestModuleTypo :: Diagnostic -> [(T.Text, [TextEdit])] +suggestModuleTypo Diagnostic{_range=_range,..} +-- src/Development/IDE/Core/Compile.hs:58:1: error: +-- Could not find module ‘Data.Cha’ +-- Perhaps you meant Data.Char (from base-4.12.0.0) + | "Could not find module" `T.isInfixOf` _message + , "Perhaps you meant" `T.isInfixOf` _message = let + findSuggestedModules = map (head . T.words) . drop 2 . T.lines + proposeModule mod = ("replace with " <> mod, [TextEdit _range mod]) + in map proposeModule $ nubOrd $ findSuggestedModules _message + | otherwise = [] + +suggestFillHole :: Diagnostic -> [(T.Text, [TextEdit])] +suggestFillHole Diagnostic{_range=_range,..} + | Just holeName <- extractHoleName _message + , (holeFits, refFits) <- processHoleSuggestions (T.lines _message) + = map (proposeHoleFit holeName False) holeFits + ++ map (proposeHoleFit holeName True) refFits + | otherwise = [] + where + extractHoleName = fmap head . flip matchRegexUnifySpaces "Found hole: ([^ ]*)" + proposeHoleFit holeName parenthise name = + ( "replace " <> holeName <> " with " <> name + , [TextEdit _range $ if parenthise then parens name else name]) + parens x = "(" <> x <> ")" + +processHoleSuggestions :: [T.Text] -> ([T.Text], [T.Text]) +processHoleSuggestions mm = (holeSuggestions, refSuggestions) +{- + • Found hole: _ :: LSP.Handlers + + Valid hole fits include def + Valid refinement hole fits include + fromMaybe (_ :: LSP.Handlers) (_ :: Maybe LSP.Handlers) + fromJust (_ :: Maybe LSP.Handlers) + haskell-lsp-types-0.22.0.0:Language.Haskell.LSP.Types.Window.$sel:_value:ProgressParams (_ :: ProgressParams + LSP.Handlers) + T.foldl (_ :: LSP.Handlers -> Char -> LSP.Handlers) + (_ :: LSP.Handlers) + (_ :: T.Text) + T.foldl' (_ :: LSP.Handlers -> Char -> LSP.Handlers) + (_ :: LSP.Handlers) + (_ :: T.Text) +-} + where + t = id @T.Text + holeSuggestions = do + -- get the text indented under Valid hole fits + validHolesSection <- + getIndentedGroupsBy (=~ t " *Valid (hole fits|substitutions) include") mm + -- the Valid hole fits line can contain a hole fit + holeFitLine <- + mapHead + (mrAfter . (=~ t " *Valid (hole fits|substitutions) include")) + validHolesSection + let holeFit = T.strip $ T.takeWhile (/= ':') holeFitLine + guard (not $ T.null holeFit) + return holeFit + refSuggestions = do -- @[] + -- get the text indented under Valid refinement hole fits + refinementSection <- + getIndentedGroupsBy (=~ t " *Valid refinement hole fits include") mm + -- get the text for each hole fit + holeFitLines <- getIndentedGroups (tail refinementSection) + let holeFit = T.strip $ T.unwords holeFitLines + guard $ not $ holeFit =~ t "Some refinement hole fits suppressed" + return holeFit + + mapHead f (a:aa) = f a : aa + mapHead _ [] = [] + +-- > getIndentedGroups [" H1", " l1", " l2", " H2", " l3"] = [[" H1,", " l1", " l2"], [" H2", " l3"]] +getIndentedGroups :: [T.Text] -> [[T.Text]] +getIndentedGroups [] = [] +getIndentedGroups ll@(l:_) = getIndentedGroupsBy ((== indentation l) . indentation) ll +-- | +-- > getIndentedGroupsBy (" H" `isPrefixOf`) [" H1", " l1", " l2", " H2", " l3"] = [[" H1", " l1", " l2"], [" H2", " l3"]] +getIndentedGroupsBy :: (T.Text -> Bool) -> [T.Text] -> [[T.Text]] +getIndentedGroupsBy pred inp = case dropWhile (not.pred) inp of + (l:ll) -> case span (\l' -> indentation l < indentation l') ll of + (indented, rest) -> (l:indented) : getIndentedGroupsBy pred rest + _ -> [] + +indentation :: T.Text -> Int +indentation = T.length . T.takeWhile isSpace + +suggestExtendImport :: ExportsMap -> Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestExtendImport exportsMap contents Diagnostic{_range=_range,..} + | Just [binding, mod, srcspan] <- + matchRegexUnifySpaces _message + "Perhaps you want to add ‘([^’]*)’ to the import list in the import of ‘([^’]*)’ *\\((.*)\\).$" + , Just c <- contents + = suggestions c binding mod srcspan + | Just (binding, mod_srcspan) <- + matchRegExMultipleImports _message + , Just c <- contents + = mod_srcspan >>= (\(x, y) -> suggestions c binding x y) + | otherwise = [] + where + suggestions c binding mod srcspan + | range <- case [ x | (x,"") <- readSrcSpan (T.unpack srcspan)] of + [s] -> let x = realSrcSpanToRange s + in x{_end = (_end x){_character = succ (_character (_end x))}} + _ -> error "bug in srcspan parser", + importLine <- textInRange range c, + Just ident <- lookupExportMap binding mod, + Just result <- addBindingToImportList ident importLine + = [("Add " <> renderIdentInfo ident <> " to the import list of " <> mod, [TextEdit range result])] + | otherwise = [] + lookupExportMap binding mod + | Just match <- Map.lookup binding (getExportsMap exportsMap) + , [(ident, _)] <- filter (\(_,m) -> mod == m) (Set.toList match) + = Just ident + | otherwise = Nothing + +suggestFixConstructorImport :: Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestFixConstructorImport _ Diagnostic{_range=_range,..} + -- ‘Success’ is a data constructor of ‘Result’ + -- To import it use + -- import Data.Aeson.Types( Result( Success ) ) + -- or + -- import Data.Aeson.Types( Result(..) ) (lsp-ui) + | Just [constructor, typ] <- + matchRegexUnifySpaces _message + "‘([^’]*)’ is a data constructor of ‘([^’]*)’ To import it use" + = let fixedImport = typ <> "(" <> constructor <> ")" + in [("Fix import of " <> fixedImport, [TextEdit _range fixedImport])] + | otherwise = [] + +suggestSignature :: Bool -> Diagnostic -> [(T.Text, [TextEdit])] +suggestSignature isQuickFix Diagnostic{_range=_range@Range{..},..} + | _message =~ + ("(Top-level binding|Polymorphic local binding|Pattern synonym) with no type signature" :: T.Text) = let + signature = removeInitialForAll + $ T.takeWhile (\x -> x/='*' && x/='•') + $ T.strip $ unifySpaces $ last $ T.splitOn "type signature: " $ filterNewlines _message + startOfLine = Position (_line _start) startCharacter + beforeLine = Range startOfLine startOfLine + title = if isQuickFix then "add signature: " <> signature else signature + action = TextEdit beforeLine $ signature <> "\n" <> T.replicate startCharacter " " + in [(title, [action])] + where removeInitialForAll :: T.Text -> T.Text + removeInitialForAll (T.breakOnEnd " :: " -> (nm, ty)) + | "forall" `T.isPrefixOf` ty = nm <> T.drop 2 (snd (T.breakOn "." ty)) + | otherwise = nm <> ty + startCharacter + | "Polymorphic local binding" `T.isPrefixOf` _message + = _character _start + | otherwise + = 0 + +suggestSignature _ _ = [] + +-- | Suggests a constraint for a declaration for which a constraint is missing. +suggestConstraint :: ParsedModule -> Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +suggestConstraint parsedModule mContents diag@Diagnostic {..} + | Just contents <- mContents + , Just missingConstraint <- findMissingConstraint _message + = let codeAction = if _message =~ ("the type signature for:" :: String) + then suggestFunctionConstraint parsedModule + else suggestInstanceConstraint contents + in codeAction diag missingConstraint + | otherwise = [] + where + findMissingConstraint :: T.Text -> Maybe T.Text + findMissingConstraint t = + let regex = "(No instance for|Could not deduce) \\((.+)\\) arising from a use of" + in matchRegexUnifySpaces t regex <&> last + +normalizeConstraints :: T.Text -> T.Text -> T.Text +normalizeConstraints existingConstraints constraint = + let constraintsInit = if "(" `T.isPrefixOf` existingConstraints + then T.dropEnd 1 existingConstraints + else "(" <> existingConstraints + in constraintsInit <> ", " <> constraint <> ")" + +-- | Suggests a constraint for an instance declaration for which a constraint is missing. +suggestInstanceConstraint :: T.Text -> Diagnostic -> T.Text -> [(T.Text, [TextEdit])] +suggestInstanceConstraint contents Diagnostic {..} missingConstraint +-- Suggests a constraint for an instance declaration with no existing constraints. +-- • No instance for (Eq a) arising from a use of ‘==’ +-- Possible fix: add (Eq a) to the context of the instance declaration +-- • In the expression: x == y +-- In an equation for ‘==’: (Wrap x) == (Wrap y) = x == y +-- In the instance declaration for ‘Eq (Wrap a)’ + | Just [instanceDeclaration] <- matchRegexUnifySpaces _message "In the instance declaration for ‘([^`]*)’" + = let instanceLine = contents + & T.splitOn ("instance " <> instanceDeclaration) + & head & T.lines & length + startOfConstraint = Position instanceLine (length ("instance " :: String)) + range = Range startOfConstraint startOfConstraint + newConstraint = missingConstraint <> " => " + in [(actionTitle missingConstraint, [TextEdit range newConstraint])] + +-- Suggests a constraint for an instance declaration with one or more existing constraints. +-- • Could not deduce (Eq b) arising from a use of ‘==’ +-- from the context: Eq a +-- bound by the instance declaration at /path/to/Main.hs:7:10-32 +-- Possible fix: add (Eq b) to the context of the instance declaration +-- • In the second argument of ‘(&&)’, namely ‘x' == y'’ +-- In the expression: x == y && x' == y' +-- In an equation for ‘==’: +-- (Pair x x') == (Pair y y') = x == y && x' == y' + | Just [instanceLineStr, constraintFirstCharStr] + <- matchRegexUnifySpaces _message "bound by the instance declaration at .+:([0-9]+):([0-9]+)" + = let existingConstraints = findExistingConstraints _message + newConstraints = normalizeConstraints existingConstraints missingConstraint + instanceLine = readPositionNumber instanceLineStr + constraintFirstChar = readPositionNumber constraintFirstCharStr + startOfConstraint = Position instanceLine constraintFirstChar + endOfConstraint = Position instanceLine $ + constraintFirstChar + T.length existingConstraints + range = Range startOfConstraint endOfConstraint + in [(actionTitle missingConstraint, [TextEdit range newConstraints])] + | otherwise = [] + where + findExistingConstraints :: T.Text -> T.Text + findExistingConstraints t = + T.replace "from the context: " "" . T.strip $ T.lines t !! 1 + + readPositionNumber :: T.Text -> Int + readPositionNumber = T.unpack >>> read >>> pred + + actionTitle :: T.Text -> T.Text + actionTitle constraint = "Add `" <> constraint + <> "` to the context of the instance declaration" + +findTypeSignatureName :: T.Text -> Maybe T.Text +findTypeSignatureName t = matchRegexUnifySpaces t "([^ ]+) :: " <&> head + +findTypeSignatureLine :: T.Text -> T.Text -> Int +findTypeSignatureLine contents typeSignatureName = + T.splitOn (typeSignatureName <> " :: ") contents & head & T.lines & length + +-- | Suggests a constraint for a type signature with any number of existing constraints. +suggestFunctionConstraint :: ParsedModule -> Diagnostic -> T.Text -> [(T.Text, [TextEdit])] +suggestFunctionConstraint ParsedModule{pm_parsed_source = L _ HsModule{hsmodDecls}} Diagnostic{..} missingConstraint +-- • No instance for (Eq a) arising from a use of ‘==’ +-- Possible fix: +-- add (Eq a) to the context of +-- the type signature for: +-- eq :: forall a. a -> a -> Bool +-- • In the expression: x == y +-- In an equation for ‘eq’: eq x y = x == y + +-- • Could not deduce (Eq b) arising from a use of ‘==’ +-- from the context: Eq a +-- bound by the type signature for: +-- eq :: forall a b. Eq a => Pair a b -> Pair a b -> Bool +-- at Main.hs:5:1-42 +-- Possible fix: +-- add (Eq b) to the context of +-- the type signature for: +-- eq :: forall a b. Eq a => Pair a b -> Pair a b -> Bool +-- • In the second argument of ‘(&&)’, namely ‘y == y'’ +-- In the expression: x == x' && y == y' +-- In an equation for ‘eq’: +-- eq (Pair x y) (Pair x' y') = x == x' && y == y' + | Just typeSignatureName <- findTypeSignatureName _message + = let mExistingConstraints = findExistingConstraints _message + newConstraint = buildNewConstraints missingConstraint mExistingConstraints + in case findRangeOfContextForFunctionNamed typeSignatureName of + Just range -> [(actionTitle missingConstraint typeSignatureName, [TextEdit range newConstraint])] + Nothing -> [] + | otherwise = [] + where + findRangeOfContextForFunctionNamed :: T.Text -> Maybe Range + findRangeOfContextForFunctionNamed typeSignatureName = do + locatedType <- listToMaybe + [ locatedType + | L _ (SigD _ (TypeSig _ identifiers (HsWC _ (HsIB _ locatedType)))) <- hsmodDecls + , any (`isSameName` T.unpack typeSignatureName) $ fmap unLoc identifiers + ] + srcSpanToRange $ case splitLHsQualTy locatedType of + (L contextSrcSpan _ , _) -> + if isGoodSrcSpan contextSrcSpan + then contextSrcSpan -- The type signature has explicit context + else -- No explicit context, return SrcSpan at the start of type sig where we can write context + let start = srcSpanStart $ getLoc locatedType in mkSrcSpan start start + + isSameName :: IdP GhcPs -> String -> Bool + isSameName x name = showSDocUnsafe (ppr x) == name + + findExistingConstraints :: T.Text -> Maybe T.Text + findExistingConstraints message = + if message =~ ("from the context:" :: String) + then fmap (T.strip . head) $ matchRegexUnifySpaces message "\\. ([^=]+)" + else Nothing + + buildNewConstraints :: T.Text -> Maybe T.Text -> T.Text + buildNewConstraints constraint mExistingConstraints = + case mExistingConstraints of + Just existingConstraints -> normalizeConstraints existingConstraints constraint + Nothing -> constraint <> " => " + + actionTitle :: T.Text -> T.Text -> T.Text + actionTitle constraint typeSignatureName = "Add `" <> constraint + <> "` to the context of the type signature for `" <> typeSignatureName <> "`" + +-- | Suggests the removal of a redundant constraint for a type signature. +removeRedundantConstraints :: Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])] +removeRedundantConstraints mContents Diagnostic{..} +-- • Redundant constraint: Eq a +-- • In the type signature for: +-- foo :: forall a. Eq a => a -> a +-- • Redundant constraints: (Monoid a, Show a) +-- • In the type signature for: +-- foo :: forall a. (Num a, Monoid a, Eq a, Show a) => a -> Bool + | Just contents <- mContents + -- Account for both "Redundant constraint" and "Redundant constraints". + , True <- "Redundant constraint" `T.isInfixOf` _message + , Just typeSignatureName <- findTypeSignatureName _message + , Just redundantConstraintList <- findRedundantConstraints _message + , Just constraints <- findConstraints contents typeSignatureName + = let constraintList = parseConstraints constraints + newConstraints = buildNewConstraints constraintList redundantConstraintList + typeSignatureLine = findTypeSignatureLine contents typeSignatureName + typeSignatureFirstChar = T.length $ typeSignatureName <> " :: " + startOfConstraint = Position typeSignatureLine typeSignatureFirstChar + endOfConstraint = Position typeSignatureLine $ + typeSignatureFirstChar + T.length (constraints <> " => ") + range = Range startOfConstraint endOfConstraint + in [(actionTitle redundantConstraintList typeSignatureName, [TextEdit range newConstraints])] + | otherwise = [] + where + parseConstraints :: T.Text -> [T.Text] + parseConstraints t = t + & (T.strip >>> stripConstraintsParens >>> T.splitOn ",") + <&> T.strip + + stripConstraintsParens :: T.Text -> T.Text + stripConstraintsParens constraints = + if "(" `T.isPrefixOf` constraints + then constraints & T.drop 1 & T.dropEnd 1 & T.strip + else constraints + + findRedundantConstraints :: T.Text -> Maybe [T.Text] + findRedundantConstraints t = t + & T.lines + & head + & T.strip + & (`matchRegexUnifySpaces` "Redundant constraints?: (.+)") + <&> (head >>> parseConstraints) + + -- If the type signature is not formatted as expected (arbitrary number of spaces, + -- line feeds...), just fail. + findConstraints :: T.Text -> T.Text -> Maybe T.Text + findConstraints contents typeSignatureName = do + constraints <- contents + & T.splitOn (typeSignatureName <> " :: ") + & (`atMay` 1) + >>= (T.splitOn " => " >>> (`atMay` 0)) + guard $ not $ "\n" `T.isInfixOf` constraints || T.strip constraints /= constraints + return constraints + + formatConstraints :: [T.Text] -> T.Text + formatConstraints [] = "" + formatConstraints [constraint] = constraint + formatConstraints constraintList = constraintList + & T.intercalate ", " + & \cs -> "(" <> cs <> ")" + + formatConstraintsWithArrow :: [T.Text] -> T.Text + formatConstraintsWithArrow [] = "" + formatConstraintsWithArrow cs = cs & formatConstraints & (<> " => ") + + buildNewConstraints :: [T.Text] -> [T.Text] -> T.Text + buildNewConstraints constraintList redundantConstraintList = + formatConstraintsWithArrow $ constraintList \\ redundantConstraintList + + actionTitle :: [T.Text] -> T.Text -> T.Text + actionTitle constraintList typeSignatureName = + "Remove redundant constraint" <> (if length constraintList == 1 then "" else "s") <> " `" + <> formatConstraints constraintList + <> "` from the context of the type signature for `" <> typeSignatureName <> "`" + +------------------------------------------------------------------------------------------------- + +suggestNewImport :: ExportsMap -> ParsedModule -> Diagnostic -> [(T.Text, [TextEdit])] +suggestNewImport packageExportsMap ParsedModule {pm_parsed_source = L _ HsModule {..}} Diagnostic{_message} + | msg <- unifySpaces _message + , Just thingMissing <- extractNotInScopeName msg + , qual <- extractQualifiedModuleName msg + , Just insertLine <- case hsmodImports of + [] -> case srcSpanStart $ getLoc (head hsmodDecls) of + RealSrcLoc s -> Just $ srcLocLine s - 1 + _ -> Nothing + _ -> case srcSpanEnd $ getLoc (last hsmodImports) of + RealSrcLoc s -> Just $ srcLocLine s + _ -> Nothing + , insertPos <- Position insertLine 0 + , extendImportSuggestions <- matchRegexUnifySpaces msg + "Perhaps you want to add ‘[^’]*’ to the import list in the import of ‘([^’]*)’" + = [(imp, [TextEdit (Range insertPos insertPos) (imp <> "\n")]) + | imp <- sort $ constructNewImportSuggestions packageExportsMap (qual, thingMissing) extendImportSuggestions + ] +suggestNewImport _ _ _ = [] + +constructNewImportSuggestions + :: ExportsMap -> (Maybe T.Text, NotInScope) -> Maybe [T.Text] -> [T.Text] +constructNewImportSuggestions exportsMap (qual, thingMissing) notTheseModules = nubOrd + [ suggestion + | Just name <- [T.stripPrefix (maybe "" (<> ".") qual) $ notInScope thingMissing] + , (identInfo, m) <- maybe [] Set.toList $ Map.lookup name (getExportsMap exportsMap) + , canUseIdent thingMissing identInfo + , m `notElem` fromMaybe [] notTheseModules + , suggestion <- renderNewImport identInfo m + ] + where + renderNewImport identInfo m + | Just q <- qual + , asQ <- if q == m then "" else " as " <> q + = ["import qualified " <> m <> asQ] + | otherwise + = ["import " <> m <> " (" <> renderIdentInfo identInfo <> ")" + ,"import " <> m ] + +canUseIdent :: NotInScope -> IdentInfo -> Bool +canUseIdent NotInScopeDataConstructor{} = isDatacon +canUseIdent _ = const True + +data NotInScope + = NotInScopeDataConstructor T.Text + | NotInScopeTypeConstructorOrClass T.Text + | NotInScopeThing T.Text + deriving Show + +notInScope :: NotInScope -> T.Text +notInScope (NotInScopeDataConstructor t) = t +notInScope (NotInScopeTypeConstructorOrClass t) = t +notInScope (NotInScopeThing t) = t + +extractNotInScopeName :: T.Text -> Maybe NotInScope +extractNotInScopeName x + | Just [name] <- matchRegexUnifySpaces x "Data constructor not in scope: ([^ ]+)" + = Just $ NotInScopeDataConstructor name + | Just [name] <- matchRegexUnifySpaces x "Not in scope: data constructor [^‘]*‘([^’]*)’" + = Just $ NotInScopeDataConstructor name + | Just [name] <- matchRegexUnifySpaces x "ot in scope: type constructor or class [^‘]*‘([^’]*)’" + = Just $ NotInScopeTypeConstructorOrClass name + | Just [name] <- matchRegexUnifySpaces x "ot in scope: \\(([^‘ ]+)\\)" + = Just $ NotInScopeThing name + | Just [name] <- matchRegexUnifySpaces x "ot in scope: ([^‘ ]+)" + = Just $ NotInScopeThing name + | Just [name] <- matchRegexUnifySpaces x "ot in scope:[^‘]*‘([^’]*)’" + = Just $ NotInScopeThing name + | otherwise + = Nothing + +extractQualifiedModuleName :: T.Text -> Maybe T.Text +extractQualifiedModuleName x + | Just [m] <- matchRegexUnifySpaces x "module named [^‘]*‘([^’]*)’" + = Just m + | otherwise + = Nothing + +------------------------------------------------------------------------------------------------- + + +mkRenameEdit :: Maybe T.Text -> Range -> T.Text -> TextEdit +mkRenameEdit contents range name = + if maybeIsInfixFunction == Just True + then TextEdit range ("`" <> name <> "`") + else TextEdit range name + where + maybeIsInfixFunction = do + curr <- textInRange range <$> contents + pure $ "`" `T.isPrefixOf` curr && "`" `T.isSuffixOf` curr + +extractWildCardTypeSignature :: T.Text -> T.Text +extractWildCardTypeSignature = + -- inferring when parens are actually needed around the type signature would + -- require understanding both the precedence of the context of the _ and of + -- the signature itself. Inserting them unconditionally is ugly but safe. + ("(" `T.append`) . (`T.append` ")") . + T.takeWhile (/='’') . T.dropWhile (=='‘') . T.dropWhile (/='‘') . + snd . T.breakOnEnd "standing for " + +extractRenamableTerms :: T.Text -> [T.Text] +extractRenamableTerms msg + -- Account for both "Variable not in scope" and "Not in scope" + | "ot in scope:" `T.isInfixOf` msg = extractSuggestions msg + | otherwise = [] + where + extractSuggestions = map getEnclosed + . concatMap singleSuggestions + . filter isKnownSymbol + . T.lines + singleSuggestions = T.splitOn "), " -- Each suggestion is comma delimited + isKnownSymbol t = " (imported from" `T.isInfixOf` t || " (line " `T.isInfixOf` t + getEnclosed = T.dropWhile (== '‘') + . T.dropWhileEnd (== '’') + . T.dropAround (\c -> c /= '‘' && c /= '’') + +-- | If a range takes up a whole line (it begins at the start of the line and there's only whitespace +-- between the end of the range and the next newline), extend the range to take up the whole line. +extendToWholeLineIfPossible :: Maybe T.Text -> Range -> Range +extendToWholeLineIfPossible contents range@Range{..} = + let newlineAfter = maybe False (T.isPrefixOf "\n" . T.dropWhile (\x -> isSpace x && x /= '\n') . snd . splitTextAtPosition _end) contents + extend = newlineAfter && _character _start == 0 -- takes up an entire line, so remove the whole line + in if extend then Range _start (Position (_line _end + 1) 0) else range + +splitTextAtPosition :: Position -> T.Text -> (T.Text, T.Text) +splitTextAtPosition (Position row col) x + | (preRow, mid:postRow) <- splitAt row $ T.splitOn "\n" x + , (preCol, postCol) <- T.splitAt col mid + = (T.intercalate "\n" $ preRow ++ [preCol], T.intercalate "\n" $ postCol : postRow) + | otherwise = (x, T.empty) + +-- | Returns [start .. end[ +textInRange :: Range -> T.Text -> T.Text +textInRange (Range (Position startRow startCol) (Position endRow endCol)) text = + case compare startRow endRow of + LT -> + let (linesInRangeBeforeEndLine, endLineAndFurtherLines) = splitAt (endRow - startRow) linesBeginningWithStartLine + (textInRangeInFirstLine, linesBetween) = case linesInRangeBeforeEndLine of + [] -> ("", []) + firstLine:linesInBetween -> (T.drop startCol firstLine, linesInBetween) + maybeTextInRangeInEndLine = T.take endCol <$> listToMaybe endLineAndFurtherLines + in T.intercalate "\n" (textInRangeInFirstLine : linesBetween ++ maybeToList maybeTextInRangeInEndLine) + EQ -> + let line = fromMaybe "" (listToMaybe linesBeginningWithStartLine) + in T.take (endCol - startCol) (T.drop startCol line) + GT -> "" + where + linesBeginningWithStartLine = drop startRow (T.splitOn "\n" text) + +-- | Returns the ranges for a binding in an import declaration +rangesForBinding :: ImportDecl GhcPs -> String -> [Range] +rangesForBinding ImportDecl{ideclHiding = Just (False, L _ lies)} b = + concatMap (mapMaybe srcSpanToRange . rangesForBinding' b') lies + where + b' = wrapOperatorInParens (unqualify b) + + wrapOperatorInParens x = if isAlpha (head x) then x else "(" <> x <> ")" + + unqualify x = snd $ breakOnEnd "." x + +rangesForBinding _ _ = [] + +rangesForBinding' :: String -> LIE GhcPs -> [SrcSpan] +rangesForBinding' b (L l x@IEVar{}) | showSDocUnsafe (ppr x) == b = [l] +rangesForBinding' b (L l x@IEThingAbs{}) | showSDocUnsafe (ppr x) == b = [l] +rangesForBinding' b (L l (IEThingAll _ x)) | showSDocUnsafe (ppr x) == b = [l] +rangesForBinding' b (L l (IEThingWith _ thing _ inners labels)) + | showSDocUnsafe (ppr thing) == b = [l] + | otherwise = + [ l' | L l' x <- inners, showSDocUnsafe (ppr x) == b] ++ + [ l' | L l' x <- labels, showSDocUnsafe (ppr x) == b] +rangesForBinding' _ _ = [] + +-- | Extends an import list with a new binding. +-- Assumes an import statement of the form: +-- import (qualified) A (..) .. +-- Places the new binding first, preserving whitespace. +-- Copes with multi-line import lists +addBindingToImportList :: IdentInfo -> T.Text -> Maybe T.Text +addBindingToImportList IdentInfo {parent = _parent, ..} importLine = + case T.breakOn "(" importLine of + (pre, T.uncons -> Just (_, rest)) -> + case _parent of + -- the binding is not a constructor, add it to the head of import list + Nothing -> Just $ T.concat [pre, "(", rendered, addCommaIfNeeds rest] + Just parent -> case T.breakOn parent rest of + -- the binding is a constructor, and current import list contains its parent + -- `rest'` could be 1. `,...)` + -- or 2. `(),...)` + -- or 3. `(ConsA),...)` + -- or 4. `)` + (leading, T.stripPrefix parent -> Just rest') -> case T.uncons (T.stripStart rest') of + -- case 1: no children and parentheses, e.g. `import A(Foo,...)` --> `import A(Foo(Cons), ...)` + Just (',', rest'') -> Just $ T.concat [pre, "(", leading, parent, "(", rendered, ")", addCommaIfNeeds rest''] + -- case 2: no children but parentheses, e.g. `import A(Foo(),...)` --> `import A(Foo(Cons), ...)` + Just ('(', T.uncons -> Just (')', rest'')) -> Just $ T.concat [pre, "(", leading, parent, "(", rendered, ")", rest''] + -- case 3: children with parentheses, e.g. `import A(Foo(ConsA),...)` --> `import A(Foo(Cons, ConsA), ...)` + Just ('(', T.breakOn ")" -> (children, rest'')) + | not (T.null children), + -- ignore A(Foo({-...-}), ...) + not $ "{-" `T.isPrefixOf` T.stripStart children + -> Just $ T.concat [pre, "(", leading, parent, "(", rendered, ", ", children, rest''] + -- case 4: no trailing, e.g. `import A(..., Foo)` --> `import A(..., Foo(Cons))` + Just (')', _) -> Just $ T.concat [pre, "(", leading, parent, "(", rendered, ")", rest'] + _ -> Nothing + -- current import list does not contain the parent, e.g. `import A(...)` --> `import A(Foo(Cons), ...)` + _ -> Just $ T.concat [pre, "(", parent, "(", rendered, ")", addCommaIfNeeds rest] + _ -> Nothing + where + addCommaIfNeeds r = case T.uncons (T.stripStart r) of + Just (')', _) -> r + _ -> ", " <> r + +-- | 'matchRegex' combined with 'unifySpaces' +matchRegexUnifySpaces :: T.Text -> T.Text -> Maybe [T.Text] +matchRegexUnifySpaces message = matchRegex (unifySpaces message) + +-- | Returns Just (the submatches) for the first capture, or Nothing. +matchRegex :: T.Text -> T.Text -> Maybe [T.Text] +matchRegex message regex = case message =~~ regex of + Just (_ :: T.Text, _ :: T.Text, _ :: T.Text, bindings) -> Just bindings + Nothing -> Nothing + +setHandlersCodeLens :: PartialHandlers c +setHandlersCodeLens = PartialHandlers $ \WithMessage{..} x -> return x{ + LSP.codeLensHandler = + withResponse RspCodeLens codeLens, + LSP.executeCommandHandler = + withResponseAndRequest + RspExecuteCommand + ReqApplyWorkspaceEdit + commandHandler + } + +filterNewlines :: T.Text -> T.Text +filterNewlines = T.concat . T.lines + +unifySpaces :: T.Text -> T.Text +unifySpaces = T.unwords . T.words + +-- functions to help parse multiple import suggestions + +-- | Returns the first match if found +regexSingleMatch :: T.Text -> T.Text -> Maybe T.Text +regexSingleMatch msg regex = case matchRegexUnifySpaces msg regex of + Just (h:_) -> Just h + _ -> Nothing + +-- | Parses tuples like (‘Data.Map’, (app/ModuleB.hs:2:1-18)) and +-- | return (Data.Map, app/ModuleB.hs:2:1-18) +regExPair :: (T.Text, T.Text) -> Maybe (T.Text, T.Text) +regExPair (modname, srcpair) = do + x <- regexSingleMatch modname "‘([^’]*)’" + y <- regexSingleMatch srcpair "\\((.*)\\)" + return (x, y) + +-- | Process a list of (module_name, filename:src_span) values +-- | Eg. [(Data.Map, app/ModuleB.hs:2:1-18), (Data.HashMap.Strict, app/ModuleB.hs:3:1-29)] +regExImports :: T.Text -> Maybe [(T.Text, T.Text)] +regExImports msg = result + where + parts = T.words msg + isPrefix = not . T.isPrefixOf "(" + (mod, srcspan) = partition isPrefix parts + -- check we have matching pairs like (Data.Map, (app/src.hs:1:2-18)) + result = if length mod == length srcspan then + regExPair `traverse` zip mod srcspan + else Nothing + +matchRegExMultipleImports :: T.Text -> Maybe (T.Text, [(T.Text, T.Text)]) +matchRegExMultipleImports message = do + let pat = T.pack "Perhaps you want to add ‘([^’]*)’ to one of these import lists: *(‘.*\\))$" + (binding, imports) <- case matchRegexUnifySpaces message pat of + Just [x, xs] -> Just (x, xs) + _ -> Nothing + imps <- regExImports imports + return (binding, imps) + +renderIdentInfo :: IdentInfo -> T.Text +renderIdentInfo IdentInfo {parent, rendered} + | Just p <- parent = p <> "(" <> rendered <> ")" + | otherwise = rendered diff --git a/ghcide/src/Development/IDE/Plugin/CodeAction/PositionIndexed.hs b/ghcide/src/Development/IDE/Plugin/CodeAction/PositionIndexed.hs new file mode 100644 index 00000000000..7711eef5e98 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/CodeAction/PositionIndexed.hs @@ -0,0 +1,131 @@ +-- | Position indexed streams of characters +module Development.IDE.Plugin.CodeAction.PositionIndexed + ( PositionIndexed + , PositionIndexedString + , indexedByPosition + , indexedByPositionStartingFrom + , extendAllToIncludeCommaIfPossible + , extendToIncludePreviousNewlineIfPossible + , mergeRanges + ) +where + +import Data.Char +import Data.List +import Language.Haskell.LSP.Types + +type PositionIndexed a = [(Position, a)] + +type PositionIndexedString = PositionIndexed Char + +-- | Add position indexing to a String. +-- +-- > indexedByPositionStartingFrom (0,0) "hey\n ho" ≡ +-- > [ ((0,0),'h') +-- > , ((0,1),'e') +-- > , ((0,2),'y') +-- > , ((0,3),'\n') +-- > , ((1,0),' ') +-- > , ((1,1),'h') +-- > , ((1,2),'o') +-- > ] +indexedByPositionStartingFrom :: Position -> String -> PositionIndexedString +indexedByPositionStartingFrom initialPos = unfoldr f . (initialPos, ) where + f (_, []) = Nothing + f (p@(Position l _), '\n' : rest) = + Just ((p, '\n'), (Position (l + 1) 0, rest)) + f (p@(Position l c), x : rest) = Just ((p, x), (Position l (c + 1), rest)) + +-- | Add position indexing to a String. +-- +-- > indexedByPosition = indexedByPositionStartingFrom (Position 0 0) +indexedByPosition :: String -> PositionIndexedString +indexedByPosition = indexedByPositionStartingFrom (Position 0 0) + +-- | Returns a tuple (before, contents, after) if the range is present. +-- The range is present only if both its start and end positions are present +unconsRange + :: Range + -> PositionIndexed a + -> Maybe (PositionIndexed a, PositionIndexed a, PositionIndexed a) +unconsRange Range {..} indexedString + | (before, rest@(_ : _)) <- span ((/= _start) . fst) indexedString + , (mid, after@(_ : _)) <- span ((/= _end) . fst) rest + = Just (before, mid, after) + | otherwise + = Nothing + +-- | Strips out all the positions included in the range. +-- Returns 'Nothing' if the start or end of the range are not included in the input. +stripRange :: Range -> PositionIndexed a -> Maybe (PositionIndexed a) +stripRange r s = case unconsRange r s of + Just (b, _, a) -> Just (b ++ a) + Nothing -> Nothing + +-- | Returns the smallest possible set of disjoint ranges that is equivalent to the input. +-- Assumes input ranges are sorted on the start positions. +mergeRanges :: [Range] -> [Range] +mergeRanges (r : r' : rest) + | + -- r' is contained in r + _end r > _end r' = mergeRanges (r : rest) + | + -- r and r' are overlapping + _end r > _start r' = mergeRanges (r { _end = _end r' } : rest) + + | otherwise = r : mergeRanges (r' : rest) +mergeRanges other = other + +-- | Returns a sorted list of ranges with extended selections including preceding or trailing commas +-- +-- @ +-- a, |b|, c ===> a|, b|, c +-- a, b, |c| ===> a, b|, c| +-- a, |b|, |c| ===> a|, b||, c| +-- @ +extendAllToIncludeCommaIfPossible :: PositionIndexedString -> [Range] -> [Range] +extendAllToIncludeCommaIfPossible indexedString = + mergeRanges . go indexedString . sortOn _start + where + go _ [] = [] + go input (r : rr) + | r' : _ <- extendToIncludeCommaIfPossible input r + , Just input' <- stripRange r' input + = r' : go input' rr + | otherwise + = go input rr + +extendToIncludeCommaIfPossible :: PositionIndexedString -> Range -> [Range] +extendToIncludeCommaIfPossible indexedString range + | Just (before, _, after) <- unconsRange range indexedString + , after' <- dropWhile (isSpace . snd) after + , before' <- dropWhile (isSpace . snd) (reverse before) + = + -- a, |b|, c ===> a|, b|, c + [ range { _start = start' } | (start', ',') : _ <- [before'] ] + ++ + -- a, |b|, c ===> a, |b, |c + [ range { _end = end' } + | (_, ',') : rest <- [after'] + , (end', _) : _ <- pure $ dropWhile (isSpace . snd) rest + ] + | otherwise + = [range] + +extendToIncludePreviousNewlineIfPossible :: PositionIndexedString -> Range -> Range +extendToIncludePreviousNewlineIfPossible indexedString range + | Just (before, _, _) <- unconsRange range indexedString + , maybeFirstSpacePos <- lastSpacePos $ reverse before + = case maybeFirstSpacePos of + Nothing -> range + Just pos -> range { _start = pos } + | otherwise = range + where + lastSpacePos :: PositionIndexedString -> Maybe Position + lastSpacePos [] = Nothing + lastSpacePos ((pos, c):xs) = + if not $ isSpace c + then Nothing -- didn't find any space + else case xs of + (y:ys) | isSpace $ snd y -> lastSpacePos (y:ys) + _ -> Just pos \ No newline at end of file diff --git a/ghcide/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs b/ghcide/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs new file mode 100644 index 00000000000..fc154c87a6e --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/CodeAction/RuleTypes.hs @@ -0,0 +1,24 @@ +{-# LANGUAGE TypeFamilies #-} +module Development.IDE.Plugin.CodeAction.RuleTypes + (PackageExports(..) + ,IdentInfo(..) + ) where + +import Data.Hashable (Hashable) +import Control.DeepSeq (NFData) +import Data.Binary (Binary) +import Development.IDE.GHC.Util +import Development.IDE.Types.Exports +import Development.Shake (RuleResult) +import Data.Typeable (Typeable) +import GHC.Generics (Generic) + +-- Rule type for caching Package Exports +type instance RuleResult PackageExports = ExportsMap + +newtype PackageExports = PackageExports HscEnvEq + deriving (Eq, Show, Typeable, Generic) + +instance Hashable PackageExports +instance NFData PackageExports +instance Binary PackageExports diff --git a/ghcide/src/Development/IDE/Plugin/CodeAction/Rules.hs b/ghcide/src/Development/IDE/Plugin/CodeAction/Rules.hs new file mode 100644 index 00000000000..ea69db60ce3 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/CodeAction/Rules.hs @@ -0,0 +1,45 @@ +module Development.IDE.Plugin.CodeAction.Rules + ( rulePackageExports + ) +where + +import Data.Traversable ( forM ) +import Development.IDE.Core.Rules +import Development.IDE.GHC.Util +import Development.IDE.Plugin.CodeAction.RuleTypes +import Development.IDE.Types.Exports +import Development.Shake +import GHC ( DynFlags(pkgState) ) +import HscTypes ( hsc_dflags) +import LoadIface +import Maybes +import Module ( Module(..) ) +import Packages ( explicitPackages + , exposedModules + , packageConfigId + ) +import TcRnMonad ( WhereFrom(ImportByUser) + , initIfaceLoad + ) + +rulePackageExports :: Rules () +rulePackageExports = defineNoFile $ \(PackageExports session) -> do + let env = hscEnv session + pkgst = pkgState (hsc_dflags env) + depends = explicitPackages pkgst + targets = + [ (pkg, mn) + | d <- depends + , Just pkg <- [lookupPackageConfig d env] + , (mn, _) <- exposedModules pkg + ] + + modIfaces <- forM targets $ \(pkg, mn) -> do + modIface <- liftIO $ initIfaceLoad env $ loadInterface + "" + (Module (packageConfigId pkg) mn) + (ImportByUser False) + return $ case modIface of + Failed _err -> Nothing + Succeeded mi -> Just mi + return $ createExportsMap (catMaybes modIfaces) diff --git a/ghcide/src/Development/IDE/Plugin/Completions.hs b/ghcide/src/Development/IDE/Plugin/Completions.hs new file mode 100644 index 00000000000..4c3ad93f412 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/Completions.hs @@ -0,0 +1,153 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE TypeFamilies #-} +#include "ghc-api-version.h" + +module Development.IDE.Plugin.Completions + ( + plugin + , getCompletionsLSP + ) where + +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types +import qualified Language.Haskell.LSP.Core as LSP +import qualified Language.Haskell.LSP.VFS as VFS + +import Development.Shake.Classes +import Development.Shake +import GHC.Generics + +import Development.IDE.Plugin +import Development.IDE.Core.Service +import Development.IDE.Core.PositionMapping +import Development.IDE.Plugin.Completions.Logic +import Development.IDE.Types.Location +import Development.IDE.Core.RuleTypes +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat + +import Development.IDE.GHC.Util +import Development.IDE.LSP.Server +import TcRnDriver (tcRnImportDecls) +import Data.Maybe + +#if defined(GHC_LIB) +import Development.IDE.Import.DependencyInformation +#endif + +plugin :: Plugin c +plugin = Plugin produceCompletions setHandlersCompletion + +produceCompletions :: Rules () +produceCompletions = do + define $ \ProduceCompletions file -> do + local <- useWithStale LocalCompletions file + nonLocal <- useWithStale NonLocalCompletions file + let extract = fmap fst + return ([], extract local <> extract nonLocal) + define $ \LocalCompletions file -> do + pm <- useWithStale GetParsedModule file + case pm of + Just (pm, _) -> do + let cdata = localCompletionsForParsedModule pm + return ([], Just cdata) + _ -> return ([], Nothing) + define $ \NonLocalCompletions file -> do + -- For non local completions we avoid depending on the parsed module, + -- synthetizing a fake module with an empty body from the buffer + -- in the ModSummary, which preserves all the imports + ms <- fmap fst <$> useWithStale GetModSummaryWithoutTimestamps file + sess <- fmap fst <$> useWithStale GhcSessionDeps file + +-- When possible, rely on the haddocks embedded in our interface files +-- This creates problems on ghc-lib, see comment on 'getDocumentationTryGhc' +#if !defined(GHC_LIB) + let parsedDeps = [] +#else + deps <- maybe (TransitiveDependencies [] [] []) fst <$> useWithStale GetDependencies file + parsedDeps <- mapMaybe (fmap fst) <$> usesWithStale GetParsedModule (transitiveModuleDeps deps) +#endif + + case (ms, sess) of + (Just (ms,imps), Just sess) -> do + let env = hscEnv sess + -- We do this to be able to provide completions of items that are not restricted to the explicit list + res <- liftIO $ tcRnImportDecls env (dropListFromImportDecl <$> imps) + case res of + (_, Just rdrEnv) -> do + cdata <- liftIO $ cacheDataProducer env (ms_mod ms) rdrEnv imps parsedDeps + return ([], Just cdata) + (_diag, _) -> + return ([], Nothing) + _ -> return ([], Nothing) + +-- Drop any explicit imports in ImportDecl if not hidden +dropListFromImportDecl :: GenLocated SrcSpan (ImportDecl GhcPs) -> GenLocated SrcSpan (ImportDecl GhcPs) +dropListFromImportDecl iDecl = let + f d@ImportDecl {ideclHiding} = case ideclHiding of + Just (False, _) -> d {ideclHiding=Nothing} + -- if hiding or Nothing just return d + _ -> d + f x = x + in f <$> iDecl + +-- | Produce completions info for a file +type instance RuleResult ProduceCompletions = CachedCompletions +type instance RuleResult LocalCompletions = CachedCompletions +type instance RuleResult NonLocalCompletions = CachedCompletions + +data ProduceCompletions = ProduceCompletions + deriving (Eq, Show, Typeable, Generic) +instance Hashable ProduceCompletions +instance NFData ProduceCompletions +instance Binary ProduceCompletions + +data LocalCompletions = LocalCompletions + deriving (Eq, Show, Typeable, Generic) +instance Hashable LocalCompletions +instance NFData LocalCompletions +instance Binary LocalCompletions + +data NonLocalCompletions = NonLocalCompletions + deriving (Eq, Show, Typeable, Generic) +instance Hashable NonLocalCompletions +instance NFData NonLocalCompletions +instance Binary NonLocalCompletions + +-- | Generate code actions. +getCompletionsLSP + :: LSP.LspFuncs cofd + -> IdeState + -> CompletionParams + -> IO (Either ResponseError CompletionResponseResult) +getCompletionsLSP lsp ide + CompletionParams{_textDocument=TextDocumentIdentifier uri + ,_position=position + ,_context=completionContext} = do + contents <- LSP.getVirtualFileFunc lsp $ toNormalizedUri uri + fmap Right $ case (contents, uriToFilePath' uri) of + (Just cnts, Just path) -> do + let npath = toNormalizedFilePath' path + (ideOpts, compls) <- runIdeAction "Completion" (shakeExtras ide) $ do + opts <- liftIO $ getIdeOptionsIO $ shakeExtras ide + compls <- useWithStaleFast ProduceCompletions npath + pm <- useWithStaleFast GetParsedModule npath + binds <- fromMaybe (mempty, zeroMapping) <$> useWithStaleFast GetBindings npath + pure (opts, fmap (,pm,binds) compls ) + case compls of + Just ((cci', _), parsedMod, bindMap) -> do + pfix <- VFS.getCompletionPrefix position cnts + case (pfix, completionContext) of + (Just (VFS.PosPrefixInfo _ "" _ _), Just CompletionContext { _triggerCharacter = Just "."}) + -> return (Completions $ List []) + (Just pfix', _) -> do + let clientCaps = clientCapabilities $ shakeExtras ide + Completions . List <$> getCompletions ideOpts cci' parsedMod bindMap pfix' clientCaps (WithSnippets True) + _ -> return (Completions $ List []) + _ -> return (Completions $ List []) + _ -> return (Completions $ List []) + +setHandlersCompletion :: PartialHandlers c +setHandlersCompletion = PartialHandlers $ \WithMessage{..} x -> return x{ + LSP.completionHandler = withResponse RspCompletion getCompletionsLSP + } diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs new file mode 100644 index 00000000000..e6adbb310aa --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/Completions/Logic.hs @@ -0,0 +1,725 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE GADTs#-} + +#include "ghc-api-version.h" + +-- Mostly taken from "haskell-ide-engine" +module Development.IDE.Plugin.Completions.Logic ( + CachedCompletions +, cacheDataProducer +, localCompletionsForParsedModule +, WithSnippets(..) +, getCompletions +) where + +import Control.Applicative +import Data.Char (isAlphaNum, isUpper) +import Data.Generics +import Data.List.Extra as List hiding (stripPrefix) +import qualified Data.Map as Map + +import Data.Maybe (listToMaybe, fromMaybe, mapMaybe) +import qualified Data.Text as T +import qualified Text.Fuzzy as Fuzzy + +import HscTypes +import Name +import RdrName +import Type +import Packages +#if MIN_GHC_API_VERSION(8,10,0) +import Predicate (isDictTy) +import Pair +import Coercion +#endif + +import Language.Haskell.LSP.Types +import Language.Haskell.LSP.Types.Capabilities +import qualified Language.Haskell.LSP.VFS as VFS +import Development.IDE.Core.Compile +import Development.IDE.Core.PositionMapping +import Development.IDE.Plugin.Completions.Types +import Development.IDE.Spans.Documentation +import Development.IDE.Spans.LocalBindings +import Development.IDE.GHC.Compat as GHC +import Development.IDE.GHC.Error +import Development.IDE.Types.Options +import Development.IDE.Spans.Common +import Development.IDE.GHC.Util +import Outputable (Outputable) +import qualified Data.Set as Set +import ConLike + +import GhcPlugins ( + flLabel, + unpackFS) + +-- From haskell-ide-engine/hie-plugin-api/Haskell/Ide/Engine/Context.hs + +-- | A context of a declaration in the program +-- e.g. is the declaration a type declaration or a value declaration +-- Used for determining which code completions to show +-- TODO: expand this with more contexts like classes or instances for +-- smarter code completion +data Context = TypeContext + | ValueContext + | ModuleContext String -- ^ module context with module name + | ImportContext String -- ^ import context with module name + | ImportListContext String -- ^ import list context with module name + | ImportHidingContext String -- ^ import hiding context with module name + | ExportContext -- ^ List of exported identifiers from the current module + deriving (Show, Eq) + +-- | Generates a map of where the context is a type and where the context is a value +-- i.e. where are the value decls and the type decls +getCContext :: Position -> ParsedModule -> Maybe Context +getCContext pos pm + | Just (L r modName) <- moduleHeader + , pos `isInsideSrcSpan` r + = Just (ModuleContext (moduleNameString modName)) + + | Just (L r _) <- exportList + , pos `isInsideSrcSpan` r + = Just ExportContext + + | Just ctx <- something (Nothing `mkQ` go `extQ` goInline) decl + = Just ctx + + | Just ctx <- something (Nothing `mkQ` importGo) imports + = Just ctx + + | otherwise + = Nothing + + where decl = hsmodDecls $ unLoc $ pm_parsed_source pm + moduleHeader = hsmodName $ unLoc $ pm_parsed_source pm + exportList = hsmodExports $ unLoc $ pm_parsed_source pm + imports = hsmodImports $ unLoc $ pm_parsed_source pm + + go :: LHsDecl GhcPs -> Maybe Context + go (L r SigD {}) + | pos `isInsideSrcSpan` r = Just TypeContext + | otherwise = Nothing + go (L r GHC.ValD {}) + | pos `isInsideSrcSpan` r = Just ValueContext + | otherwise = Nothing + go _ = Nothing + + goInline :: GHC.LHsType GhcPs -> Maybe Context + goInline (GHC.L r _) + | pos `isInsideSrcSpan` r = Just TypeContext + goInline _ = Nothing + + importGo :: GHC.LImportDecl GhcPs -> Maybe Context + importGo (L r impDecl) + | pos `isInsideSrcSpan` r + = importInline importModuleName (ideclHiding impDecl) + <|> Just (ImportContext importModuleName) + + | otherwise = Nothing + where importModuleName = moduleNameString $ unLoc $ ideclName impDecl + + importInline :: String -> Maybe (Bool, GHC.Located [LIE GhcPs]) -> Maybe Context + importInline modName (Just (True, L r _)) + | pos `isInsideSrcSpan` r = Just $ ImportHidingContext modName + | otherwise = Nothing + importInline modName (Just (False, L r _)) + | pos `isInsideSrcSpan` r = Just $ ImportListContext modName + | otherwise = Nothing + importInline _ _ = Nothing + +occNameToComKind :: Maybe T.Text -> OccName -> CompletionItemKind +occNameToComKind ty oc + | isVarOcc oc = case occNameString oc of + i:_ | isUpper i -> CiConstructor + _ -> CiFunction + | isTcOcc oc = case ty of + Just t + | "Constraint" `T.isSuffixOf` t + -> CiClass + _ -> CiStruct + | isDataOcc oc = CiConstructor + | otherwise = CiVariable + + +showModName :: ModuleName -> T.Text +showModName = T.pack . moduleNameString + +-- mkCompl :: IdeOptions -> CompItem -> CompletionItem +-- mkCompl IdeOptions{..} CI{compKind,insertText, importedFrom,typeText,label,docs} = +-- CompletionItem label kind (List []) ((colon <>) <$> typeText) +-- (Just $ CompletionDocMarkup $ MarkupContent MkMarkdown $ T.intercalate sectionSeparator docs') +-- Nothing Nothing Nothing Nothing (Just insertText) (Just Snippet) +-- Nothing Nothing Nothing Nothing Nothing + +mkCompl :: IdeOptions -> CompItem -> CompletionItem +mkCompl IdeOptions{..} CI{compKind,insertText, importedFrom,typeText,label,docs, additionalTextEdits} = + CompletionItem {_label = label, + _kind = kind, + _tags = List [], + _detail = (colon <>) <$> typeText, + _documentation = documentation, + _deprecated = Nothing, + _preselect = Nothing, + _sortText = Nothing, + _filterText = Nothing, + _insertText = Just insertText, + _insertTextFormat = Just Snippet, + _textEdit = Nothing, + _additionalTextEdits = List <$> additionalTextEdits, + _commitCharacters = Nothing, + _command = Nothing, + _xdata = Nothing} + + where kind = Just compKind + docs' = imported : spanDocToMarkdown docs + imported = case importedFrom of + Left pos -> "*Defined at '" <> ppr pos <> "'*\n'" + Right mod -> "*Defined in '" <> mod <> "'*\n" + colon = if optNewColonConvention then ": " else ":: " + documentation = Just $ CompletionDocMarkup $ + MarkupContent MkMarkdown $ + T.intercalate sectionSeparator docs' + +mkNameCompItem :: Name -> ModuleName -> Maybe Type -> Maybe Backtick -> SpanDoc -> Maybe (LImportDecl GhcPs) -> CompItem +mkNameCompItem origName origMod thingType isInfix docs !imp = CI{..} + where + compKind = occNameToComKind typeText $ occName origName + importedFrom = Right $ showModName origMod + isTypeCompl = isTcOcc $ occName origName + label = T.pack $ showGhc origName + insertText = case isInfix of + Nothing -> case getArgText <$> thingType of + Nothing -> label + Just argText -> label <> " " <> argText + Just LeftSide -> label <> "`" + + Just Surrounded -> label + typeText + | Just t <- thingType = Just . stripForall $ T.pack (showGhc t) + | otherwise = Nothing + additionalTextEdits = imp >>= extendImportList (showGhc origName) + + stripForall :: T.Text -> T.Text + stripForall t + | T.isPrefixOf "forall" t = + -- We drop 2 to remove the '.' and the space after it + T.drop 2 (T.dropWhile (/= '.') t) + | otherwise = t + + getArgText :: Type -> T.Text + getArgText typ = argText + where + argTypes = getArgs typ + argText :: T.Text + argText = mconcat $ List.intersperse " " $ zipWithFrom snippet 1 argTypes + snippet :: Int -> Type -> T.Text + snippet i t = T.pack $ "${" <> show i <> ":" <> showGhc t <> "}" + getArgs :: Type -> [Type] + getArgs t + | isPredTy t = [] + | isDictTy t = [] + | isForAllTy t = getArgs $ snd (splitForAllTys t) + | isFunTy t = + let (args, ret) = splitFunTys t + in if isForAllTy ret + then getArgs ret + else Prelude.filter (not . isDictTy) args + | isPiTy t = getArgs $ snd (splitPiTys t) +#if MIN_GHC_API_VERSION(8,10,0) + | Just (Pair _ t) <- coercionKind <$> isCoercionTy_maybe t + = getArgs t +#else + | isCoercionTy t = maybe [] (getArgs . snd) (splitCoercionType_maybe t) +#endif + | otherwise = [] + +mkModCompl :: T.Text -> CompletionItem +mkModCompl label = + CompletionItem label (Just CiModule) (List []) Nothing + Nothing Nothing Nothing Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing Nothing + +mkImportCompl :: T.Text -> T.Text -> CompletionItem +mkImportCompl enteredQual label = + CompletionItem m (Just CiModule) (List []) (Just label) + Nothing Nothing Nothing Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing Nothing + where + m = fromMaybe "" (T.stripPrefix enteredQual label) + +mkExtCompl :: T.Text -> CompletionItem +mkExtCompl label = + CompletionItem label (Just CiKeyword) (List []) Nothing + Nothing Nothing Nothing Nothing Nothing Nothing Nothing + Nothing Nothing Nothing Nothing Nothing + +mkPragmaCompl :: T.Text -> T.Text -> CompletionItem +mkPragmaCompl label insertText = + CompletionItem label (Just CiKeyword) (List []) Nothing + Nothing Nothing Nothing Nothing Nothing (Just insertText) (Just Snippet) + Nothing Nothing Nothing Nothing Nothing + +extendImportList :: String -> LImportDecl GhcPs -> Maybe [TextEdit] +extendImportList name lDecl = let + f (Just range) ImportDecl {ideclHiding} = case ideclHiding of + Just (False, x) + | Set.notMember name (Set.fromList [show y| y <- unLoc x]) + -> let + start_pos = _end range + new_start_pos = start_pos {_character = _character start_pos - 1} + -- use to same start_pos to handle situation where we do not have latest edits due to caching of Rules + new_range = Range new_start_pos new_start_pos + -- we cannot wrap mapM_ inside (mapM_) but we need to wrap (<$) + alpha = all isAlphaNum $ filter (\c -> c /= '_') name + result = if alpha then name ++ ", " + else "(" ++ name ++ "), " + in Just [TextEdit new_range (T.pack result)] + | otherwise -> Nothing + _ -> Nothing -- hiding import list and no list + f _ _ = Nothing + src_span = srcSpanToRange . getLoc $ lDecl + in f src_span . unLoc $ lDecl + + +cacheDataProducer :: HscEnv -> Module -> GlobalRdrEnv -> [LImportDecl GhcPs] -> [ParsedModule] -> IO CachedCompletions +cacheDataProducer packageState curMod rdrEnv limports deps = do + let dflags = hsc_dflags packageState + curModName = moduleName curMod + + iDeclToModName :: ImportDecl name -> ModuleName + iDeclToModName = unLoc . ideclName + + asNamespace :: ImportDecl name -> ModuleName + asNamespace imp = maybe (iDeclToModName imp) GHC.unLoc (ideclAs imp) + -- Full canonical names of imported modules + importDeclerations = map unLoc limports + + -- The list of all importable Modules from all packages + moduleNames = map showModName (listVisibleModuleNames dflags) + + -- The given namespaces for the imported modules (ie. full name, or alias if used) + allModNamesAsNS = map (showModName . asNamespace) importDeclerations + + rdrElts = globalRdrEnvElts rdrEnv + + foldMapM :: (Foldable f, Monad m, Monoid b) => (a -> m b) -> f a -> m b + foldMapM f xs = foldr step return xs mempty where + step x r z = f x >>= \y -> r $! z `mappend` y + + getCompls :: [GlobalRdrElt] -> IO ([CompItem],QualCompls) + getCompls = foldMapM getComplsForOne + + getComplsForOne :: GlobalRdrElt -> IO ([CompItem],QualCompls) + getComplsForOne (GRE n _ True _) = + (, mempty) <$> toCompItem curMod curModName n Nothing + getComplsForOne (GRE n _ False prov) = + flip foldMapM (map is_decl prov) $ \spec -> do + compItem <- toCompItem curMod (is_mod spec) n Nothing + let unqual + | is_qual spec = [] + | otherwise = compItem + qual + | is_qual spec = Map.singleton asMod compItem + | otherwise = Map.fromList [(asMod,compItem),(origMod,compItem)] + asMod = showModName (is_as spec) + origMod = showModName (is_mod spec) + return (unqual,QualCompls qual) + + toCompItem :: Module -> ModuleName -> Name -> Maybe (LImportDecl GhcPs) -> IO [CompItem] + toCompItem m mn n imp' = do + docs <- getDocumentationTryGhc packageState curMod deps n + ty <- catchSrcErrors (hsc_dflags packageState) "completion" $ do + name' <- lookupName packageState m n + return $ name' >>= safeTyThingType + -- use the same pass to also capture any Record snippets that we can collect + record_ty <- catchSrcErrors (hsc_dflags packageState) "record-completion" $ do + name' <- lookupName packageState m n + return $ name' >>= safeTyThingForRecord + + let recordCompls = case either (const Nothing) id record_ty of + Just (ctxStr, flds) -> case flds of + [] -> [] + _ -> [mkRecordSnippetCompItem ctxStr flds (ppr mn) docs imp'] + Nothing -> [] + + return $ [mkNameCompItem n mn (either (const Nothing) id ty) Nothing docs imp'] ++ + recordCompls + + (unquals,quals) <- getCompls rdrElts + + return $ CC + { allModNamesAsNS = allModNamesAsNS + , unqualCompls = unquals + , qualCompls = quals + , importableModules = moduleNames + } + + +-- | Produces completions from the top level declarations of a module. +localCompletionsForParsedModule :: ParsedModule -> CachedCompletions +localCompletionsForParsedModule pm@ParsedModule{pm_parsed_source = L _ HsModule{hsmodDecls, hsmodName}} = + CC { allModNamesAsNS = mempty + , unqualCompls = compls + , qualCompls = mempty + , importableModules = mempty + } + where + typeSigIds = Set.fromList + [ id + | L _ (SigD _ (TypeSig _ ids _)) <- hsmodDecls + , L _ id <- ids + ] + hasTypeSig = (`Set.member` typeSigIds) . unLoc + + compls = concat + [ case decl of + SigD _ (TypeSig _ ids typ) -> + [mkComp id CiFunction (Just $ ppr typ) | id <- ids] + ValD _ FunBind{fun_id} -> + [ mkComp fun_id CiFunction Nothing + | not (hasTypeSig fun_id) + ] + ValD _ PatBind{pat_lhs} -> + [mkComp id CiVariable Nothing + | VarPat _ id <- listify (\(_ :: Pat GhcPs) -> True) pat_lhs] + TyClD _ ClassDecl{tcdLName, tcdSigs} -> + mkComp tcdLName CiClass Nothing : + [ mkComp id CiFunction (Just $ ppr typ) + | L _ (TypeSig _ ids typ) <- tcdSigs + , id <- ids] + TyClD _ x -> + let generalCompls = [mkComp id cl Nothing + | id <- listify (\(_ :: Located(IdP GhcPs)) -> True) x + , let cl = occNameToComKind Nothing (rdrNameOcc $ unLoc id)] + -- here we only have to look at the outermost type + recordCompls = findRecordCompl pm thisModName x + in + -- the constructors and snippets will be duplicated here giving the user 2 choices. + generalCompls ++ recordCompls + ForD _ ForeignImport{fd_name,fd_sig_ty} -> + [mkComp fd_name CiVariable (Just $ ppr fd_sig_ty)] + ForD _ ForeignExport{fd_name,fd_sig_ty} -> + [mkComp fd_name CiVariable (Just $ ppr fd_sig_ty)] + _ -> [] + | L _ decl <- hsmodDecls + ] + + mkComp n ctyp ty = + CI ctyp pn (Right thisModName) ty pn Nothing doc (ctyp `elem` [CiStruct, CiClass]) Nothing + where + pn = ppr n + doc = SpanDocText (getDocumentation [pm] n) (SpanDocUris Nothing Nothing) + + thisModName = ppr hsmodName + +findRecordCompl :: ParsedModule -> T.Text -> TyClDecl GhcPs -> [CompItem] +findRecordCompl pmod mn DataDecl {tcdLName, tcdDataDefn} = result + where + result = [mkRecordSnippetCompItem (T.pack . showGhc . unLoc $ con_name) field_labels mn doc Nothing + | ConDeclH98{..} <- unLoc <$> dd_cons tcdDataDefn + , Just con_details <- [getFlds con_args] + , let field_names = mapMaybe extract con_details + , let field_labels = T.pack . showGhc . unLoc <$> field_names + , (not . List.null) field_labels + ] + doc = SpanDocText (getDocumentation [pmod] tcdLName) (SpanDocUris Nothing Nothing) + + getFlds :: HsConDetails arg (Located [LConDeclField GhcPs]) -> Maybe [ConDeclField GhcPs] + getFlds conArg = case conArg of + RecCon rec -> Just $ unLoc <$> unLoc rec + PrefixCon _ -> Just [] + _ -> Nothing + + extract ConDeclField{..} + -- TODO: Why is cd_fld_names a list? + | Just fld_name <- rdrNameFieldOcc . unLoc <$> listToMaybe cd_fld_names = Just fld_name + | otherwise = Nothing + -- XConDeclField + extract _ = Nothing +findRecordCompl _ _ _ = [] + +ppr :: Outputable a => a -> T.Text +ppr = T.pack . prettyPrint + +newtype WithSnippets = WithSnippets Bool + +toggleSnippets :: ClientCapabilities -> WithSnippets -> CompletionItem -> CompletionItem +toggleSnippets ClientCapabilities { _textDocument } (WithSnippets with) x + | with && supported = x + | otherwise = x { _insertTextFormat = Just PlainText + , _insertText = Nothing + } + where + supported = + Just True == (_textDocument >>= _completion >>= _completionItem >>= _snippetSupport) + +-- | Returns the cached completions for the given module and position. +getCompletions + :: IdeOptions + -> CachedCompletions + -> Maybe (ParsedModule, PositionMapping) + -> (Bindings, PositionMapping) + -> VFS.PosPrefixInfo + -> ClientCapabilities + -> WithSnippets + -> IO [CompletionItem] +getCompletions ideOpts CC { allModNamesAsNS, unqualCompls, qualCompls, importableModules} + maybe_parsed (localBindings, bmapping) prefixInfo caps withSnippets = do + let VFS.PosPrefixInfo { fullLine, prefixModule, prefixText } = prefixInfo + enteredQual = if T.null prefixModule then "" else prefixModule <> "." + fullPrefix = enteredQual <> prefixText + + {- correct the position by moving 'foo :: Int -> String -> ' + ^ + to 'foo :: Int -> String -> ' + ^ + -} + pos = VFS.cursorPos prefixInfo + + filtModNameCompls = + map mkModCompl + $ mapMaybe (T.stripPrefix enteredQual) + $ Fuzzy.simpleFilter fullPrefix allModNamesAsNS + + filtCompls = map Fuzzy.original $ Fuzzy.filter prefixText ctxCompls "" "" label False + where + + mcc = case maybe_parsed of + Nothing -> Nothing + Just (pm, pmapping) -> + let PositionMapping pDelta = pmapping + position' = fromDelta pDelta pos + lpos = lowerRange position' + hpos = upperRange position' + in getCContext lpos pm <|> getCContext hpos pm + + -- completions specific to the current context + ctxCompls' = case mcc of + Nothing -> compls + Just TypeContext -> filter isTypeCompl compls + Just ValueContext -> filter (not . isTypeCompl) compls + Just _ -> filter (not . isTypeCompl) compls + -- Add whether the text to insert has backticks + ctxCompls = map (\comp -> comp { isInfix = infixCompls }) ctxCompls' + + infixCompls :: Maybe Backtick + infixCompls = isUsedAsInfix fullLine prefixModule prefixText pos + + PositionMapping bDelta = bmapping + oldPos = fromDelta bDelta $ VFS.cursorPos prefixInfo + startLoc = lowerRange oldPos + endLoc = upperRange oldPos + localCompls = map (uncurry localBindsToCompItem) $ getFuzzyScope localBindings startLoc endLoc + localBindsToCompItem :: Name -> Maybe Type -> CompItem + localBindsToCompItem name typ = CI ctyp pn thisModName ty pn Nothing emptySpanDoc (not $ isValOcc occ) Nothing + where + occ = nameOccName name + ctyp = occNameToComKind Nothing occ + pn = ppr name + ty = ppr <$> typ + thisModName = case nameModule_maybe name of + Nothing -> Left $ nameSrcSpan name + Just m -> Right $ ppr m + + compls = if T.null prefixModule + then localCompls ++ unqualCompls + else Map.findWithDefault [] prefixModule $ getQualCompls qualCompls + + filtListWith f list = + [ f label + | label <- Fuzzy.simpleFilter fullPrefix list + , enteredQual `T.isPrefixOf` label + ] + + filtListWithSnippet f list suffix = + [ toggleSnippets caps withSnippets (f label (snippet <> suffix)) + | (snippet, label) <- list + , Fuzzy.test fullPrefix label + ] + + filtImportCompls = filtListWith (mkImportCompl enteredQual) importableModules + filtPragmaCompls = filtListWithSnippet mkPragmaCompl validPragmas + filtOptsCompls = filtListWith mkExtCompl + filtKeywordCompls + | T.null prefixModule = filtListWith mkExtCompl (optKeywords ideOpts) + | otherwise = [] + + stripLeading :: Char -> String -> String + stripLeading _ [] = [] + stripLeading c (s:ss) + | s == c = ss + | otherwise = s:ss + + result + | "import " `T.isPrefixOf` fullLine + = filtImportCompls + -- we leave this condition here to avoid duplications and return empty list + -- since HLS implements this completion (#haskell-language-server/pull/662) + | "{-# language" `T.isPrefixOf` T.toLower fullLine + = [] + | "{-# options_ghc" `T.isPrefixOf` T.toLower fullLine + = filtOptsCompls (map (T.pack . stripLeading '-') $ flagsForCompletion False) + | "{-# " `T.isPrefixOf` fullLine + = filtPragmaCompls (pragmaSuffix fullLine) + | otherwise + = let uniqueFiltCompls = nubOrdOn insertText filtCompls + in filtModNameCompls ++ map (toggleSnippets caps withSnippets + . mkCompl ideOpts . stripAutoGenerated) uniqueFiltCompls + ++ filtKeywordCompls + return result + + +-- --------------------------------------------------------------------- +-- helper functions for pragmas +-- --------------------------------------------------------------------- + +validPragmas :: [(T.Text, T.Text)] +validPragmas = + [ ("LANGUAGE ${1:extension}" , "LANGUAGE") + , ("OPTIONS_GHC -${1:option}" , "OPTIONS_GHC") + , ("INLINE ${1:function}" , "INLINE") + , ("NOINLINE ${1:function}" , "NOINLINE") + , ("INLINABLE ${1:function}" , "INLINABLE") + , ("WARNING ${1:message}" , "WARNING") + , ("DEPRECATED ${1:message}" , "DEPRECATED") + , ("ANN ${1:annotation}" , "ANN") + , ("RULES" , "RULES") + , ("SPECIALIZE ${1:function}" , "SPECIALIZE") + , ("SPECIALIZE INLINE ${1:function}", "SPECIALIZE INLINE") + ] + +pragmaSuffix :: T.Text -> T.Text +pragmaSuffix fullLine + | "}" `T.isSuffixOf` fullLine = mempty + | otherwise = " #-}" + +-- --------------------------------------------------------------------- +-- helper functions for infix backticks +-- --------------------------------------------------------------------- + +hasTrailingBacktick :: T.Text -> Position -> Bool +hasTrailingBacktick line Position { _character } + | T.length line > _character = (line `T.index` _character) == '`' + | otherwise = False + +isUsedAsInfix :: T.Text -> T.Text -> T.Text -> Position -> Maybe Backtick +isUsedAsInfix line prefixMod prefixText pos + | hasClosingBacktick && hasOpeningBacktick = Just Surrounded + | hasOpeningBacktick = Just LeftSide + | otherwise = Nothing + where + hasOpeningBacktick = openingBacktick line prefixMod prefixText pos + hasClosingBacktick = hasTrailingBacktick line pos + +openingBacktick :: T.Text -> T.Text -> T.Text -> Position -> Bool +openingBacktick line prefixModule prefixText Position { _character } + | backtickIndex < 0 = False + | otherwise = (line `T.index` backtickIndex) == '`' + where + backtickIndex :: Int + backtickIndex = + let + prefixLength = T.length prefixText + moduleLength = if prefixModule == "" + then 0 + else T.length prefixModule + 1 {- Because of "." -} + in + -- Points to the first letter of either the module or prefix text + _character - (prefixLength + moduleLength) - 1 + + +-- --------------------------------------------------------------------- + +-- | Under certain circumstance GHC generates some extra stuff that we +-- don't want in the autocompleted symbols +stripAutoGenerated :: CompItem -> CompItem +stripAutoGenerated ci = + ci {label = stripPrefix (label ci)} + {- When e.g. DuplicateRecordFields is enabled, compiler generates + names like "$sel:accessor:One" and "$sel:accessor:Two" to disambiguate record selectors + https://ghc.haskell.org/trac/ghc/wiki/Records/OverloadedRecordFields/DuplicateRecordFields#Implementation + -} + +-- TODO: Turn this into an alex lexer that discards prefixes as if they were whitespace. + +stripPrefix :: T.Text -> T.Text +stripPrefix name = T.takeWhile (/=':') $ go prefixes + where + go [] = name + go (p:ps) + | T.isPrefixOf p name = T.drop (T.length p) name + | otherwise = go ps + +-- | Prefixes that can occur in a GHC OccName +prefixes :: [T.Text] +prefixes = + [ + -- long ones + "$con2tag_" + , "$tag2con_" + , "$maxtag_" + + -- four chars + , "$sel:" + , "$tc'" + + -- three chars + , "$dm" + , "$co" + , "$tc" + , "$cp" + , "$fx" + + -- two chars + , "$W" + , "$w" + , "$m" + , "$b" + , "$c" + , "$d" + , "$i" + , "$s" + , "$f" + , "$r" + , "C:" + , "N:" + , "D:" + , "$p" + , "$L" + , "$f" + , "$t" + , "$c" + , "$m" + ] + + +safeTyThingForRecord :: TyThing -> Maybe (T.Text, [T.Text]) +safeTyThingForRecord (AnId _) = Nothing +safeTyThingForRecord (AConLike dc) = + let ctxStr = T.pack . showGhc . occName . conLikeName $ dc + field_names = T.pack . unpackFS . flLabel <$> conLikeFieldLabels dc + in + Just (ctxStr, field_names) +safeTyThingForRecord _ = Nothing + +mkRecordSnippetCompItem :: T.Text -> [T.Text] -> T.Text -> SpanDoc -> Maybe (LImportDecl GhcPs) -> CompItem +mkRecordSnippetCompItem ctxStr compl mn docs imp = r + where + r = CI { + compKind = CiSnippet + , insertText = buildSnippet + , importedFrom = importedFrom + , typeText = Nothing + , label = ctxStr + , isInfix = Nothing + , docs = docs + , isTypeCompl = False + , additionalTextEdits = imp >>= extendImportList (T.unpack ctxStr) + } + + placeholder_pairs = zip compl ([1..]::[Int]) + snippet_parts = map (\(x, i) -> x <> "=${" <> T.pack (show i) <> ":_" <> x <> "}") placeholder_pairs + snippet = T.intercalate (T.pack ", ") snippet_parts + buildSnippet = ctxStr <> " {" <> snippet <> "}" + importedFrom = Right mn diff --git a/ghcide/src/Development/IDE/Plugin/Completions/Types.hs b/ghcide/src/Development/IDE/Plugin/Completions/Types.hs new file mode 100644 index 00000000000..c928b543380 --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/Completions/Types.hs @@ -0,0 +1,60 @@ +module Development.IDE.Plugin.Completions.Types ( + module Development.IDE.Plugin.Completions.Types +) where + +import Control.DeepSeq +import qualified Data.Map as Map +import qualified Data.Text as T +import SrcLoc + +import Development.IDE.Spans.Common +import Language.Haskell.LSP.Types (TextEdit, CompletionItemKind) + +-- From haskell-ide-engine/src/Haskell/Ide/Engine/LSP/Completions.hs + +data Backtick = Surrounded | LeftSide + deriving (Eq, Ord, Show) + +data CompItem = CI + { compKind :: CompletionItemKind + , insertText :: T.Text -- ^ Snippet for the completion + , importedFrom :: Either SrcSpan T.Text -- ^ From where this item is imported from. + , typeText :: Maybe T.Text -- ^ Available type information. + , label :: T.Text -- ^ Label to display to the user. + , isInfix :: Maybe Backtick -- ^ Did the completion happen + -- in the context of an infix notation. + , docs :: SpanDoc -- ^ Available documentation. + , isTypeCompl :: Bool + , additionalTextEdits :: Maybe [TextEdit] + } + deriving (Eq, Show) + +-- Associates a module's qualifier with its members +newtype QualCompls + = QualCompls { getQualCompls :: Map.Map T.Text [CompItem] } + deriving Show +instance Semigroup QualCompls where + (QualCompls a) <> (QualCompls b) = QualCompls $ Map.unionWith (++) a b +instance Monoid QualCompls where + mempty = QualCompls Map.empty + mappend = (Prelude.<>) + +-- | End result of the completions +data CachedCompletions = CC + { allModNamesAsNS :: [T.Text] -- ^ All module names in scope. + -- Prelude is a single module + , unqualCompls :: [CompItem] -- ^ All Possible completion items + , qualCompls :: QualCompls -- ^ Completion items associated to + -- to a specific module name. + , importableModules :: [T.Text] -- ^ All modules that may be imported. + } deriving Show + +instance NFData CachedCompletions where + rnf = rwhnf + +instance Monoid CachedCompletions where + mempty = CC mempty mempty mempty mempty + +instance Semigroup CachedCompletions where + CC a b c d <> CC a' b' c' d' = + CC (a<>a') (b<>b') (c<>c') (d<>d') diff --git a/ghcide/src/Development/IDE/Plugin/Test.hs b/ghcide/src/Development/IDE/Plugin/Test.hs new file mode 100644 index 00000000000..a33fccea49c --- /dev/null +++ b/ghcide/src/Development/IDE/Plugin/Test.hs @@ -0,0 +1,106 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +-- | A plugin that adds custom messages for use in tests +module Development.IDE.Plugin.Test + ( TestRequest(..) + , WaitForIdeRuleResult(..) + , plugin + ) where + +import Control.Monad.STM +import Data.Aeson +import Data.Aeson.Types +import Data.CaseInsensitive (CI, original) +import Development.IDE.Core.Service +import Development.IDE.Core.Shake +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Util (HscEnvEq(hscEnv)) +import Development.IDE.LSP.Server +import Development.IDE.Plugin +import Development.IDE.Types.Action +import GHC.Generics (Generic) +import GhcPlugins (HscEnv(hsc_dflags)) +import Language.Haskell.LSP.Core +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types +import System.Time.Extra +import Development.IDE.Core.RuleTypes +import Control.Monad +import Development.Shake (Action) +import Data.Maybe (isJust) +import Data.Bifunctor +import Data.Text (pack, Text) +import Data.String +import Development.IDE.Types.Location (fromUri) + +data TestRequest + = BlockSeconds Seconds -- ^ :: Null + | GetInterfaceFilesDir FilePath -- ^ :: String + | GetShakeSessionQueueCount -- ^ :: Number + | WaitForShakeQueue -- ^ Block until the Shake queue is empty. Returns Null + | WaitForIdeRule String Uri -- ^ :: WaitForIdeRuleResult + deriving Generic + deriving anyclass (FromJSON, ToJSON) + +newtype WaitForIdeRuleResult = WaitForIdeRuleResult { ideResultSuccess::Bool} + deriving newtype (FromJSON, ToJSON) + +plugin :: Plugin c +plugin = Plugin { + pluginRules = return (), + pluginHandler = PartialHandlers $ \WithMessage{..} x -> return x { + customRequestHandler = withResponse RspCustomServer requestHandler' + } +} + where + requestHandler' lsp ide req + | Just customReq <- parseMaybe parseJSON req + = requestHandler lsp ide customReq + | otherwise + = return $ Left + $ ResponseError InvalidRequest "Cannot parse request" Nothing + +requestHandler :: LspFuncs c + -> IdeState + -> TestRequest + -> IO (Either ResponseError Value) +requestHandler lsp _ (BlockSeconds secs) = do + sendFunc lsp $ NotCustomServer $ + NotificationMessage "2.0" (CustomServerMethod "ghcide/blocking/request") $ + toJSON secs + sleep secs + return (Right Null) +requestHandler _ s (GetInterfaceFilesDir fp) = do + let nfp = toNormalizedFilePath fp + sess <- runAction "Test - GhcSession" s $ use_ GhcSession nfp + let hiPath = hiDir $ hsc_dflags $ hscEnv sess + return $ Right (toJSON hiPath) +requestHandler _ s GetShakeSessionQueueCount = do + n <- atomically $ countQueue $ actionQueue $ shakeExtras s + return $ Right (toJSON n) +requestHandler _ s WaitForShakeQueue = do + atomically $ do + n <- countQueue $ actionQueue $ shakeExtras s + when (n>0) retry + return $ Right Null +requestHandler _ s (WaitForIdeRule k file) = do + let nfp = fromUri $ toNormalizedUri file + success <- runAction ("WaitForIdeRule " <> k <> " " <> show file) s $ parseAction (fromString k) nfp + let res = WaitForIdeRuleResult <$> success + return $ bimap mkResponseError toJSON res + +mkResponseError :: Text -> ResponseError +mkResponseError msg = ResponseError InvalidRequest msg Nothing + +parseAction :: CI String -> NormalizedFilePath -> Action (Either Text Bool) +parseAction "typecheck" fp = Right . isJust <$> use TypeCheck fp +parseAction "getLocatedImports" fp = Right . isJust <$> use GetLocatedImports fp +parseAction "getmodsummary" fp = Right . isJust <$> use GetModSummary fp +parseAction "getmodsummarywithouttimestamps" fp = Right . isJust <$> use GetModSummaryWithoutTimestamps fp +parseAction "getparsedmodule" fp = Right . isJust <$> use GetParsedModule fp +parseAction "ghcsession" fp = Right . isJust <$> use GhcSession fp +parseAction "ghcsessiondeps" fp = Right . isJust <$> use GhcSessionDeps fp +parseAction "gethieast" fp = Right . isJust <$> use GetHieAst fp +parseAction "getDependencies" fp = Right . isJust <$> use GetDependencies fp +parseAction "getFileContents" fp = Right . isJust <$> use GetFileContents fp +parseAction other _ = return $ Left $ "Cannot parse ide rule: " <> pack (original other) diff --git a/ghcide/src/Development/IDE/Spans/AtPoint.hs b/ghcide/src/Development/IDE/Spans/AtPoint.hs new file mode 100644 index 00000000000..bb33a3f856f --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/AtPoint.hs @@ -0,0 +1,203 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +-- | Gives information about symbols at a given point in DAML files. +-- These are all pure functions that should execute quickly. +module Development.IDE.Spans.AtPoint ( + atPoint + , gotoDefinition + , gotoTypeDefinition + , documentHighlight + , pointCommand + ) where + +import Development.IDE.GHC.Error +import Development.IDE.GHC.Orphans() +import Development.IDE.Types.Location +import Language.Haskell.LSP.Types + +-- DAML compiler and infrastructure +import Development.IDE.GHC.Compat +import Development.IDE.Types.Options +import Development.IDE.Spans.Common +import Development.IDE.Core.RuleTypes + +-- GHC API imports +import FastString +import Name +import Outputable hiding ((<>)) +import SrcLoc +import TyCoRep +import TyCon +import qualified Var +import NameEnv + +import Control.Applicative +import Control.Monad.Extra +import Control.Monad.Trans.Maybe +import Control.Monad.Trans.Class +import Control.Monad.IO.Class +import Data.Maybe +import Data.List +import qualified Data.Text as T +import qualified Data.Map as M + +import Data.Either +import Data.List.Extra (dropEnd1) + +documentHighlight + :: Monad m + => HieASTs Type + -> RefMap + -> Position + -> MaybeT m [DocumentHighlight] +documentHighlight hf rf pos = MaybeT $ pure (Just highlights) + where + ns = concat $ pointCommand hf pos (rights . M.keys . nodeIdentifiers . nodeInfo) + highlights = do + n <- ns + ref <- maybe [] id (M.lookup (Right n) rf) + pure $ makeHighlight ref + makeHighlight (sp,dets) = + DocumentHighlight (realSrcSpanToRange sp) (Just $ highlightType $ identInfo dets) + highlightType s = + if any (isJust . getScopeFromContext) s + then HkWrite + else HkRead + +gotoTypeDefinition + :: MonadIO m + => (Module -> MaybeT m (HieFile, FilePath)) + -> IdeOptions + -> HieASTs Type + -> Position + -> MaybeT m [Location] +gotoTypeDefinition getHieFile ideOpts srcSpans pos + = lift $ typeLocationsAtPoint getHieFile ideOpts pos srcSpans + +-- | Locate the definition of the name at a given position. +gotoDefinition + :: MonadIO m + => (Module -> MaybeT m (HieFile, FilePath)) + -> IdeOptions + -> M.Map ModuleName NormalizedFilePath + -> HieASTs Type + -> Position + -> MaybeT m Location +gotoDefinition getHieFile ideOpts imports srcSpans pos + = MaybeT $ fmap listToMaybe $ locationsAtPoint getHieFile ideOpts imports pos srcSpans + +-- | Synopsis for the name at a given position. +atPoint + :: IdeOptions + -> HieASTs Type + -> DocAndKindMap + -> Position + -> Maybe (Maybe Range, [T.Text]) +atPoint IdeOptions{} hf (DKMap dm km) pos = listToMaybe $ pointCommand hf pos hoverInfo + where + -- Hover info for values/data + hoverInfo ast = + (Just range, prettyNames ++ pTypes) + where + pTypes + | length names == 1 = dropEnd1 $ map wrapHaskell prettyTypes + | otherwise = map wrapHaskell prettyTypes + + range = realSrcSpanToRange $ nodeSpan ast + + wrapHaskell x = "\n```haskell\n"<>x<>"\n```\n" + info = nodeInfo ast + names = M.assocs $ nodeIdentifiers info + types = nodeType info + + prettyNames :: [T.Text] + prettyNames = map prettyName names + prettyName (Right n, dets) = T.unlines $ + wrapHaskell (showNameWithoutUniques n <> maybe "" ((" :: " <>) . prettyType) (identType dets <|> maybeKind)) + : definedAt n + ++ catMaybes [ T.unlines . spanDocToMarkdown <$> lookupNameEnv dm n + ] + where maybeKind = safeTyThingType =<< lookupNameEnv km n + prettyName (Left m,_) = showName m + + prettyTypes = map (("_ :: "<>) . prettyType) types + prettyType t = showName t + + definedAt name = + -- do not show "at " and similar messages + -- see the code of 'pprNameDefnLoc' for more information + case nameSrcLoc name of + UnhelpfulLoc {} | isInternalName name || isSystemName name -> [] + _ -> ["*Defined " <> T.pack (showSDocUnsafe $ pprNameDefnLoc name) <> "*"] + +typeLocationsAtPoint + :: forall m + . MonadIO m + => (Module -> MaybeT m (HieFile, FilePath)) + -> IdeOptions + -> Position + -> HieASTs Type + -> m [Location] +typeLocationsAtPoint getHieFile _ideOptions pos ast = + let ts = concat $ pointCommand ast pos (nodeType . nodeInfo) + ns = flip mapMaybe ts $ \case + TyConApp tc _ -> Just $ tyConName tc + TyVarTy n -> Just $ Var.varName n + _ -> Nothing + in mapMaybeM (nameToLocation getHieFile) ns + +locationsAtPoint + :: forall m + . MonadIO m + => (Module -> MaybeT m (HieFile, FilePath)) + -> IdeOptions + -> M.Map ModuleName NormalizedFilePath + -> Position + -> HieASTs Type + -> m [Location] +locationsAtPoint getHieFile _ideOptions imports pos ast = + let ns = concat $ pointCommand ast pos (M.keys . nodeIdentifiers . nodeInfo) + zeroPos = Position 0 0 + zeroRange = Range zeroPos zeroPos + modToLocation m = fmap (\fs -> Location (fromNormalizedUri $ filePathToUri' fs) zeroRange) $ M.lookup m imports + in mapMaybeM (either (pure . modToLocation) $ nameToLocation getHieFile) ns + +-- | Given a 'Name' attempt to find the location where it is defined. +nameToLocation :: Monad f => (Module -> MaybeT f (HieFile, String)) -> Name -> f (Maybe Location) +nameToLocation getHieFile name = fmap (srcSpanToLocation =<<) $ + case nameSrcSpan name of + sp@(RealSrcSpan _) -> pure $ Just sp + sp@(UnhelpfulSpan _) -> runMaybeT $ do + guard (sp /= wiredInSrcSpan) + -- This case usually arises when the definition is in an external package. + -- In this case the interface files contain garbage source spans + -- so we instead read the .hie files to get useful source spans. + mod <- MaybeT $ return $ nameModule_maybe name + (hieFile, srcPath) <- getHieFile mod + avail <- MaybeT $ pure $ find (eqName name . snd) $ hieExportNames hieFile + -- The location will point to the source file used during compilation. + -- This file might no longer exists and even if it does the path will be relative + -- to the compilation directory which we don’t know. + let span = setFileName srcPath $ fst avail + pure span + where + -- We ignore uniques and source spans and only compare the name and the module. + eqName :: Name -> Name -> Bool + eqName n n' = nameOccName n == nameOccName n' && nameModule_maybe n == nameModule_maybe n' + setFileName f (RealSrcSpan span) = RealSrcSpan (span { srcSpanFile = mkFastString f }) + setFileName _ span@(UnhelpfulSpan _) = span + +pointCommand :: HieASTs Type -> Position -> (HieAST Type -> a) -> [a] +pointCommand hf pos k = + catMaybes $ M.elems $ flip M.mapWithKey (getAsts hf) $ \fs ast -> + case selectSmallestContaining (sp fs) ast of + Nothing -> Nothing + Just ast' -> Just $ k ast' + where + sloc fs = mkRealSrcLoc fs (line+1) (cha+1) + sp fs = mkRealSrcSpan (sloc fs) (sloc fs) + line = _line pos + cha = _character pos + + diff --git a/ghcide/src/Development/IDE/Spans/Common.hs b/ghcide/src/Development/IDE/Spans/Common.hs new file mode 100644 index 00000000000..1f47ed8b4c7 --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/Common.hs @@ -0,0 +1,189 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DeriveAnyClass #-} +#include "ghc-api-version.h" + +module Development.IDE.Spans.Common ( + showGhc +, showName +, showNameWithoutUniques +, safeTyThingId +, safeTyThingType +, SpanDoc(..) +, SpanDocUris(..) +, emptySpanDoc +, spanDocToMarkdown +, spanDocToMarkdownForTest +, DocMap +, KindMap +) where + +import Data.Maybe +import qualified Data.Text as T +import Data.List.Extra +import Control.DeepSeq +import GHC.Generics + +import GHC +import Outputable hiding ((<>)) +import ConLike +import DataCon +import Var +import NameEnv + +import qualified Documentation.Haddock.Parser as H +import qualified Documentation.Haddock.Types as H +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Orphans () + +type DocMap = NameEnv SpanDoc +type KindMap = NameEnv TyThing + +showGhc :: Outputable a => a -> String +showGhc = showPpr unsafeGlobalDynFlags + +showName :: Outputable a => a -> T.Text +showName = T.pack . prettyprint + where + prettyprint x = renderWithStyle unsafeGlobalDynFlags (ppr x) style + style = mkUserStyle unsafeGlobalDynFlags neverQualify AllTheWay + +showNameWithoutUniques :: Outputable a => a -> T.Text +showNameWithoutUniques = T.pack . prettyprint + where + dyn = unsafeGlobalDynFlags `gopt_set` Opt_SuppressUniques + prettyprint x = renderWithStyle dyn (ppr x) style + style = mkUserStyle dyn neverQualify AllTheWay + +-- From haskell-ide-engine/src/Haskell/Ide/Engine/Support/HieExtras.hs +safeTyThingType :: TyThing -> Maybe Type +safeTyThingType thing + | Just i <- safeTyThingId thing = Just (varType i) +safeTyThingType (ATyCon tycon) = Just (tyConKind tycon) +safeTyThingType _ = Nothing + +safeTyThingId :: TyThing -> Maybe Id +safeTyThingId (AnId i) = Just i +safeTyThingId (AConLike (RealDataCon dc)) = Just $ dataConWrapId dc +safeTyThingId _ = Nothing + +-- Possible documentation for an element in the code +data SpanDoc + = SpanDocString HsDocString SpanDocUris + | SpanDocText [T.Text] SpanDocUris + deriving stock (Eq, Show, Generic) + deriving anyclass NFData + +data SpanDocUris = + SpanDocUris + { spanDocUriDoc :: Maybe T.Text -- ^ The haddock html page + , spanDocUriSrc :: Maybe T.Text -- ^ The hyperlinked source html page + } deriving stock (Eq, Show, Generic) + deriving anyclass NFData + +emptySpanDoc :: SpanDoc +emptySpanDoc = SpanDocText [] (SpanDocUris Nothing Nothing) + +spanDocToMarkdown :: SpanDoc -> [T.Text] +spanDocToMarkdown (SpanDocString docs uris) + = [T.pack $ haddockToMarkdown $ H.toRegular $ H._doc $ H.parseParas Nothing $ unpackHDS docs] + <> ["\n"] <> spanDocUrisToMarkdown uris + -- Append the extra newlines since this is markdown --- to get a visible newline, + -- you need to have two newlines +spanDocToMarkdown (SpanDocText txt uris) = txt <> ["\n"] <> spanDocUrisToMarkdown uris + +spanDocUrisToMarkdown :: SpanDocUris -> [T.Text] +spanDocUrisToMarkdown (SpanDocUris mdoc msrc) = catMaybes + [ linkify "Documentation" <$> mdoc + , linkify "Source" <$> msrc + ] + where linkify title uri = "[" <> title <> "](" <> uri <> ")" + +spanDocToMarkdownForTest :: String -> String +spanDocToMarkdownForTest + = haddockToMarkdown . H.toRegular . H._doc . H.parseParas Nothing + +-- Simple (and a bit hacky) conversion from Haddock markup to Markdown +haddockToMarkdown + :: H.DocH String String -> String + +haddockToMarkdown H.DocEmpty + = "" +haddockToMarkdown (H.DocAppend d1 d2) + = haddockToMarkdown d1 ++ " " ++ haddockToMarkdown d2 +haddockToMarkdown (H.DocString s) + = escapeBackticks s +haddockToMarkdown (H.DocParagraph p) + = "\n\n" ++ haddockToMarkdown p +haddockToMarkdown (H.DocIdentifier i) + = "`" ++ i ++ "`" +haddockToMarkdown (H.DocIdentifierUnchecked i) + = "`" ++ i ++ "`" +haddockToMarkdown (H.DocModule i) + = "`" ++ escapeBackticks i ++ "`" +haddockToMarkdown (H.DocWarning w) + = haddockToMarkdown w +haddockToMarkdown (H.DocEmphasis d) + = "*" ++ haddockToMarkdown d ++ "*" +haddockToMarkdown (H.DocBold d) + = "**" ++ haddockToMarkdown d ++ "**" +haddockToMarkdown (H.DocMonospaced d) + = "`" ++ removeUnescapedBackticks (haddockToMarkdown d) ++ "`" +haddockToMarkdown (H.DocCodeBlock d) + = "\n```haskell\n" ++ haddockToMarkdown d ++ "\n```\n" +haddockToMarkdown (H.DocExamples es) + = "\n```haskell\n" ++ unlines (map exampleToMarkdown es) ++ "\n```\n" + where + exampleToMarkdown (H.Example expr result) + = ">>> " ++ expr ++ "\n" ++ unlines result +haddockToMarkdown (H.DocHyperlink (H.Hyperlink url Nothing)) + = "<" ++ url ++ ">" +haddockToMarkdown (H.DocHyperlink (H.Hyperlink url (Just label))) + = "[" ++ haddockToMarkdown label ++ "](" ++ url ++ ")" +haddockToMarkdown (H.DocPic (H.Picture url Nothing)) + = "![](" ++ url ++ ")" +haddockToMarkdown (H.DocPic (H.Picture url (Just label))) + = "![" ++ label ++ "](" ++ url ++ ")" +haddockToMarkdown (H.DocAName aname) + = "[" ++ escapeBackticks aname ++ "]:" +haddockToMarkdown (H.DocHeader (H.Header level title)) + = replicate level '#' ++ " " ++ haddockToMarkdown title + +haddockToMarkdown (H.DocUnorderedList things) + = '\n' : (unlines $ map (("+ " ++) . trimStart . splitForList . haddockToMarkdown) things) +haddockToMarkdown (H.DocOrderedList things) + = '\n' : (unlines $ map (("1. " ++) . trimStart . splitForList . haddockToMarkdown) things) +haddockToMarkdown (H.DocDefList things) + = '\n' : (unlines $ map (\(term, defn) -> "+ **" ++ haddockToMarkdown term ++ "**: " ++ haddockToMarkdown defn) things) + +-- we cannot render math by default +haddockToMarkdown (H.DocMathInline _) + = "*cannot render inline math formula*" +haddockToMarkdown (H.DocMathDisplay _) + = "\n\n*cannot render display math formula*\n\n" + +-- TODO: render tables +haddockToMarkdown (H.DocTable _t) + = "\n\n*tables are not yet supported*\n\n" + +-- things I don't really know how to handle +haddockToMarkdown (H.DocProperty _) + = "" -- don't really know what to do + +escapeBackticks :: String -> String +escapeBackticks "" = "" +escapeBackticks ('`':ss) = '\\':'`':escapeBackticks ss +escapeBackticks (s :ss) = s:escapeBackticks ss + +removeUnescapedBackticks :: String -> String +removeUnescapedBackticks = \case + '\\' : '`' : ss -> '\\' : '`' : removeUnescapedBackticks ss + '`' : ss -> removeUnescapedBackticks ss + "" -> "" + s : ss -> s : removeUnescapedBackticks ss + +splitForList :: String -> String +splitForList s + = case lines s of + [] -> "" + (first:rest) -> unlines $ first : map ((" " ++) . trimStart) rest diff --git a/ghcide/src/Development/IDE/Spans/Documentation.hs b/ghcide/src/Development/IDE/Spans/Documentation.hs new file mode 100644 index 00000000000..b6a8327a406 --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/Documentation.hs @@ -0,0 +1,226 @@ +{-# LANGUAGE RankNTypes #-} +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE CPP #-} +#include "ghc-api-version.h" + +module Development.IDE.Spans.Documentation ( + getDocumentation + , getDocumentationTryGhc + , getDocumentationsTryGhc + , DocMap + , mkDocMap + ) where + +import Control.Monad +import Control.Monad.Extra (findM) +import Data.Either +import Data.Foldable +import Data.List.Extra +import qualified Data.Map as M +import qualified Data.Set as S +import Data.Maybe +import qualified Data.Text as T +import Development.IDE.Core.Compile +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Error +import Development.IDE.Spans.Common +import Development.IDE.Core.RuleTypes +import System.Directory +import System.FilePath + +import FastString +import SrcLoc (RealLocated) +import GhcMonad +import Packages +import Name +import Language.Haskell.LSP.Types (getUri, filePathToUri) +import TcRnTypes +import ExtractDocs +import NameEnv +import HscTypes (HscEnv(hsc_dflags)) + +mkDocMap + :: HscEnv + -> [ParsedModule] + -> RefMap + -> TcGblEnv + -> IO DocAndKindMap +mkDocMap env sources rm this_mod = + do let (_ , DeclDocMap this_docs, _) = extractDocs this_mod + d <- foldrM getDocs (mkNameEnv $ M.toList $ fmap (`SpanDocString` SpanDocUris Nothing Nothing) this_docs) names + k <- foldrM getType (tcg_type_env this_mod) names + pure $ DKMap d k + where + getDocs n map + | maybe True (mod ==) $ nameModule_maybe n = pure map -- we already have the docs in this_docs, or they do not exist + | otherwise = do + doc <- getDocumentationTryGhc env mod sources n + pure $ extendNameEnv map n doc + getType n map + | isTcOcc $ occName n = do + kind <- lookupKind env mod n + pure $ maybe map (extendNameEnv map n) kind + | otherwise = pure map + names = rights $ S.toList idents + idents = M.keysSet rm + mod = tcg_mod this_mod + +lookupKind :: HscEnv -> Module -> Name -> IO (Maybe TyThing) +lookupKind env mod = + fmap (either (const Nothing) id) . catchSrcErrors (hsc_dflags env) "span" . lookupName env mod + +getDocumentationTryGhc :: HscEnv -> Module -> [ParsedModule] -> Name -> IO SpanDoc +getDocumentationTryGhc env mod deps n = head <$> getDocumentationsTryGhc env mod deps [n] + +getDocumentationsTryGhc :: HscEnv -> Module -> [ParsedModule] -> [Name] -> IO [SpanDoc] +-- Interfaces are only generated for GHC >= 8.6. +-- In older versions, interface files do not embed Haddocks anyway +getDocumentationsTryGhc env mod sources names = do + res <- catchSrcErrors (hsc_dflags env) "docs" $ getDocsBatch env mod names + case res of + Left _ -> mapM mkSpanDocText names + Right res -> zipWithM unwrap res names + where + unwrap (Right (Just docs, _)) n = SpanDocString docs <$> getUris n + unwrap _ n = mkSpanDocText n + + mkSpanDocText name = + SpanDocText (getDocumentation sources name) <$> getUris name + + -- Get the uris to the documentation and source html pages if they exist + getUris name = do + let df = hsc_dflags env + (docFu, srcFu) <- + case nameModule_maybe name of + Just mod -> liftIO $ do + doc <- toFileUriText $ lookupDocHtmlForModule df mod + src <- toFileUriText $ lookupSrcHtmlForModule df mod + return (doc, src) + Nothing -> pure (Nothing, Nothing) + let docUri = (<> "#" <> selector <> showName name) <$> docFu + srcUri = (<> "#" <> showName name) <$> srcFu + selector + | isValName name = "v:" + | otherwise = "t:" + return $ SpanDocUris docUri srcUri + + toFileUriText = (fmap . fmap) (getUri . filePathToUri) + +getDocumentation + :: HasSrcSpan name + => [ParsedModule] -- ^ All of the possible modules it could be defined in. + -> name -- ^ The name you want documentation for. + -> [T.Text] +-- This finds any documentation between the name you want +-- documentation for and the one before it. This is only an +-- approximately correct algorithm and there are easily constructed +-- cases where it will be wrong (if so then usually slightly but there +-- may be edge cases where it is very wrong). +-- TODO : Build a version of GHC exactprint to extract this information +-- more accurately. +getDocumentation sources targetName = fromMaybe [] $ do + -- Find the module the target is defined in. + targetNameSpan <- realSpan $ getLoc targetName + tc <- + find ((==) (Just $ srcSpanFile targetNameSpan) . annotationFileName) + $ reverse sources -- TODO : Is reversing the list here really neccessary? + + -- Top level names bound by the module + let bs = [ n | let L _ HsModule{hsmodDecls} = pm_parsed_source tc + , L _ (ValD _ hsbind) <- hsmodDecls + , Just n <- [name_of_bind hsbind] + ] + -- Sort the names' source spans. + let sortedSpans = sortedNameSpans bs + -- Now go ahead and extract the docs. + let docs = ann tc + nameInd <- elemIndex targetNameSpan sortedSpans + let prevNameSpan = + if nameInd >= 1 + then sortedSpans !! (nameInd - 1) + else zeroSpan $ srcSpanFile targetNameSpan + -- Annoyingly "-- |" documentation isn't annotated with a location, + -- so you have to pull it out from the elements. + pure + $ docHeaders + $ filter (\(L target _) -> isBetween target prevNameSpan targetNameSpan) + $ mapMaybe (\(L l v) -> L <$> realSpan l <*> pure v) + $ join + $ M.elems + docs + where + -- Get the name bound by a binding. We only concern ourselves with + -- @FunBind@ (which covers functions and variables). + name_of_bind :: HsBind GhcPs -> Maybe (Located RdrName) + name_of_bind FunBind {fun_id} = Just fun_id + name_of_bind _ = Nothing + -- Get source spans from names, discard unhelpful spans, remove + -- duplicates and sort. + sortedNameSpans :: [Located RdrName] -> [RealSrcSpan] + sortedNameSpans ls = nubSort (mapMaybe (realSpan . getLoc) ls) + isBetween target before after = before <= target && target <= after + ann = snd . pm_annotations + annotationFileName :: ParsedModule -> Maybe FastString + annotationFileName = fmap srcSpanFile . listToMaybe . realSpans . ann + realSpans :: M.Map SrcSpan [Located a] -> [RealSrcSpan] + realSpans = + mapMaybe (realSpan . getLoc) + . join + . M.elems + +-- | Shows this part of the documentation +docHeaders :: [RealLocated AnnotationComment] + -> [T.Text] +docHeaders = mapMaybe (\(L _ x) -> wrk x) + where + wrk = \case + -- When `Opt_Haddock` is enabled. + AnnDocCommentNext s -> Just $ T.pack s + -- When `Opt_KeepRawTokenStream` enabled. + AnnLineComment s -> if "-- |" `isPrefixOf` s + then Just $ T.pack s + else Nothing + _ -> Nothing + +-- These are taken from haskell-ide-engine's Haddock plugin + +-- | Given a module finds the local @doc/html/Foo-Bar-Baz.html@ page. +-- An example for a cabal installed module: +-- @~/.cabal/store/ghc-8.10.1/vctr-0.12.1.2-98e2e861/share/doc/html/Data-Vector-Primitive.html@ +lookupDocHtmlForModule :: DynFlags -> Module -> IO (Maybe FilePath) +lookupDocHtmlForModule = + lookupHtmlForModule (\pkgDocDir modDocName -> pkgDocDir modDocName <.> "html") + +-- | Given a module finds the hyperlinked source @doc/html/src/Foo.Bar.Baz.html@ page. +-- An example for a cabal installed module: +-- @~/.cabal/store/ghc-8.10.1/vctr-0.12.1.2-98e2e861/share/doc/html/src/Data.Vector.Primitive.html@ +lookupSrcHtmlForModule :: DynFlags -> Module -> IO (Maybe FilePath) +lookupSrcHtmlForModule = + lookupHtmlForModule (\pkgDocDir modDocName -> pkgDocDir "src" modDocName <.> "html") + +lookupHtmlForModule :: (FilePath -> FilePath -> FilePath) -> DynFlags -> Module -> IO (Maybe FilePath) +lookupHtmlForModule mkDocPath df m = do + -- try all directories + let mfs = fmap (concatMap go) (lookupHtmls df ui) + html <- findM doesFileExist (concat . maybeToList $ mfs) + -- canonicalize located html to remove /../ indirection which can break some clients + -- (vscode on Windows at least) + traverse canonicalizePath html + where + go pkgDocDir = map (mkDocPath pkgDocDir) mns + ui = moduleUnitId m + -- try to locate html file from most to least specific name e.g. + -- first Language.Haskell.LSP.Types.Uri.html and Language-Haskell-LSP-Types-Uri.html + -- then Language.Haskell.LSP.Types.html and Language-Haskell-LSP-Types.html etc. + mns = do + chunks <- (reverse . drop1 . inits . splitOn ".") $ (moduleNameString . moduleName) m + -- The file might use "." or "-" as separator + map (`intercalate` chunks) [".", "-"] + +lookupHtmls :: DynFlags -> UnitId -> Maybe [FilePath] +lookupHtmls df ui = + -- use haddockInterfaces instead of haddockHTMLs: GHC treats haddockHTMLs as URL not path + -- and therefore doesn't expand $topdir on Windows + map takeDirectory . haddockInterfaces <$> lookupPackage df ui diff --git a/ghcide/src/Development/IDE/Spans/LocalBindings.hs b/ghcide/src/Development/IDE/Spans/LocalBindings.hs new file mode 100644 index 00000000000..67ed1315569 --- /dev/null +++ b/ghcide/src/Development/IDE/Spans/LocalBindings.hs @@ -0,0 +1,134 @@ +{-# LANGUAGE DerivingStrategies #-} + +module Development.IDE.Spans.LocalBindings + ( Bindings + , getLocalScope + , getFuzzyScope + , getDefiningBindings + , getFuzzyDefiningBindings + , bindings + ) where + +import Control.DeepSeq +import Control.Monad +import Data.Bifunctor +import Data.IntervalMap.FingerTree (IntervalMap, Interval (..)) +import qualified Data.IntervalMap.FingerTree as IM +import qualified Data.List as L +import qualified Data.Map as M +import qualified Data.Set as S +import Development.IDE.GHC.Compat (RefMap, identType, identInfo, getScopeFromContext, getBindSiteFromContext, Scope(..), Name, Type) +import Development.IDE.GHC.Error +import Development.IDE.Types.Location +import NameEnv +import SrcLoc + +------------------------------------------------------------------------------ +-- | Turn a 'RealSrcSpan' into an 'Interval'. +realSrcSpanToInterval :: RealSrcSpan -> Interval Position +realSrcSpanToInterval rss = + Interval + (realSrcLocToPosition $ realSrcSpanStart rss) + (realSrcLocToPosition $ realSrcSpanEnd rss) + +bindings :: RefMap -> Bindings +bindings = uncurry Bindings . localBindings + +------------------------------------------------------------------------------ +-- | Compute which identifiers are in scope at every point in the AST. Use +-- 'getLocalScope' to find the results. +localBindings + :: RefMap + -> ( IntervalMap Position (NameEnv (Name, Maybe Type)) + , IntervalMap Position (NameEnv (Name, Maybe Type)) + ) +localBindings refmap = bimap mk mk $ unzip $ do + (ident, refs) <- M.toList refmap + Right name <- pure ident + (_, ident_details) <- refs + let ty = identType ident_details + info <- S.toList $ identInfo ident_details + pure + ( do + Just scopes <- pure $ getScopeFromContext info + scope <- scopes >>= \case + LocalScope scope -> pure $ realSrcSpanToInterval scope + _ -> [] + pure ( scope + , unitNameEnv name (name,ty) + ) + , do + Just scope <- pure $ getBindSiteFromContext info + pure ( realSrcSpanToInterval scope + , unitNameEnv name (name,ty) + ) + ) + where + mk = L.foldl' (flip (uncurry IM.insert)) mempty . join + +------------------------------------------------------------------------------ +-- | The available bindings at every point in a Haskell tree. +data Bindings = Bindings + { getLocalBindings + :: IntervalMap Position (NameEnv (Name, Maybe Type)) + , getBindingSites + :: IntervalMap Position (NameEnv (Name, Maybe Type)) + } + +instance Semigroup Bindings where + Bindings a1 b1 <> Bindings a2 b2 + = Bindings (a1 <> a2) (b1 <> b2) + +instance Monoid Bindings where + mempty = Bindings mempty mempty + +instance NFData Bindings where + rnf = rwhnf + +instance Show Bindings where + show _ = "" + + +------------------------------------------------------------------------------ +-- | Given a 'Bindings' get every identifier in scope at the given +-- 'RealSrcSpan', +getLocalScope :: Bindings -> RealSrcSpan -> [(Name, Maybe Type)] +getLocalScope bs rss + = nameEnvElts + $ foldMap snd + $ IM.dominators (realSrcSpanToInterval rss) + $ getLocalBindings bs + +------------------------------------------------------------------------------ +-- | Given a 'Bindings', get every binding currently active at a given +-- 'RealSrcSpan', +getDefiningBindings :: Bindings -> RealSrcSpan -> [(Name, Maybe Type)] +getDefiningBindings bs rss + = nameEnvElts + $ foldMap snd + $ IM.dominators (realSrcSpanToInterval rss) + $ getBindingSites bs + + +-- | Lookup all names in scope in any span that intersects the interval +-- defined by the two positions. +-- This is meant for use with the fuzzy `PositionRange` returned by `PositionMapping` +getFuzzyScope :: Bindings -> Position -> Position -> [(Name, Maybe Type)] +getFuzzyScope bs a b + = nameEnvElts + $ foldMap snd + $ IM.intersections (Interval a b) + $ getLocalBindings bs + +------------------------------------------------------------------------------ +-- | Given a 'Bindings', get every binding that intersects the interval defined +-- by the two positions. +-- This is meant for use with the fuzzy `PositionRange` returned by +-- `PositionMapping` +getFuzzyDefiningBindings :: Bindings -> Position -> Position -> [(Name, Maybe Type)] +getFuzzyDefiningBindings bs a b + = nameEnvElts + $ foldMap snd + $ IM.intersections (Interval a b) + $ getBindingSites bs + diff --git a/ghcide/src/Development/IDE/Types/Action.hs b/ghcide/src/Development/IDE/Types/Action.hs new file mode 100644 index 00000000000..4a3c7e6a8bb --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Action.hs @@ -0,0 +1,88 @@ +module Development.IDE.Types.Action + ( DelayedAction (..), + DelayedActionInternal, + ActionQueue, + newQueue, + pushQueue, + popQueue, + doneQueue, + peekInProgress, + abortQueue,countQueue) +where + +import Control.Concurrent.STM +import Data.Hashable (Hashable (..)) +import Data.HashSet (HashSet) +import qualified Data.HashSet as Set +import Data.Unique (Unique) +import Development.IDE.Types.Logger +import Development.Shake (Action) +import Numeric.Natural + +data DelayedAction a = DelayedAction + { uniqueID :: Maybe Unique, + -- | Name we use for debugging + actionName :: String, + -- | Priority with which to log the action + actionPriority :: Priority, + -- | The payload + getAction :: Action a + } + deriving (Functor) + +type DelayedActionInternal = DelayedAction () + +instance Eq (DelayedAction a) where + a == b = uniqueID a == uniqueID b + +instance Hashable (DelayedAction a) where + hashWithSalt s = hashWithSalt s . uniqueID + +instance Show (DelayedAction a) where + show d = "DelayedAction: " ++ actionName d + +------------------------------------------------------------------------------ + +data ActionQueue = ActionQueue + { newActions :: TQueue DelayedActionInternal, + inProgress :: TVar (HashSet DelayedActionInternal) + } + +newQueue :: IO ActionQueue +newQueue = atomically $ do + newActions <- newTQueue + inProgress <- newTVar mempty + return ActionQueue {..} + +pushQueue :: DelayedActionInternal -> ActionQueue -> STM () +pushQueue act ActionQueue {..} = writeTQueue newActions act + +-- | You must call 'doneQueue' to signal completion +popQueue :: ActionQueue -> STM DelayedActionInternal +popQueue ActionQueue {..} = do + x <- readTQueue newActions + modifyTVar inProgress (Set.insert x) + return x + +-- | Completely remove an action from the queue +abortQueue :: DelayedActionInternal -> ActionQueue -> STM () +abortQueue x ActionQueue {..} = do + qq <- flushTQueue newActions + mapM_ (writeTQueue newActions) (filter (/= x) qq) + modifyTVar inProgress (Set.delete x) + +-- | Mark an action as complete when called after 'popQueue'. +-- Has no effect otherwise +doneQueue :: DelayedActionInternal -> ActionQueue -> STM () +doneQueue x ActionQueue {..} = do + modifyTVar inProgress (Set.delete x) + +countQueue :: ActionQueue -> STM Natural +countQueue ActionQueue{..} = do + backlog <- flushTQueue newActions + mapM_ (writeTQueue newActions) backlog + m <- Set.size <$> readTVar inProgress + return $ fromIntegral $ length backlog + m + +peekInProgress :: ActionQueue -> STM [DelayedActionInternal] +peekInProgress ActionQueue {..} = Set.toList <$> readTVar inProgress diff --git a/ghcide/src/Development/IDE/Types/Diagnostics.hs b/ghcide/src/Development/IDE/Types/Diagnostics.hs new file mode 100644 index 00000000000..1c196568d43 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Diagnostics.hs @@ -0,0 +1,151 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + + +module Development.IDE.Types.Diagnostics ( + LSP.Diagnostic(..), + ShowDiagnostic(..), + FileDiagnostic, + IdeResult, + LSP.DiagnosticSeverity(..), + DiagnosticStore, + List(..), + ideErrorText, + ideErrorWithSource, + showDiagnostics, + showDiagnosticsColored, + ) where + +import Control.DeepSeq +import Data.Maybe as Maybe +import qualified Data.Text as T +import Data.Text.Prettyprint.Doc +import Language.Haskell.LSP.Types as LSP (DiagnosticSource, + DiagnosticSeverity(..) + , Diagnostic(..) + , List(..) + ) +import Language.Haskell.LSP.Diagnostics +import Data.Text.Prettyprint.Doc.Render.Text +import qualified Data.Text.Prettyprint.Doc.Render.Terminal as Terminal +import Data.Text.Prettyprint.Doc.Render.Terminal (Color(..), color) + +import Development.IDE.Types.Location + + +-- | The result of an IDE operation. Warnings and errors are in the Diagnostic, +-- and a value is in the Maybe. For operations that throw an error you +-- expect a non-empty list of diagnostics, at least one of which is an error, +-- and a Nothing. For operations that succeed you expect perhaps some warnings +-- and a Just. For operations that depend on other failing operations you may +-- get empty diagnostics and a Nothing, to indicate this phase throws no fresh +-- errors but still failed. +-- +-- A rule on a file should only return diagnostics for that given file. It should +-- not propagate diagnostic errors through multiple phases. +type IdeResult v = ([FileDiagnostic], Maybe v) + +ideErrorText :: NormalizedFilePath -> T.Text -> FileDiagnostic +ideErrorText = ideErrorWithSource (Just "compiler") (Just DsError) + +ideErrorWithSource + :: Maybe DiagnosticSource + -> Maybe DiagnosticSeverity + -> a + -> T.Text + -> (a, ShowDiagnostic, Diagnostic) +ideErrorWithSource source sev fp msg = (fp, ShowDiag, LSP.Diagnostic { + _range = noRange, + _severity = sev, + _code = Nothing, + _source = source, + _message = msg, + _relatedInformation = Nothing, + _tags = Nothing + }) + +-- | Defines whether a particular diagnostic should be reported +-- back to the user. +-- +-- One important use case is "missing signature" code lenses, +-- for which we need to enable the corresponding warning during +-- type checking. However, we do not want to show the warning +-- unless the programmer asks for it (#261). +data ShowDiagnostic + = ShowDiag -- ^ Report back to the user + | HideDiag -- ^ Hide from user + deriving (Eq, Ord, Show) + +instance NFData ShowDiagnostic where + rnf = rwhnf + +-- | Human readable diagnostics for a specific file. +-- +-- This type packages a pretty printed, human readable error message +-- along with the related source location so that we can display the error +-- on either the console or in the IDE at the right source location. +-- +type FileDiagnostic = (NormalizedFilePath, ShowDiagnostic, Diagnostic) + +prettyRange :: Range -> Doc Terminal.AnsiStyle +prettyRange Range{..} = f _start <> "-" <> f _end + where f Position{..} = pretty (_line+1) <> colon <> pretty (_character+1) + +stringParagraphs :: T.Text -> Doc a +stringParagraphs = vcat . map (fillSep . map pretty . T.words) . T.lines + +showDiagnostics :: [FileDiagnostic] -> T.Text +showDiagnostics = srenderPlain . prettyDiagnostics + +showDiagnosticsColored :: [FileDiagnostic] -> T.Text +showDiagnosticsColored = srenderColored . prettyDiagnostics + + +prettyDiagnostics :: [FileDiagnostic] -> Doc Terminal.AnsiStyle +prettyDiagnostics = vcat . map prettyDiagnostic + +prettyDiagnostic :: FileDiagnostic -> Doc Terminal.AnsiStyle +prettyDiagnostic (fp, sh, LSP.Diagnostic{..}) = + vcat + [ slabel_ "File: " $ pretty (fromNormalizedFilePath fp) + , slabel_ "Hidden: " $ if sh == ShowDiag then "no" else "yes" + , slabel_ "Range: " $ prettyRange _range + , slabel_ "Source: " $ pretty _source + , slabel_ "Severity:" $ pretty $ show sev + , slabel_ "Message: " + $ case sev of + LSP.DsError -> annotate $ color Red + LSP.DsWarning -> annotate $ color Yellow + LSP.DsInfo -> annotate $ color Blue + LSP.DsHint -> annotate $ color Magenta + $ stringParagraphs _message + ] + where + sev = fromMaybe LSP.DsError _severity + + +-- | Label a document. +slabel_ :: String -> Doc a -> Doc a +slabel_ t d = nest 2 $ sep [pretty t, d] + +-- | The layout options used for the SDK assistant. +cliLayout :: + Int + -- ^ Rendering width of the pretty printer. + -> LayoutOptions +cliLayout renderWidth = LayoutOptions + { layoutPageWidth = AvailablePerLine renderWidth 0.9 + } + +-- | Render without any syntax annotations +srenderPlain :: Doc ann -> T.Text +srenderPlain = renderStrict . layoutSmart (cliLayout defaultTermWidth) + +-- | Render a 'Document' as an ANSII colored string. +srenderColored :: Doc Terminal.AnsiStyle -> T.Text +srenderColored = + Terminal.renderStrict . + layoutSmart defaultLayoutOptions { layoutPageWidth = AvailablePerLine 100 1.0 } + +defaultTermWidth :: Int +defaultTermWidth = 80 diff --git a/ghcide/src/Development/IDE/Types/Exports.hs b/ghcide/src/Development/IDE/Types/Exports.hs new file mode 100644 index 00000000000..8a42bc950ef --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Exports.hs @@ -0,0 +1,92 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +module Development.IDE.Types.Exports +( + IdentInfo(..), + ExportsMap(..), + createExportsMap, + createExportsMapMg, + createExportsMapTc +) where + +import Avail (AvailInfo(..)) +import Control.DeepSeq (NFData) +import Data.Text (pack, Text) +import Development.IDE.GHC.Compat +import Development.IDE.GHC.Util +import Data.HashMap.Strict (HashMap) +import GHC.Generics (Generic) +import Name +import FieldLabel (flSelector) +import qualified Data.HashMap.Strict as Map +import GhcPlugins (IfaceExport, ModGuts(..)) +import Data.HashSet (HashSet) +import qualified Data.HashSet as Set +import Data.Bifunctor (Bifunctor(second)) +import Data.Hashable (Hashable) +import TcRnTypes(TcGblEnv(..)) + +newtype ExportsMap = ExportsMap + {getExportsMap :: HashMap IdentifierText (HashSet (IdentInfo,ModuleNameText))} + deriving newtype (Monoid, NFData, Show) + +instance Semigroup ExportsMap where + ExportsMap a <> ExportsMap b = ExportsMap $ Map.unionWith (<>) a b + +type IdentifierText = Text +type ModuleNameText = Text + +data IdentInfo = IdentInfo + { name :: !Text + , rendered :: Text + , parent :: !(Maybe Text) + , isDatacon :: !Bool + } + deriving (Eq, Generic, Show) + deriving anyclass Hashable + +instance NFData IdentInfo + +mkIdentInfos :: AvailInfo -> [IdentInfo] +mkIdentInfos (Avail n) = + [IdentInfo (pack (prettyPrint n)) (pack (printName n)) Nothing (isDataConName n)] +mkIdentInfos (AvailTC parent (n:nn) flds) + -- Following the GHC convention that parent == n if parent is exported + | n == parent + = [ IdentInfo (pack (prettyPrint n)) (pack (printName n)) (Just $! parentP) True + | n <- nn ++ map flSelector flds + ] ++ + [ IdentInfo (pack (prettyPrint n)) (pack (printName n)) Nothing False] + where + parentP = pack $ prettyPrint parent + +mkIdentInfos (AvailTC _ nn flds) + = [ IdentInfo (pack (prettyPrint n)) (pack (printName n)) Nothing True + | n <- nn ++ map flSelector flds + ] + +createExportsMap :: [ModIface] -> ExportsMap +createExportsMap = ExportsMap . Map.fromListWith (<>) . concatMap doOne + where + doOne mi = concatMap (fmap (second Set.fromList) . unpackAvail mn) (mi_exports mi) + where + mn = moduleName $ mi_module mi + +createExportsMapMg :: [ModGuts] -> ExportsMap +createExportsMapMg = ExportsMap . Map.fromListWith (<>) . concatMap doOne + where + doOne mi = concatMap (fmap (second Set.fromList) . unpackAvail mn) (mg_exports mi) + where + mn = moduleName $ mg_module mi + +createExportsMapTc :: [TcGblEnv] -> ExportsMap +createExportsMapTc = ExportsMap . Map.fromListWith (<>) . concatMap doOne + where + doOne mi = concatMap (fmap (second Set.fromList) . unpackAvail mn) (tcg_exports mi) + where + mn = moduleName $ tcg_mod mi + +unpackAvail :: ModuleName -> IfaceExport -> [(Text, [(IdentInfo, Text)])] +unpackAvail mod = + map (\id@IdentInfo {..} -> (name, [(id, pack $ moduleNameString mod)])) + . mkIdentInfos diff --git a/ghcide/src/Development/IDE/Types/KnownTargets.hs b/ghcide/src/Development/IDE/Types/KnownTargets.hs new file mode 100644 index 00000000000..529edc21fc2 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/KnownTargets.hs @@ -0,0 +1,24 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +module Development.IDE.Types.KnownTargets (KnownTargets, Target(..), toKnownFiles) where + +import Data.HashMap.Strict +import Development.IDE.Types.Location +import Development.IDE.GHC.Compat (ModuleName) +import Development.IDE.GHC.Orphans () +import Data.Hashable +import GHC.Generics +import Control.DeepSeq +import Data.HashSet +import qualified Data.HashSet as HSet +import qualified Data.HashMap.Strict as HMap + +-- | A mapping of module name to known files +type KnownTargets = HashMap Target [NormalizedFilePath] + +data Target = TargetModule ModuleName | TargetFile NormalizedFilePath + deriving ( Eq, Generic, Show ) + deriving anyclass (Hashable, NFData) + +toKnownFiles :: KnownTargets -> HashSet NormalizedFilePath +toKnownFiles = HSet.fromList . concat . HMap.elems diff --git a/ghcide/src/Development/IDE/Types/Location.hs b/ghcide/src/Development/IDE/Types/Location.hs new file mode 100644 index 00000000000..9c1c12ad497 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Location.hs @@ -0,0 +1,112 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + + +-- | Types and functions for working with source code locations. +module Development.IDE.Types.Location + ( Location(..) + , noFilePath + , noRange + , Position(..) + , showPosition + , Range(..) + , LSP.Uri(..) + , LSP.NormalizedUri + , LSP.toNormalizedUri + , LSP.fromNormalizedUri + , LSP.NormalizedFilePath + , fromUri + , emptyFilePath + , emptyPathUri + , toNormalizedFilePath' + , LSP.fromNormalizedFilePath + , filePathToUri' + , uriToFilePath' + , readSrcSpan + ) where + +import Control.Applicative +import Language.Haskell.LSP.Types (Location(..), Range(..), Position(..)) +import Control.Monad +import Data.Hashable (Hashable(hash)) +import Data.String +import FastString +import qualified Language.Haskell.LSP.Types as LSP +import SrcLoc as GHC +import Text.ParserCombinators.ReadP as ReadP +import Data.Maybe (fromMaybe) + +toNormalizedFilePath' :: FilePath -> LSP.NormalizedFilePath +-- We want to keep empty paths instead of normalising them to "." +toNormalizedFilePath' "" = emptyFilePath +toNormalizedFilePath' fp = LSP.toNormalizedFilePath fp + +emptyFilePath :: LSP.NormalizedFilePath +emptyFilePath = LSP.NormalizedFilePath emptyPathUri "" + +-- | We use an empty string as a filepath when we don’t have a file. +-- However, haskell-lsp doesn’t support that in uriToFilePath and given +-- that it is not a valid filepath it does not make sense to upstream a fix. +-- So we have our own wrapper here that supports empty filepaths. +uriToFilePath' :: LSP.Uri -> Maybe FilePath +uriToFilePath' uri + | uri == LSP.fromNormalizedUri emptyPathUri = Just "" + | otherwise = LSP.uriToFilePath uri + +emptyPathUri :: LSP.NormalizedUri +emptyPathUri = + let s = "file://" + in LSP.NormalizedUri (hash s) s + +filePathToUri' :: LSP.NormalizedFilePath -> LSP.NormalizedUri +filePathToUri' = LSP.normalizedFilePathToUri + +fromUri :: LSP.NormalizedUri -> LSP.NormalizedFilePath +fromUri = fromMaybe (toNormalizedFilePath' noFilePath) . LSP.uriToNormalizedFilePath + +noFilePath :: FilePath +noFilePath = "" + +-- A dummy range to use when range is unknown +noRange :: Range +noRange = Range (Position 0 0) (Position 1 0) + +showPosition :: Position -> String +showPosition Position{..} = show (_line + 1) ++ ":" ++ show (_character + 1) + +-- | Parser for the GHC output format +readSrcSpan :: ReadS RealSrcSpan +readSrcSpan = readP_to_S (singleLineSrcSpanP <|> multiLineSrcSpanP) + where + singleLineSrcSpanP, multiLineSrcSpanP :: ReadP RealSrcSpan + singleLineSrcSpanP = do + fp <- filePathP + l <- readS_to_P reads <* char ':' + c0 <- readS_to_P reads + c1 <- (char '-' *> readS_to_P reads) <|> pure c0 + let from = mkRealSrcLoc fp l c0 + to = mkRealSrcLoc fp l c1 + return $ mkRealSrcSpan from to + + multiLineSrcSpanP = do + fp <- filePathP + s <- parensP (srcLocP fp) + void $ char '-' + e <- parensP (srcLocP fp) + return $ mkRealSrcSpan s e + + parensP :: ReadP a -> ReadP a + parensP = between (char '(') (char ')') + + filePathP :: ReadP FastString + filePathP = fromString <$> (readFilePath <* char ':') <|> pure "" + + srcLocP :: FastString -> ReadP RealSrcLoc + srcLocP fp = do + l <- readS_to_P reads + void $ char ',' + c <- readS_to_P reads + return $ mkRealSrcLoc fp l c + + readFilePath :: ReadP FilePath + readFilePath = some ReadP.get diff --git a/ghcide/src/Development/IDE/Types/Logger.hs b/ghcide/src/Development/IDE/Types/Logger.hs new file mode 100644 index 00000000000..1213067ffe5 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Logger.hs @@ -0,0 +1,54 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE RankNTypes #-} +-- | This is a compatibility module that abstracts over the +-- concrete choice of logging framework so users can plug in whatever +-- framework they want to. +module Development.IDE.Types.Logger + ( Priority(..) + , Logger(..) + , logError, logWarning, logInfo, logDebug, logTelemetry + , noLogging + ) where + +import qualified Data.Text as T + + +data Priority +-- Don't change the ordering of this type or you will mess up the Ord +-- instance + = Telemetry -- ^ Events that are useful for gathering user metrics. + | Debug -- ^ Verbose debug logging. + | Info -- ^ Useful information in case an error has to be understood. + | Warning + -- ^ These error messages should not occur in a expected usage, and + -- should be investigated. + | Error -- ^ Such log messages must never occur in expected usage. + deriving (Eq, Show, Ord, Enum, Bounded) + + +-- | Note that this is logging actions _of the program_, not of the user. +-- You shouldn't call warning/error if the user has caused an error, only +-- if our code has gone wrong and is itself erroneous (e.g. we threw an exception). +data Logger = Logger {logPriority :: Priority -> T.Text -> IO ()} + + +logError :: Logger -> T.Text -> IO () +logError x = logPriority x Error + +logWarning :: Logger -> T.Text -> IO () +logWarning x = logPriority x Warning + +logInfo :: Logger -> T.Text -> IO () +logInfo x = logPriority x Info + +logDebug :: Logger -> T.Text -> IO () +logDebug x = logPriority x Debug + +logTelemetry :: Logger -> T.Text -> IO () +logTelemetry x = logPriority x Telemetry + + +noLogging :: Logger +noLogging = Logger $ \_ _ -> return () diff --git a/ghcide/src/Development/IDE/Types/Options.hs b/ghcide/src/Development/IDE/Types/Options.hs new file mode 100644 index 00000000000..7bc38e7e8e9 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Options.hs @@ -0,0 +1,206 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} + +{- HLINT ignore "Avoid restricted extensions" -} + +-- | Options +module Development.IDE.Types.Options + ( IdeOptions(..) + , IdePreprocessedSource(..) + , IdeReportProgress(..) + , IdeDefer(..) + , IdeTesting(..) + , IdeOTMemoryProfiling(..) + , clientSupportsProgress + , IdePkgLocationOptions(..) + , defaultIdeOptions + , IdeResult + , IdeGhcSession(..) + , LspConfig(..) + , defaultLspConfig + , CheckProject(..) + , CheckParents(..) + , OptHaddockParse(..) + ) where + +import Development.Shake +import Development.IDE.GHC.Util +import GHC hiding (parseModule, typecheckModule) +import GhcPlugins as GHC hiding (fst3, (<>)) +import qualified Language.Haskell.LSP.Types.Capabilities as LSP +import qualified Data.Text as T +import Development.IDE.Types.Diagnostics +import Control.DeepSeq (NFData(..)) +import Data.Aeson +import GHC.Generics + +data IdeGhcSession = IdeGhcSession + { loadSessionFun :: FilePath -> IO (IdeResult HscEnvEq, [FilePath]) + -- ^ Returns the Ghc session and the cradle dependencies + , sessionVersion :: !Int + -- ^ Used as Shake key, versions must be unique and not reused + } + +instance Show IdeGhcSession where show _ = "IdeGhcSession" +instance NFData IdeGhcSession where rnf !_ = () + +data IdeOptions = IdeOptions + { optPreprocessor :: GHC.ParsedSource -> IdePreprocessedSource + -- ^ Preprocessor to run over all parsed source trees, generating a list of warnings + -- and a list of errors, along with a new parse tree. + , optGhcSession :: Action IdeGhcSession + -- ^ Setup a GHC session for a given file, e.g. @Foo.hs@. + -- For the same 'ComponentOptions' from hie-bios, the resulting function will be applied once per file. + -- It is desirable that many files get the same 'HscEnvEq', so that more IDE features work. + , optPkgLocationOpts :: IdePkgLocationOptions + -- ^ How to locate source and @.hie@ files given a module name. + , optExtensions :: [String] + -- ^ File extensions to search for code, defaults to Haskell sources (including @.hs@) + + , optThreads :: Int + -- ^ Number of threads to use. Use 0 for number of threads on the machine. + , optShakeFiles :: Maybe FilePath + -- ^ Directory where the shake database should be stored. For ghcide this is always set to `Nothing` for now + -- meaning we keep everything in memory but the daml CLI compiler uses this for incremental builds. + , optShakeProfiling :: Maybe FilePath + -- ^ Set to 'Just' to create a directory of profiling reports. + , optOTMemoryProfiling :: IdeOTMemoryProfiling + -- ^ Whether to record profiling information with OpenTelemetry. You must + -- also enable the -l RTS flag for this to have any effect + , optTesting :: IdeTesting + -- ^ Whether to enable additional lsp messages used by the test suite for checking invariants + , optReportProgress :: IdeReportProgress + -- ^ Whether to report progress during long operations. + , optLanguageSyntax :: String + -- ^ the ```language to use + , optNewColonConvention :: Bool + -- ^ whether to use new colon convention + , optKeywords :: [T.Text] + -- ^ keywords used for completions. These are customizable + -- since DAML has a different set of keywords than Haskell. + , optDefer :: IdeDefer + -- ^ Whether to defer type errors, typed holes and out of scope + -- variables. Deferral allows the IDE to continue to provide + -- features such as diagnostics and go-to-definition, in + -- situations in which they would become unavailable because of + -- the presence of type errors, holes or unbound variables. + , optCheckProject :: CheckProject + -- ^ Whether to typecheck the entire project on load + , optCheckParents :: CheckParents + -- ^ When to typecheck reverse dependencies of a file + , optHaddockParse :: OptHaddockParse + -- ^ Whether to return result of parsing module with Opt_Haddock. + -- Otherwise, return the result of parsing without Opt_Haddock, so + -- that the parsed module contains the result of Opt_KeepRawTokenStream, + -- which might be necessary for hlint. + , optCustomDynFlags :: DynFlags -> DynFlags + -- ^ Will be called right after setting up a new cradle, + -- allowing to customize the Ghc options used + } + +data OptHaddockParse = HaddockParse | NoHaddockParse + deriving (Eq,Ord,Show,Enum) + +newtype CheckProject = CheckProject { shouldCheckProject :: Bool } + deriving stock (Eq, Ord, Show) + deriving newtype (FromJSON,ToJSON) +data CheckParents + -- Note that ordering of constructors is meaningful and must be monotonically + -- increasing in the scenarios where parents are checked + = NeverCheck + | CheckOnClose + | CheckOnSaveAndClose + | AlwaysCheck + deriving stock (Eq, Ord, Show, Generic) + deriving anyclass (FromJSON, ToJSON) + +data LspConfig + = LspConfig + { checkParents :: CheckParents + , checkProject :: CheckProject + } deriving stock (Eq, Ord, Show, Generic) + deriving anyclass (FromJSON, ToJSON) + +defaultLspConfig :: LspConfig +defaultLspConfig = LspConfig CheckOnSaveAndClose (CheckProject True) + +data IdePreprocessedSource = IdePreprocessedSource + { preprocWarnings :: [(GHC.SrcSpan, String)] + -- ^ Warnings emitted by the preprocessor. + , preprocErrors :: [(GHC.SrcSpan, String)] + -- ^ Errors emitted by the preprocessor. + , preprocSource :: GHC.ParsedSource + -- ^ New parse tree emitted by the preprocessor. + } + +newtype IdeReportProgress = IdeReportProgress Bool +newtype IdeDefer = IdeDefer Bool +newtype IdeTesting = IdeTesting Bool +newtype IdeOTMemoryProfiling = IdeOTMemoryProfiling Bool + +clientSupportsProgress :: LSP.ClientCapabilities -> IdeReportProgress +clientSupportsProgress caps = IdeReportProgress $ Just True == + (LSP._workDoneProgress =<< LSP._window (caps :: LSP.ClientCapabilities)) + +defaultIdeOptions :: Action IdeGhcSession -> IdeOptions +defaultIdeOptions session = IdeOptions + {optPreprocessor = IdePreprocessedSource [] [] + ,optGhcSession = session + ,optExtensions = ["hs", "lhs"] + ,optPkgLocationOpts = defaultIdePkgLocationOptions + ,optThreads = 0 + ,optShakeFiles = Nothing + ,optShakeProfiling = Nothing + ,optOTMemoryProfiling = IdeOTMemoryProfiling False + ,optReportProgress = IdeReportProgress False + ,optLanguageSyntax = "haskell" + ,optNewColonConvention = False + ,optKeywords = haskellKeywords + ,optDefer = IdeDefer True + ,optTesting = IdeTesting False + ,optCheckProject = checkProject defaultLspConfig + ,optCheckParents = checkParents defaultLspConfig + ,optHaddockParse = HaddockParse + ,optCustomDynFlags = id + } + + +-- | The set of options used to locate files belonging to external packages. +data IdePkgLocationOptions = IdePkgLocationOptions + { optLocateHieFile :: PackageConfig -> Module -> IO (Maybe FilePath) + -- ^ Locate the HIE file for the given module. The PackageConfig can be + -- used to lookup settings like importDirs. + , optLocateSrcFile :: PackageConfig -> Module -> IO (Maybe FilePath) + -- ^ Locate the source file for the given module. The PackageConfig can be + -- used to lookup settings like importDirs. For DAML, we place them in the package DB. + -- For cabal this could point somewhere in ~/.cabal/packages. + } + +defaultIdePkgLocationOptions :: IdePkgLocationOptions +defaultIdePkgLocationOptions = IdePkgLocationOptions f f + where f _ _ = return Nothing + +-- | From https://wiki.haskell.org/Keywords +haskellKeywords :: [T.Text] +haskellKeywords = + [ "as" + , "case", "of" + , "class", "instance", "type" + , "data", "family", "newtype" + , "default" + , "deriving" + , "do", "mdo", "proc", "rec" + , "forall" + , "foreign" + , "hiding" + , "if", "then", "else" + , "import", "qualified", "hiding" + , "infix", "infixl", "infixr" + , "let", "in", "where" + , "module" + ] diff --git a/ghcide/src/Development/IDE/Types/Shake.hs b/ghcide/src/Development/IDE/Types/Shake.hs new file mode 100644 index 00000000000..b2af70c74c7 --- /dev/null +++ b/ghcide/src/Development/IDE/Types/Shake.hs @@ -0,0 +1,41 @@ +{-# LANGUAGE ExistentialQuantification #-} +module Development.IDE.Types.Shake (Value(..), Values, Key(..), currentValue) where + +import Control.DeepSeq +import Data.Dynamic +import Data.Hashable +import Data.HashMap.Strict +import Data.Typeable +import GHC.Generics +import Language.Haskell.LSP.Types + +data Value v + = Succeeded TextDocumentVersion v + | Stale TextDocumentVersion v + | Failed + deriving (Functor, Generic, Show) + +instance NFData v => NFData (Value v) + +-- | Convert a Value to a Maybe. This will only return `Just` for +-- up2date results not for stale values. +currentValue :: Value v -> Maybe v +currentValue (Succeeded _ v) = Just v +currentValue (Stale _ _) = Nothing +currentValue Failed = Nothing + +-- | The state of the all values. +type Values = HashMap (NormalizedFilePath, Key) (Value Dynamic) + +-- | Key type +data Key = forall k . (Typeable k, Hashable k, Eq k, Show k) => Key k + +instance Show Key where + show (Key k) = show k + +instance Eq Key where + Key k1 == Key k2 | Just k2' <- cast k2 = k1 == k2' + | otherwise = False + +instance Hashable Key where + hashWithSalt salt (Key key) = hashWithSalt salt (typeOf key, key) diff --git a/ghcide/test/cabal/Development/IDE/Test/Runfiles.hs b/ghcide/test/cabal/Development/IDE/Test/Runfiles.hs new file mode 100644 index 00000000000..83b7e8c3680 --- /dev/null +++ b/ghcide/test/cabal/Development/IDE/Test/Runfiles.hs @@ -0,0 +1,9 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Development.IDE.Test.Runfiles + ( locateGhcideExecutable + ) where + +locateGhcideExecutable :: IO FilePath +locateGhcideExecutable = pure "ghcide" diff --git a/ghcide/test/data/TH/THA.hs b/ghcide/test/data/TH/THA.hs new file mode 100644 index 00000000000..ec6cf8ef393 --- /dev/null +++ b/ghcide/test/data/TH/THA.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE TemplateHaskell #-} +module THA where +import Language.Haskell.TH + +th_a :: DecsQ +th_a = [d| a = () |] diff --git a/ghcide/test/data/TH/THB.hs b/ghcide/test/data/TH/THB.hs new file mode 100644 index 00000000000..8d50b01eacc --- /dev/null +++ b/ghcide/test/data/TH/THB.hs @@ -0,0 +1,5 @@ +{-# LANGUAGE TemplateHaskell #-} +module THB where +import THA + +$th_a diff --git a/ghcide/test/data/TH/THC.hs b/ghcide/test/data/TH/THC.hs new file mode 100644 index 00000000000..79a02ef6013 --- /dev/null +++ b/ghcide/test/data/TH/THC.hs @@ -0,0 +1,5 @@ +module THC where +import THB + +c ::() +c = a diff --git a/ghcide/test/data/TH/hie.yaml b/ghcide/test/data/TH/hie.yaml new file mode 100644 index 00000000000..a65c7b79c4a --- /dev/null +++ b/ghcide/test/data/TH/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-Wmissing-signatures", "-package template-haskell", "THA", "THB", "THC"]}} diff --git a/ghcide/test/data/THNewName/A.hs b/ghcide/test/data/THNewName/A.hs new file mode 100644 index 00000000000..81984d2dff0 --- /dev/null +++ b/ghcide/test/data/THNewName/A.hs @@ -0,0 +1,6 @@ +module A (template) where + +import Language.Haskell.TH + +template :: DecsQ +template = (\consA -> [DataD [] (mkName "A") [] Nothing [NormalC consA []] []]) <$> newName "A" diff --git a/ghcide/test/data/THNewName/B.hs b/ghcide/test/data/THNewName/B.hs new file mode 100644 index 00000000000..8f65997d60c --- /dev/null +++ b/ghcide/test/data/THNewName/B.hs @@ -0,0 +1,5 @@ +module B(A(A)) where + +import A + +template diff --git a/ghcide/test/data/THNewName/C.hs b/ghcide/test/data/THNewName/C.hs new file mode 100644 index 00000000000..89a7e1eac94 --- /dev/null +++ b/ghcide/test/data/THNewName/C.hs @@ -0,0 +1,4 @@ +module C where +import B + +a = A diff --git a/ghcide/test/data/THNewName/hie.yaml b/ghcide/test/data/THNewName/hie.yaml new file mode 100644 index 00000000000..8853fd51eab --- /dev/null +++ b/ghcide/test/data/THNewName/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-XTemplateHaskell","-Wmissing-signatures","A", "B", "C"]}} diff --git a/ghcide/test/data/boot/A.hs b/ghcide/test/data/boot/A.hs new file mode 100644 index 00000000000..7f0bcca74c1 --- /dev/null +++ b/ghcide/test/data/boot/A.hs @@ -0,0 +1,8 @@ +module A where + +import B( TB(..) ) + +newtype TA = MkTA Int + +f :: TB -> TA +f (MkTB x) = MkTA x diff --git a/ghcide/test/data/boot/A.hs-boot b/ghcide/test/data/boot/A.hs-boot new file mode 100644 index 00000000000..04f7eece405 --- /dev/null +++ b/ghcide/test/data/boot/A.hs-boot @@ -0,0 +1,2 @@ +module A where +newtype TA = MkTA Int diff --git a/ghcide/test/data/boot/B.hs b/ghcide/test/data/boot/B.hs new file mode 100644 index 00000000000..8bf96dcbde8 --- /dev/null +++ b/ghcide/test/data/boot/B.hs @@ -0,0 +1,7 @@ +module B(TA(..), TB(..)) where +import {-# SOURCE #-} A( TA(..) ) + +data TB = MkTB !Int + +g :: TA -> TB +g (MkTA x) = MkTB x diff --git a/ghcide/test/data/boot/C.hs b/ghcide/test/data/boot/C.hs new file mode 100644 index 00000000000..f90e9604322 --- /dev/null +++ b/ghcide/test/data/boot/C.hs @@ -0,0 +1,8 @@ +module C where + +import B +import A hiding (MkTA(..)) + +x = MkTA +y = MkTB +z = f diff --git a/ghcide/test/data/boot/hie.yaml b/ghcide/test/data/boot/hie.yaml new file mode 100644 index 00000000000..166c61ef841 --- /dev/null +++ b/ghcide/test/data/boot/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["A.hs", "A.hs-boot", "B.hs", "C.hs"]}} diff --git a/ghcide/test/data/cabal-exe/a/a.cabal b/ghcide/test/data/cabal-exe/a/a.cabal new file mode 100644 index 00000000000..093890733bf --- /dev/null +++ b/ghcide/test/data/cabal-exe/a/a.cabal @@ -0,0 +1,14 @@ +cabal-version: 2.2 + +name: a +version: 0.1.0.0 +author: Fendor +maintainer: power.walross@gmail.com +build-type: Simple + +executable a + main-is: Main.hs + hs-source-dirs: src + ghc-options: -Wall + build-depends: base + default-language: Haskell2010 diff --git a/ghcide/test/data/cabal-exe/a/src/Main.hs b/ghcide/test/data/cabal-exe/a/src/Main.hs new file mode 100644 index 00000000000..81d0cfb17a6 --- /dev/null +++ b/ghcide/test/data/cabal-exe/a/src/Main.hs @@ -0,0 +1,3 @@ +module Main where + +main = putStrLn "Hello, Haskell!" diff --git a/ghcide/test/data/cabal-exe/cabal.project b/ghcide/test/data/cabal-exe/cabal.project new file mode 100644 index 00000000000..edcac420d94 --- /dev/null +++ b/ghcide/test/data/cabal-exe/cabal.project @@ -0,0 +1 @@ +packages: ./a \ No newline at end of file diff --git a/ghcide/test/data/cabal-exe/hie.yaml b/ghcide/test/data/cabal-exe/hie.yaml new file mode 100644 index 00000000000..5c7ab116419 --- /dev/null +++ b/ghcide/test/data/cabal-exe/hie.yaml @@ -0,0 +1,3 @@ +cradle: + cabal: + component: "exe:a" \ No newline at end of file diff --git a/ghcide/test/data/hover/Bar.hs b/ghcide/test/data/hover/Bar.hs new file mode 100644 index 00000000000..f9fde2a7ccb --- /dev/null +++ b/ghcide/test/data/hover/Bar.hs @@ -0,0 +1,4 @@ +module Bar (Bar(..)) where + +-- | Bar Haddock +data Bar = Bar diff --git a/ghcide/test/data/hover/Foo.hs b/ghcide/test/data/hover/Foo.hs new file mode 100644 index 00000000000..489a6ccd6b2 --- /dev/null +++ b/ghcide/test/data/hover/Foo.hs @@ -0,0 +1,6 @@ +module Foo (Bar, foo) where + +import Bar + +-- | foo Haddock +foo = Bar diff --git a/ghcide/test/data/hover/GotoHover.hs b/ghcide/test/data/hover/GotoHover.hs new file mode 100644 index 00000000000..80931a613a2 --- /dev/null +++ b/ghcide/test/data/hover/GotoHover.hs @@ -0,0 +1,60 @@ +{-# LANGUAGE OverloadedStrings #-} +{- HLINT ignore -} +module GotoHover ( module GotoHover) where +import Data.Text (Text, pack) +import Foo (Bar, foo) + + +data TypeConstructor = DataConstructor + { fff :: Text + , ggg :: Int } +aaa :: TypeConstructor +aaa = DataConstructor + { fff = "dfgy" + , ggg = 832 + } +bbb :: TypeConstructor +bbb = DataConstructor "mjgp" 2994 +ccc :: (Text, Int) +ccc = (fff bbb, ggg aaa) +ddd :: Num a => a -> a -> a +ddd vv ww = vv +! ww +a +! b = a - b +hhh (Just a) (><) = a >< a +iii a b = a `b` a +jjj s = pack $ s <> s +class MyClass a where + method :: a -> Int +instance MyClass Int where + method = succ +kkk :: MyClass a => Int -> a -> Int +kkk n c = n + method c + +doBind :: Maybe () +doBind = do unwrapped <- Just () + return unwrapped + +listCompBind :: [Char] +listCompBind = [ succ c | c <- "ptfx" ] + +multipleClause :: Bool -> Char +multipleClause True = 't' +multipleClause False = 'f' + +-- | Recognizable docs: kpqz +documented :: Monad m => Either Int (m a) +documented = Left 7518 + +listOfInt = [ 8391 :: Int, 6268 ] + +outer :: Bool +outer = undefined inner where + + inner :: Char + inner = undefined + +imported :: Bar +imported = foo + +hole :: Int +hole = _ diff --git a/ghcide/test/data/hover/hie.yaml b/ghcide/test/data/hover/hie.yaml new file mode 100644 index 00000000000..f076eb000e2 --- /dev/null +++ b/ghcide/test/data/hover/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["Foo", "Bar", "GotoHover"]}} diff --git a/ghcide/test/data/ignore-fatal/IgnoreFatal.hs b/ghcide/test/data/ignore-fatal/IgnoreFatal.hs new file mode 100644 index 00000000000..77b11c5bb33 --- /dev/null +++ b/ghcide/test/data/ignore-fatal/IgnoreFatal.hs @@ -0,0 +1,8 @@ +-- "missing signature" is declared a fatal warning in the cabal file, +-- but is ignored in this module. + +{-# OPTIONS_GHC -fno-warn-missing-signatures #-} + +module IgnoreFatal where + +a = 'a' diff --git a/ghcide/test/data/ignore-fatal/cabal.project b/ghcide/test/data/ignore-fatal/cabal.project new file mode 100644 index 00000000000..c6bb6fb152f --- /dev/null +++ b/ghcide/test/data/ignore-fatal/cabal.project @@ -0,0 +1 @@ +packages: ignore-fatal.cabal diff --git a/ghcide/test/data/ignore-fatal/hie.yaml b/ghcide/test/data/ignore-fatal/hie.yaml new file mode 100644 index 00000000000..6ea3cebd0d4 --- /dev/null +++ b/ghcide/test/data/ignore-fatal/hie.yaml @@ -0,0 +1,4 @@ +cradle: + cabal: + - path: "." + component: "lib:ignore-fatal" diff --git a/ghcide/test/data/ignore-fatal/ignore-fatal.cabal b/ghcide/test/data/ignore-fatal/ignore-fatal.cabal new file mode 100644 index 00000000000..6e831e03955 --- /dev/null +++ b/ghcide/test/data/ignore-fatal/ignore-fatal.cabal @@ -0,0 +1,10 @@ +name: ignore-fatal +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: IgnoreFatal + hs-source-dirs: . + ghc-options: -Werror=missing-signatures diff --git a/ghcide/test/data/multi/a/A.hs b/ghcide/test/data/multi/a/A.hs new file mode 100644 index 00000000000..1a3672013a5 --- /dev/null +++ b/ghcide/test/data/multi/a/A.hs @@ -0,0 +1,3 @@ +module A(foo) where + +foo = () diff --git a/ghcide/test/data/multi/a/a.cabal b/ghcide/test/data/multi/a/a.cabal new file mode 100644 index 00000000000..d66fc0300ca --- /dev/null +++ b/ghcide/test/data/multi/a/a.cabal @@ -0,0 +1,9 @@ +name: a +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: A + hs-source-dirs: . diff --git a/ghcide/test/data/multi/b/B.hs b/ghcide/test/data/multi/b/B.hs new file mode 100644 index 00000000000..2c6d4b28a22 --- /dev/null +++ b/ghcide/test/data/multi/b/B.hs @@ -0,0 +1,3 @@ +module B(module B) where +import A +qux = foo diff --git a/ghcide/test/data/multi/b/b.cabal b/ghcide/test/data/multi/b/b.cabal new file mode 100644 index 00000000000..e23f5177d8c --- /dev/null +++ b/ghcide/test/data/multi/b/b.cabal @@ -0,0 +1,9 @@ +name: b +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base, a + exposed-modules: B + hs-source-dirs: . diff --git a/ghcide/test/data/multi/cabal.project b/ghcide/test/data/multi/cabal.project new file mode 100644 index 00000000000..6ad9e72e04c --- /dev/null +++ b/ghcide/test/data/multi/cabal.project @@ -0,0 +1 @@ +packages: a b diff --git a/ghcide/test/data/multi/hie.yaml b/ghcide/test/data/multi/hie.yaml new file mode 100644 index 00000000000..357e8b68eaa --- /dev/null +++ b/ghcide/test/data/multi/hie.yaml @@ -0,0 +1,6 @@ +cradle: + cabal: + - path: "./a" + component: "lib:a" + - path: "./b" + component: "lib:b" diff --git a/ghcide/test/data/plugin/KnownNat.hs b/ghcide/test/data/plugin/KnownNat.hs new file mode 100644 index 00000000000..6c91f0c0a55 --- /dev/null +++ b/ghcide/test/data/plugin/KnownNat.hs @@ -0,0 +1,10 @@ +{-# OPTIONS_GHC -fplugin GHC.TypeLits.KnownNat.Solver #-} +{-# LANGUAGE DataKinds, ScopedTypeVariables, TypeOperators #-} +module KnownNat where +import Data.Proxy +import GHC.TypeLits + +f :: forall n. KnownNat n => Proxy n -> Integer +f _ = natVal (Proxy :: Proxy n) + natVal (Proxy :: Proxy (n+2)) +foo :: Int -> Int -> Int +foo a _b = a + c diff --git a/ghcide/test/data/plugin/RecordDot.hs b/ghcide/test/data/plugin/RecordDot.hs new file mode 100644 index 00000000000..a0e30599e9b --- /dev/null +++ b/ghcide/test/data/plugin/RecordDot.hs @@ -0,0 +1,6 @@ +{-# LANGUAGE DuplicateRecordFields, TypeApplications, TypeFamilies, UndecidableInstances, FlexibleContexts, DataKinds, MultiParamTypeClasses, TypeSynonymInstances, FlexibleInstances #-} +{-# OPTIONS_GHC -fplugin=RecordDotPreprocessor #-} +module RecordDot (Company(..), display) where +data Company = Company {name :: String} +display :: Company -> String +display c = c.name diff --git a/ghcide/test/data/plugin/cabal.project b/ghcide/test/data/plugin/cabal.project new file mode 100644 index 00000000000..e6fdbadb439 --- /dev/null +++ b/ghcide/test/data/plugin/cabal.project @@ -0,0 +1 @@ +packages: . diff --git a/ghcide/test/data/plugin/plugin.cabal b/ghcide/test/data/plugin/plugin.cabal new file mode 100644 index 00000000000..11bd0e1513b --- /dev/null +++ b/ghcide/test/data/plugin/plugin.cabal @@ -0,0 +1,10 @@ +cabal-version: 1.18 +name: plugin +version: 1.0.0 +build-type: Simple + +library + build-depends: base, ghc-typelits-knownnat, record-dot-preprocessor, + record-hasfield + exposed-modules: KnownNat, RecordDot + hs-source-dirs: . diff --git a/ghcide/test/data/recomp/A.hs b/ghcide/test/data/recomp/A.hs new file mode 100644 index 00000000000..cc80fe9eddf --- /dev/null +++ b/ghcide/test/data/recomp/A.hs @@ -0,0 +1,6 @@ +module A(x) where + +import B + +x :: Int +x = y diff --git a/ghcide/test/data/recomp/B.hs b/ghcide/test/data/recomp/B.hs new file mode 100644 index 00000000000..e8f35da9e98 --- /dev/null +++ b/ghcide/test/data/recomp/B.hs @@ -0,0 +1,4 @@ +module B(y) where + +y :: Int +y = undefined diff --git a/ghcide/test/data/recomp/P.hs b/ghcide/test/data/recomp/P.hs new file mode 100644 index 00000000000..0622632eead --- /dev/null +++ b/ghcide/test/data/recomp/P.hs @@ -0,0 +1,5 @@ +module P() where +import A +import B + +bar = x :: Int diff --git a/ghcide/test/data/recomp/hie.yaml b/ghcide/test/data/recomp/hie.yaml new file mode 100644 index 00000000000..bf98055e958 --- /dev/null +++ b/ghcide/test/data/recomp/hie.yaml @@ -0,0 +1 @@ +cradle: {direct: {arguments: ["-Wmissing-signatures","B", "A", "P"]}} diff --git a/ghcide/test/data/rootUri/dirA/Foo.hs b/ghcide/test/data/rootUri/dirA/Foo.hs new file mode 100644 index 00000000000..ea4238dcbb0 --- /dev/null +++ b/ghcide/test/data/rootUri/dirA/Foo.hs @@ -0,0 +1,3 @@ +module Foo () where + +foo = () diff --git a/ghcide/test/data/rootUri/dirA/foo.cabal b/ghcide/test/data/rootUri/dirA/foo.cabal new file mode 100644 index 00000000000..3cdd320ad99 --- /dev/null +++ b/ghcide/test/data/rootUri/dirA/foo.cabal @@ -0,0 +1,9 @@ +name: foo +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: Foo + hs-source-dirs: . diff --git a/ghcide/test/data/rootUri/dirB/Foo.hs b/ghcide/test/data/rootUri/dirB/Foo.hs new file mode 100644 index 00000000000..ea4238dcbb0 --- /dev/null +++ b/ghcide/test/data/rootUri/dirB/Foo.hs @@ -0,0 +1,3 @@ +module Foo () where + +foo = () diff --git a/ghcide/test/data/rootUri/dirB/foo.cabal b/ghcide/test/data/rootUri/dirB/foo.cabal new file mode 100644 index 00000000000..3cdd320ad99 --- /dev/null +++ b/ghcide/test/data/rootUri/dirB/foo.cabal @@ -0,0 +1,9 @@ +name: foo +version: 1.0.0 +build-type: Simple +cabal-version: >= 1.2 + +library + build-depends: base + exposed-modules: Foo + hs-source-dirs: . diff --git a/ghcide/test/exe/Main.hs b/ghcide/test/exe/Main.hs new file mode 100644 index 00000000000..d7bf6b26187 --- /dev/null +++ b/ghcide/test/exe/Main.hs @@ -0,0 +1,4144 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE ImplicitParams #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE CPP #-} +#include "ghc-api-version.h" + +module Main (main) where + +import Control.Applicative.Combinators +import Control.Exception (catch) +import qualified Control.Lens as Lens +import Control.Monad +import Control.Monad.IO.Class (liftIO) +import Data.Aeson (FromJSON, Value, toJSON) +import qualified Data.Binary as Binary +import Data.Foldable +import Data.List.Extra +import Data.Maybe +import Data.Rope.UTF16 (Rope) +import qualified Data.Rope.UTF16 as Rope +import Development.IDE.Core.PositionMapping (fromCurrent, toCurrent, PositionResult(..), positionResultToMaybe) +import Development.IDE.Core.Shake (Q(..)) +import Development.IDE.GHC.Util +import qualified Data.Text as T +import Data.Typeable +import Development.IDE.Spans.Common +import Development.IDE.Test +import Development.IDE.Test.Runfiles +import qualified Development.IDE.Types.Diagnostics as Diagnostics +import Development.IDE.Types.Location +import Development.Shake (getDirectoryFilesIO) +import qualified Experiments as Bench +import Language.Haskell.LSP.Test +import Language.Haskell.LSP.Messages +import Language.Haskell.LSP.Types +import Language.Haskell.LSP.Types.Capabilities +import qualified Language.Haskell.LSP.Types.Lens as Lsp (diagnostics, params, message) +import Language.Haskell.LSP.VFS (applyChange) +import Network.URI +import System.Environment.Blank (getEnv, setEnv) +import System.FilePath +import System.IO.Extra hiding (withTempDir) +import qualified System.IO.Extra +import System.Directory +import System.Exit (ExitCode(ExitSuccess)) +import System.Process.Extra (readCreateProcessWithExitCode, CreateProcess(cwd), proc) +import System.Info.Extra (isWindows) +import Test.QuickCheck +import Test.QuickCheck.Instances () +import Test.Tasty +import Test.Tasty.ExpectedFailure +import Test.Tasty.Ingredients.Rerun +import Test.Tasty.HUnit +import Test.Tasty.QuickCheck +import System.Time.Extra +import Development.IDE.Plugin.CodeAction (typeSignatureCommandId, blockCommandId, matchRegExMultipleImports) +import Development.IDE.Plugin.Test (WaitForIdeRuleResult(..), TestRequest(WaitForIdeRule, BlockSeconds,GetInterfaceFilesDir)) +import Control.Monad.Extra (whenJust) + +main :: IO () +main = do + -- We mess with env vars so run single-threaded. + defaultMainWithRerun $ testGroup "ghcide" + [ testSession "open close" $ do + doc <- createDoc "Testing.hs" "haskell" "" + void (skipManyTill anyMessage message :: Session WorkDoneProgressCreateRequest) + void (skipManyTill anyMessage message :: Session WorkDoneProgressBeginNotification) + closeDoc doc + void (skipManyTill anyMessage message :: Session WorkDoneProgressEndNotification) + , initializeResponseTests + , completionTests + , cppTests + , diagnosticTests + , codeActionTests + , codeLensesTests + , outlineTests + , highlightTests + , findDefinitionAndHoverTests + , pluginSimpleTests + , pluginParsedResultTests + , preprocessorTests + , thTests + , safeTests + , unitTests + , haddockTests + , positionMappingTests + , watchedFilesTests + , cradleTests + , dependentFileTest + , nonLspCommandLine + , benchmarkTests + , ifaceTests + , bootTests + , rootUriTests + , asyncTests + , clientSettingsTest + , codeActionHelperFunctionTests + ] + +initializeResponseTests :: TestTree +initializeResponseTests = withResource acquire release tests where + + -- these tests document and monitor the evolution of the + -- capabilities announced by the server in the initialize + -- response. Currently the server advertises almost no capabilities + -- at all, in some cases failing to announce capabilities that it + -- actually does provide! Hopefully this will change ... + tests :: IO InitializeResponse -> TestTree + tests getInitializeResponse = + testGroup "initialize response capabilities" + [ chk " text doc sync" _textDocumentSync tds + , chk " hover" _hoverProvider (Just True) + , chk " completion" _completionProvider (Just $ CompletionOptions (Just False) (Just ["."]) Nothing) + , chk "NO signature help" _signatureHelpProvider Nothing + , chk " goto definition" _definitionProvider (Just True) + , chk " goto type definition" _typeDefinitionProvider (Just $ GotoOptionsStatic True) + -- BUG in lsp-test, this test fails, just change the accepted response + -- for now + , chk "NO goto implementation" _implementationProvider (Just $ GotoOptionsStatic True) + , chk "NO find references" _referencesProvider Nothing + , chk " doc highlight" _documentHighlightProvider (Just True) + , chk " doc symbol" _documentSymbolProvider (Just True) + , chk "NO workspace symbol" _workspaceSymbolProvider Nothing + , chk " code action" _codeActionProvider $ Just $ CodeActionOptionsStatic True + , chk " code lens" _codeLensProvider $ Just $ CodeLensOptions Nothing + , chk "NO doc formatting" _documentFormattingProvider Nothing + , chk "NO doc range formatting" + _documentRangeFormattingProvider Nothing + , chk "NO doc formatting on typing" + _documentOnTypeFormattingProvider Nothing + , chk "NO renaming" _renameProvider (Just $ RenameOptionsStatic False) + , chk "NO doc link" _documentLinkProvider Nothing + , chk "NO color" _colorProvider (Just $ ColorOptionsStatic False) + , chk "NO folding range" _foldingRangeProvider (Just $ FoldingRangeOptionsStatic False) + , che " execute command" _executeCommandProvider (Just $ ExecuteCommandOptions $ List [typeSignatureCommandId, blockCommandId]) + , chk " workspace" _workspace (Just $ WorkspaceOptions (Just WorkspaceFolderOptions{_supported = Just True, _changeNotifications = Just ( WorkspaceFolderChangeNotificationsBool True )})) + , chk "NO experimental" _experimental Nothing + ] where + + tds = Just (TDSOptions (TextDocumentSyncOptions + { _openClose = Just True + , _change = Just TdSyncIncremental + , _willSave = Nothing + , _willSaveWaitUntil = Nothing + , _save = Just (SaveOptions {_includeText = Nothing})})) + + chk :: (Eq a, Show a) => TestName -> (InitializeResponseCapabilitiesInner -> a) -> a -> TestTree + chk title getActual expected = + testCase title $ getInitializeResponse >>= \ir -> expected @=? (getActual . innerCaps) ir + + che :: TestName -> (InitializeResponseCapabilitiesInner -> Maybe ExecuteCommandOptions) -> Maybe ExecuteCommandOptions -> TestTree + che title getActual _expected = testCase title doTest + where + doTest = do + ir <- getInitializeResponse + let Just ExecuteCommandOptions {_commands = List [command]} = getActual $ innerCaps ir + True @=? T.isSuffixOf "typesignature.add" command + + + innerCaps :: InitializeResponse -> InitializeResponseCapabilitiesInner + innerCaps (ResponseMessage _ _ (Right (InitializeResponseCapabilities c))) = c + innerCaps _ = error "this test only expects inner capabilities" + + acquire :: IO InitializeResponse + acquire = run initializeResponse + + release :: InitializeResponse -> IO () + release = const $ pure () + + +diagnosticTests :: TestTree +diagnosticTests = testGroup "diagnostics" + [ testSessionWait "fix syntax error" $ do + let content = T.unlines [ "module Testing wher" ] + doc <- createDoc "Testing.hs" "haskell" content + expectDiagnostics [("Testing.hs", [(DsError, (0, 15), "parse error")])] + let change = TextDocumentContentChangeEvent + { _range = Just (Range (Position 0 15) (Position 0 19)) + , _rangeLength = Nothing + , _text = "where" + } + changeDoc doc [change] + expectDiagnostics [("Testing.hs", [])] + , testSessionWait "introduce syntax error" $ do + let content = T.unlines [ "module Testing where" ] + doc <- createDoc "Testing.hs" "haskell" content + void $ skipManyTill anyMessage (message :: Session WorkDoneProgressCreateRequest) + void $ skipManyTill anyMessage (message :: Session WorkDoneProgressBeginNotification) + let change = TextDocumentContentChangeEvent + { _range = Just (Range (Position 0 15) (Position 0 18)) + , _rangeLength = Nothing + , _text = "wher" + } + changeDoc doc [change] + expectDiagnostics [("Testing.hs", [(DsError, (0, 15), "parse error")])] + , testSessionWait "variable not in scope" $ do + let content = T.unlines + [ "module Testing where" + , "foo :: Int -> Int -> Int" + , "foo a _b = a + ab" + , "bar :: Int -> Int -> Int" + , "bar _a b = cd + b" + ] + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs" + , [ (DsError, (2, 15), "Variable not in scope: ab") + , (DsError, (4, 11), "Variable not in scope: cd") + ] + ) + ] + , testSessionWait "type error" $ do + let content = T.unlines + [ "module Testing where" + , "foo :: Int -> String -> Int" + , "foo a b = a + b" + ] + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs" + , [(DsError, (2, 14), "Couldn't match type '[Char]' with 'Int'")] + ) + ] + , testSessionWait "typed hole" $ do + let content = T.unlines + [ "module Testing where" + , "foo :: Int -> String" + , "foo a = _ a" + ] + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs" + , [(DsError, (2, 8), "Found hole: _ :: Int -> String")] + ) + ] + + , testGroup "deferral" $ + let sourceA a = T.unlines + [ "module A where" + , "a :: Int" + , "a = " <> a] + sourceB = T.unlines + [ "module B where" + , "import A ()" + , "b :: Float" + , "b = True"] + bMessage = "Couldn't match expected type 'Float' with actual type 'Bool'" + expectedDs aMessage = + [ ("A.hs", [(DsError, (2,4), aMessage)]) + , ("B.hs", [(DsError, (3,4), bMessage)])] + deferralTest title binding msg = testSessionWait title $ do + _ <- createDoc "A.hs" "haskell" $ sourceA binding + _ <- createDoc "B.hs" "haskell" sourceB + expectDiagnostics $ expectedDs msg + in + [ deferralTest "type error" "True" "Couldn't match expected type" + , deferralTest "typed hole" "_" "Found hole" + , deferralTest "out of scope var" "unbound" "Variable not in scope" + ] + + , testSessionWait "remove required module" $ do + let contentA = T.unlines [ "module ModuleA where" ] + docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "module ModuleB where" + , "import ModuleA" + ] + _ <- createDoc "ModuleB.hs" "haskell" contentB + let change = TextDocumentContentChangeEvent + { _range = Just (Range (Position 0 0) (Position 0 20)) + , _rangeLength = Nothing + , _text = "" + } + changeDoc docA [change] + expectDiagnostics [("ModuleB.hs", [(DsError, (1, 0), "Could not find module")])] + , testSessionWait "add missing module" $ do + let contentB = T.unlines + [ "module ModuleB where" + , "import ModuleA ()" + ] + _ <- createDoc "ModuleB.hs" "haskell" contentB + expectDiagnostics [("ModuleB.hs", [(DsError, (1, 7), "Could not find module")])] + let contentA = T.unlines [ "module ModuleA where" ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + expectDiagnostics [("ModuleB.hs", [])] + , ignoreInWindowsBecause "Broken in windows" $ testSessionWait "add missing module (non workspace)" $ do + -- need to canonicalize in Mac Os + tmpDir <- liftIO $ canonicalizePath =<< getTemporaryDirectory + let contentB = T.unlines + [ "module ModuleB where" + , "import ModuleA ()" + ] + _ <- createDoc (tmpDir "ModuleB.hs") "haskell" contentB + expectDiagnostics [(tmpDir "ModuleB.hs", [(DsError, (1, 7), "Could not find module")])] + let contentA = T.unlines [ "module ModuleA where" ] + _ <- createDoc (tmpDir "ModuleA.hs") "haskell" contentA + expectDiagnostics [(tmpDir "ModuleB.hs", [])] + , testSessionWait "cyclic module dependency" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "import ModuleB" + ] + let contentB = T.unlines + [ "module ModuleB where" + , "import ModuleA" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleB.hs" "haskell" contentB + expectDiagnostics + [ ( "ModuleA.hs" + , [(DsError, (1, 7), "Cyclic module dependency between ModuleA, ModuleB")] + ) + , ( "ModuleB.hs" + , [(DsError, (1, 7), "Cyclic module dependency between ModuleA, ModuleB")] + ) + ] + , testSessionWait "cyclic module dependency with hs-boot" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "import {-# SOURCE #-} ModuleB" + ] + let contentB = T.unlines + [ "{-# OPTIONS -Wmissing-signatures#-}" + , "module ModuleB where" + , "import ModuleA" + -- introduce an artificial diagnostic + , "foo = ()" + ] + let contentBboot = T.unlines + [ "module ModuleB where" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleB.hs" "haskell" contentB + _ <- createDoc "ModuleB.hs-boot" "haskell" contentBboot + expectDiagnostics [("ModuleB.hs", [(DsWarning, (3,0), "Top-level binding")])] + , testSessionWait "correct reference used with hs-boot" $ do + let contentB = T.unlines + [ "module ModuleB where" + , "import {-# SOURCE #-} ModuleA()" + ] + let contentA = T.unlines + [ "module ModuleA where" + , "import ModuleB()" + , "x = 5" + ] + let contentAboot = T.unlines + [ "module ModuleA where" + ] + let contentC = T.unlines + [ "{-# OPTIONS -Wmissing-signatures #-}" + , "module ModuleC where" + , "import ModuleA" + -- this reference will fail if it gets incorrectly + -- resolved to the hs-boot file + , "y = x" + ] + _ <- createDoc "ModuleB.hs" "haskell" contentB + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleA.hs-boot" "haskell" contentAboot + _ <- createDoc "ModuleC.hs" "haskell" contentC + expectDiagnostics [("ModuleC.hs", [(DsWarning, (3,0), "Top-level binding")])] + , testSessionWait "redundant import" $ do + let contentA = T.unlines ["module ModuleA where"] + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleB.hs" "haskell" contentB + expectDiagnosticsWithTags + [ ( "ModuleB.hs" + , [(DsWarning, (2, 0), "The import of 'ModuleA' is redundant", Just DtUnnecessary)] + ) + ] + , testSessionWait "redundant import even without warning" $ do + let contentA = T.unlines ["module ModuleA where"] + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wno-unused-imports -Wmissing-signatures #-}" + , "module ModuleB where" + , "import ModuleA" + -- introduce an artificial warning for testing purposes + , "foo = ()" + ] + _ <- createDoc "ModuleA.hs" "haskell" contentA + _ <- createDoc "ModuleB.hs" "haskell" contentB + expectDiagnostics [("ModuleB.hs", [(DsWarning, (3,0), "Top-level binding")])] + , testSessionWait "package imports" $ do + let thisDataListContent = T.unlines + [ "module Data.List where" + , "x :: Integer" + , "x = 123" + ] + let mainContent = T.unlines + [ "{-# LANGUAGE PackageImports #-}" + , "module Main where" + , "import qualified \"this\" Data.List as ThisList" + , "import qualified \"base\" Data.List as BaseList" + , "useThis = ThisList.x" + , "useBase = BaseList.map" + , "wrong1 = ThisList.map" + , "wrong2 = BaseList.x" + ] + _ <- createDoc "Data/List.hs" "haskell" thisDataListContent + _ <- createDoc "Main.hs" "haskell" mainContent + expectDiagnostics + [ ( "Main.hs" + , [(DsError, (6, 9), "Not in scope: \8216ThisList.map\8217") + ,(DsError, (7, 9), "Not in scope: \8216BaseList.x\8217") + ] + ) + ] + , testSessionWait "unqualified warnings" $ do + let fooContent = T.unlines + [ "{-# OPTIONS_GHC -Wredundant-constraints #-}" + , "module Foo where" + , "foo :: Ord a => a -> Int" + , "foo _a = 1" + ] + _ <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics + [ ( "Foo.hs" + -- The test is to make sure that warnings contain unqualified names + -- where appropriate. The warning should use an unqualified name 'Ord', not + -- sometihng like 'GHC.Classes.Ord'. The choice of redundant-constraints to + -- test this is fairly arbitrary. + , [(DsWarning, (2, 0), "Redundant constraint: Ord a") + ] + ) + ] + , testSessionWait "lower-case drive" $ do + let aContent = T.unlines + [ "module A.A where" + , "import A.B ()" + ] + bContent = T.unlines + [ "{-# OPTIONS_GHC -Wall #-}" + , "module A.B where" + , "import Data.List" + ] + uriB <- getDocUri "A/B.hs" + Just pathB <- pure $ uriToFilePath uriB + uriB <- pure $ + let (drive, suffix) = splitDrive pathB + in filePathToUri (joinDrive (lower drive) suffix) + liftIO $ createDirectoryIfMissing True (takeDirectory pathB) + liftIO $ writeFileUTF8 pathB $ T.unpack bContent + uriA <- getDocUri "A/A.hs" + Just pathA <- pure $ uriToFilePath uriA + uriA <- pure $ + let (drive, suffix) = splitDrive pathA + in filePathToUri (joinDrive (lower drive) suffix) + let itemA = TextDocumentItem uriA "haskell" 0 aContent + let a = TextDocumentIdentifier uriA + sendNotification TextDocumentDidOpen (DidOpenTextDocumentParams itemA) + diagsNot <- skipManyTill anyMessage diagnostic + let PublishDiagnosticsParams fileUri diags = _params (diagsNot :: PublishDiagnosticsNotification) + -- Check that if we put a lower-case drive in for A.A + -- the diagnostics for A.B will also be lower-case. + liftIO $ fileUri @?= uriB + let msg = _message (head (toList diags) :: Diagnostic) + liftIO $ unless ("redundant" `T.isInfixOf` msg) $ + assertFailure ("Expected redundant import but got " <> T.unpack msg) + closeDoc a + , testSessionWait "haddock parse error" $ do + let fooContent = T.unlines + [ "module Foo where" + , "foo :: Int" + , "foo = 1 {-|-}" + ] + _ <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics + [ ( "Foo.hs" + , [(DsWarning, (2, 8), "Haddock parse error on input") + ] + ) + ] + , testSessionWait "strip file path" $ do + let + name = "Testing" + content = T.unlines + [ "module " <> name <> " where" + , "value :: Maybe ()" + , "value = [()]" + ] + _ <- createDoc (T.unpack name <> ".hs") "haskell" content + notification <- skipManyTill anyMessage diagnostic + let + offenders = + Lsp.params . + Lsp.diagnostics . + Lens.folded . + Lsp.message . + Lens.filtered (T.isInfixOf ("/" <> name <> ".hs:")) + failure msg = liftIO $ assertFailure $ "Expected file path to be stripped but got " <> T.unpack msg + Lens.mapMOf_ offenders failure notification + , testSession' "-Werror in cradle is ignored" $ \sessionDir -> do + liftIO $ writeFile (sessionDir "hie.yaml") + "cradle: {direct: {arguments: [\"-Wall\", \"-Werror\"]}}" + let fooContent = T.unlines + [ "module Foo where" + , "foo = ()" + ] + _ <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics + [ ( "Foo.hs" + , [(DsWarning, (1, 0), "Top-level binding with no type signature:") + ] + ) + ] + , testSessionWait "-Werror in pragma is ignored" $ do + let fooContent = T.unlines + [ "{-# OPTIONS_GHC -Wall -Werror #-}" + , "module Foo() where" + , "foo :: Int" + , "foo = 1" + ] + _ <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics + [ ( "Foo.hs" + , [(DsWarning, (3, 0), "Defined but not used:") + ] + ) + ] + , testCase "typecheck-all-parents-of-interest" $ runWithExtraFiles "recomp" $ \dir -> do + let bPath = dir "B.hs" + pPath = dir "P.hs" + + bSource <- liftIO $ readFileUtf8 bPath -- y :: Int + pSource <- liftIO $ readFileUtf8 pPath -- bar = x :: Int + + bdoc <- createDoc bPath "haskell" bSource + _pdoc <- createDoc pPath "haskell" pSource + expectDiagnostics + [("P.hs", [(DsWarning,(4,0), "Top-level binding")])] -- So that we know P has been loaded + + -- Change y from Int to B which introduces a type error in A (imported from P) + changeDoc bdoc [TextDocumentContentChangeEvent Nothing Nothing $ + T.unlines ["module B where", "y :: Bool", "y = undefined"]] + expectDiagnostics + [("A.hs", [(DsError, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'")]) + ] + expectNoMoreDiagnostics 2 + + , testSessionWait "deduplicate missing module diagnostics" $ do + let fooContent = T.unlines [ "module Foo() where" , "import MissingModule" ] + doc <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics [("Foo.hs", [(DsError, (1,7), "Could not find module 'MissingModule'")])] + + changeDoc doc [TextDocumentContentChangeEvent Nothing Nothing "module Foo() where" ] + expectDiagnostics [] + + changeDoc doc [TextDocumentContentChangeEvent Nothing Nothing $ T.unlines + [ "module Foo() where" , "import MissingModule" ] ] + expectDiagnostics [("Foo.hs", [(DsError, (1,7), "Could not find module 'MissingModule'")])] + + , testGroup "Cancellation" + [ cancellationTestGroup "edit header" editHeader yesDepends yesSession noParse noTc + , cancellationTestGroup "edit import" editImport noDepends noSession yesParse noTc + , cancellationTestGroup "edit body" editBody yesDepends yesSession yesParse yesTc + ] + ] + where + editPair x y = let p = Position x y ; p' = Position x (y+2) in + (TextDocumentContentChangeEvent {_range=Just (Range p p), _rangeLength=Nothing, _text="fd"} + ,TextDocumentContentChangeEvent {_range=Just (Range p p'), _rangeLength=Nothing, _text=""}) + editHeader = editPair 0 0 + editImport = editPair 2 10 + editBody = editPair 3 10 + + noParse = False + yesParse = True + + noDepends = False + yesDepends = True + + noSession = False + yesSession = True + + noTc = False + yesTc = True + +cancellationTestGroup :: TestName -> (TextDocumentContentChangeEvent, TextDocumentContentChangeEvent) -> Bool -> Bool -> Bool -> Bool -> TestTree +cancellationTestGroup name edits dependsOutcome sessionDepsOutcome parseOutcome tcOutcome = testGroup name + [ cancellationTemplate edits Nothing + , cancellationTemplate edits $ Just ("GetFileContents", True) + , cancellationTemplate edits $ Just ("GhcSession", True) + -- the outcome for GetModSummary is always True because parseModuleHeader never fails (!) + , cancellationTemplate edits $ Just ("GetModSummary", True) + , cancellationTemplate edits $ Just ("GetModSummaryWithoutTimestamps", True) + -- getLocatedImports never fails + , cancellationTemplate edits $ Just ("GetLocatedImports", True) + , cancellationTemplate edits $ Just ("GetDependencies", dependsOutcome) + , cancellationTemplate edits $ Just ("GhcSessionDeps", sessionDepsOutcome) + , cancellationTemplate edits $ Just ("GetParsedModule", parseOutcome) + , cancellationTemplate edits $ Just ("TypeCheck", tcOutcome) + , cancellationTemplate edits $ Just ("GetHieAst", tcOutcome) + ] + +cancellationTemplate :: (TextDocumentContentChangeEvent, TextDocumentContentChangeEvent) -> Maybe (String, Bool) -> TestTree +cancellationTemplate (edit, undoEdit) mbKey = testCase (maybe "-" fst mbKey) $ runTestNoKick $ do + doc <- createDoc "Foo.hs" "haskell" $ T.unlines + [ "{-# OPTIONS_GHC -Wall #-}" + , "module Foo where" + , "import Data.List()" + , "f0 x = (x,x)" + ] + + -- for the example above we expect one warning + let missingSigDiags = [(DsWarning, (3, 0), "Top-level binding") ] + typeCheck doc >> expectCurrentDiagnostics doc missingSigDiags + + -- Now we edit the document and wait for the given key (if any) + changeDoc doc [edit] + whenJust mbKey $ \(key, expectedResult) -> do + Right WaitForIdeRuleResult{ideResultSuccess} <- waitForAction key doc + liftIO $ ideResultSuccess @?= expectedResult + + -- The 2nd edit cancels the active session and unbreaks the file + -- wait for typecheck and check that the current diagnostics are accurate + changeDoc doc [undoEdit] + typeCheck doc >> expectCurrentDiagnostics doc missingSigDiags + + expectNoMoreDiagnostics 0.5 + where + -- similar to run except it disables kick + runTestNoKick s = withTempDir $ \dir -> runInDir' dir "." "." ["--test-no-kick"] s + + waitForAction key TextDocumentIdentifier{_uri} = do + waitId <- sendRequest (CustomClientMethod "test") (WaitForIdeRule key _uri) + ResponseMessage{_result} <- skipManyTill anyMessage $ responseForId waitId + return _result + + typeCheck doc = do + Right WaitForIdeRuleResult {..} <- waitForAction "TypeCheck" doc + liftIO $ assertBool "The file should typecheck" ideResultSuccess + -- wait for the debouncer to publish diagnostics if the rule runs + liftIO $ sleep 0.2 + -- flush messages to ensure current diagnostics state is updated + flushMessages + +codeActionTests :: TestTree +codeActionTests = testGroup "code actions" + [ renameActionTests + , typeWildCardActionTests + , removeImportTests + , extendImportTests + , suggestImportTests + , fixConstructorImportTests + , importRenameActionTests + , fillTypedHoleTests + , addSigActionTests + , insertNewDefinitionTests + , deleteUnusedDefinitionTests + , addInstanceConstraintTests + , addFunctionConstraintTests + , removeRedundantConstraintsTests + , addTypeAnnotationsToLiteralsTest + , exportUnusedTests + ] + +codeActionHelperFunctionTests :: TestTree +codeActionHelperFunctionTests = testGroup "code action helpers" + [ + extendImportTestsRegEx + ] + + +codeLensesTests :: TestTree +codeLensesTests = testGroup "code lenses" + [ addSigLensesTests + ] + +watchedFilesTests :: TestTree +watchedFilesTests = testGroup "watched files" + [ testSession' "workspace files" $ \sessionDir -> do + liftIO $ writeFile (sessionDir "hie.yaml") "cradle: {direct: {arguments: [\"-isrc\", \"A\", \"WatchedFilesMissingModule\"]}}" + _doc <- createDoc "A.hs" "haskell" "{-#LANGUAGE NoImplicitPrelude #-}\nmodule A where\nimport WatchedFilesMissingModule" + watchedFileRegs <- getWatchedFilesSubscriptionsUntil @PublishDiagnosticsNotification + + -- Expect 1 subscription: we only ever send one + liftIO $ length watchedFileRegs @?= 1 + + , testSession' "non workspace file" $ \sessionDir -> do + tmpDir <- liftIO getTemporaryDirectory + liftIO $ writeFile (sessionDir "hie.yaml") ("cradle: {direct: {arguments: [\"-i" <> tmpDir <> "\", \"A\", \"WatchedFilesMissingModule\"]}}") + _doc <- createDoc "A.hs" "haskell" "{-# LANGUAGE NoImplicitPrelude#-}\nmodule A where\nimport WatchedFilesMissingModule" + watchedFileRegs <- getWatchedFilesSubscriptionsUntil @PublishDiagnosticsNotification + + -- Expect 1 subscription: we only ever send one + liftIO $ length watchedFileRegs @?= 1 + + -- TODO add a test for didChangeWorkspaceFolder + ] + +renameActionTests :: TestTree +renameActionTests = testGroup "rename actions" + [ testSession "change to local variable name" $ do + let content = T.unlines + [ "module Testing where" + , "foo :: Int -> Int" + , "foo argName = argNme" + ] + doc <- createDoc "Testing.hs" "haskell" content + _ <- waitForDiagnostics + action <- findCodeAction doc (Range (Position 2 14) (Position 2 20)) "Replace with ‘argName’" + executeCodeAction action + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "module Testing where" + , "foo :: Int -> Int" + , "foo argName = argName" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "change to name of imported function" $ do + let content = T.unlines + [ "module Testing where" + , "import Data.Maybe (maybeToList)" + , "foo :: Maybe a -> [a]" + , "foo = maybToList" + ] + doc <- createDoc "Testing.hs" "haskell" content + _ <- waitForDiagnostics + action <- findCodeAction doc (Range (Position 3 6) (Position 3 16)) "Replace with ‘maybeToList’" + executeCodeAction action + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "module Testing where" + , "import Data.Maybe (maybeToList)" + , "foo :: Maybe a -> [a]" + , "foo = maybeToList" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "suggest multiple local variable names" $ do + let content = T.unlines + [ "module Testing where" + , "foo :: Char -> Char -> Char -> Char" + , "foo argument1 argument2 argument3 = argumentX" + ] + doc <- createDoc "Testing.hs" "haskell" content + _ <- waitForDiagnostics + _ <- findCodeActions doc (Range (Position 2 36) (Position 2 45)) + ["Replace with ‘argument1’", "Replace with ‘argument2’", "Replace with ‘argument3’"] + return() + , testSession "change infix function" $ do + let content = T.unlines + [ "module Testing where" + , "monus :: Int -> Int" + , "monus x y = max 0 (x - y)" + , "foo x y = x `monnus` y" + ] + doc <- createDoc "Testing.hs" "haskell" content + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 3 12) (Position 3 20)) + [fixTypo] <- pure [action | CACodeAction action@CodeAction{ _title = actionTitle } <- actionsOrCommands, "monus" `T.isInfixOf` actionTitle ] + executeCodeAction fixTypo + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "module Testing where" + , "monus :: Int -> Int" + , "monus x y = max 0 (x - y)" + , "foo x y = x `monus` y" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + ] + +typeWildCardActionTests :: TestTree +typeWildCardActionTests = testGroup "type wildcard actions" + [ testSession "global signature" $ do + let content = T.unlines + [ "module Testing where" + , "func :: _" + , "func x = x" + ] + doc <- createDoc "Testing.hs" "haskell" content + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 2 1) (Position 2 10)) + let [addSignature] = [action | CACodeAction action@CodeAction { _title = actionTitle } <- actionsOrCommands + , "Use type signature" `T.isInfixOf` actionTitle + ] + executeCodeAction addSignature + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "module Testing where" + , "func :: (p -> p)" + , "func x = x" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "multi-line message" $ do + let content = T.unlines + [ "module Testing where" + , "func :: _" + , "func x y = x + y" + ] + doc <- createDoc "Testing.hs" "haskell" content + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 2 1) (Position 2 10)) + let [addSignature] = [action | CACodeAction action@CodeAction { _title = actionTitle } <- actionsOrCommands + , "Use type signature" `T.isInfixOf` actionTitle + ] + executeCodeAction addSignature + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "module Testing where" + , "func :: (Integer -> Integer -> Integer)" + , "func x y = x + y" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "local signature" $ do + let content = T.unlines + [ "module Testing where" + , "func :: Int -> Int" + , "func x =" + , " let y :: _" + , " y = x * 2" + , " in y" + ] + doc <- createDoc "Testing.hs" "haskell" content + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 4 1) (Position 4 10)) + let [addSignature] = [action | CACodeAction action@CodeAction { _title = actionTitle } <- actionsOrCommands + , "Use type signature" `T.isInfixOf` actionTitle + ] + executeCodeAction addSignature + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "module Testing where" + , "func :: Int -> Int" + , "func x =" + , " let y :: (Int)" + , " y = x * 2" + , " in y" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + ] + +removeImportTests :: TestTree +removeImportTests = testGroup "remove import actions" + [ testSession "redundant" $ do + let contentA = T.unlines + [ "module ModuleA where" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA" + , "stuffB :: Integer" + , "stuffB = 123" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + [CACodeAction action@CodeAction { _title = actionTitle }, _] + <- getCodeActions docB (Range (Position 2 0) (Position 2 5)) + liftIO $ "Remove import" @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "stuffB :: Integer" + , "stuffB = 123" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "qualified redundant" $ do + let contentA = T.unlines + [ "module ModuleA where" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import qualified ModuleA" + , "stuffB :: Integer" + , "stuffB = 123" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + [CACodeAction action@CodeAction { _title = actionTitle }, _] + <- getCodeActions docB (Range (Position 2 0) (Position 2 5)) + liftIO $ "Remove import" @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "stuffB :: Integer" + , "stuffB = 123" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant binding" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "stuffA = False" + , "stuffB :: Integer" + , "stuffB = 123" + , "stuffC = ()" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (stuffA, stuffB, stuffC, stuffA)" + , "main = print stuffB" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + [CACodeAction action@CodeAction { _title = actionTitle }, _] + <- getCodeActions docB (Range (Position 2 0) (Position 2 5)) + liftIO $ "Remove stuffA, stuffC from import" @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (stuffB)" + , "main = print stuffB" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant operator" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "a !! _b = a" + , "a _b = a" + , "stuffB :: Integer" + , "stuffB = 123" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import qualified ModuleA as A ((), stuffB, (!!))" + , "main = print A.stuffB" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + [CACodeAction action@CodeAction { _title = actionTitle }, _] + <- getCodeActions docB (Range (Position 2 0) (Position 2 5)) + liftIO $ "Remove !!, from import" @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import qualified ModuleA as A (stuffB)" + , "main = print A.stuffB" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant all import" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "data A = A" + , "stuffB :: Integer" + , "stuffB = 123" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (A(..), stuffB)" + , "main = print stuffB" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + [CACodeAction action@CodeAction { _title = actionTitle }, _] + <- getCodeActions docB (Range (Position 2 0) (Position 2 5)) + liftIO $ "Remove A from import" @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (stuffB)" + , "main = print stuffB" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "redundant constructor import" $ do + let contentA = T.unlines + [ "module ModuleA where" + , "data D = A | B" + , "data E = F" + ] + _docA <- createDoc "ModuleA.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (D(A,B), E(F))" + , "main = B" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + [CACodeAction action@CodeAction { _title = actionTitle }, _] + <- getCodeActions docB (Range (Position 2 0) (Position 2 5)) + liftIO $ "Remove A, E, F from import" @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import ModuleA (D(B))" + , "main = B" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "import containing the identifier Strict" $ do + let contentA = T.unlines + [ "module Strict where" + ] + _docA <- createDoc "Strict.hs" "haskell" contentA + let contentB = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + , "import Strict" + ] + docB <- createDoc "ModuleB.hs" "haskell" contentB + _ <- waitForDiagnostics + [CACodeAction action@CodeAction { _title = actionTitle }, _] + <- getCodeActions docB (Range (Position 2 0) (Position 2 5)) + liftIO $ "Remove import" @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents docB + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleB where" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + , testSession "remove all" $ do + let content = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleA where" + , "import Data.Function (fix, (&))" + , "import qualified Data.Functor.Const" + , "import Data.Functor.Identity" + , "import Data.Functor.Sum (Sum (InL, InR))" + , "import qualified Data.Kind as K (Constraint, Type)" + , "x = InL (Identity 123)" + , "y = fix id" + , "type T = K.Type" + ] + doc <- createDoc "ModuleC.hs" "haskell" content + _ <- waitForDiagnostics + [_, _, _, _, CACodeAction action@CodeAction { _title = actionTitle }] + <- getCodeActions doc (Range (Position 2 0) (Position 2 5)) + liftIO $ "Remove all redundant imports" @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "{-# OPTIONS_GHC -Wunused-imports #-}" + , "module ModuleA where" + , "import Data.Function (fix)" + , "import Data.Functor.Identity" + , "import Data.Functor.Sum (Sum (InL))" + , "import qualified Data.Kind as K (Type)" + , "x = InL (Identity 123)" + , "y = fix id" + , "type T = K.Type" + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + ] + +extendImportTests :: TestTree +extendImportTests = testGroup "extend import actions" + [ testSession "extend single line import with value" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "stuffA :: Double" + , "stuffA = 0.00750" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA as A (stuffB)" + , "main = print (stuffA, stuffB)" + ]) + (Range (Position 3 17) (Position 3 18)) + ["Add stuffA to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA as A (stuffA, stuffB)" + , "main = print (stuffA, stuffB)" + ]) + , testSession "extend single line import with operator" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "(.*) :: Integer -> Integer -> Integer" + , "x .* y = x * y" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA as A (stuffB)" + , "main = print (stuffB .* stuffB)" + ]) + (Range (Position 3 17) (Position 3 18)) + ["Add (.*) to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA as A ((.*), stuffB)" + , "main = print (stuffB .* stuffB)" + ]) + , testSession "extend single line import with type" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "type A = Double" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA ()" + , "b :: A" + , "b = 0" + ]) + (Range (Position 2 5) (Position 2 5)) + ["Add A to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (A)" + , "b :: A" + , "b = 0" + ]) + , testSession "extend single line import with constructor" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data A = Constructor" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (A)" + , "b :: A" + , "b = Constructor" + ]) + (Range (Position 2 5) (Position 2 5)) + ["Add A(Constructor) to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (A(Constructor))" + , "b :: A" + , "b = Constructor" + ]) + , testSession "extend single line import with mixed constructors" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "data A = ConstructorFoo | ConstructorBar" + , "a = 1" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (A(ConstructorBar), a)" + , "b :: A" + , "b = ConstructorFoo" + ]) + (Range (Position 2 5) (Position 2 5)) + ["Add A(ConstructorFoo) to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (A(ConstructorFoo, ConstructorBar), a)" + , "b :: A" + , "b = ConstructorFoo" + ]) + , testSession "extend single line qualified import with value" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "stuffA :: Double" + , "stuffA = 0.00750" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import qualified ModuleA as A (stuffB)" + , "main = print (A.stuffA, A.stuffB)" + ]) + (Range (Position 3 17) (Position 3 18)) + ["Add stuffA to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import qualified ModuleA as A (stuffA, stuffB)" + , "main = print (A.stuffA, A.stuffB)" + ]) + , testSession "extend multi line import with value" $ template + [("ModuleA.hs", T.unlines + [ "module ModuleA where" + , "stuffA :: Double" + , "stuffA = 0.00750" + , "stuffB :: Integer" + , "stuffB = 123" + ])] + ("ModuleB.hs", T.unlines + [ "module ModuleB where" + , "import ModuleA (stuffB" + , " )" + , "main = print (stuffA, stuffB)" + ]) + (Range (Position 3 17) (Position 3 18)) + ["Add stuffA to the import list of ModuleA"] + (T.unlines + [ "module ModuleB where" + , "import ModuleA (stuffA, stuffB" + , " )" + , "main = print (stuffA, stuffB)" + ]) + , testSession "extend import list with multiple choices" $ template + [("ModuleA.hs", T.unlines + -- this is just a dummy module to help the arguments needed for this test + [ "module ModuleA (bar) where" + , "bar = 10" + ]), + ("ModuleB.hs", T.unlines + -- this is just a dummy module to help the arguments needed for this test + [ "module ModuleB (bar) where" + , "bar = 10" + ])] + ("ModuleC.hs", T.unlines + [ "module ModuleC where" + , "import ModuleB ()" + , "import ModuleA ()" + , "foo = bar" + ]) + (Range (Position 3 17) (Position 3 18)) + ["Add bar to the import list of ModuleA", + "Add bar to the import list of ModuleB"] + (T.unlines + [ "module ModuleC where" + , "import ModuleB ()" + , "import ModuleA (bar)" + , "foo = bar" + ]) + ] + where + template setUpModules moduleUnderTest range expectedActions expectedContentB = do + mapM_ (\x -> createDoc (fst x) "haskell" (snd x)) setUpModules + docB <- createDoc (fst moduleUnderTest) "haskell" (snd moduleUnderTest) + _ <- waitForDiagnostics + void (skipManyTill anyMessage message :: Session WorkDoneProgressEndNotification) + codeActions <- filter (\(CACodeAction CodeAction{_title=x}) -> T.isPrefixOf "Add" x) + <$> getCodeActions docB range + let expectedTitles = (\(CACodeAction CodeAction{_title=x}) ->x) <$> codeActions + liftIO $ expectedActions @=? expectedTitles + + -- Get the first action and execute the first action + let CACodeAction action : _ + = sortOn (\(CACodeAction CodeAction{_title=x}) -> x) codeActions + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ expectedContentB @=? contentAfterAction + +extendImportTestsRegEx :: TestTree +extendImportTestsRegEx = testGroup "regex parsing" + [ + testCase "parse invalid multiple imports" $ template "foo bar foo" Nothing + , testCase "parse malformed import list" $ template + "\n\8226 Perhaps you want to add \8216fromList\8217 to one of these import lists:\n \8216Data.Map\8217)" + Nothing + , testCase "parse multiple imports" $ template + "\n\8226 Perhaps you want to add \8216fromList\8217 to one of these import lists:\n \8216Data.Map\8217 (app/testlsp.hs:7:1-18)\n \8216Data.HashMap.Strict\8217 (app/testlsp.hs:8:1-29)" + $ Just ("fromList",[("Data.Map","app/testlsp.hs:7:1-18"),("Data.HashMap.Strict","app/testlsp.hs:8:1-29")]) + ] + where + template message expected = do + liftIO $ matchRegExMultipleImports message @=? expected + + + +suggestImportTests :: TestTree +suggestImportTests = testGroup "suggest import actions" + [ testGroup "Dont want suggestion" + [ -- extend import + test False ["Data.List.NonEmpty ()"] "f = nonEmpty" [] "import Data.List.NonEmpty (nonEmpty)" + -- data constructor + , test False [] "f = First" [] "import Data.Monoid (First)" + -- internal module + , test False [] "f :: Typeable a => a" ["f = undefined"] "import Data.Typeable.Internal (Typeable)" + -- package not in scope + , test False [] "f = quickCheck" [] "import Test.QuickCheck (quickCheck)" + ] + , testGroup "want suggestion" + [ wantWait [] "f = foo" [] "import Foo (foo)" + , wantWait [] "f = Bar" [] "import Bar (Bar(Bar))" + , wantWait [] "f :: Bar" [] "import Bar (Bar)" + , test True [] "f = nonEmpty" [] "import Data.List.NonEmpty (nonEmpty)" + , test True [] "f = (:|)" [] "import Data.List.NonEmpty (NonEmpty((:|)))" + , test True [] "f :: Natural" ["f = undefined"] "import Numeric.Natural (Natural)" + , test True [] "f :: Natural" ["f = undefined"] "import Numeric.Natural" + , test True [] "f :: NonEmpty ()" ["f = () :| []"] "import Data.List.NonEmpty (NonEmpty)" + , test True [] "f :: NonEmpty ()" ["f = () :| []"] "import Data.List.NonEmpty" + , test True [] "f = First" [] "import Data.Monoid (First(First))" + , test True [] "f = Endo" [] "import Data.Monoid (Endo(Endo))" + , test True [] "f = Version" [] "import Data.Version (Version(Version))" + , test True [] "f ExitSuccess = ()" [] "import System.Exit (ExitCode(ExitSuccess))" + , test True [] "f = AssertionFailed" [] "import Control.Exception (AssertionFailed(AssertionFailed))" + , test True ["Prelude"] "f = nonEmpty" [] "import Data.List.NonEmpty (nonEmpty)" + , test True [] "f :: Alternative f => f ()" ["f = undefined"] "import Control.Applicative (Alternative)" + , test True [] "f :: Alternative f => f ()" ["f = undefined"] "import Control.Applicative" + , test True [] "f = empty" [] "import Control.Applicative (Alternative(empty))" + , test True [] "f = empty" [] "import Control.Applicative" + , test True [] "f = (&)" [] "import Data.Function ((&))" + , test True [] "f = NE.nonEmpty" [] "import qualified Data.List.NonEmpty as NE" + , test True [] "f = Data.List.NonEmpty.nonEmpty" [] "import qualified Data.List.NonEmpty" + , test True [] "f :: Typeable a => a" ["f = undefined"] "import Data.Typeable (Typeable)" + , test True [] "f = pack" [] "import Data.Text (pack)" + , test True [] "f :: Text" ["f = undefined"] "import Data.Text (Text)" + , test True [] "f = [] & id" [] "import Data.Function ((&))" + , test True [] "f = (&) [] id" [] "import Data.Function ((&))" + , test True [] "f = (.|.)" [] "import Data.Bits (Bits((.|.)))" + ] + ] + where + test = test' False + wantWait = test' True True + test' waitForCheckProject wanted imps def other newImp = testSessionWithExtraFiles "hover" (T.unpack def) $ \dir -> do + let before = T.unlines $ "module A where" : ["import " <> x | x <- imps] ++ def : other + after = T.unlines $ "module A where" : ["import " <> x | x <- imps] ++ [newImp] ++ def : other + cradle = "cradle: {direct: {arguments: [-hide-all-packages, -package, base, -package, text, -package-env, -, A, Bar, Foo]}}" + liftIO $ writeFileUTF8 (dir "hie.yaml") cradle + doc <- createDoc "Test.hs" "haskell" before + void (skipManyTill anyMessage message :: Session WorkDoneProgressEndNotification) + _diags <- waitForDiagnostics + -- there isn't a good way to wait until the whole project is checked atm + when waitForCheckProject $ liftIO $ sleep 0.5 + let defLine = length imps + 1 + range = Range (Position defLine 0) (Position defLine maxBound) + actions <- getCodeActions doc range + if wanted + then do + action <- liftIO $ pickActionWithTitle newImp actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ after @=? contentAfterAction + else + liftIO $ [_title | CACodeAction CodeAction{_title} <- actions, _title == newImp ] @?= [] + +insertNewDefinitionTests :: TestTree +insertNewDefinitionTests = testGroup "insert new definition actions" + [ testSession "insert new function definition" $ do + let txtB = + ["foo True = select [True]" + , "" + ,"foo False = False" + ] + txtB' = + ["" + ,"someOtherCode = ()" + ] + docB <- createDoc "ModuleB.hs" "haskell" (T.unlines $ txtB ++ txtB') + _ <- waitForDiagnostics + CACodeAction action@CodeAction { _title = actionTitle } : _ + <- sortOn (\(CACodeAction CodeAction{_title=x}) -> x) <$> + getCodeActions docB (R 1 0 1 50) + liftIO $ actionTitle @?= "Define select :: [Bool] -> Bool" + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ contentAfterAction @?= T.unlines (txtB ++ + [ "" + , "select :: [Bool] -> Bool" + , "select = error \"not implemented\"" + ] + ++ txtB') + , testSession "define a hole" $ do + let txtB = + ["foo True = _select [True]" + , "" + ,"foo False = False" + ] + txtB' = + ["" + ,"someOtherCode = ()" + ] + docB <- createDoc "ModuleB.hs" "haskell" (T.unlines $ txtB ++ txtB') + _ <- waitForDiagnostics + CACodeAction action@CodeAction { _title = actionTitle } : _ + <- sortOn (\(CACodeAction CodeAction{_title=x}) -> x) <$> + getCodeActions docB (R 1 0 1 50) + liftIO $ actionTitle @?= "Define select :: [Bool] -> Bool" + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ contentAfterAction @?= T.unlines ( + ["foo True = select [True]" + , "" + ,"foo False = False" + , "" + , "select :: [Bool] -> Bool" + , "select = error \"not implemented\"" + ] + ++ txtB') + ] + + +deleteUnusedDefinitionTests :: TestTree +deleteUnusedDefinitionTests = testGroup "delete unused definition action" + [ testSession "delete unused top level binding" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (some) where" + , "" + , "f :: Int -> Int" + , "f 1 = let a = 1" + , " in a" + , "f 2 = 2" + , "" + , "some = ()" + ]) + (4, 0) + "Delete ‘f’" + (T.unlines [ + "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (some) where" + , "" + , "some = ()" + ]) + + , testSession "delete unused top level binding defined in infix form" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (some) where" + , "" + , "myPlus :: Int -> Int -> Int" + , "a `myPlus` b = a + b" + , "" + , "some = ()" + ]) + (4, 2) + "Delete ‘myPlus’" + (T.unlines [ + "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (some) where" + , "" + , "some = ()" + ]) + , testSession "delete unused binding in where clause" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (h, g) where" + , "" + , "h :: Int" + , "h = 3" + , "" + , "g :: Int" + , "g = 6" + , " where" + , " h :: Int" + , " h = 4" + , "" + ]) + (10, 4) + "Delete ‘h’" + (T.unlines [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (h, g) where" + , "" + , "h :: Int" + , "h = 3" + , "" + , "g :: Int" + , "g = 6" + , " where" + , "" + ]) + , testSession "delete unused binding with multi-oneline signatures front" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (b, c) where" + , "" + , "a, b, c :: Int" + , "a = 3" + , "b = 4" + , "c = 5" + ]) + (4, 0) + "Delete ‘a’" + (T.unlines [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (b, c) where" + , "" + , "b, c :: Int" + , "b = 4" + , "c = 5" + ]) + , testSession "delete unused binding with multi-oneline signatures mid" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (a, c) where" + , "" + , "a, b, c :: Int" + , "a = 3" + , "b = 4" + , "c = 5" + ]) + (5, 0) + "Delete ‘b’" + (T.unlines [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (a, c) where" + , "" + , "a, c :: Int" + , "a = 3" + , "c = 5" + ]) + , testSession "delete unused binding with multi-oneline signatures end" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (a, b) where" + , "" + , "a, b, c :: Int" + , "a = 3" + , "b = 4" + , "c = 5" + ]) + (6, 0) + "Delete ‘c’" + (T.unlines [ "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (a, b) where" + , "" + , "a, b :: Int" + , "a = 3" + , "b = 4" + ]) + ] + where + testFor source pos expectedTitle expectedResult = do + docId <- createDoc "A.hs" "haskell" source + expectDiagnostics [ ("A.hs", [(DsWarning, pos, "not used")]) ] + + (action, title) <- extractCodeAction docId "Delete" + + liftIO $ title @?= expectedTitle + executeCodeAction action + contentAfterAction <- documentContents docId + liftIO $ contentAfterAction @?= expectedResult + + extractCodeAction docId actionPrefix = do + [action@CodeAction { _title = actionTitle }] <- findCodeActionsByPrefix docId (R 0 0 0 0) [actionPrefix] + return (action, actionTitle) + +addTypeAnnotationsToLiteralsTest :: TestTree +addTypeAnnotationsToLiteralsTest = testGroup "add type annotations to literals to satisfy contraints" + [ + testSession "add default type to satisfy one contraint" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "module A (f) where" + , "" + , "f = 1" + ]) + [ (DsWarning, (3, 4), "Defaulting the following constraint") ] + "Add type annotation ‘Integer’ to ‘1’" + (T.unlines [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "module A (f) where" + , "" + , "f = (1 :: Integer)" + ]) + + , testSession "add default type to satisfy one contraint with duplicate literals" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f = seq \"debug\" traceShow \"debug\"" + ]) + [ (DsWarning, (6, 8), "Defaulting the following constraint") + , (DsWarning, (6, 16), "Defaulting the following constraint") + ] + "Add type annotation ‘[Char]’ to ‘\"debug\"’" + (T.unlines [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f = seq (\"debug\" :: [Char]) traceShow \"debug\"" + ]) + , testSession "add default type to satisfy two contraints" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f a = traceShow \"debug\" a" + ]) + [ (DsWarning, (6, 6), "Defaulting the following constraint") ] + "Add type annotation ‘[Char]’ to ‘\"debug\"’" + (T.unlines [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f a = traceShow (\"debug\" :: [Char]) a" + ]) + , testSession "add default type to satisfy two contraints with duplicate literals" $ + testFor + (T.unlines [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow \"debug\"))" + ]) + [ (DsWarning, (6, 54), "Defaulting the following constraint") ] + "Add type annotation ‘[Char]’ to ‘\"debug\"’" + (T.unlines [ "{-# OPTIONS_GHC -Wtype-defaults #-}" + , "{-# LANGUAGE OverloadedStrings #-}" + , "module A (f) where" + , "" + , "import Debug.Trace" + , "" + , "f = seq (\"debug\" :: [Char]) (seq (\"debug\" :: [Char]) (traceShow (\"debug\" :: [Char])))" + ]) + ] + where + testFor source diag expectedTitle expectedResult = do + docId <- createDoc "A.hs" "haskell" source + expectDiagnostics [ ("A.hs", diag) ] + + (action, title) <- extractCodeAction docId "Add type annotation" + + liftIO $ title @?= expectedTitle + executeCodeAction action + contentAfterAction <- documentContents docId + liftIO $ contentAfterAction @?= expectedResult + + extractCodeAction docId actionPrefix = do + [action@CodeAction { _title = actionTitle }] <- findCodeActionsByPrefix docId (R 0 0 0 0) [actionPrefix] + return (action, actionTitle) + + +fixConstructorImportTests :: TestTree +fixConstructorImportTests = testGroup "fix import actions" + [ testSession "fix constructor import" $ template + (T.unlines + [ "module ModuleA where" + , "data A = Constructor" + ]) + (T.unlines + [ "module ModuleB where" + , "import ModuleA(Constructor)" + ]) + (Range (Position 1 10) (Position 1 11)) + "Fix import of A(Constructor)" + (T.unlines + [ "module ModuleB where" + , "import ModuleA(A(Constructor))" + ]) + ] + where + template contentA contentB range expectedAction expectedContentB = do + _docA <- createDoc "ModuleA.hs" "haskell" contentA + docB <- createDoc "ModuleB.hs" "haskell" contentB + _diags <- waitForDiagnostics + CACodeAction action@CodeAction { _title = actionTitle } : _ + <- sortOn (\(CACodeAction CodeAction{_title=x}) -> x) <$> + getCodeActions docB range + liftIO $ expectedAction @=? actionTitle + executeCodeAction action + contentAfterAction <- documentContents docB + liftIO $ expectedContentB @=? contentAfterAction + +importRenameActionTests :: TestTree +importRenameActionTests = testGroup "import rename actions" + [ testSession "Data.Mape -> Data.Map" $ check "Map" + , testSession "Data.Mape -> Data.Maybe" $ check "Maybe" ] where + check modname = do + let content = T.unlines + [ "module Testing where" + , "import Data.Mape" + ] + doc <- createDoc "Testing.hs" "haskell" content + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 2 8) (Position 2 16)) + let [changeToMap] = [action | CACodeAction action@CodeAction{ _title = actionTitle } <- actionsOrCommands, ("Data." <> modname) `T.isInfixOf` actionTitle ] + executeCodeAction changeToMap + contentAfterAction <- documentContents doc + let expectedContentAfterAction = T.unlines + [ "module Testing where" + , "import Data." <> modname + ] + liftIO $ expectedContentAfterAction @=? contentAfterAction + +fillTypedHoleTests :: TestTree +fillTypedHoleTests = let + + sourceCode :: T.Text -> T.Text -> T.Text -> T.Text + sourceCode a b c = T.unlines + [ "module Testing where" + , "" + , "globalConvert :: Int -> String" + , "globalConvert = undefined" + , "" + , "globalInt :: Int" + , "globalInt = 3" + , "" + , "bar :: Int -> Int -> String" + , "bar n parameterInt = " <> a <> " (n + " <> b <> " + " <> c <> ") where" + , " localConvert = (flip replicate) 'x'" + , "" + , "foo :: () -> Int -> String" + , "foo = undefined" + + ] + + check :: T.Text -> T.Text -> T.Text -> T.Text -> T.Text -> T.Text -> T.Text -> TestTree + check actionTitle + oldA oldB oldC + newA newB newC = testSession (T.unpack actionTitle) $ do + let originalCode = sourceCode oldA oldB oldC + let expectedCode = sourceCode newA newB newC + doc <- createDoc "Testing.hs" "haskell" originalCode + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 9 0) (Position 9 maxBound)) + chosenAction <- liftIO $ pickActionWithTitle actionTitle actionsOrCommands + executeCodeAction chosenAction + modifiedCode <- documentContents doc + liftIO $ expectedCode @=? modifiedCode + in + testGroup "fill typed holes" + [ check "replace _ with show" + "_" "n" "n" + "show" "n" "n" + + , check "replace _ with globalConvert" + "_" "n" "n" + "globalConvert" "n" "n" + + , check "replace _convertme with localConvert" + "_convertme" "n" "n" + "localConvert" "n" "n" + + , check "replace _b with globalInt" + "_a" "_b" "_c" + "_a" "globalInt" "_c" + + , check "replace _c with globalInt" + "_a" "_b" "_c" + "_a" "_b" "globalInt" + + , check "replace _c with parameterInt" + "_a" "_b" "_c" + "_a" "_b" "parameterInt" + , check "replace _ with foo _" + "_" "n" "n" + "(foo _)" "n" "n" + , testSession "replace _toException with E.toException" $ do + let mkDoc x = T.unlines + [ "module Testing where" + , "import qualified Control.Exception as E" + , "ioToSome :: E.IOException -> E.SomeException" + , "ioToSome = " <> x ] + doc <- createDoc "Test.hs" "haskell" $ mkDoc "_toException" + _ <- waitForDiagnostics + actions <- getCodeActions doc (Range (Position 3 0) (Position 3 maxBound)) + chosen <- liftIO $ pickActionWithTitle "replace _toException with E.toException" actions + executeCodeAction chosen + modifiedCode <- documentContents doc + liftIO $ mkDoc "E.toException" @=? modifiedCode + ] + +addInstanceConstraintTests :: TestTree +addInstanceConstraintTests = let + missingConstraintSourceCode :: Maybe T.Text -> T.Text + missingConstraintSourceCode mConstraint = + let constraint = maybe "" (<> " => ") mConstraint + in T.unlines + [ "module Testing where" + , "" + , "data Wrap a = Wrap a" + , "" + , "instance " <> constraint <> "Eq (Wrap a) where" + , " (Wrap x) == (Wrap y) = x == y" + ] + + incompleteConstraintSourceCode :: Maybe T.Text -> T.Text + incompleteConstraintSourceCode mConstraint = + let constraint = maybe "Eq a" (\c -> "(Eq a, " <> c <> ")") mConstraint + in T.unlines + [ "module Testing where" + , "" + , "data Pair a b = Pair a b" + , "" + , "instance " <> constraint <> " => Eq (Pair a b) where" + , " (Pair x y) == (Pair x' y') = x == x' && y == y'" + ] + + incompleteConstraintSourceCode2 :: Maybe T.Text -> T.Text + incompleteConstraintSourceCode2 mConstraint = + let constraint = maybe "(Eq a, Eq b)" (\c -> "(Eq a, Eq b, " <> c <> ")") mConstraint + in T.unlines + [ "module Testing where" + , "" + , "data Three a b c = Three a b c" + , "" + , "instance " <> constraint <> " => Eq (Three a b c) where" + , " (Three x y z) == (Three x' y' z') = x == x' && y == y' && z == z'" + ] + + check :: T.Text -> T.Text -> T.Text -> TestTree + check actionTitle originalCode expectedCode = testSession (T.unpack actionTitle) $ do + doc <- createDoc "Testing.hs" "haskell" originalCode + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 6 0) (Position 6 68)) + chosenAction <- liftIO $ pickActionWithTitle actionTitle actionsOrCommands + executeCodeAction chosenAction + modifiedCode <- documentContents doc + liftIO $ expectedCode @=? modifiedCode + + in testGroup "add instance constraint" + [ check + "Add `Eq a` to the context of the instance declaration" + (missingConstraintSourceCode Nothing) + (missingConstraintSourceCode $ Just "Eq a") + , check + "Add `Eq b` to the context of the instance declaration" + (incompleteConstraintSourceCode Nothing) + (incompleteConstraintSourceCode $ Just "Eq b") + , check + "Add `Eq c` to the context of the instance declaration" + (incompleteConstraintSourceCode2 Nothing) + (incompleteConstraintSourceCode2 $ Just "Eq c") + ] + +addFunctionConstraintTests :: TestTree +addFunctionConstraintTests = let + missingConstraintSourceCode :: T.Text -> T.Text + missingConstraintSourceCode constraint = + T.unlines + [ "module Testing where" + , "" + , "eq :: " <> constraint <> "a -> a -> Bool" + , "eq x y = x == y" + ] + + incompleteConstraintSourceCode :: T.Text -> T.Text + incompleteConstraintSourceCode constraint = + T.unlines + [ "module Testing where" + , "" + , "data Pair a b = Pair a b" + , "" + , "eq :: " <> constraint <> " => Pair a b -> Pair a b -> Bool" + , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" + ] + + incompleteConstraintSourceCode2 :: T.Text -> T.Text + incompleteConstraintSourceCode2 constraint = + T.unlines + [ "module Testing where" + , "" + , "data Three a b c = Three a b c" + , "" + , "eq :: " <> constraint <> " => Three a b c -> Three a b c -> Bool" + , "eq (Three x y z) (Three x' y' z') = x == x' && y == y' && z == z'" + ] + + incompleteConstraintSourceCodeWithExtraCharsInContext :: T.Text -> T.Text + incompleteConstraintSourceCodeWithExtraCharsInContext constraint = + T.unlines + [ "module Testing where" + , "" + , "data Pair a b = Pair a b" + , "" + , "eq :: " <> constraint <> " => Pair a b -> Pair a b -> Bool" + , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" + ] + + incompleteConstraintSourceCodeWithNewlinesInTypeSignature :: T.Text -> T.Text + incompleteConstraintSourceCodeWithNewlinesInTypeSignature constraint = + T.unlines + [ "module Testing where" + , "data Pair a b = Pair a b" + , "eq " + , " :: " <> constraint + , " => Pair a b -> Pair a b -> Bool" + , "eq (Pair x y) (Pair x' y') = x == x' && y == y'" + ] + + check :: T.Text -> T.Text -> T.Text -> TestTree + check actionTitle originalCode expectedCode = testSession (T.unpack actionTitle) $ do + doc <- createDoc "Testing.hs" "haskell" originalCode + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 6 0) (Position 6 maxBound)) + chosenAction <- liftIO $ pickActionWithTitle actionTitle actionsOrCommands + executeCodeAction chosenAction + modifiedCode <- documentContents doc + liftIO $ expectedCode @=? modifiedCode + + in testGroup "add function constraint" + [ check + "Add `Eq a` to the context of the type signature for `eq`" + (missingConstraintSourceCode "") + (missingConstraintSourceCode "Eq a => ") + , check + "Add `Eq b` to the context of the type signature for `eq`" + (incompleteConstraintSourceCode "Eq a") + (incompleteConstraintSourceCode "(Eq a, Eq b)") + , check + "Add `Eq c` to the context of the type signature for `eq`" + (incompleteConstraintSourceCode2 "(Eq a, Eq b)") + (incompleteConstraintSourceCode2 "(Eq a, Eq b, Eq c)") + , check + "Add `Eq b` to the context of the type signature for `eq`" + (incompleteConstraintSourceCodeWithExtraCharsInContext "( Eq a )") + (incompleteConstraintSourceCodeWithExtraCharsInContext "(Eq a, Eq b)") + , check + "Add `Eq b` to the context of the type signature for `eq`" + (incompleteConstraintSourceCodeWithNewlinesInTypeSignature "(Eq a)") + (incompleteConstraintSourceCodeWithNewlinesInTypeSignature "(Eq a, Eq b)") + ] + +removeRedundantConstraintsTests :: TestTree +removeRedundantConstraintsTests = let + header = + [ "{-# OPTIONS_GHC -Wredundant-constraints #-}" + , "module Testing where" + , "" + ] + + redundantConstraintsCode :: Maybe T.Text -> T.Text + redundantConstraintsCode mConstraint = + let constraint = maybe "" (\c -> "" <> c <> " => ") mConstraint + in T.unlines $ header <> + [ "foo :: " <> constraint <> "a -> a" + , "foo = id" + ] + + redundantMixedConstraintsCode :: Maybe T.Text -> T.Text + redundantMixedConstraintsCode mConstraint = + let constraint = maybe "(Num a, Eq a)" (\c -> "(Num a, Eq a, " <> c <> ")") mConstraint + in T.unlines $ header <> + [ "foo :: " <> constraint <> " => a -> Bool" + , "foo x = x == 1" + ] + + typeSignatureSpaces :: T.Text + typeSignatureSpaces = T.unlines $ header <> + [ "foo :: (Num a, Eq a, Monoid a) => a -> Bool" + , "foo x = x == 1" + ] + + typeSignatureMultipleLines :: T.Text + typeSignatureMultipleLines = T.unlines $ header <> + [ "foo :: (Num a, Eq a, Monoid a)" + , "=> a -> Bool" + , "foo x = x == 1" + ] + + check :: T.Text -> T.Text -> T.Text -> TestTree + check actionTitle originalCode expectedCode = testSession (T.unpack actionTitle) $ do + doc <- createDoc "Testing.hs" "haskell" originalCode + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 4 0) (Position 4 maxBound)) + chosenAction <- liftIO $ pickActionWithTitle actionTitle actionsOrCommands + executeCodeAction chosenAction + modifiedCode <- documentContents doc + liftIO $ expectedCode @=? modifiedCode + + checkPeculiarFormatting :: String -> T.Text -> TestTree + checkPeculiarFormatting title code = testSession title $ do + doc <- createDoc "Testing.hs" "haskell" code + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 4 0) (Position 4 maxBound)) + liftIO $ assertBool "Found some actions" (null actionsOrCommands) + + in testGroup "remove redundant function constraints" + [ check + "Remove redundant constraint `Eq a` from the context of the type signature for `foo`" + (redundantConstraintsCode $ Just "Eq a") + (redundantConstraintsCode Nothing) + , check + "Remove redundant constraints `(Eq a, Monoid a)` from the context of the type signature for `foo`" + (redundantConstraintsCode $ Just "(Eq a, Monoid a)") + (redundantConstraintsCode Nothing) + , check + "Remove redundant constraints `(Monoid a, Show a)` from the context of the type signature for `foo`" + (redundantMixedConstraintsCode $ Just "Monoid a, Show a") + (redundantMixedConstraintsCode Nothing) + , checkPeculiarFormatting + "should do nothing when constraints contain an arbitrary number of spaces" + typeSignatureSpaces + , checkPeculiarFormatting + "should do nothing when constraints contain line feeds" + typeSignatureMultipleLines + ] + +addSigActionTests :: TestTree +addSigActionTests = let + header = "{-# OPTIONS_GHC -Wmissing-signatures -Wmissing-pattern-synonym-signatures #-}" + moduleH = "{-# LANGUAGE PatternSynonyms #-}\nmodule Sigs where" + before def = T.unlines [header, moduleH, def] + after' def sig = T.unlines [header, moduleH, sig, def] + + def >:: sig = testSession (T.unpack def) $ do + let originalCode = before def + let expectedCode = after' def sig + doc <- createDoc "Sigs.hs" "haskell" originalCode + _ <- waitForDiagnostics + actionsOrCommands <- getCodeActions doc (Range (Position 3 1) (Position 3 maxBound)) + chosenAction <- liftIO $ pickActionWithTitle ("add signature: " <> sig) actionsOrCommands + executeCodeAction chosenAction + modifiedCode <- documentContents doc + liftIO $ expectedCode @=? modifiedCode + in + testGroup "add signature" + [ "abc = True" >:: "abc :: Bool" + , "foo a b = a + b" >:: "foo :: Num a => a -> a -> a" + , "bar a b = show $ a + b" >:: "bar :: (Show a, Num a) => a -> a -> String" + , "(!!!) a b = a > b" >:: "(!!!) :: Ord a => a -> a -> Bool" + , "a >>>> b = a + b" >:: "(>>>>) :: Num a => a -> a -> a" + , "a `haha` b = a b" >:: "haha :: (t1 -> t2) -> t1 -> t2" + , "pattern Some a = Just a" >:: "pattern Some :: a -> Maybe a" + ] + +exportUnusedTests :: TestTree +exportUnusedTests = testGroup "export unused actions" + [ testGroup "don't want suggestion" + [ testSession "implicit exports" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# OPTIONS_GHC -Wmissing-signatures #-}" + , "module A where" + , "foo = id"]) + (R 3 0 3 3) + "Export ‘foo’" + Nothing -- codeaction should not be available + , testSession "not top-level" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# OPTIONS_GHC -Wunused-binds #-}" + , "module A (foo,bar) where" + , "foo = ()" + , " where bar = ()" + , "bar = ()"]) + (R 2 0 2 11) + "Export ‘bar’" + Nothing + , testSession "type is exported but not the constructor of same name" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo) where" + , "data Foo = Foo"]) + (R 2 0 2 8) + "Export ‘Foo’" + Nothing -- codeaction should not be available + , testSession "unused data field" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo(Foo)) where" + , "data Foo = Foo {foo :: ()}"]) + (R 2 0 2 20) + "Export ‘foo’" + Nothing -- codeaction should not be available + ] + , testGroup "want suggestion" + [ testSession "empty exports" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (" + , ") where" + , "foo = id"]) + (R 3 0 3 3) + "Export ‘foo’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (" + , "foo) where" + , "foo = id"]) + , testSession "single line explicit exports" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (foo) where" + , "foo = id" + , "bar = foo"]) + (R 3 0 3 3) + "Export ‘bar’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (foo,bar) where" + , "foo = id" + , "bar = foo"]) + , testSession "multi line explicit exports" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " (" + , " foo) where" + , "foo = id" + , "bar = foo"]) + (R 5 0 5 3) + "Export ‘bar’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " (" + , " foo,bar) where" + , "foo = id" + , "bar = foo"]) + , testSession "export list ends in comma" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " (foo," + , " ) where" + , "foo = id" + , "bar = foo"]) + (R 4 0 4 3) + "Export ‘bar’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A" + , " (foo," + , " bar) where" + , "foo = id" + , "bar = foo"]) + , testSession "unused pattern synonym" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE PatternSynonyms #-}" + , "module A () where" + , "pattern Foo a <- (a, _)"]) + (R 3 0 3 10) + "Export ‘Foo’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE PatternSynonyms #-}" + , "module A (pattern Foo) where" + , "pattern Foo a <- (a, _)"]) + , testSession "unused data type" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "data Foo = Foo"]) + (R 2 0 2 7) + "Export ‘Foo’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo(..)) where" + , "data Foo = Foo"]) + , testSession "unused newtype" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "newtype Foo = Foo ()"]) + (R 2 0 2 10) + "Export ‘Foo’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo(..)) where" + , "newtype Foo = Foo ()"]) + , testSession "unused type synonym" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "type Foo = ()"]) + (R 2 0 2 7) + "Export ‘Foo’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo) where" + , "type Foo = ()"]) + , testSession "unused type family" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeFamilies #-}" + , "module A () where" + , "type family Foo p"]) + (R 3 0 3 15) + "Export ‘Foo’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeFamilies #-}" + , "module A (Foo(..)) where" + , "type family Foo p"]) + , testSession "unused typeclass" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "class Foo a"]) + (R 2 0 2 8) + "Export ‘Foo’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (Foo(..)) where" + , "class Foo a"]) + , testSession "infix" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "a `f` b = ()"]) + (R 2 0 2 11) + "Export ‘f’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A (f) where" + , "a `f` b = ()"]) + , testSession "function operator" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A () where" + , "(<|) = ($)"]) + (R 2 0 2 9) + "Export ‘<|’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "module A ((<|)) where" + , "(<|) = ($)"]) + , testSession "type synonym operator" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "type (:<) = ()"]) + (R 3 0 3 13) + "Export ‘:<’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A ((:<)) where" + , "type (:<) = ()"]) + , testSession "type family operator" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeFamilies #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "type family (:<)"]) + (R 4 0 4 15) + "Export ‘:<’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeFamilies #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A (type (:<)(..)) where" + , "type family (:<)"]) + , testSession "typeclass operator" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "class (:<) a"]) + (R 3 0 3 11) + "Export ‘:<’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A (type (:<)(..)) where" + , "class (:<) a"]) + , testSession "newtype operator" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "newtype (:<) = Foo ()"]) + (R 3 0 3 20) + "Export ‘:<’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A (type (:<)(..)) where" + , "newtype (:<) = Foo ()"]) + , testSession "data type operator" $ template + (T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A () where" + , "data (:<) = Foo ()"]) + (R 3 0 3 17) + "Export ‘:<’" + (Just $ T.unlines + [ "{-# OPTIONS_GHC -Wunused-top-binds #-}" + , "{-# LANGUAGE TypeOperators #-}" + , "module A (type (:<)(..)) where" + , "data (:<) = Foo ()"]) + ] + ] + where + template initialContent range expectedAction expectedContents = do + doc <- createDoc "A.hs" "haskell" initialContent + _ <- waitForDiagnostics + actions <- getCodeActions doc range + case expectedContents of + Just content -> do + action <- liftIO $ pickActionWithTitle expectedAction actions + executeCodeAction action + contentAfterAction <- documentContents doc + liftIO $ content @=? contentAfterAction + Nothing -> + liftIO $ [_title | CACodeAction CodeAction{_title} <- actions, _title == expectedAction ] @?= [] + +addSigLensesTests :: TestTree +addSigLensesTests = let + missing = "{-# OPTIONS_GHC -Wmissing-signatures -Wmissing-pattern-synonym-signatures -Wunused-matches #-}" + notMissing = "{-# OPTIONS_GHC -Wunused-matches #-}" + moduleH = "{-# LANGUAGE PatternSynonyms #-}\nmodule Sigs where\nimport qualified Data.Complex as C" + other = T.unlines ["f :: Integer -> Integer", "f x = 3"] + before withMissing def + = T.unlines $ (if withMissing then (missing :) else (notMissing :)) [moduleH, def, other] + after' withMissing def sig + = T.unlines $ (if withMissing then (missing :) else (notMissing :)) [moduleH, sig, def, other] + + sigSession withMissing def sig = testSession (T.unpack def) $ do + let originalCode = before withMissing def + let expectedCode = after' withMissing def sig + doc <- createDoc "Sigs.hs" "haskell" originalCode + [CodeLens {_command = Just c}] <- getCodeLenses doc + executeCommand c + modifiedCode <- getDocumentEdit doc + liftIO $ expectedCode @=? modifiedCode + in + testGroup "add signature" + [ testGroup title + [ sigSession enableWarnings "abc = True" "abc :: Bool" + , sigSession enableWarnings "foo a b = a + b" "foo :: Num a => a -> a -> a" + , sigSession enableWarnings "bar a b = show $ a + b" "bar :: (Show a, Num a) => a -> a -> String" + , sigSession enableWarnings "(!!!) a b = a > b" "(!!!) :: Ord a => a -> a -> Bool" + , sigSession enableWarnings "a >>>> b = a + b" "(>>>>) :: Num a => a -> a -> a" + , sigSession enableWarnings "a `haha` b = a b" "haha :: (t1 -> t2) -> t1 -> t2" + , sigSession enableWarnings "pattern Some a = Just a" "pattern Some :: a -> Maybe a" + , sigSession enableWarnings "qualifiedSigTest= C.realPart" "qualifiedSigTest :: C.Complex a -> a" + ] + | (title, enableWarnings) <- + [("with warnings enabled", True) + ,("with warnings disabled", False) + ] + ] + +checkDefs :: [Location] -> Session [Expect] -> Session () +checkDefs defs mkExpectations = traverse_ check =<< mkExpectations where + + check (ExpectRange expectedRange) = do + assertNDefinitionsFound 1 defs + assertRangeCorrect (head defs) expectedRange + check (ExpectLocation expectedLocation) = do + assertNDefinitionsFound 1 defs + liftIO $ do + canonActualLoc <- canonicalizeLocation (head defs) + canonExpectedLoc <- canonicalizeLocation expectedLocation + canonActualLoc @?= canonExpectedLoc + check ExpectNoDefinitions = do + assertNDefinitionsFound 0 defs + check ExpectExternFail = liftIO $ assertFailure "Expecting to fail to find in external file" + check _ = pure () -- all other expectations not relevant to getDefinition + + assertNDefinitionsFound :: Int -> [a] -> Session () + assertNDefinitionsFound n defs = liftIO $ assertEqual "number of definitions" n (length defs) + + assertRangeCorrect Location{_range = foundRange} expectedRange = + liftIO $ expectedRange @=? foundRange + +canonicalizeLocation :: Location -> IO Location +canonicalizeLocation (Location uri range) = Location <$> canonicalizeUri uri <*> pure range + +findDefinitionAndHoverTests :: TestTree +findDefinitionAndHoverTests = let + + tst (get, check) pos targetRange title = testSessionWithExtraFiles "hover" title $ \dir -> do + + -- Dirty the cache to check that definitions work even in the presence of iface files + liftIO $ runInDir dir $ do + let fooPath = dir "Foo.hs" + fooSource <- liftIO $ readFileUtf8 fooPath + fooDoc <- createDoc fooPath "haskell" fooSource + _ <- getHover fooDoc $ Position 4 3 + closeDoc fooDoc + + doc <- openTestDataDoc (dir sourceFilePath) + void (skipManyTill anyMessage message :: Session WorkDoneProgressEndNotification) + found <- get doc pos + check found targetRange + + + + checkHover :: Maybe Hover -> Session [Expect] -> Session () + checkHover hover expectations = traverse_ check =<< expectations where + + check expected = + case hover of + Nothing -> unless (expected == ExpectNoHover) $ liftIO $ assertFailure "no hover found" + Just Hover{_contents = (HoverContents MarkupContent{_value = standardizeQuotes -> msg}) + ,_range = rangeInHover } -> + case expected of + ExpectRange expectedRange -> checkHoverRange expectedRange rangeInHover msg + ExpectHoverRange expectedRange -> checkHoverRange expectedRange rangeInHover msg + ExpectHoverText snippets -> liftIO $ traverse_ (`assertFoundIn` msg) snippets + ExpectNoHover -> liftIO $ assertFailure $ "Expected no hover but got " <> show hover + _ -> pure () -- all other expectations not relevant to hover + _ -> liftIO $ assertFailure $ "test not expecting this kind of hover info" <> show hover + + extractLineColFromHoverMsg :: T.Text -> [T.Text] + extractLineColFromHoverMsg = T.splitOn ":" . head . T.splitOn "*" . last . T.splitOn (sourceFileName <> ":") + + checkHoverRange :: Range -> Maybe Range -> T.Text -> Session () + checkHoverRange expectedRange rangeInHover msg = + let + lineCol = extractLineColFromHoverMsg msg + -- looks like hovers use 1-based numbering while definitions use 0-based + -- turns out that they are stored 1-based in RealSrcLoc by GHC itself. + adjust Position{_line = l, _character = c} = + Position{_line = l + 1, _character = c + 1} + in + case map (read . T.unpack) lineCol of + [l,c] -> liftIO $ (adjust $ _start expectedRange) @=? Position l c + _ -> liftIO $ assertFailure $ + "expected: " <> show ("[...]" <> sourceFileName <> "::**[...]", Just expectedRange) <> + "\n but got: " <> show (msg, rangeInHover) + + assertFoundIn :: T.Text -> T.Text -> Assertion + assertFoundIn part whole = assertBool + (T.unpack $ "failed to find: `" <> part <> "` in hover message:\n" <> whole) + (part `T.isInfixOf` whole) + + sourceFilePath = T.unpack sourceFileName + sourceFileName = "GotoHover.hs" + + mkFindTests tests = testGroup "get" + [ testGroup "definition" $ mapMaybe fst tests + , testGroup "hover" $ mapMaybe snd tests + , checkFileCompiles sourceFilePath $ + expectDiagnostics + [ ( "GotoHover.hs", [(DsError, (59, 7), "Found hole: _")]) ] + , testGroup "type-definition" typeDefinitionTests ] + + typeDefinitionTests = [ tst (getTypeDefinitions, checkDefs) aaaL14 (pure tcData) "Saturated data con" + , tst (getTypeDefinitions, checkDefs) opL16 (pure [ExpectNoDefinitions]) "Polymorphic variable"] + + test runDef runHover look expect = testM runDef runHover look (return expect) + + testM runDef runHover look expect title = + ( runDef $ tst def look expect title + , runHover $ tst hover look expect title ) where + def = (getDefinitions, checkDefs) + hover = (getHover , checkHover) + + -- search locations expectations on results + fffL4 = _start fffR ; fffR = mkRange 8 4 8 7 ; fff = [ExpectRange fffR] + fffL8 = Position 12 4 ; + fffL14 = Position 18 7 ; + aaaL14 = Position 18 20 ; aaa = [mkR 11 0 11 3] + dcL7 = Position 11 11 ; tcDC = [mkR 7 23 9 16] + dcL12 = Position 16 11 ; + xtcL5 = Position 9 11 ; xtc = [ExpectExternFail, ExpectHoverText ["Int", "Defined in ", "GHC.Types"]] + tcL6 = Position 10 11 ; tcData = [mkR 7 0 9 16, ExpectHoverText ["TypeConstructor", "GotoHover.hs:8:1"]] + vvL16 = Position 20 12 ; vv = [mkR 20 4 20 6] + opL16 = Position 20 15 ; op = [mkR 21 2 21 4] + opL18 = Position 22 22 ; opp = [mkR 22 13 22 17] + aL18 = Position 22 20 ; apmp = [mkR 22 10 22 11] + b'L19 = Position 23 13 ; bp = [mkR 23 6 23 7] + xvL20 = Position 24 8 ; xvMsg = [ExpectExternFail, ExpectHoverText ["pack", ":: String -> Text", "Data.Text"]] + clL23 = Position 27 11 ; cls = [mkR 25 0 26 20, ExpectHoverText ["MyClass", "GotoHover.hs:26:1"]] + clL25 = Position 29 9 + eclL15 = Position 19 8 ; ecls = [ExpectExternFail, ExpectHoverText ["Num", "Defined in ", "GHC.Num"]] + dnbL29 = Position 33 18 ; dnb = [ExpectHoverText [":: ()"], mkR 33 12 33 21] + dnbL30 = Position 34 23 + lcbL33 = Position 37 26 ; lcb = [ExpectHoverText [":: Char"], mkR 37 26 37 27] + lclL33 = Position 37 22 + mclL36 = Position 40 1 ; mcl = [mkR 40 0 40 14] + mclL37 = Position 41 1 + spaceL37 = Position 41 24 ; space = [ExpectNoDefinitions, ExpectHoverText [":: Char"]] + docL41 = Position 45 1 ; doc = [ExpectHoverText ["Recognizable docs: kpqz"]] + ; constr = [ExpectHoverText ["Monad m"]] + eitL40 = Position 44 28 ; kindE = [ExpectHoverText [":: * -> * -> *\n"]] + intL40 = Position 44 34 ; kindI = [ExpectHoverText [":: *\n"]] + tvrL40 = Position 44 37 ; kindV = [ExpectHoverText [":: * -> *\n"]] + intL41 = Position 45 20 ; litI = [ExpectHoverText ["7518"]] + chrL36 = Position 41 24 ; litC = [ExpectHoverText ["'f'"]] + txtL8 = Position 12 14 ; litT = [ExpectHoverText ["\"dfgy\""]] + lstL43 = Position 47 12 ; litL = [ExpectHoverText ["[8391 :: Int, 6268]"]] + outL45 = Position 49 3 ; outSig = [ExpectHoverText ["outer", "Bool"], mkR 46 0 46 5] + innL48 = Position 52 5 ; innSig = [ExpectHoverText ["inner", "Char"], mkR 49 2 49 7] + holeL60 = Position 59 7 ; hleInfo = [ExpectHoverText ["_ ::"]] + cccL17 = Position 17 16 ; docLink = [ExpectHoverText ["[Documentation](file:///"]] + imported = Position 56 13 ; importedSig = getDocUri "Foo.hs" >>= \foo -> return [ExpectHoverText ["foo", "Foo", "Haddock"], mkL foo 5 0 5 3] + reexported = Position 55 14 ; reexportedSig = getDocUri "Bar.hs" >>= \bar -> return [ExpectHoverText ["Bar", "Bar", "Haddock"], mkL bar 3 0 3 14] + in + mkFindTests + -- def hover look expect + [ test yes yes fffL4 fff "field in record definition" + , test yes yes fffL8 fff "field in record construction #71" + , test yes yes fffL14 fff "field name used as accessor" -- 120 in Calculate.hs + , test yes yes aaaL14 aaa "top-level name" -- 120 + , test yes yes dcL7 tcDC "data constructor record #247" + , test yes yes dcL12 tcDC "data constructor plain" -- 121 + , test yes yes tcL6 tcData "type constructor #248" -- 147 + , test broken yes xtcL5 xtc "type constructor external #248,249" + , test broken yes xvL20 xvMsg "value external package #249" -- 120 + , test yes yes vvL16 vv "plain parameter" -- 120 + , test yes yes aL18 apmp "pattern match name" -- 120 + , test yes yes opL16 op "top-level operator" -- 120, 123 + , test yes yes opL18 opp "parameter operator" -- 120 + , test yes yes b'L19 bp "name in backticks" -- 120 + , test yes yes clL23 cls "class in instance declaration #250" + , test yes yes clL25 cls "class in signature #250" -- 147 + , test broken yes eclL15 ecls "external class in signature #249,250" + , test yes yes dnbL29 dnb "do-notation bind" -- 137 + , test yes yes dnbL30 dnb "do-notation lookup" + , test yes yes lcbL33 lcb "listcomp bind" -- 137 + , test yes yes lclL33 lcb "listcomp lookup" + , test yes yes mclL36 mcl "top-level fn 1st clause" + , test yes yes mclL37 mcl "top-level fn 2nd clause #246" +#if MIN_GHC_API_VERSION(8,10,0) + , test yes yes spaceL37 space "top-level fn on space #315" +#else + , test yes broken spaceL37 space "top-level fn on space #315" +#endif + , test no yes docL41 doc "documentation #7" + , test no yes eitL40 kindE "kind of Either #273" + , test no yes intL40 kindI "kind of Int #273" + , test no broken tvrL40 kindV "kind of (* -> *) type variable #273" + , test no broken intL41 litI "literal Int in hover info #274" + , test no broken chrL36 litC "literal Char in hover info #274" + , test no broken txtL8 litT "literal Text in hover info #274" + , test no broken lstL43 litL "literal List in hover info #274" + , test no broken docL41 constr "type constraint in hover info #283" + , test broken broken outL45 outSig "top-level signature #310" + , test broken broken innL48 innSig "inner signature #310" + , test no yes holeL60 hleInfo "hole without internal name #847" + , test no skip cccL17 docLink "Haddock html links" + , testM yes yes imported importedSig "Imported symbol" + , testM yes yes reexported reexportedSig "Imported symbol (reexported)" + ] + where yes, broken :: (TestTree -> Maybe TestTree) + yes = Just -- test should run and pass + broken = Just . (`xfail` "known broken") + no = const Nothing -- don't run this test at all + skip = const Nothing -- unreliable, don't run + +checkFileCompiles :: FilePath -> Session () -> TestTree +checkFileCompiles fp diag = + testSessionWithExtraFiles "hover" ("Does " ++ fp ++ " compile") $ \dir -> do + void (openTestDataDoc (dir fp)) + diag + +pluginSimpleTests :: TestTree +pluginSimpleTests = + ignoreTest8101 "GHC #18070" $ + ignoreInWindowsForGHC88And810 $ + testSessionWithExtraFiles "plugin" "simple plugin" $ \dir -> do + _ <- openDoc (dir "KnownNat.hs") "haskell" + liftIO $ writeFile (dir"hie.yaml") + "cradle: {cabal: [{path: '.', component: 'lib:plugin'}]}" + + expectDiagnostics + [ ( "KnownNat.hs", + [(DsError, (9, 15), "Variable not in scope: c")] + ) + ] + +pluginParsedResultTests :: TestTree +pluginParsedResultTests = + ignoreTest8101 "GHC #18070" $ + ignoreInWindowsForGHC88And810 $ + testSessionWithExtraFiles "plugin" "parsedResultAction plugin" $ \dir -> do + _ <- openDoc (dir "RecordDot.hs") "haskell" + expectNoMoreDiagnostics 2 + +cppTests :: TestTree +cppTests = + testGroup "cpp" + [ ignoreInWindowsBecause "Throw a lsp session time out in windows for ghc-8.8 and is broken for other versions" $ testCase "cpp-error" $ do + let content = + T.unlines + [ "{-# LANGUAGE CPP #-}", + "module Testing where", + "#ifdef FOO", + "foo = 42" + ] + -- The error locations differ depending on which C-preprocessor is used. + -- Some give the column number and others don't (hence -1). Assert either + -- of them. + (run $ expectError content (2, -1)) + `catch` ( \e -> do + let _ = e :: HUnitFailure + run $ expectError content (2, 1) + ) + , testSessionWait "cpp-ghcide" $ do + _ <- createDoc "A.hs" "haskell" $ T.unlines + ["{-# LANGUAGE CPP #-}" + ,"main =" + ,"#ifdef __GHCIDE__" + ," worked" + ,"#else" + ," failed" + ,"#endif" + ] + expectDiagnostics [("A.hs", [(DsError, (3, 2), "Variable not in scope: worked")])] + ] + where + expectError :: T.Text -> Cursor -> Session () + expectError content cursor = do + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs", + [(DsError, cursor, "error: unterminated")] + ) + ] + expectNoMoreDiagnostics 0.5 + +preprocessorTests :: TestTree +preprocessorTests = testSessionWait "preprocessor" $ do + let content = + T.unlines + [ "{-# OPTIONS_GHC -F -pgmF=ghcide-test-preprocessor #-}" + , "module Testing where" + , "y = x + z" -- plugin replaces x with y, making this have only one diagnostic + ] + _ <- createDoc "Testing.hs" "haskell" content + expectDiagnostics + [ ( "Testing.hs", + [(DsError, (2, 8), "Variable not in scope: z")] + ) + ] + + +safeTests :: TestTree +safeTests = + testGroup + "SafeHaskell" + [ -- Test for https://github.com/haskell/ghcide/issues/424 + testSessionWait "load" $ do + let sourceA = + T.unlines + ["{-# LANGUAGE Trustworthy #-}" + ,"module A where" + ,"import System.IO.Unsafe" + ,"import System.IO ()" + ,"trustWorthyId :: a -> a" + ,"trustWorthyId i = unsafePerformIO $ do" + ," putStrLn \"I'm safe\"" + ," return i"] + sourceB = + T.unlines + ["{-# LANGUAGE Safe #-}" + ,"module B where" + ,"import A" + ,"safeId :: a -> a" + ,"safeId = trustWorthyId" + ] + + _ <- createDoc "A.hs" "haskell" sourceA + _ <- createDoc "B.hs" "haskell" sourceB + expectNoMoreDiagnostics 1 ] + +thTests :: TestTree +thTests = + testGroup + "TemplateHaskell" + [ -- Test for https://github.com/haskell/ghcide/pull/212 + testSessionWait "load" $ do + let sourceA = + T.unlines + [ "{-# LANGUAGE PackageImports #-}", + "{-# LANGUAGE TemplateHaskell #-}", + "module A where", + "import \"template-haskell\" Language.Haskell.TH", + "a :: Integer", + "a = $(litE $ IntegerL 3)" + ] + sourceB = + T.unlines + [ "{-# LANGUAGE PackageImports #-}", + "{-# LANGUAGE TemplateHaskell #-}", + "module B where", + "import A", + "import \"template-haskell\" Language.Haskell.TH", + "b :: Integer", + "b = $(litE $ IntegerL $ a) + n" + ] + _ <- createDoc "A.hs" "haskell" sourceA + _ <- createDoc "B.hs" "haskell" sourceB + expectDiagnostics [ ( "B.hs", [(DsError, (6, 29), "Variable not in scope: n")] ) ] + , testSessionWait "newtype-closure" $ do + let sourceA = + T.unlines + [ "{-# LANGUAGE DeriveDataTypeable #-}" + ,"{-# LANGUAGE TemplateHaskell #-}" + ,"module A (a) where" + ,"import Data.Data" + ,"import Language.Haskell.TH" + ,"newtype A = A () deriving (Data)" + ,"a :: ExpQ" + ,"a = [| 0 |]"] + let sourceB = + T.unlines + [ "{-# LANGUAGE TemplateHaskell #-}" + ,"module B where" + ,"import A" + ,"b :: Int" + ,"b = $( a )" ] + _ <- createDoc "A.hs" "haskell" sourceA + _ <- createDoc "B.hs" "haskell" sourceB + return () + , thReloadingTest + -- Regression test for https://github.com/haskell/ghcide/issues/614 + , thLinkingTest + , testSessionWait "findsTHIdentifiers" $ do + let sourceA = + T.unlines + [ "{-# LANGUAGE TemplateHaskell #-}" + , "module A (a) where" + , "a = [| glorifiedID |]" + , "glorifiedID :: a -> a" + , "glorifiedID = id" ] + let sourceB = + T.unlines + [ "{-# OPTIONS_GHC -Wall #-}" + , "{-# LANGUAGE TemplateHaskell #-}" + , "module B where" + , "import A" + , "main = $a (putStrLn \"success!\")"] + _ <- createDoc "A.hs" "haskell" sourceA + _ <- createDoc "B.hs" "haskell" sourceB + expectDiagnostics [ ( "B.hs", [(DsWarning, (4, 0), "Top-level binding with no type signature: main :: IO ()")] ) ] + , ignoreInWindowsForGHC88 $ testCase "findsTHnewNameConstructor" $ runWithExtraFiles "THNewName" $ \dir -> do + + -- This test defines a TH value with the meaning "data A = A" in A.hs + -- Loads and export the template in B.hs + -- And checks wether the constructor A can be loaded in C.hs + -- This test does not fail when either A and B get manually loaded before C.hs + -- or when we remove the seemingly unnecessary TH pragma from C.hs + + let cPath = dir "C.hs" + _ <- openDoc cPath "haskell" + expectDiagnostics [ ( cPath, [(DsWarning, (3, 0), "Top-level binding with no type signature: a :: A")] ) ] + ] + +-- | test that TH is reevaluated on typecheck +thReloadingTest :: TestTree +thReloadingTest = testCase "reloading-th-test" $ runWithExtraFiles "TH" $ \dir -> do + + let aPath = dir "THA.hs" + bPath = dir "THB.hs" + cPath = dir "THC.hs" + + aSource <- liftIO $ readFileUtf8 aPath -- th = [d|a = ()|] + bSource <- liftIO $ readFileUtf8 bPath -- $th + cSource <- liftIO $ readFileUtf8 cPath -- c = a :: () + + adoc <- createDoc aPath "haskell" aSource + bdoc <- createDoc bPath "haskell" bSource + cdoc <- createDoc cPath "haskell" cSource + + expectDiagnostics [("THB.hs", [(DsWarning, (4,0), "Top-level binding")])] + + -- Change th from () to Bool + let aSource' = T.unlines $ init (T.lines aSource) ++ ["th_a = [d| a = False|]"] + changeDoc adoc [TextDocumentContentChangeEvent Nothing Nothing aSource'] + -- generate an artificial warning to avoid timing out if the TH change does not propagate + changeDoc cdoc [TextDocumentContentChangeEvent Nothing Nothing $ cSource <> "\nfoo=()"] + + -- Check that the change propagates to C + expectDiagnostics + [("THC.hs", [(DsError, (4, 4), "Couldn't match expected type '()' with actual type 'Bool'")]) + ,("THC.hs", [(DsWarning, (6,0), "Top-level binding")]) + ,("THB.hs", [(DsWarning, (4,0), "Top-level binding")]) + ] + + closeDoc adoc + closeDoc bdoc + closeDoc cdoc + +thLinkingTest :: TestTree +thLinkingTest = testCase "th-linking-test" $ runWithExtraFiles "TH" $ \dir -> do + + let aPath = dir "THA.hs" + bPath = dir "THB.hs" + + aSource <- liftIO $ readFileUtf8 aPath -- th_a = [d|a :: ()|] + bSource <- liftIO $ readFileUtf8 bPath -- $th_a + + adoc <- createDoc aPath "haskell" aSource + bdoc <- createDoc bPath "haskell" bSource + + expectDiagnostics [("THB.hs", [(DsWarning, (4,0), "Top-level binding")])] + + let aSource' = T.unlines $ init (init (T.lines aSource)) ++ ["th :: DecsQ", "th = [d| a = False|]"] + changeDoc adoc [TextDocumentContentChangeEvent Nothing Nothing aSource'] + + -- modify b too + let bSource' = T.unlines $ init (T.lines bSource) ++ ["$th"] + changeDoc bdoc [TextDocumentContentChangeEvent Nothing Nothing bSource'] + + expectDiagnostics [("THB.hs", [(DsWarning, (4,0), "Top-level binding")])] + + closeDoc adoc + closeDoc bdoc + + +completionTests :: TestTree +completionTests + = testGroup "completion" + [ testGroup "non local" nonLocalCompletionTests + , testGroup "topLevel" topLevelCompletionTests + , testGroup "local" localCompletionTests + , testGroup "other" otherCompletionTests + ] + +completionTest :: String -> [T.Text] -> Position -> [(T.Text, CompletionItemKind, T.Text, Bool, Bool, Maybe (List TextEdit))] -> TestTree +completionTest name src pos expected = testSessionWait name $ do + docId <- createDoc "A.hs" "haskell" (T.unlines src) + _ <- waitForDiagnostics + compls <- getCompletions docId pos + let compls' = [ (_label, _kind, _insertText, _additionalTextEdits) | CompletionItem{..} <- compls] + liftIO $ do + let emptyToMaybe x = if T.null x then Nothing else Just x + compls' @?= [ (l, Just k, emptyToMaybe t, at) | (l,k,t,_,_,at) <- expected] + forM_ (zip compls expected) $ \(CompletionItem{..}, (_,_,_,expectedSig, expectedDocs, _)) -> do + when expectedSig $ + assertBool ("Missing type signature: " <> T.unpack _label) (isJust _detail) + when expectedDocs $ + assertBool ("Missing docs: " <> T.unpack _label) (isJust _documentation) + +topLevelCompletionTests :: [TestTree] +topLevelCompletionTests = [ + completionTest + "variable" + ["bar = xx", "-- | haddock", "xxx :: ()", "xxx = ()", "-- | haddock", "data Xxx = XxxCon"] + (Position 0 8) + [("xxx", CiFunction, "xxx", True, True, Nothing), + ("XxxCon", CiConstructor, "XxxCon", False, True, Nothing) + ], + completionTest + "constructor" + ["bar = xx", "-- | haddock", "xxx :: ()", "xxx = ()", "-- | haddock", "data Xxx = XxxCon"] + (Position 0 8) + [("xxx", CiFunction, "xxx", True, True, Nothing), + ("XxxCon", CiConstructor, "XxxCon", False, True, Nothing) + ], + completionTest + "class method" + ["bar = xx", "class Xxx a where", "-- | haddock", "xxx :: ()", "xxx = ()"] + (Position 0 8) + [("xxx", CiFunction, "xxx", True, True, Nothing)], + completionTest + "type" + ["bar :: Xx", "xxx = ()", "-- | haddock", "data Xxx = XxxCon"] + (Position 0 9) + [("Xxx", CiStruct, "Xxx", False, True, Nothing)], + completionTest + "class" + ["bar :: Xx", "xxx = ()", "-- | haddock", "class Xxx a"] + (Position 0 9) + [("Xxx", CiClass, "Xxx", False, True, Nothing)], + completionTest + "records" + ["data Person = Person { _personName:: String, _personAge:: Int}", "bar = Person { _pers }" ] + (Position 1 19) + [("_personName", CiFunction, "_personName", False, True, Nothing), + ("_personAge", CiFunction, "_personAge", False, True, Nothing)], + completionTest + "recordsConstructor" + ["data XxRecord = XyRecord { x:: String, y:: Int}", "bar = Xy" ] + (Position 1 19) + [("XyRecord", CiConstructor, "XyRecord", False, True, Nothing), + ("XyRecord", CiSnippet, "XyRecord {x=${1:_x}, y=${2:_y}}", False, True, Nothing)] + ] + +localCompletionTests :: [TestTree] +localCompletionTests = [ + completionTest + "argument" + ["bar (Just abcdef) abcdefg = abcd"] + (Position 0 32) + [("abcdef", CiFunction, "abcdef", True, False, Nothing), + ("abcdefg", CiFunction , "abcdefg", True, False, Nothing) + ], + completionTest + "let" + ["bar = let (Just abcdef) = undefined" + ," abcdefg = let abcd = undefined in undefined" + ," in abcd" + ] + (Position 2 15) + [("abcdef", CiFunction, "abcdef", True, False, Nothing), + ("abcdefg", CiFunction , "abcdefg", True, False, Nothing) + ], + completionTest + "where" + ["bar = abcd" + ," where (Just abcdef) = undefined" + ," abcdefg = let abcd = undefined in undefined" + ] + (Position 0 10) + [("abcdef", CiFunction, "abcdef", True, False, Nothing), + ("abcdefg", CiFunction , "abcdefg", True, False, Nothing) + ], + completionTest + "do/1" + ["bar = do" + ," Just abcdef <- undefined" + ," abcd" + ," abcdefg <- undefined" + ," pure ()" + ] + (Position 2 6) + [("abcdef", CiFunction, "abcdef", True, False, Nothing) + ], + completionTest + "do/2" + ["bar abcde = do" + ," Just [(abcdef,_)] <- undefined" + ," abcdefg <- undefined" + ," let abcdefgh = undefined" + ," (Just [abcdefghi]) = undefined" + ," abcd" + ," where" + ," abcdefghij = undefined" + ] + (Position 5 8) + [("abcde", CiFunction, "abcde", True, False, Nothing) + ,("abcdefghij", CiFunction, "abcdefghij", True, False, Nothing) + ,("abcdef", CiFunction, "abcdef", True, False, Nothing) + ,("abcdefg", CiFunction, "abcdefg", True, False, Nothing) + ,("abcdefgh", CiFunction, "abcdefgh", True, False, Nothing) + ,("abcdefghi", CiFunction, "abcdefghi", True, False, Nothing) + ] + ] + +nonLocalCompletionTests :: [TestTree] +nonLocalCompletionTests = + [ completionTest + "variable" + ["module A where", "f = hea"] + (Position 1 7) + [("head", CiFunction, "head ${1:[a]}", True, True, Nothing)], + completionTest + "constructor" + ["module A where", "f = Tru"] + (Position 1 7) + [ ("True", CiConstructor, "True ", True, True, Nothing), + ("truncate", CiFunction, "truncate ${1:a}", True, True, Nothing) + ], + completionTest + "type" + ["{-# OPTIONS_GHC -Wall #-}", "module A () where", "f :: Bo", "f = True"] + (Position 2 7) + [ ("Bounded", CiClass, "Bounded ${1:*}", True, True, Nothing), + ("Bool", CiStruct, "Bool ", True, True, Nothing) + ], + completionTest + "qualified" + ["{-# OPTIONS_GHC -Wunused-binds #-}", "module A () where", "f = Prelude.hea"] + (Position 2 15) + [ ("head", CiFunction, "head ${1:[a]}", True, True, Nothing) + ], + completionTest + "duplicate import" + ["module A where", "import Data.List", "import Data.List", "f = perm"] + (Position 3 8) + [ ("permutations", CiFunction, "permutations ${1:[a]}", False, False, Nothing) + ], + completionTest + "dont show hidden items" + [ "{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", + "import Control.Monad hiding (join)", + "f = joi" + ] + (Position 3 6) + [], + expectFailBecause "Auto import completion snippets were disabled in v0.6.0.2" $ + testGroup "auto import snippets" + [ completionTest + "show imports not in list - simple" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad (msum)", "f = joi"] + (Position 3 6) + [("join", CiFunction, "join ${1:m (m a)}", False, False, + Just (List [TextEdit {_range = Range {_start = Position {_line = 2, _character = 26}, _end = Position {_line = 2, _character = 26}}, _newText = "join, "}]))] + , completionTest + "show imports not in list - multi-line" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import Control.Monad (\n msum)", "f = joi"] + (Position 4 6) + [("join", CiFunction, "join ${1:m (m a)}", False, False, + Just (List [TextEdit {_range = Range {_start = Position {_line = 3, _character = 8}, _end = Position {_line = 3, _character = 8}}, _newText = "join, "}]))] + , completionTest + "show imports not in list - names with _" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import qualified Control.Monad as M (msum)", "f = M.mapM_"] + (Position 3 11) + [("mapM_", CiFunction, "mapM_ ${1:a -> m b} ${2:t a}", False, False, + Just (List [TextEdit {_range = Range {_start = Position {_line = 2, _character = 41}, _end = Position {_line = 2, _character = 41}}, _newText = "mapM_, "}]))] + , completionTest + "show imports not in list - initial empty list" + ["{-# LANGUAGE NoImplicitPrelude #-}", + "module A where", "import qualified Control.Monad as M ()", "f = M.joi"] + (Position 3 10) + [("join", CiFunction, "join ${1:m (m a)}", False, False, + Just (List [TextEdit {_range = Range {_start = Position {_line = 2, _character = 37}, _end = Position {_line = 2, _character = 37}}, _newText = "join, "}]))] + , completionTest + "record snippet on import" + ["module A where", "import Text.Printf (FormatParse(FormatParse))", "FormatParse"] + (Position 2 10) + [("FormatParse", CiStruct, "FormatParse ", False, False, + Just (List [TextEdit {_range = Range {_start = Position {_line = 1, _character = 44}, _end = Position {_line = 1, _character = 44}}, _newText = "FormatParse, "}])), + ("FormatParse", CiConstructor, "FormatParse ${1:String} ${2:Char} ${3:String}", False, False, + Just (List [TextEdit {_range = Range {_start = Position {_line = 1, _character = 44}, _end = Position {_line = 1, _character = 44}}, _newText = "FormatParse, "}])), + ("FormatParse", CiSnippet, "FormatParse {fpModifiers=${1:_fpModifiers}, fpChar=${2:_fpChar}, fpRest=${3:_fpRest}}", False, False, + Just (List [TextEdit {_range = Range {_start = Position {_line = 1, _character = 44}, _end = Position {_line = 1, _character = 44}}, _newText = "FormatParse, "}])) + ] + ], + -- we need this test to make sure the ghcide completions module does not return completions for language pragmas. this functionality is turned on in hls + completionTest + "do not show pragma completions" + [ "{-# LANGUAGE ", + "{module A where}", + "main = return ()" + ] + (Position 0 13) + [] + ] + +otherCompletionTests :: [TestTree] +otherCompletionTests = [ + completionTest + "keyword" + ["module A where", "f = newty"] + (Position 1 9) + [("newtype", CiKeyword, "", False, False, Nothing)], + completionTest + "type context" + [ "{-# OPTIONS_GHC -Wunused-binds #-}", + "module A () where", + "f = f", + "g :: Intege" + ] + -- At this point the module parses but does not typecheck. + -- This should be sufficient to detect that we are in a + -- type context and only show the completion to the type. + (Position 3 11) + [("Integer", CiStruct, "Integer ", True, True, Nothing)] + ] + +highlightTests :: TestTree +highlightTests = testGroup "highlight" + [ testSessionWait "value" $ do + doc <- createDoc "A.hs" "haskell" source + _ <- waitForDiagnostics + highlights <- getHighlights doc (Position 3 2) + liftIO $ highlights @?= + [ DocumentHighlight (R 2 0 2 3) (Just HkRead) + , DocumentHighlight (R 3 0 3 3) (Just HkWrite) + , DocumentHighlight (R 4 6 4 9) (Just HkRead) + , DocumentHighlight (R 5 22 5 25) (Just HkRead) + ] + , testSessionWait "type" $ do + doc <- createDoc "A.hs" "haskell" source + _ <- waitForDiagnostics + highlights <- getHighlights doc (Position 2 8) + liftIO $ highlights @?= + [ DocumentHighlight (R 2 7 2 10) (Just HkRead) + , DocumentHighlight (R 3 11 3 14) (Just HkRead) + ] + , testSessionWait "local" $ do + doc <- createDoc "A.hs" "haskell" source + _ <- waitForDiagnostics + highlights <- getHighlights doc (Position 6 5) + liftIO $ highlights @?= + [ DocumentHighlight (R 6 4 6 7) (Just HkWrite) + , DocumentHighlight (R 6 10 6 13) (Just HkRead) + , DocumentHighlight (R 7 12 7 15) (Just HkRead) + ] + , testSessionWait "record" $ do + doc <- createDoc "A.hs" "haskell" recsource + _ <- waitForDiagnostics + highlights <- getHighlights doc (Position 4 15) + liftIO $ highlights @?= + -- Span is just the .. on 8.10, but Rec{..} before +#if MIN_GHC_API_VERSION(8,10,0) + [ DocumentHighlight (R 4 8 4 10) (Just HkWrite) +#else + [ DocumentHighlight (R 4 4 4 11) (Just HkWrite) +#endif + , DocumentHighlight (R 4 14 4 20) (Just HkRead) + ] + highlights <- getHighlights doc (Position 3 17) + liftIO $ highlights @?= + [ DocumentHighlight (R 3 17 3 23) (Just HkWrite) + -- Span is just the .. on 8.10, but Rec{..} before +#if MIN_GHC_API_VERSION(8,10,0) + , DocumentHighlight (R 4 8 4 10) (Just HkRead) +#else + , DocumentHighlight (R 4 4 4 11) (Just HkRead) +#endif + ] + ] + where + source = T.unlines + ["{-# OPTIONS_GHC -Wunused-binds #-}" + ,"module Highlight () where" + ,"foo :: Int" + ,"foo = 3 :: Int" + ,"bar = foo" + ," where baz = let x = foo in x" + ,"baz arg = arg + x" + ," where x = arg" + ] + recsource = T.unlines + ["{-# LANGUAGE RecordWildCards #-}" + ,"{-# OPTIONS_GHC -Wunused-binds #-}" + ,"module Highlight () where" + ,"data Rec = Rec { field1 :: Int, field2 :: Char }" + ,"foo Rec{..} = field2 + field1" + ] + +outlineTests :: TestTree +outlineTests = testGroup + "outline" + [ testSessionWait "type class" $ do + let source = T.unlines ["module A where", "class A a where a :: a -> Bool"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [ moduleSymbol + "A" + (R 0 7 0 8) + [ classSymbol "A a" + (R 1 0 1 30) + [docSymbol' "a" SkMethod (R 1 16 1 30) (R 1 16 1 17)] + ] + ] + , testSessionWait "type class instance " $ do + let source = T.unlines ["class A a where", "instance A () where"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [ classSymbol "A a" (R 0 0 0 15) [] + , docSymbol "A ()" SkInterface (R 1 0 1 19) + ] + , testSessionWait "type family" $ do + let source = T.unlines ["{-# language TypeFamilies #-}", "type family A"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left [docSymbolD "A" "type family" SkClass (R 1 0 1 13)] + , testSessionWait "type family instance " $ do + let source = T.unlines + [ "{-# language TypeFamilies #-}" + , "type family A a" + , "type instance A () = ()" + ] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [ docSymbolD "A a" "type family" SkClass (R 1 0 1 15) + , docSymbol "A ()" SkInterface (R 2 0 2 23) + ] + , testSessionWait "data family" $ do + let source = T.unlines ["{-# language TypeFamilies #-}", "data family A"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left [docSymbolD "A" "data family" SkClass (R 1 0 1 11)] + , testSessionWait "data family instance " $ do + let source = T.unlines + [ "{-# language TypeFamilies #-}" + , "data family A a" + , "data instance A () = A ()" + ] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [ docSymbolD "A a" "data family" SkClass (R 1 0 1 11) + , docSymbol "A ()" SkInterface (R 2 0 2 25) + ] + , testSessionWait "constant" $ do + let source = T.unlines ["a = ()"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [docSymbol "a" SkFunction (R 0 0 0 6)] + , testSessionWait "pattern" $ do + let source = T.unlines ["Just foo = Just 21"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [docSymbol "Just foo" SkFunction (R 0 0 0 18)] + , testSessionWait "pattern with type signature" $ do + let source = T.unlines ["{-# language ScopedTypeVariables #-}", "a :: () = ()"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [docSymbol "a :: ()" SkFunction (R 1 0 1 12)] + , testSessionWait "function" $ do + let source = T.unlines ["a _x = ()"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left [docSymbol "a" SkFunction (R 0 0 0 9)] + , testSessionWait "type synonym" $ do + let source = T.unlines ["type A = Bool"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [docSymbol' "A" SkTypeParameter (R 0 0 0 13) (R 0 5 0 6)] + , testSessionWait "datatype" $ do + let source = T.unlines ["data A = C"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [ docSymbolWithChildren "A" + SkStruct + (R 0 0 0 10) + [docSymbol "C" SkConstructor (R 0 9 0 10)] + ] + , testSessionWait "record fields" $ do + let source = T.unlines ["data A = B {", " x :: Int", " , y :: Int}"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @=? Left + [ docSymbolWithChildren "A" SkStruct (R 0 0 2 13) + [ docSymbolWithChildren' "B" SkConstructor (R 0 9 2 13) (R 0 9 0 10) + [ docSymbol "x" SkField (R 1 2 1 3) + , docSymbol "y" SkField (R 2 4 2 5) + ] + ] + ] + , testSessionWait "import" $ do + let source = T.unlines ["import Data.Maybe ()"] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [docSymbolWithChildren "imports" + SkModule + (R 0 0 0 20) + [ docSymbol "import Data.Maybe" SkModule (R 0 0 0 20) + ] + ] + , testSessionWait "multiple import" $ do + let source = T.unlines ["", "import Data.Maybe ()", "", "import Control.Exception ()", ""] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left + [docSymbolWithChildren "imports" + SkModule + (R 1 0 3 27) + [ docSymbol "import Data.Maybe" SkModule (R 1 0 1 20) + , docSymbol "import Control.Exception" SkModule (R 3 0 3 27) + ] + ] + , testSessionWait "foreign import" $ do + let source = T.unlines + [ "{-# language ForeignFunctionInterface #-}" + , "foreign import ccall \"a\" a :: Int" + ] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left [docSymbolD "a" "import" SkObject (R 1 0 1 33)] + , testSessionWait "foreign export" $ do + let source = T.unlines + [ "{-# language ForeignFunctionInterface #-}" + , "foreign export ccall odd :: Int -> Bool" + ] + docId <- createDoc "A.hs" "haskell" source + symbols <- getDocumentSymbols docId + liftIO $ symbols @?= Left [docSymbolD "odd" "export" SkObject (R 1 0 1 39)] + ] + where + docSymbol name kind loc = + DocumentSymbol name Nothing kind Nothing loc loc Nothing + docSymbol' name kind loc selectionLoc = + DocumentSymbol name Nothing kind Nothing loc selectionLoc Nothing + docSymbolD name detail kind loc = + DocumentSymbol name (Just detail) kind Nothing loc loc Nothing + docSymbolWithChildren name kind loc cc = + DocumentSymbol name Nothing kind Nothing loc loc (Just $ List cc) + docSymbolWithChildren' name kind loc selectionLoc cc = + DocumentSymbol name Nothing kind Nothing loc selectionLoc (Just $ List cc) + moduleSymbol name loc cc = DocumentSymbol name + Nothing + SkFile + Nothing + (R 0 0 maxBound 0) + loc + (Just $ List cc) + classSymbol name loc cc = DocumentSymbol name + (Just "class") + SkClass + Nothing + loc + loc + (Just $ List cc) + +pattern R :: Int -> Int -> Int -> Int -> Range +pattern R x y x' y' = Range (Position x y) (Position x' y') + +xfail :: TestTree -> String -> TestTree +xfail = flip expectFailBecause + +ignoreTest8101 :: String -> TestTree -> TestTree +ignoreTest8101 + | GHC_API_VERSION == ("8.10.1" :: String) = ignoreTestBecause + | otherwise = const id + +ignoreInWindowsBecause :: String -> TestTree -> TestTree +ignoreInWindowsBecause = if isWindows then ignoreTestBecause else (\_ x -> x) + +ignoreInWindowsForGHC88And810 :: TestTree -> TestTree +#if MIN_GHC_API_VERSION(8,8,1) && !MIN_GHC_API_VERSION(9,0,0) +ignoreInWindowsForGHC88And810 = + ignoreInWindowsBecause "tests are unreliable in windows for ghc 8.8 and 8.10" +#else +ignoreInWindowsForGHC88And810 = id +#endif + +ignoreInWindowsForGHC88 :: TestTree -> TestTree +#if MIN_GHC_API_VERSION(8,8,1) && !MIN_GHC_API_VERSION(8,10,1) +ignoreInWindowsForGHC88 = + ignoreInWindowsBecause "tests are unreliable in windows for ghc 8.8" +#else +ignoreInWindowsForGHC88 = id +#endif + +data Expect + = ExpectRange Range -- Both gotoDef and hover should report this range + | ExpectLocation Location +-- | ExpectDefRange Range -- Only gotoDef should report this range + | ExpectHoverRange Range -- Only hover should report this range + | ExpectHoverText [T.Text] -- the hover message must contain these snippets + | ExpectExternFail -- definition lookup in other file expected to fail + | ExpectNoDefinitions + | ExpectNoHover +-- | ExpectExtern -- TODO: as above, but expected to succeed: need some more info in here, once we have some working examples + deriving Eq + +mkR :: Int -> Int -> Int -> Int -> Expect +mkR startLine startColumn endLine endColumn = ExpectRange $ mkRange startLine startColumn endLine endColumn + +mkL :: Uri -> Int -> Int -> Int -> Int -> Expect +mkL uri startLine startColumn endLine endColumn = ExpectLocation $ Location uri $ mkRange startLine startColumn endLine endColumn + +haddockTests :: TestTree +haddockTests + = testGroup "haddock" + [ testCase "Num" $ checkHaddock + (unlines + [ "However, '(+)' and '(*)' are" + , "customarily expected to define a ring and have the following properties:" + , "" + , "[__Associativity of (+)__]: @(x + y) + z@ = @x + (y + z)@" + , "[__Commutativity of (+)__]: @x + y@ = @y + x@" + , "[__@fromInteger 0@ is the additive identity__]: @x + fromInteger 0@ = @x@" + ] + ) + (unlines + [ "" + , "" + , "However, `(+)` and `(*)` are" + , "customarily expected to define a ring and have the following properties: " + , "+ ****Associativity of (+)****: `(x + y) + z` = `x + (y + z)`" + , "+ ****Commutativity of (+)****: `x + y` = `y + x`" + , "+ ****`fromInteger 0` is the additive identity****: `x + fromInteger 0` = `x`" + ] + ) + , testCase "unsafePerformIO" $ checkHaddock + (unlines + [ "may require" + , "different precautions:" + , "" + , " * Use @{\\-\\# NOINLINE foo \\#-\\}@ as a pragma on any function @foo@" + , " that calls 'unsafePerformIO'. If the call is inlined," + , " the I\\/O may be performed more than once." + , "" + , " * Use the compiler flag @-fno-cse@ to prevent common sub-expression" + , " elimination being performed on the module." + , "" + ] + ) + (unlines + [ "" + , "" + , "may require" + , "different precautions: " + , "+ Use `{-# NOINLINE foo #-}` as a pragma on any function `foo` " + , " that calls `unsafePerformIO` . If the call is inlined," + , " the I/O may be performed more than once." + , "" + , "+ Use the compiler flag `-fno-cse` to prevent common sub-expression" + , " elimination being performed on the module." + , "" + ] + ) + ] + where + checkHaddock s txt = spanDocToMarkdownForTest s @?= txt + +cradleTests :: TestTree +cradleTests = testGroup "cradle" + [testGroup "dependencies" [sessionDepsArePickedUp] + ,testGroup "ignore-fatal" [ignoreFatalWarning] + ,testGroup "loading" [loadCradleOnlyonce] + ,testGroup "multi" [simpleMultiTest, simpleMultiTest2] + ,testGroup "sub-directory" [simpleSubDirectoryTest] + ] + +loadCradleOnlyonce :: TestTree +loadCradleOnlyonce = testGroup "load cradle only once" + [ testSession' "implicit" implicit + , testSession' "direct" direct + ] + where + direct dir = do + liftIO $ writeFileUTF8 (dir "hie.yaml") + "cradle: {direct: {arguments: []}}" + test dir + implicit dir = test dir + test _dir = do + doc <- createDoc "B.hs" "haskell" "module B where\nimport Data.Foo" + msgs <- someTill (skipManyTill anyMessage cradleLoadedMessage) (skipManyTill anyMessage (message @PublishDiagnosticsNotification)) + liftIO $ length msgs @?= 1 + changeDoc doc [TextDocumentContentChangeEvent Nothing Nothing "module B where\nimport Data.Maybe"] + msgs <- manyTill (skipManyTill anyMessage cradleLoadedMessage) (skipManyTill anyMessage (message @PublishDiagnosticsNotification)) + liftIO $ length msgs @?= 0 + _ <- createDoc "A.hs" "haskell" "module A where\nimport LoadCradleBar" + msgs <- manyTill (skipManyTill anyMessage cradleLoadedMessage) (skipManyTill anyMessage (message @PublishDiagnosticsNotification)) + liftIO $ length msgs @?= 0 + + +dependentFileTest :: TestTree +dependentFileTest = testGroup "addDependentFile" + [testGroup "file-changed" [ignoreInWindowsForGHC88 $ testSession' "test" test] + ] + where + test dir = do + -- If the file contains B then no type error + -- otherwise type error + liftIO $ writeFile (dir "dep-file.txt") "A" + let fooContent = T.unlines + [ "{-# LANGUAGE TemplateHaskell #-}" + , "module Foo where" + , "import Language.Haskell.TH.Syntax" + , "foo :: Int" + , "foo = 1 + $(do" + , " qAddDependentFile \"dep-file.txt\"" + , " f <- qRunIO (readFile \"dep-file.txt\")" + , " if f == \"B\" then [| 1 |] else lift f)" + ] + let bazContent = T.unlines ["module Baz where", "import Foo ()"] + _ <-createDoc "Foo.hs" "haskell" fooContent + doc <- createDoc "Baz.hs" "haskell" bazContent + expectDiagnostics + [("Foo.hs", [(DsError, (4, 6), "Couldn't match expected type")])] + -- Now modify the dependent file + liftIO $ writeFile (dir "dep-file.txt") "B" + let change = TextDocumentContentChangeEvent + { _range = Just (Range (Position 2 0) (Position 2 6)) + , _rangeLength = Nothing + , _text = "f = ()" + } + -- Modifying Baz will now trigger Foo to be rebuilt as well + changeDoc doc [change] + expectDiagnostics [("Foo.hs", [])] + + +cradleLoadedMessage :: Session FromServerMessage +cradleLoadedMessage = satisfy $ \case + NotCustomServer (NotificationMessage _ (CustomServerMethod m) _) -> m == cradleLoadedMethod + _ -> False + +cradleLoadedMethod :: T.Text +cradleLoadedMethod = "ghcide/cradle/loaded" + +ignoreFatalWarning :: TestTree +ignoreFatalWarning = testCase "ignore-fatal-warning" $ runWithExtraFiles "ignore-fatal" $ \dir -> do + let srcPath = dir "IgnoreFatal.hs" + src <- liftIO $ readFileUtf8 srcPath + _ <- createDoc srcPath "haskell" src + expectNoMoreDiagnostics 5 + +simpleSubDirectoryTest :: TestTree +simpleSubDirectoryTest = + testCase "simple-subdirectory" $ runWithExtraFiles "cabal-exe" $ \dir -> do + let mainPath = dir "a/src/Main.hs" + mainSource <- liftIO $ readFileUtf8 mainPath + _mdoc <- createDoc mainPath "haskell" mainSource + expectDiagnosticsWithTags + [("a/src/Main.hs", [(DsWarning,(2,0), "Top-level binding", Nothing)]) -- So that we know P has been loaded + ] + expectNoMoreDiagnostics 0.5 + +simpleMultiTest :: TestTree +simpleMultiTest = testCase "simple-multi-test" $ runWithExtraFiles "multi" $ \dir -> do + let aPath = dir "a/A.hs" + bPath = dir "b/B.hs" + aSource <- liftIO $ readFileUtf8 aPath + (TextDocumentIdentifier adoc) <- createDoc aPath "haskell" aSource + expectNoMoreDiagnostics 0.5 + bSource <- liftIO $ readFileUtf8 bPath + bdoc <- createDoc bPath "haskell" bSource + expectNoMoreDiagnostics 0.5 + locs <- getDefinitions bdoc (Position 2 7) + let fooL = mkL adoc 2 0 2 3 + checkDefs locs (pure [fooL]) + expectNoMoreDiagnostics 0.5 + +-- Like simpleMultiTest but open the files in the other order +simpleMultiTest2 :: TestTree +simpleMultiTest2 = testCase "simple-multi-test2" $ runWithExtraFiles "multi" $ \dir -> do + let aPath = dir "a/A.hs" + bPath = dir "b/B.hs" + bSource <- liftIO $ readFileUtf8 bPath + bdoc <- createDoc bPath "haskell" bSource + expectNoMoreDiagnostics 10 + aSource <- liftIO $ readFileUtf8 aPath + (TextDocumentIdentifier adoc) <- createDoc aPath "haskell" aSource + -- Need to have some delay here or the test fails + expectNoMoreDiagnostics 10 + locs <- getDefinitions bdoc (Position 2 7) + let fooL = mkL adoc 2 0 2 3 + checkDefs locs (pure [fooL]) + expectNoMoreDiagnostics 0.5 + +ifaceTests :: TestTree +ifaceTests = testGroup "Interface loading tests" + [ -- https://github.com/haskell/ghcide/pull/645/ + ifaceErrorTest + , ifaceErrorTest2 + , ifaceErrorTest3 + , ifaceTHTest + ] + +bootTests :: TestTree +bootTests = testCase "boot-def-test" $ runWithExtraFiles "boot" $ \dir -> do + let cPath = dir "C.hs" + cSource <- liftIO $ readFileUtf8 cPath + + -- Dirty the cache + liftIO $ runInDir dir $ do + cDoc <- createDoc cPath "haskell" cSource + _ <- getHover cDoc $ Position 4 3 + closeDoc cDoc + + cdoc <- createDoc cPath "haskell" cSource + locs <- getDefinitions cdoc (Position 7 4) + let floc = mkR 7 0 7 1 + checkDefs locs (pure [floc]) + +-- | test that TH reevaluates across interfaces +ifaceTHTest :: TestTree +ifaceTHTest = testCase "iface-th-test" $ runWithExtraFiles "TH" $ \dir -> do + let aPath = dir "THA.hs" + bPath = dir "THB.hs" + cPath = dir "THC.hs" + + aSource <- liftIO $ readFileUtf8 aPath -- [TH] a :: () + _bSource <- liftIO $ readFileUtf8 bPath -- a :: () + cSource <- liftIO $ readFileUtf8 cPath -- c = a :: () + + cdoc <- createDoc cPath "haskell" cSource + + -- Change [TH]a from () to Bool + liftIO $ writeFileUTF8 aPath (unlines $ init (lines $ T.unpack aSource) ++ ["th_a = [d| a = False|]"]) + + -- Check that the change propogates to C + changeDoc cdoc [TextDocumentContentChangeEvent Nothing Nothing cSource] + expectDiagnostics + [("THC.hs", [(DsError, (4, 4), "Couldn't match expected type '()' with actual type 'Bool'")]) + ,("THB.hs", [(DsWarning, (4,0), "Top-level binding")])] + closeDoc cdoc + +ifaceErrorTest :: TestTree +ifaceErrorTest = testCase "iface-error-test-1" $ runWithExtraFiles "recomp" $ \dir -> do + let bPath = dir "B.hs" + pPath = dir "P.hs" + + bSource <- liftIO $ readFileUtf8 bPath -- y :: Int + pSource <- liftIO $ readFileUtf8 pPath -- bar = x :: Int + + bdoc <- createDoc bPath "haskell" bSource + expectDiagnostics + [("P.hs", [(DsWarning,(4,0), "Top-level binding")])] -- So what we know P has been loaded + + -- Change y from Int to B + changeDoc bdoc [TextDocumentContentChangeEvent Nothing Nothing $ T.unlines ["module B where", "y :: Bool", "y = undefined"]] + -- save so that we can that the error propogates to A + sendNotification TextDocumentDidSave (DidSaveTextDocumentParams bdoc) + + -- Check that the error propogates to A + expectDiagnostics + [("A.hs", [(DsError, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'")])] + + + -- Check that we wrote the interfaces for B when we saved + lid <- sendRequest (CustomClientMethod "hidir") $ GetInterfaceFilesDir bPath + res <- skipManyTill anyMessage $ responseForId lid + liftIO $ case res of + ResponseMessage{_result=Right hidir} -> do + hi_exists <- doesFileExist $ hidir "B.hi" + assertBool ("Couldn't find B.hi in " ++ hidir) hi_exists + _ -> assertFailure $ "Got malformed response for CustomMessage hidir: " ++ show res + + pdoc <- createDoc pPath "haskell" pSource + changeDoc pdoc [TextDocumentContentChangeEvent Nothing Nothing $ pSource <> "\nfoo = y :: Bool" ] + -- Now in P we have + -- bar = x :: Int + -- foo = y :: Bool + -- HOWEVER, in A... + -- x = y :: Int + -- This is clearly inconsistent, and the expected outcome a bit surprising: + -- - The diagnostic for A has already been received. Ghcide does not repeat diagnostics + -- - P is being typechecked with the last successful artifacts for A. + expectDiagnostics + [("P.hs", [(DsWarning,(4,0), "Top-level binding")]) + ,("P.hs", [(DsWarning,(6,0), "Top-level binding")]) + ] + expectNoMoreDiagnostics 2 + +ifaceErrorTest2 :: TestTree +ifaceErrorTest2 = testCase "iface-error-test-2" $ runWithExtraFiles "recomp" $ \dir -> do + let bPath = dir "B.hs" + pPath = dir "P.hs" + + bSource <- liftIO $ readFileUtf8 bPath -- y :: Int + pSource <- liftIO $ readFileUtf8 pPath -- bar = x :: Int + + bdoc <- createDoc bPath "haskell" bSource + pdoc <- createDoc pPath "haskell" pSource + expectDiagnostics + [("P.hs", [(DsWarning,(4,0), "Top-level binding")])] -- So that we know P has been loaded + + -- Change y from Int to B + changeDoc bdoc [TextDocumentContentChangeEvent Nothing Nothing $ T.unlines ["module B where", "y :: Bool", "y = undefined"]] + + -- Add a new definition to P + changeDoc pdoc [TextDocumentContentChangeEvent Nothing Nothing $ pSource <> "\nfoo = y :: Bool" ] + -- Now in P we have + -- bar = x :: Int + -- foo = y :: Bool + -- HOWEVER, in A... + -- x = y :: Int + expectDiagnostics + -- As in the other test, P is being typechecked with the last successful artifacts for A + -- (ot thanks to -fdeferred-type-errors) + [("A.hs", [(DsError, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'")]) + ,("P.hs", [(DsWarning, (4, 0), "Top-level binding")]) + ,("P.hs", [(DsWarning, (6, 0), "Top-level binding")]) + ] + + expectNoMoreDiagnostics 2 + +ifaceErrorTest3 :: TestTree +ifaceErrorTest3 = testCase "iface-error-test-3" $ runWithExtraFiles "recomp" $ \dir -> do + let bPath = dir "B.hs" + pPath = dir "P.hs" + + bSource <- liftIO $ readFileUtf8 bPath -- y :: Int + pSource <- liftIO $ readFileUtf8 pPath -- bar = x :: Int + + bdoc <- createDoc bPath "haskell" bSource + + -- Change y from Int to B + changeDoc bdoc [TextDocumentContentChangeEvent Nothing Nothing $ T.unlines ["module B where", "y :: Bool", "y = undefined"]] + + -- P should not typecheck, as there are no last valid artifacts for A + _pdoc <- createDoc pPath "haskell" pSource + + -- In this example the interface file for A should not exist (modulo the cache folder) + -- Despite that P still type checks, as we can generate an interface file for A thanks to -fdeferred-type-errors + expectDiagnostics + [("A.hs", [(DsError, (5, 4), "Couldn't match expected type 'Int' with actual type 'Bool'")]) + ,("P.hs", [(DsWarning,(4,0), "Top-level binding")]) + ] + expectNoMoreDiagnostics 2 + +sessionDepsArePickedUp :: TestTree +sessionDepsArePickedUp = testSession' + "session-deps-are-picked-up" + $ \dir -> do + liftIO $ + writeFileUTF8 + (dir "hie.yaml") + "cradle: {direct: {arguments: []}}" + -- Open without OverloadedStrings and expect an error. + doc <- createDoc "Foo.hs" "haskell" fooContent + expectDiagnostics + [("Foo.hs", [(DsError, (3, 6), "Couldn't match expected type")])] + -- Update hie.yaml to enable OverloadedStrings. + liftIO $ + writeFileUTF8 + (dir "hie.yaml") + "cradle: {direct: {arguments: [-XOverloadedStrings]}}" + -- Send change event. + let change = + TextDocumentContentChangeEvent + { _range = Just (Range (Position 4 0) (Position 4 0)), + _rangeLength = Nothing, + _text = "\n" + } + changeDoc doc [change] + -- Now no errors. + expectDiagnostics [("Foo.hs", [])] + where + fooContent = + T.unlines + [ "module Foo where", + "import Data.Text", + "foo :: Text", + "foo = \"hello\"" + ] + +-- A test to ensure that the command line ghcide workflow stays working +nonLspCommandLine :: TestTree +nonLspCommandLine = testGroup "ghcide command line" + [ testCase "works" $ withTempDir $ \dir -> do + ghcide <- locateGhcideExecutable + copyTestDataFiles dir "multi" + let cmd = (proc ghcide ["a/A.hs"]){cwd = Just dir} + + setEnv "HOME" "/homeless-shelter" False + + (ec, _, _) <- readCreateProcessWithExitCode cmd "" + + ec @=? ExitSuccess + ] + +benchmarkTests :: TestTree +benchmarkTests = + let ?config = Bench.defConfig + { Bench.verbosity = Bench.Quiet + , Bench.repetitions = Just 3 + , Bench.buildTool = Bench.Cabal + } in + withResource Bench.setup Bench.cleanUp $ \getResource -> testGroup "benchmark experiments" + [ testCase (Bench.name e) $ do + Bench.SetupResult{Bench.benchDir} <- getResource + res <- Bench.runBench (runInDir benchDir) e + assertBool "did not successfully complete 5 repetitions" $ Bench.success res + | e <- Bench.experiments + , Bench.name e /= "edit" -- the edit experiment does not ever fail + ] + +-- | checks if we use InitializeParams.rootUri for loading session +rootUriTests :: TestTree +rootUriTests = testCase "use rootUri" . runTest "dirA" "dirB" $ \dir -> do + let bPath = dir "dirB/Foo.hs" + liftIO $ copyTestDataFiles dir "rootUri" + bSource <- liftIO $ readFileUtf8 bPath + _ <- createDoc "Foo.hs" "haskell" bSource + expectNoMoreDiagnostics 0.5 + where + -- similar to run' except we can configure where to start ghcide and session + runTest :: FilePath -> FilePath -> (FilePath -> Session ()) -> IO () + runTest dir1 dir2 s = withTempDir $ \dir -> runInDir' dir dir1 dir2 [] (s dir) + +-- | Test if ghcide asynchronously handles Commands and user Requests +asyncTests :: TestTree +asyncTests = testGroup "async" + [ + testSession "command" $ do + -- Execute a command that will block forever + let req = ExecuteCommandParams blockCommandId Nothing Nothing + void $ sendRequest WorkspaceExecuteCommand req + -- Load a file and check for code actions. Will only work if the command is run asynchronously + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS -Wmissing-signatures #-}" + , "foo = id" + ] + void waitForDiagnostics + actions <- getCodeActions doc (Range (Position 1 0) (Position 1 0)) + liftIO $ [ _title | CACodeAction CodeAction{_title} <- actions] @=? ["add signature: foo :: a -> a"] + , testSession "request" $ do + -- Execute a custom request that will block for 1000 seconds + void $ sendRequest (CustomClientMethod "test") $ BlockSeconds 1000 + -- Load a file and check for code actions. Will only work if the request is run asynchronously + doc <- createDoc "A.hs" "haskell" $ T.unlines + [ "{-# OPTIONS -Wmissing-signatures #-}" + , "foo = id" + ] + void waitForDiagnostics + actions <- getCodeActions doc (Range (Position 0 0) (Position 0 0)) + liftIO $ [ _title | CACodeAction CodeAction{_title} <- actions] @=? ["add signature: foo :: a -> a"] + ] + + +clientSettingsTest :: TestTree +clientSettingsTest = testGroup "client settings handling" + [ + testSession "ghcide does not support update config" $ do + sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (toJSON ("" :: String))) + logNot <- skipManyTill anyMessage loggingNotification + isMessagePresent "Updating Not supported" [getLogMessage logNot] + , testSession "ghcide restarts shake session on config changes" $ do + void $ skipManyTill anyMessage $ message @RegisterCapabilityRequest + sendNotification WorkspaceDidChangeConfiguration (DidChangeConfigurationParams (toJSON ("" :: String))) + nots <- skipManyTill anyMessage $ count 3 loggingNotification + isMessagePresent "Restarting build session" (map getLogMessage nots) + + ] + where getLogMessage (NotLogMessage (NotificationMessage _ _ (LogMessageParams _ msg))) = msg + getLogMessage _ = "" + + isMessagePresent expectedMsg actualMsgs = liftIO $ + assertBool ("\"" ++ expectedMsg ++ "\" is not present in: " ++ show actualMsgs) + (any ((expectedMsg `isSubsequenceOf`) . show) actualMsgs) +---------------------------------------------------------------------- +-- Utils +---------------------------------------------------------------------- + +testSession :: String -> Session () -> TestTree +testSession name = testCase name . run + +testSessionWithExtraFiles :: FilePath -> String -> (FilePath -> Session ()) -> TestTree +testSessionWithExtraFiles prefix name = testCase name . runWithExtraFiles prefix + +testSession' :: String -> (FilePath -> Session ()) -> TestTree +testSession' name = testCase name . run' + +testSessionWait :: String -> Session () -> TestTree +testSessionWait name = testSession name . + -- Check that any diagnostics produced were already consumed by the test case. + -- + -- If in future we add test cases where we don't care about checking the diagnostics, + -- this could move elsewhere. + -- + -- Experimentally, 0.5s seems to be long enough to wait for any final diagnostics to appear. + ( >> expectNoMoreDiagnostics 0.5) + +pickActionWithTitle :: T.Text -> [CAResult] -> IO CodeAction +pickActionWithTitle title actions = do + assertBool ("Found no matching actions for " <> show title <> " in " <> show titles) (not $ null matches) + return $ head matches + where + titles = + [ actionTitle + | CACodeAction CodeAction { _title = actionTitle } <- actions + ] + matches = + [ action + | CACodeAction action@CodeAction { _title = actionTitle } <- actions + , title == actionTitle + ] + +mkRange :: Int -> Int -> Int -> Int -> Range +mkRange a b c d = Range (Position a b) (Position c d) + +run :: Session a -> IO a +run s = run' (const s) + +runWithExtraFiles :: FilePath -> (FilePath -> Session a) -> IO a +runWithExtraFiles prefix s = withTempDir $ \dir -> do + copyTestDataFiles dir prefix + runInDir dir (s dir) + +copyTestDataFiles :: FilePath -> FilePath -> IO () +copyTestDataFiles dir prefix = do + -- Copy all the test data files to the temporary workspace + testDataFiles <- getDirectoryFilesIO ("test/data" prefix) ["//*"] + for_ testDataFiles $ \f -> do + createDirectoryIfMissing True $ dir takeDirectory f + copyFile ("test/data" prefix f) (dir f) + +run' :: (FilePath -> Session a) -> IO a +run' s = withTempDir $ \dir -> runInDir dir (s dir) + +runInDir :: FilePath -> Session a -> IO a +runInDir dir = runInDir' dir "." "." [] + +-- | Takes a directory as well as relative paths to where we should launch the executable as well as the session root. +runInDir' :: FilePath -> FilePath -> FilePath -> [String] -> Session a -> IO a +runInDir' dir startExeIn startSessionIn extraOptions s = do + ghcideExe <- locateGhcideExecutable + let startDir = dir startExeIn + let projDir = dir startSessionIn + + createDirectoryIfMissing True startDir + createDirectoryIfMissing True projDir + -- Temporarily hack around https://github.com/mpickering/hie-bios/pull/56 + -- since the package import test creates "Data/List.hs", which otherwise has no physical home + createDirectoryIfMissing True $ projDir ++ "/Data" + + let cmd = unwords $ + [ghcideExe, "--lsp", "--test", "--verbose", "-j2", "--cwd", startDir] ++ extraOptions + -- HIE calls getXgdDirectory which assumes that HOME is set. + -- Only sets HOME if it wasn't already set. + setEnv "HOME" "/homeless-shelter" False + let lspTestCaps = fullCaps { _window = Just $ WindowClientCapabilities $ Just True } + logColor <- fromMaybe True <$> checkEnv "LSP_TEST_LOG_COLOR" + runSessionWithConfig conf{logColor} cmd lspTestCaps projDir s + where + checkEnv :: String -> IO (Maybe Bool) + checkEnv s = fmap convertVal <$> getEnv s + convertVal "0" = False + convertVal _ = True + + conf = defaultConfig + -- uncomment this or set LSP_TEST_LOG_STDERR=1 to see all logging + -- { logStdErr = True } + -- uncomment this or set LSP_TEST_LOG_MESSAGES=1 to see all messages + -- { logMessages = True } + +openTestDataDoc :: FilePath -> Session TextDocumentIdentifier +openTestDataDoc path = do + source <- liftIO $ readFileUtf8 $ "test/data" path + createDoc path "haskell" source + +findCodeActions :: TextDocumentIdentifier -> Range -> [T.Text] -> Session [CodeAction] +findCodeActions = findCodeActions' (==) "is not a superset of" + +findCodeActionsByPrefix :: TextDocumentIdentifier -> Range -> [T.Text] -> Session [CodeAction] +findCodeActionsByPrefix = findCodeActions' T.isPrefixOf "is not prefix of" + +findCodeActions' :: (T.Text -> T.Text -> Bool) -> String -> TextDocumentIdentifier -> Range -> [T.Text] -> Session [CodeAction] +findCodeActions' op errMsg doc range expectedTitles = do + actions <- getCodeActions doc range + let matches = sequence + [ listToMaybe + [ action + | CACodeAction action@CodeAction { _title = actionTitle } <- actions + , expectedTitle `op` actionTitle] + | expectedTitle <- expectedTitles] + let msg = show + [ actionTitle + | CACodeAction CodeAction { _title = actionTitle } <- actions + ] + ++ " " <> errMsg <> " " + ++ show expectedTitles + liftIO $ case matches of + Nothing -> assertFailure msg + Just _ -> pure () + return (fromJust matches) + +findCodeAction :: TextDocumentIdentifier -> Range -> T.Text -> Session CodeAction +findCodeAction doc range t = head <$> findCodeActions doc range [t] + +unitTests :: TestTree +unitTests = do + testGroup "Unit" + [ testCase "empty file path does NOT work with the empty String literal" $ + uriToFilePath' (fromNormalizedUri $ filePathToUri' "") @?= Just "." + , testCase "empty file path works using toNormalizedFilePath'" $ + uriToFilePath' (fromNormalizedUri $ filePathToUri' (toNormalizedFilePath' "")) @?= Just "" + , testCase "empty path URI" $ do + Just URI{..} <- pure $ parseURI (T.unpack $ getUri $ fromNormalizedUri emptyPathUri) + uriScheme @?= "file:" + uriPath @?= "" + , testCase "from empty path URI" $ do + let uri = Uri "file://" + uriToFilePath' uri @?= Just "" + , testCase "Key with empty file path roundtrips via Binary" $ + Binary.decode (Binary.encode (Q ((), emptyFilePath))) @?= Q ((), emptyFilePath) + , testCase "showDiagnostics prints ranges 1-based (like vscode)" $ do + let diag = ("", Diagnostics.ShowDiag, Diagnostic + { _range = Range + { _start = Position{_line = 0, _character = 1} + , _end = Position{_line = 2, _character = 3} + } + , _severity = Nothing + , _code = Nothing + , _source = Nothing + , _message = "" + , _relatedInformation = Nothing + , _tags = Nothing + }) + let shown = T.unpack (Diagnostics.showDiagnostics [diag]) + let expected = "1:2-3:4" + assertBool (unwords ["expected to find range", expected, "in diagnostic", shown]) $ + expected `isInfixOf` shown + ] + +positionMappingTests :: TestTree +positionMappingTests = + testGroup "position mapping" + [ testGroup "toCurrent" + [ testCase "before" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "ab" + (Position 0 0) @?= PositionExact (Position 0 0) + , testCase "after, same line, same length" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "ab" + (Position 0 3) @?= PositionExact (Position 0 3) + , testCase "after, same line, increased length" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc" + (Position 0 3) @?= PositionExact (Position 0 4) + , testCase "after, same line, decreased length" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "a" + (Position 0 3) @?= PositionExact (Position 0 2) + , testCase "after, next line, no newline" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc" + (Position 1 3) @?= PositionExact (Position 1 3) + , testCase "after, next line, newline" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\ndef" + (Position 1 0) @?= PositionExact (Position 2 0) + , testCase "after, same line, newline" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\nd" + (Position 0 4) @?= PositionExact (Position 1 2) + , testCase "after, same line, newline + newline at end" $ + toCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\nd\n" + (Position 0 4) @?= PositionExact (Position 2 1) + , testCase "after, same line, newline + newline at end" $ + toCurrent + (Range (Position 0 1) (Position 0 1)) + "abc" + (Position 0 1) @?= PositionExact (Position 0 4) + ] + , testGroup "fromCurrent" + [ testCase "before" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "ab" + (Position 0 0) @?= PositionExact (Position 0 0) + , testCase "after, same line, same length" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "ab" + (Position 0 3) @?= PositionExact (Position 0 3) + , testCase "after, same line, increased length" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc" + (Position 0 4) @?= PositionExact (Position 0 3) + , testCase "after, same line, decreased length" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "a" + (Position 0 2) @?= PositionExact (Position 0 3) + , testCase "after, next line, no newline" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc" + (Position 1 3) @?= PositionExact (Position 1 3) + , testCase "after, next line, newline" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\ndef" + (Position 2 0) @?= PositionExact (Position 1 0) + , testCase "after, same line, newline" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\nd" + (Position 1 2) @?= PositionExact (Position 0 4) + , testCase "after, same line, newline + newline at end" $ + fromCurrent + (Range (Position 0 1) (Position 0 3)) + "abc\nd\n" + (Position 2 1) @?= PositionExact (Position 0 4) + , testCase "after, same line, newline + newline at end" $ + fromCurrent + (Range (Position 0 1) (Position 0 1)) + "abc" + (Position 0 4) @?= PositionExact (Position 0 1) + ] + , adjustOption (\(QuickCheckTests i) -> QuickCheckTests (max 1000 i)) $ testGroup "properties" + [ testProperty "fromCurrent r t <=< toCurrent r t" $ do + -- Note that it is important to use suchThatMap on all values at once + -- instead of only using it on the position. Otherwise you can get + -- into situations where there is no position that can be mapped back + -- for the edit which will result in QuickCheck looping forever. + let gen = do + rope <- genRope + range <- genRange rope + PrintableText replacement <- arbitrary + oldPos <- genPosition rope + pure (range, replacement, oldPos) + forAll + (suchThatMap gen + (\(range, replacement, oldPos) -> positionResultToMaybe $ (range, replacement, oldPos,) <$> toCurrent range replacement oldPos)) $ + \(range, replacement, oldPos, newPos) -> + fromCurrent range replacement newPos === PositionExact oldPos + , testProperty "toCurrent r t <=< fromCurrent r t" $ do + let gen = do + rope <- genRope + range <- genRange rope + PrintableText replacement <- arbitrary + let newRope = applyChange rope (TextDocumentContentChangeEvent (Just range) Nothing replacement) + newPos <- genPosition newRope + pure (range, replacement, newPos) + forAll + (suchThatMap gen + (\(range, replacement, newPos) -> positionResultToMaybe $ (range, replacement, newPos,) <$> fromCurrent range replacement newPos)) $ + \(range, replacement, newPos, oldPos) -> + toCurrent range replacement oldPos === PositionExact newPos + ] + ] + +newtype PrintableText = PrintableText { getPrintableText :: T.Text } + deriving Show + +instance Arbitrary PrintableText where + arbitrary = PrintableText . T.pack . getPrintableString <$> arbitrary + + +genRope :: Gen Rope +genRope = Rope.fromText . getPrintableText <$> arbitrary + +genPosition :: Rope -> Gen Position +genPosition r = do + row <- choose (0, max 0 $ rows - 1) + let columns = Rope.columns (nthLine row r) + column <- choose (0, max 0 $ columns - 1) + pure $ Position row column + where rows = Rope.rows r + +genRange :: Rope -> Gen Range +genRange r = do + startPos@(Position startLine startColumn) <- genPosition r + let maxLineDiff = max 0 $ rows - 1 - startLine + endLine <- choose (startLine, startLine + maxLineDiff) + let columns = Rope.columns (nthLine endLine r) + endColumn <- + if startLine == endLine + then choose (startColumn, columns) + else choose (0, max 0 $ columns - 1) + pure $ Range startPos (Position endLine endColumn) + where rows = Rope.rows r + +-- | Get the ith line of a rope, starting from 0. Trailing newline not included. +nthLine :: Int -> Rope -> Rope +nthLine i r + | i < 0 = error $ "Negative line number: " <> show i + | i == 0 && Rope.rows r == 0 = r + | i >= Rope.rows r = error $ "Row number out of bounds: " <> show i <> "/" <> show (Rope.rows r) + | otherwise = Rope.takeWhile (/= '\n') $ fst $ Rope.splitAtLine 1 $ snd $ Rope.splitAtLine (i - 1) r + +getWatchedFilesSubscriptionsUntil :: forall end . (FromJSON end, Typeable end) => Session [Maybe Value] +getWatchedFilesSubscriptionsUntil = do + msgs <- manyTill (Just <$> message @RegisterCapabilityRequest <|> Nothing <$ anyMessage) (message @end) + return + [ args + | Just RequestMessage{_params = RegistrationParams (List regs)} <- msgs + , Registration _id WorkspaceDidChangeWatchedFiles args <- regs + ] + +-- | Version of 'System.IO.Extra.withTempDir' that canonicalizes the path +-- Which we need to do on macOS since the $TMPDIR can be in @/private/var@ or +-- @/var@ +withTempDir :: (FilePath -> IO a) -> IO a +withTempDir f = System.IO.Extra.withTempDir $ \dir -> do + dir' <- canonicalizePath dir + f dir' diff --git a/ghcide/test/manual/lhs/Bird.lhs b/ghcide/test/manual/lhs/Bird.lhs new file mode 100644 index 00000000000..a9ed4e2a57d --- /dev/null +++ b/ghcide/test/manual/lhs/Bird.lhs @@ -0,0 +1,19 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + + +\subsection{Bird-style LHS} + +> module Bird +> ( +> fly +> ) where + + + +what birds are able to do: + +> fly :: IO () +> fly = putStrLn "birds fly." + + diff --git a/ghcide/test/manual/lhs/Main.hs b/ghcide/test/manual/lhs/Main.hs new file mode 100644 index 00000000000..518912e2d6c --- /dev/null +++ b/ghcide/test/manual/lhs/Main.hs @@ -0,0 +1,12 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +module Main + ( + main + ) where + +import Test (main) + + + diff --git a/ghcide/test/manual/lhs/Test.lhs b/ghcide/test/manual/lhs/Test.lhs new file mode 100644 index 00000000000..0e30d25a01c --- /dev/null +++ b/ghcide/test/manual/lhs/Test.lhs @@ -0,0 +1,36 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + + +\subsection{Testing LHS} + +\begin{code} +{-# LANGUAGE CPP #-} + +module Test + ( + main + ) where + + +import Bird + +\end{code} + +for this file, \emph{hlint} should be turned off. +\begin{code} +{-# ANN module ("HLint: ignore" :: String) #-} +\end{code} + +our main procedure + +\begin{code} + +main :: IO () +main = do + putStrLn "hello world." + fly + +\end{code} + + diff --git a/ghcide/test/preprocessor/Main.hs b/ghcide/test/preprocessor/Main.hs new file mode 100644 index 00000000000..560f62eeb41 --- /dev/null +++ b/ghcide/test/preprocessor/Main.hs @@ -0,0 +1,10 @@ + +module Main(main) where + +import System.Environment + +main :: IO () +main = do + _:input:output:_ <- getArgs + let f = map (\x -> if x == 'x' then 'y' else x) + writeFile output . f =<< readFile input diff --git a/ghcide/test/src/Development/IDE/Test.hs b/ghcide/test/src/Development/IDE/Test.hs new file mode 100644 index 00000000000..1a10a30690e --- /dev/null +++ b/ghcide/test/src/Development/IDE/Test.hs @@ -0,0 +1,182 @@ +-- Copyright (c) 2019 The DAML Authors. All rights reserved. +-- SPDX-License-Identifier: Apache-2.0 + +{-# LANGUAGE DuplicateRecordFields #-} + +module Development.IDE.Test + ( Cursor + , cursorPosition + , requireDiagnostic + , diagnostic + , expectDiagnostics + , expectDiagnosticsWithTags + , expectNoMoreDiagnostics + , expectCurrentDiagnostics + , checkDiagnosticsForDoc + , canonicalizeUri + , standardizeQuotes + ,flushMessages) where + +import Control.Applicative.Combinators +import Control.Lens hiding (List) +import Control.Monad +import Control.Monad.IO.Class +import Data.Bifunctor (second) +import qualified Data.Map.Strict as Map +import qualified Data.Text as T +import Language.Haskell.LSP.Test hiding (message) +import qualified Language.Haskell.LSP.Test as LspTest +import Language.Haskell.LSP.Types +import Language.Haskell.LSP.Types.Lens as Lsp +import System.Time.Extra +import Test.Tasty.HUnit +import System.Directory (canonicalizePath) +import Data.Maybe (fromJust) + + +-- | (0-based line number, 0-based column number) +type Cursor = (Int, Int) + +cursorPosition :: Cursor -> Position +cursorPosition (line, col) = Position line col + +requireDiagnostic :: List Diagnostic -> (DiagnosticSeverity, Cursor, T.Text, Maybe DiagnosticTag) -> Assertion +requireDiagnostic actuals expected@(severity, cursor, expectedMsg, expectedTag) = do + unless (any match actuals) $ + assertFailure $ + "Could not find " <> show expected <> + " in " <> show actuals + where + match :: Diagnostic -> Bool + match d = + Just severity == _severity d + && cursorPosition cursor == d ^. range . start + && standardizeQuotes (T.toLower expectedMsg) `T.isInfixOf` + standardizeQuotes (T.toLower $ d ^. message) + && hasTag expectedTag (d ^. tags) + + hasTag :: Maybe DiagnosticTag -> Maybe (List DiagnosticTag) -> Bool + hasTag Nothing _ = True + hasTag (Just _) Nothing = False + hasTag (Just actualTag) (Just (List tags)) = actualTag `elem` tags + +-- |wait for @timeout@ seconds and report an assertion failure +-- if any diagnostic messages arrive in that period +expectNoMoreDiagnostics :: Seconds -> Session () +expectNoMoreDiagnostics timeout = do + -- Give any further diagnostic messages time to arrive. + liftIO $ sleep timeout + -- Send a dummy message to provoke a response from the server. + -- This guarantees that we have at least one message to + -- process, so message won't block or timeout. + void $ sendRequest (CustomClientMethod "non-existent-method") () + handleMessages + where + handleMessages = handleDiagnostic <|> handleCustomMethodResponse <|> ignoreOthers + handleDiagnostic = do + diagsNot <- LspTest.message :: Session PublishDiagnosticsNotification + let fileUri = diagsNot ^. params . uri + actual = diagsNot ^. params . diagnostics + liftIO $ assertFailure $ + "Got unexpected diagnostics for " <> show fileUri <> + " got " <> show actual + ignoreOthers = void anyMessage >> handleMessages + +handleCustomMethodResponse :: Session () +handleCustomMethodResponse = + -- the CustomClientMethod triggers a RspCustomServer + -- handle that and then exit + void (LspTest.message :: Session CustomResponse) + +flushMessages :: Session () +flushMessages = do + void $ sendRequest (CustomClientMethod "non-existent-method") () + handleCustomMethodResponse <|> ignoreOthers + where + ignoreOthers = void anyMessage >> flushMessages + +-- | It is not possible to use 'expectDiagnostics []' to assert the absence of diagnostics, +-- only that existing diagnostics have been cleared. +-- +-- Rather than trying to assert the absence of diagnostics, introduce an +-- expected diagnostic (e.g. a redundant import) and assert the singleton diagnostic. +expectDiagnostics :: [(FilePath, [(DiagnosticSeverity, Cursor, T.Text)])] -> Session () +expectDiagnostics + = expectDiagnosticsWithTags + . map (second (map (\(ds, c, t) -> (ds, c, t, Nothing)))) + +unwrapDiagnostic :: PublishDiagnosticsNotification -> (Uri, List Diagnostic) +unwrapDiagnostic diagsNot = (diagsNot^.params.uri, diagsNot^.params.diagnostics) + +expectDiagnosticsWithTags :: [(String, [(DiagnosticSeverity, Cursor, T.Text, Maybe DiagnosticTag)])] -> Session () +expectDiagnosticsWithTags expected = do + let f = getDocUri >=> liftIO . canonicalizeUri >=> pure . toNormalizedUri + next = unwrapDiagnostic <$> skipManyTill anyMessage diagnostic + expected' <- Map.fromListWith (<>) <$> traverseOf (traverse . _1) f expected + expectDiagnosticsWithTags' next expected' + +expectDiagnosticsWithTags' :: + MonadIO m => + m (Uri, List Diagnostic) -> + Map.Map NormalizedUri [(DiagnosticSeverity, Cursor, T.Text, Maybe DiagnosticTag)] -> + m () +expectDiagnosticsWithTags' next m | null m = do + (_,actual) <- next + case actual of + List [] -> + return () + _ -> + liftIO $ assertFailure $ "Got unexpected diagnostics:" <> show actual + +expectDiagnosticsWithTags' next expected = go expected + where + go m + | Map.null m = pure () + | otherwise = do + (fileUri, actual) <- next + canonUri <- liftIO $ toNormalizedUri <$> canonicalizeUri fileUri + case Map.lookup canonUri m of + Nothing -> do + liftIO $ + assertFailure $ + "Got diagnostics for " <> show fileUri + <> " but only expected diagnostics for " + <> show (Map.keys m) + <> " got " + <> show actual + Just expected -> do + liftIO $ mapM_ (requireDiagnostic actual) expected + liftIO $ + unless (length expected == length actual) $ + assertFailure $ + "Incorrect number of diagnostics for " <> show fileUri + <> ", expected " + <> show expected + <> " but got " + <> show actual + go $ Map.delete canonUri m + +expectCurrentDiagnostics :: TextDocumentIdentifier -> [(DiagnosticSeverity, Cursor, T.Text)] -> Session () +expectCurrentDiagnostics doc expected = do + diags <- getCurrentDiagnostics doc + checkDiagnosticsForDoc doc expected diags + +checkDiagnosticsForDoc :: TextDocumentIdentifier -> [(DiagnosticSeverity, Cursor, T.Text)] -> [Diagnostic] -> Session () +checkDiagnosticsForDoc TextDocumentIdentifier {_uri} expected obtained = do + let expected' = Map.fromList [(nuri, map (\(ds, c, t) -> (ds, c, t, Nothing)) expected)] + nuri = toNormalizedUri _uri + expectDiagnosticsWithTags' (return $ (_uri, List obtained)) expected' + +canonicalizeUri :: Uri -> IO Uri +canonicalizeUri uri = filePathToUri <$> canonicalizePath (fromJust (uriToFilePath uri)) + +diagnostic :: Session PublishDiagnosticsNotification +diagnostic = LspTest.message + +standardizeQuotes :: T.Text -> T.Text +standardizeQuotes msg = let + repl '‘' = '\'' + repl '’' = '\'' + repl '`' = '\'' + repl c = c + in T.map repl msg diff --git a/haskell-language-server.cabal b/haskell-language-server.cabal index 063146793d1..3b5b9153d6f 100644 --- a/haskell-language-server.cabal +++ b/haskell-language-server.cabal @@ -395,6 +395,7 @@ test-suite func-test build-depends: , bytestring , data-default + , hspec-expectations , lens , tasty , tasty-ant-xml >=1.1.6 diff --git a/hie-cabal.yaml b/hie-cabal.yaml index 324cf6fdb60..f106c0df7cf 100644 --- a/hie-cabal.yaml +++ b/hie-cabal.yaml @@ -1,64 +1,125 @@ # This is a sample hie.yaml file for opening haskell-language-server -# in hie, using cabal as the build system. To use is, copy it to a -# file called 'hie.yaml' +# in hie, using cabal as the build system. +# It was autogenerated by gen-hie. +# To use is, copy it to a file called 'hie.yaml' cradle: - multi: - - path: "./test/testdata/" - config: { cradle: { none: } } + cabal: + - path: "./ghcide/src" + component: "lib:ghcide" - - path: "./" - config: - cradle: - cabal: - - path: "./test/functional/" - component: "haskell-language-server:func-test" + - path: "./ghcide/session-loader" + component: "lib:ghcide" - - path: "./test/utils/" - component: "haskell-language-server:func-test" + - path: "./ghcide/test/preprocessor/Main.hs" + component: "ghcide:exe:ghcide-test-preprocessor" - - path: "./exe/Main.hs" - component: "haskell-language-server:exe:haskell-language-server" + - path: "./ghcide/bench/hist/Main.hs" + component: "ghcide:bench:benchHist" - - path: "./exe/Arguments.hs" - component: "haskell-language-server:exe:haskell-language-server" + - path: "./ghcide/bench/lib/Main.hs" + component: "ghcide:bench:benchHist" - - path: "./plugins/default/src" - component: "haskell-language-server:exe:haskell-language-server" + - path: "./ghcide/bench/hist/Experiments/Types.hs" + component: "ghcide:bench:benchHist" - - path: "./exe/Wrapper.hs" - component: "haskell-language-server:exe:haskell-language-server-wrapper" + - path: "./ghcide/bench/lib/Experiments/Types.hs" + component: "ghcide:bench:benchHist" - - path: "./src" - component: "lib:haskell-language-server" + - path: "./ghcide/exe/Main.hs" + component: "ghcide:exe:ghcide" - - path: "./dist-newstyle/" - component: "lib:haskell-language-server" + - path: "./ghcide/exe/Arguments.hs" + component: "ghcide:exe:ghcide" - - path: "./ghcide/src" - component: "ghcide:lib:ghcide" + - path: "./ghcide/exe/Paths_ghcide.hs" + component: "ghcide:exe:ghcide" - - path: "./ghcide/exe" - component: "ghcide:exe:ghcide" + - path: "./ghcide/test/cabal" + component: "ghcide:test:ghcide-tests" - - path: "./hls-plugin-api/src" - component: "hls-plugin-api" + - path: "./ghcide/test/exe" + component: "ghcide:test:ghcide-tests" -# Plugins: + - path: "./ghcide/test/src" + component: "ghcide:test:ghcide-tests" - - path: "./plugins/hls-class-plugin/src" - component: "hls-class-plugin" + - path: "./ghcide/bench/lib" + component: "ghcide:test:ghcide-tests" - - path: "./plugins/tactics/src" - component: "hls-tactics-plugin:lib:hls-tactics-plugin" + - path: "./ghcide/bench/lib/Main.hs" + component: "ghcide:exe:ghcide-bench" - - path: "./plugins/tactics/test" - component: "hls-tactics-plugin:test:tests" + - path: "./ghcide/bench/exe/Main.hs" + component: "ghcide:exe:ghcide-bench" - - path: "./plugins/hls-hlint-plugin/src" - component: "hls-hlint-plugin" + - path: "./ghcide/bench/lib/Experiments.hs" + component: "ghcide:exe:ghcide-bench" - - path: "./plugins/hls-retrie-plugin/src" - component: "hls-retrie-plugin" + - path: "./ghcide/bench/lib/Experiments/Types.hs" + component: "ghcide:exe:ghcide-bench" - - path: "./plugins/hls-explicit-imports-plugin/src" - component: "hls-explicit-imports-plugin" + - path: "./ghcide/bench/exe/Experiments.hs" + component: "ghcide:exe:ghcide-bench" + + - path: "./ghcide/bench/exe/Experiments/Types.hs" + component: "ghcide:exe:ghcide-bench" + + - path: "./src" + component: "lib:haskell-language-server" + + - path: "./exe/Main.hs" + component: "haskell-language-server:exe:haskell-language-server" + + - path: "./exe/Plugins.hs" + component: "haskell-language-server:exe:haskell-language-server" + + - path: "./exe/Wrapper.hs" + component: "haskell-language-server:exe:haskell-language-server-wrapper" + + - path: "./test/functional" + component: "haskell-language-server:test:func-test" + + - path: "./plugins/tactics/src" + component: "haskell-language-server:test:func-test" + + - path: "./test/wrapper" + component: "haskell-language-server:test:wrapper-test" + + - path: "./hie-compat/src-ghc86" + component: "lib:hie-compat" + + - path: "./hie-compat/src-ghc88" + component: "lib:hie-compat" + + - path: "./hie-compat/src-reexport" + component: "lib:hie-compat" + + - path: "./hie-compat/src-ghc810" + component: "lib:hie-compat" + + - path: "./hie-compat/src-reexport" + component: "lib:hie-compat" + + - path: "./hls-plugin-api/src" + component: "lib:hls-plugin-api" + + - path: "./plugins/hls-class-plugin/src" + component: "lib:hls-class-plugin" + + - path: "./plugins/hls-explicit-imports-plugin/src" + component: "lib:hls-explicit-imports-plugin" + + - path: "./plugins/hls-hlint-plugin/src" + component: "lib:hls-hlint-plugin" + + - path: "./plugins/hls-retrie-plugin/src" + component: "lib:hls-retrie-plugin" + + - path: "./plugins/tactics/src" + component: "lib:hls-tactics-plugin" + + - path: "./plugins/tactics/test" + component: "hls-tactics-plugin:test:tests" + + - path: "./shake-bench/src" + component: "lib:shake-bench" diff --git a/hie-compat/CHANGELOG.md b/hie-compat/CHANGELOG.md new file mode 100644 index 00000000000..82d590f7ab7 --- /dev/null +++ b/hie-compat/CHANGELOG.md @@ -0,0 +1,5 @@ +# Revision history for hie-compat + +## 0.1.0.0 -- 2020-10-19 + +* Initial Release diff --git a/hie-compat/LICENSE b/hie-compat/LICENSE new file mode 100644 index 00000000000..8775cb7967f --- /dev/null +++ b/hie-compat/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Zubin Duggal + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/hie-compat/README.md b/hie-compat/README.md new file mode 100644 index 00000000000..08fddefac46 --- /dev/null +++ b/hie-compat/README.md @@ -0,0 +1,20 @@ +# hie-compat + +Mainly a backport of [HIE +Files](https://gitlab.haskell.org/ghc/ghc/-/wikis/hie-files) for ghc 8.6, along +with a few other backports of fixes useful for `ghcide` + +Fully compatible with `.hie` files natively produced by versions of GHC that support +them. + +**THIS DOES NOT LET YOU READ HIE FILES WITH MISMATCHED VERSIONS OF GHC** + +Backports included: + +https://gitlab.haskell.org/ghc/ghc/-/merge_requests/4037 + +https://gitlab.haskell.org/ghc/ghc/-/merge_requests/4068 + +https://gitlab.haskell.org/ghc/ghc/-/merge_requests/3199 + +https://gitlab.haskell.org/ghc/ghc/-/merge_requests/2578 diff --git a/hie-compat/hie-compat.cabal b/hie-compat/hie-compat.cabal new file mode 100644 index 00000000000..97784850281 --- /dev/null +++ b/hie-compat/hie-compat.cabal @@ -0,0 +1,45 @@ +cabal-version: 1.22 +name: hie-compat +version: 0.1.0.0 +synopsis: HIE files for GHC 8.6 and other HIE file backports +license: Apache-2.0 +description: + Backports for HIE files to GHC 8.6, along with a few other backports + of HIE file related fixes for ghcide. + + THIS DOES NOT LET YOU READ HIE FILES WITH MISMATCHED VERSIONS OF GHC +license-file: LICENSE +author: Zubin Duggal +maintainer: zubin.duggal@gmail.com +build-type: Simple +extra-source-files: CHANGELOG.md README.md +category: Development + +flag ghc-lib + description: build against ghc-lib instead of the ghc package + default: False + manual: True + +library + default-language: Haskell2010 + build-depends: + base < 4.15, array, bytestring, containers, directory, filepath, transformers + if flag(ghc-lib) + build-depends: ghc-lib + else + build-depends: ghc, ghc-boot + + exposed-modules: + Compat.HieAst + Compat.HieBin + Compat.HieTypes + Compat.HieDebug + Compat.HieUtils + + if (impl(ghc > 8.5) && impl(ghc < 8.7) && !flag(ghc-lib)) + hs-source-dirs: src-ghc86 + if (impl(ghc > 8.7) && impl(ghc < 8.10)) + hs-source-dirs: src-ghc88 src-reexport + if (impl(ghc > 8.9) && impl(ghc < 8.11) || flag(ghc-lib)) + hs-source-dirs: src-ghc810 src-reexport + diff --git a/hie-compat/src-ghc810/Compat/HieAst.hs b/hie-compat/src-ghc810/Compat/HieAst.hs new file mode 100644 index 00000000000..3b713cbe12d --- /dev/null +++ b/hie-compat/src-ghc810/Compat/HieAst.hs @@ -0,0 +1,1925 @@ +{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} + +{- +Forked from GHC v8.10.1 to work around the readFile side effect in mkHiefile + +Main functions for .hie file generation +-} +{- HLINT ignore -} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE DeriveDataTypeable #-} +module Compat.HieAst ( mkHieFile, enrichHie ) where + +import GhcPrelude + +import Avail ( Avails ) +import Bag ( Bag, bagToList ) +import BasicTypes +import BooleanFormula +import Class ( FunDep ) +import CoreUtils ( exprType ) +import ConLike ( conLikeName ) +import Desugar ( deSugarExpr ) +import FieldLabel +import GHC.Hs +import HscTypes +import Module ( ModuleName, ml_hs_file ) +import MonadUtils ( concatMapM, liftIO ) +import Name ( Name, nameSrcSpan ) +import NameEnv ( NameEnv, emptyNameEnv, extendNameEnv, lookupNameEnv ) +import SrcLoc +import TcHsSyn ( hsLitType, hsPatType ) +import Type ( mkVisFunTys, Type ) +import TysWiredIn ( mkListTy, mkSumTy ) +import Var ( Id, Var, setVarName, varName, varType ) +import TcRnTypes +import MkIface ( mkIfaceExports ) +import Panic + +import HieTypes +import HieUtils + +import qualified Data.Array as A +import qualified Data.ByteString as BS +import qualified Data.Map as M +import qualified Data.Set as S +import Data.Data ( Data, Typeable ) +import Data.List ( foldl1' ) +import Data.Maybe ( listToMaybe ) +import Control.Monad.Trans.Reader +import Control.Monad.Trans.Class ( lift ) + +{- Note [Updating HieAst for changes in the GHC AST] + +When updating the code in this file for changes in the GHC AST, you +need to pay attention to the following things: + +1) Symbols (Names/Vars/Modules) in the following categories: + + a) Symbols that appear in the source file that directly correspond to + something the user typed + b) Symbols that don't appear in the source, but should be in some sense + "visible" to a user, particularly via IDE tooling or the like. This + includes things like the names introduced by RecordWildcards (We record + all the names introduced by a (..) in HIE files), and will include implicit + parameters and evidence variables after one of my pending MRs lands. + +2) Subtrees that may contain such symbols, or correspond to a SrcSpan in + the file. This includes all `Located` things + +For 1), you need to call `toHie` for one of the following instances + +instance ToHie (Context (Located Name)) where ... +instance ToHie (Context (Located Var)) where ... +instance ToHie (IEContext (Located ModuleName)) where ... + +`Context` is a data type that looks like: + +data Context a = C ContextInfo a -- Used for names and bindings + +`ContextInfo` is defined in `HieTypes`, and looks like + +data ContextInfo + = Use -- ^ regular variable + | MatchBind + | IEThing IEType -- ^ import/export + | TyDecl + -- | Value binding + | ValBind + BindType -- ^ whether or not the binding is in an instance + Scope -- ^ scope over which the value is bound + (Maybe Span) -- ^ span of entire binding + ... + +It is used to annotate symbols in the .hie files with some extra information on +the context in which they occur and should be fairly self explanatory. You need +to select one that looks appropriate for the symbol usage. In very rare cases, +you might need to extend this sum type if none of the cases seem appropriate. + +So, given a `Located Name` that is just being "used", and not defined at a +particular location, you would do the following: + + toHie $ C Use located_name + +If you select one that corresponds to a binding site, you will need to +provide a `Scope` and a `Span` for your binding. Both of these are basically +`SrcSpans`. + +The `SrcSpan` in the `Scope` is supposed to span over the part of the source +where the symbol can be legally allowed to occur. For more details on how to +calculate this, see Note [Capturing Scopes and other non local information] +in HieAst. + +The binding `Span` is supposed to be the span of the entire binding for +the name. + +For a function definition `foo`: + +foo x = x + y + where y = x^2 + +The binding `Span` is the span of the entire function definition from `foo x` +to `x^2`. For a class definition, this is the span of the entire class, and +so on. If this isn't well defined for your bit of syntax (like a variable +bound by a lambda), then you can just supply a `Nothing` + +There is a test that checks that all symbols in the resulting HIE file +occur inside their stated `Scope`. This can be turned on by passing the +-fvalidate-ide-info flag to ghc along with -fwrite-ide-info to generate the +.hie file. + +You may also want to provide a test in testsuite/test/hiefile that includes +a file containing your new construction, and tests that the calculated scope +is valid (by using -fvalidate-ide-info) + +For subtrees in the AST that may contain symbols, the procedure is fairly +straightforward. If you are extending the GHC AST, you will need to provide a +`ToHie` instance for any new types you may have introduced in the AST. + +Here are is an extract from the `ToHie` instance for (LHsExpr (GhcPass p)): + + toHie e@(L mspan oexpr) = concatM $ getTypeNode e : case oexpr of + HsVar _ (L _ var) -> + [ toHie $ C Use (L mspan var) + -- Patch up var location since typechecker removes it + ] + HsConLikeOut _ con -> + [ toHie $ C Use $ L mspan $ conLikeName con + ] + ... + HsApp _ a b -> + [ toHie a + , toHie b + ] + +If your subtree is `Located` or has a `SrcSpan` available, the output list +should contain a HieAst `Node` corresponding to the subtree. You can use +either `makeNode` or `getTypeNode` for this purpose, depending on whether it +makes sense to assign a `Type` to the subtree. After this, you just need +to concatenate the result of calling `toHie` on all subexpressions and +appropriately annotated symbols contained in the subtree. + +The code above from the ToHie instance of `LhsExpr (GhcPass p)` is supposed +to work for both the renamed and typechecked source. `getTypeNode` is from +the `HasType` class defined in this file, and it has different instances +for `GhcTc` and `GhcRn` that allow it to access the type of the expression +when given a typechecked AST: + +class Data a => HasType a where + getTypeNode :: a -> HieM [HieAST Type] +instance HasType (LHsExpr GhcTc) where + getTypeNode e@(L spn e') = ... -- Actually get the type for this expression +instance HasType (LHsExpr GhcRn) where + getTypeNode (L spn e) = makeNode e spn -- Fallback to a regular `makeNode` without recording the type + +If your subtree doesn't have a span available, you can omit the `makeNode` +call and just recurse directly in to the subexpressions. + +-} + +-- These synonyms match those defined in main/GHC.hs +type RenamedSource = ( HsGroup GhcRn, [LImportDecl GhcRn] + , Maybe [(LIE GhcRn, Avails)] + , Maybe LHsDocString ) +type TypecheckedSource = LHsBinds GhcTc + + +{- Note [Name Remapping] +The Typechecker introduces new names for mono names in AbsBinds. +We don't care about the distinction between mono and poly bindings, +so we replace all occurrences of the mono name with the poly name. +-} +newtype HieState = HieState + { name_remapping :: NameEnv Id + } + +initState :: HieState +initState = HieState emptyNameEnv + +class ModifyState a where -- See Note [Name Remapping] + addSubstitution :: a -> a -> HieState -> HieState + +instance ModifyState Name where + addSubstitution _ _ hs = hs + +instance ModifyState Id where + addSubstitution mono poly hs = + hs{name_remapping = extendNameEnv (name_remapping hs) (varName mono) poly} + +modifyState :: ModifyState (IdP p) => [ABExport p] -> HieState -> HieState +modifyState = foldr go id + where + go ABE{abe_poly=poly,abe_mono=mono} f = addSubstitution mono poly . f + go _ f = f + +type HieM = ReaderT HieState Hsc + +-- | Construct an 'HieFile' from the outputs of the typechecker. +mkHieFile :: ModSummary + -> TcGblEnv + -> RenamedSource + -> BS.ByteString -> Hsc HieFile +mkHieFile ms ts rs src = do + let tc_binds = tcg_binds ts + (asts', arr) <- getCompressedAsts tc_binds rs + let Just src_file = ml_hs_file $ ms_location ms + return $ HieFile + { hie_hs_file = src_file + , hie_module = ms_mod ms + , hie_types = arr + , hie_asts = asts' + -- mkIfaceExports sorts the AvailInfos for stability + , hie_exports = mkIfaceExports (tcg_exports ts) + , hie_hs_src = src + } + +getCompressedAsts :: TypecheckedSource -> RenamedSource + -> Hsc (HieASTs TypeIndex, A.Array TypeIndex HieTypeFlat) +getCompressedAsts ts rs = do + asts <- enrichHie ts rs + return $ compressTypes asts + +enrichHie :: TypecheckedSource -> RenamedSource -> Hsc (HieASTs Type) +enrichHie ts (hsGrp, imports, exports, _) = flip runReaderT initState $ do + tasts <- toHie $ fmap (BC RegularBind ModuleScope) ts + rasts <- processGrp hsGrp + imps <- toHie $ filter (not . ideclImplicit . unLoc) imports + exps <- toHie $ fmap (map $ IEC Export . fst) exports + let spanFile children = case children of + [] -> mkRealSrcSpan (mkRealSrcLoc "" 1 1) (mkRealSrcLoc "" 1 1) + _ -> mkRealSrcSpan (realSrcSpanStart $ nodeSpan $ head children) + (realSrcSpanEnd $ nodeSpan $ last children) + + modulify xs = + Node (simpleNodeInfo "Module" "Module") (spanFile xs) xs + + asts = HieASTs + $ resolveTyVarScopes + $ M.map (modulify . mergeSortAsts) + $ M.fromListWith (++) + $ map (\x -> (srcSpanFile (nodeSpan x),[x])) flat_asts + + flat_asts = concat + [ tasts + , rasts + , imps + , exps + ] + return asts + where + processGrp grp = concatM + [ toHie $ fmap (RS ModuleScope ) hs_valds grp + , toHie $ hs_splcds grp + , toHie $ hs_tyclds grp + , toHie $ hs_derivds grp + , toHie $ hs_fixds grp + , toHie $ hs_defds grp + , toHie $ hs_fords grp + , toHie $ hs_warnds grp + , toHie $ hs_annds grp + , toHie $ hs_ruleds grp + ] + +getRealSpan :: SrcSpan -> Maybe Span +getRealSpan (RealSrcSpan sp) = Just sp +getRealSpan _ = Nothing + +grhss_span :: GRHSs p body -> SrcSpan +grhss_span (GRHSs _ xs bs) = foldl' combineSrcSpans (getLoc bs) (map getLoc xs) +grhss_span (XGRHSs _) = panic "XGRHS has no span" + +bindingsOnly :: [Context Name] -> [HieAST a] +bindingsOnly [] = [] +bindingsOnly (C c n : xs) = case nameSrcSpan n of + RealSrcSpan span -> Node nodeinfo span [] : bindingsOnly xs + where nodeinfo = NodeInfo S.empty [] (M.singleton (Right n) info) + info = mempty{identInfo = S.singleton c} + _ -> bindingsOnly xs + +concatM :: Monad m => [m [a]] -> m [a] +concatM xs = concat <$> sequence xs + +{- Note [Capturing Scopes and other non local information] +toHie is a local tranformation, but scopes of bindings cannot be known locally, +hence we have to push the relevant info down into the binding nodes. +We use the following types (*Context and *Scoped) to wrap things and +carry the required info +(Maybe Span) always carries the span of the entire binding, including rhs +-} +data Context a = C ContextInfo a -- Used for names and bindings + +data RContext a = RC RecFieldContext a +data RFContext a = RFC RecFieldContext (Maybe Span) a +-- ^ context for record fields + +data IEContext a = IEC IEType a +-- ^ context for imports/exports + +data BindContext a = BC BindType Scope a +-- ^ context for imports/exports + +data PatSynFieldContext a = PSC (Maybe Span) a +-- ^ context for pattern synonym fields. + +data SigContext a = SC SigInfo a +-- ^ context for type signatures + +data SigInfo = SI SigType (Maybe Span) + +data SigType = BindSig | ClassSig | InstSig + +data RScoped a = RS Scope a +-- ^ Scope spans over everything to the right of a, (mostly) not +-- including a itself +-- (Includes a in a few special cases like recursive do bindings) or +-- let/where bindings + +-- | Pattern scope +data PScoped a = PS (Maybe Span) + Scope -- ^ use site of the pattern + Scope -- ^ pattern to the right of a, not including a + a + deriving (Typeable, Data) -- Pattern Scope + +{- Note [TyVar Scopes] +Due to -XScopedTypeVariables, type variables can be in scope quite far from +their original binding. We resolve the scope of these type variables +in a separate pass +-} +data TScoped a = TS TyVarScope a -- TyVarScope + +data TVScoped a = TVS TyVarScope Scope a -- TyVarScope +-- ^ First scope remains constant +-- Second scope is used to build up the scope of a tyvar over +-- things to its right, ala RScoped + +-- | Each element scopes over the elements to the right +listScopes :: Scope -> [Located a] -> [RScoped (Located a)] +listScopes _ [] = [] +listScopes rhsScope [pat] = [RS rhsScope pat] +listScopes rhsScope (pat : pats) = RS sc pat : pats' + where + pats'@((RS scope p):_) = listScopes rhsScope pats + sc = combineScopes scope $ mkScope $ getLoc p + +-- | 'listScopes' specialised to 'PScoped' things +patScopes + :: Maybe Span + -> Scope + -> Scope + -> [LPat (GhcPass p)] + -> [PScoped (LPat (GhcPass p))] +patScopes rsp useScope patScope xs = + map (\(RS sc a) -> PS rsp useScope sc (composeSrcSpan a)) $ + listScopes patScope (map dL xs) + +-- | 'listScopes' specialised to 'TVScoped' things +tvScopes + :: TyVarScope + -> Scope + -> [LHsTyVarBndr a] + -> [TVScoped (LHsTyVarBndr a)] +tvScopes tvScope rhsScope xs = + map (\(RS sc a)-> TVS tvScope sc a) $ listScopes rhsScope xs + +{- Note [Scoping Rules for SigPat] +Explicitly quantified variables in pattern type signatures are not +brought into scope in the rhs, but implicitly quantified variables +are (HsWC and HsIB). +This is unlike other signatures, where explicitly quantified variables +are brought into the RHS Scope +For example +foo :: forall a. ...; +foo = ... -- a is in scope here + +bar (x :: forall a. a -> a) = ... -- a is not in scope here +-- ^ a is in scope here (pattern body) + +bax (x :: a) = ... -- a is in scope here +Because of HsWC and HsIB pass on their scope to their children +we must wrap the LHsType in pattern signatures in a +Shielded explictly, so that the HsWC/HsIB scope is not passed +on the the LHsType +-} + +data Shielded a = SH Scope a -- Ignores its TScope, uses its own scope instead + +type family ProtectedSig a where + ProtectedSig GhcRn = HsWildCardBndrs GhcRn (HsImplicitBndrs + GhcRn + (Shielded (LHsType GhcRn))) + ProtectedSig GhcTc = NoExtField + +class ProtectSig a where + protectSig :: Scope -> LHsSigWcType (NoGhcTc a) -> ProtectedSig a + +instance (HasLoc a) => HasLoc (Shielded a) where + loc (SH _ a) = loc a + +instance (ToHie (TScoped a)) => ToHie (TScoped (Shielded a)) where + toHie (TS _ (SH sc a)) = toHie (TS (ResolvedScopes [sc]) a) + +instance ProtectSig GhcTc where + protectSig _ _ = noExtField + +instance ProtectSig GhcRn where + protectSig sc (HsWC a (HsIB b sig)) = + HsWC a (HsIB b (SH sc sig)) + protectSig _ (HsWC _ (XHsImplicitBndrs nec)) = noExtCon nec + protectSig _ (XHsWildCardBndrs nec) = noExtCon nec + +class HasLoc a where + -- ^ defined so that HsImplicitBndrs and HsWildCardBndrs can + -- know what their implicit bindings are scoping over + loc :: a -> SrcSpan + +instance HasLoc thing => HasLoc (TScoped thing) where + loc (TS _ a) = loc a + +instance HasLoc thing => HasLoc (PScoped thing) where + loc (PS _ _ _ a) = loc a + +instance HasLoc (LHsQTyVars GhcRn) where + loc (HsQTvs _ vs) = loc vs + loc _ = noSrcSpan + +instance HasLoc thing => HasLoc (HsImplicitBndrs a thing) where + loc (HsIB _ a) = loc a + loc _ = noSrcSpan + +instance HasLoc thing => HasLoc (HsWildCardBndrs a thing) where + loc (HsWC _ a) = loc a + loc _ = noSrcSpan + +instance HasLoc (Located a) where + loc (L l _) = l + +instance HasLoc a => HasLoc [a] where + loc [] = noSrcSpan + loc xs = foldl1' combineSrcSpans $ map loc xs + +instance HasLoc a => HasLoc (FamEqn s a) where + loc (FamEqn _ a Nothing b _ c) = foldl1' combineSrcSpans [loc a, loc b, loc c] + loc (FamEqn _ a (Just tvs) b _ c) = foldl1' combineSrcSpans + [loc a, loc tvs, loc b, loc c] + loc _ = noSrcSpan +instance (HasLoc tm, HasLoc ty) => HasLoc (HsArg tm ty) where + loc (HsValArg tm) = loc tm + loc (HsTypeArg _ ty) = loc ty + loc (HsArgPar sp) = sp + +instance HasLoc (HsDataDefn GhcRn) where + loc def@(HsDataDefn{}) = loc $ dd_cons def + -- Only used for data family instances, so we only need rhs + -- Most probably the rest will be unhelpful anyway + loc _ = noSrcSpan + +{- Note [Real DataCon Name] +The typechecker subtitutes the conLikeWrapId for the name, but we don't want +this showing up in the hieFile, so we replace the name in the Id with the +original datacon name +See also Note [Data Constructor Naming] +-} +class HasRealDataConName p where + getRealDataCon :: XRecordCon p -> Located (IdP p) -> Located (IdP p) + +instance HasRealDataConName GhcRn where + getRealDataCon _ n = n +instance HasRealDataConName GhcTc where + getRealDataCon RecordConTc{rcon_con_like = con} (L sp var) = + L sp (setVarName var (conLikeName con)) + +-- | The main worker class +-- See Note [Updating HieAst for changes in the GHC AST] for more information +-- on how to add/modify instances for this. +class ToHie a where + toHie :: a -> HieM [HieAST Type] + +-- | Used to collect type info +class Data a => HasType a where + getTypeNode :: a -> HieM [HieAST Type] + +instance (ToHie a) => ToHie [a] where + toHie = concatMapM toHie + +instance (ToHie a) => ToHie (Bag a) where + toHie = toHie . bagToList + +instance (ToHie a) => ToHie (Maybe a) where + toHie = maybe (pure []) toHie + +instance ToHie (Context (Located NoExtField)) where + toHie _ = pure [] + +instance ToHie (TScoped NoExtField) where + toHie _ = pure [] + +instance ToHie (IEContext (Located ModuleName)) where + toHie (IEC c (L (RealSrcSpan span) mname)) = + pure $ [Node (NodeInfo S.empty [] idents) span []] + where details = mempty{identInfo = S.singleton (IEThing c)} + idents = M.singleton (Left mname) details + toHie _ = pure [] + +instance ToHie (Context (Located Var)) where + toHie c = case c of + C context (L (RealSrcSpan span) name') + -> do + m <- asks name_remapping + let name = case lookupNameEnv m (varName name') of + Just var -> var + Nothing-> name' + pure + [Node + (NodeInfo S.empty [] $ + M.singleton (Right $ varName name) + (IdentifierDetails (Just $ varType name') + (S.singleton context))) + span + []] + _ -> pure [] + +instance ToHie (Context (Located Name)) where + toHie c = case c of + C context (L (RealSrcSpan span) name') -> do + m <- asks name_remapping + let name = case lookupNameEnv m name' of + Just var -> varName var + Nothing -> name' + pure + [Node + (NodeInfo S.empty [] $ + M.singleton (Right name) + (IdentifierDetails Nothing + (S.singleton context))) + span + []] + _ -> pure [] + +-- | Dummy instances - never called +instance ToHie (TScoped (LHsSigWcType GhcTc)) where + toHie _ = pure [] +instance ToHie (TScoped (LHsWcType GhcTc)) where + toHie _ = pure [] +instance ToHie (SigContext (LSig GhcTc)) where + toHie _ = pure [] +instance ToHie (TScoped Type) where + toHie _ = pure [] + +instance HasType (LHsBind GhcRn) where + getTypeNode (L spn bind) = makeNode bind spn + +instance HasType (LHsBind GhcTc) where + getTypeNode (L spn bind) = case bind of + FunBind{fun_id = name} -> makeTypeNode bind spn (varType $ unLoc name) + _ -> makeNode bind spn + +instance HasType (Located (Pat GhcRn)) where + getTypeNode (dL -> L spn pat) = makeNode pat spn + +instance HasType (Located (Pat GhcTc)) where + getTypeNode (dL -> L spn opat) = makeTypeNode opat spn (hsPatType opat) + +instance HasType (LHsExpr GhcRn) where + getTypeNode (L spn e) = makeNode e spn + +-- | This instance tries to construct 'HieAST' nodes which include the type of +-- the expression. It is not yet possible to do this efficiently for all +-- expression forms, so we skip filling in the type for those inputs. +-- +-- 'HsApp', for example, doesn't have any type information available directly on +-- the node. Our next recourse would be to desugar it into a 'CoreExpr' then +-- query the type of that. Yet both the desugaring call and the type query both +-- involve recursive calls to the function and argument! This is particularly +-- problematic when you realize that the HIE traversal will eventually visit +-- those nodes too and ask for their types again. +-- +-- Since the above is quite costly, we just skip cases where computing the +-- expression's type is going to be expensive. +-- +-- See #16233 +instance HasType (LHsExpr GhcTc) where + getTypeNode e@(L spn e') = lift $ + -- Some expression forms have their type immediately available + let tyOpt = case e' of + HsLit _ l -> Just (hsLitType l) + HsOverLit _ o -> Just (overLitType o) + + HsLam _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) + HsLamCase _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) + HsCase _ _ (MG { mg_ext = groupTy }) -> Just (mg_res_ty groupTy) + + ExplicitList ty _ _ -> Just (mkListTy ty) + ExplicitSum ty _ _ _ -> Just (mkSumTy ty) + HsDo ty _ _ -> Just ty + HsMultiIf ty _ -> Just ty + + _ -> Nothing + + in + case tyOpt of + Just t -> makeTypeNode e' spn t + Nothing + | skipDesugaring e' -> fallback + | otherwise -> do + hs_env <- Hsc $ \e w -> return (e,w) + (_,mbe) <- liftIO $ deSugarExpr hs_env e + maybe fallback (makeTypeNode e' spn . exprType) mbe + where + fallback = makeNode e' spn + + matchGroupType :: MatchGroupTc -> Type + matchGroupType (MatchGroupTc args res) = mkVisFunTys args res + + -- | Skip desugaring of these expressions for performance reasons. + -- + -- See impact on Haddock output (esp. missing type annotations or links) + -- before marking more things here as 'False'. See impact on Haddock + -- performance before marking more things as 'True'. + skipDesugaring :: HsExpr a -> Bool + skipDesugaring e = case e of + HsVar{} -> False + HsUnboundVar{} -> False + HsConLikeOut{} -> False + HsRecFld{} -> False + HsOverLabel{} -> False + HsIPVar{} -> False + HsWrap{} -> False + _ -> True + +instance ( ToHie (Context (Located (IdP a))) + , ToHie (MatchGroup a (LHsExpr a)) + , ToHie (PScoped (LPat a)) + , ToHie (GRHSs a (LHsExpr a)) + , ToHie (LHsExpr a) + , ToHie (Located (PatSynBind a a)) + , HasType (LHsBind a) + , ModifyState (IdP a) + , Data (HsBind a) + ) => ToHie (BindContext (LHsBind a)) where + toHie (BC context scope b@(L span bind)) = + concatM $ getTypeNode b : case bind of + FunBind{fun_id = name, fun_matches = matches} -> + [ toHie $ C (ValBind context scope $ getRealSpan span) name + , toHie matches + ] + PatBind{pat_lhs = lhs, pat_rhs = rhs} -> + [ toHie $ PS (getRealSpan span) scope NoScope lhs + , toHie rhs + ] + VarBind{var_rhs = expr} -> + [ toHie expr + ] + AbsBinds{abs_exports = xs, abs_binds = binds} -> + [ local (modifyState xs) $ -- Note [Name Remapping] + toHie $ fmap (BC context scope) binds + ] + PatSynBind _ psb -> + [ toHie $ L span psb -- PatSynBinds only occur at the top level + ] + XHsBindsLR _ -> [] + +instance ( ToHie (LMatch a body) + ) => ToHie (MatchGroup a body) where + toHie mg = concatM $ case mg of + MG{ mg_alts = (L span alts) , mg_origin = FromSource } -> + [ pure $ locOnly span + , toHie alts + ] + MG{} -> [] + XMatchGroup _ -> [] + +instance ( ToHie (Context (Located (IdP a))) + , ToHie (PScoped (LPat a)) + , ToHie (HsPatSynDir a) + ) => ToHie (Located (PatSynBind a a)) where + toHie (L sp psb) = concatM $ case psb of + PSB{psb_id=var, psb_args=dets, psb_def=pat, psb_dir=dir} -> + [ toHie $ C (Decl PatSynDec $ getRealSpan sp) var + , toHie $ toBind dets + , toHie $ PS Nothing lhsScope NoScope pat + , toHie dir + ] + where + lhsScope = combineScopes varScope detScope + varScope = mkLScope var + detScope = case dets of + (PrefixCon args) -> foldr combineScopes NoScope $ map mkLScope args + (InfixCon a b) -> combineScopes (mkLScope a) (mkLScope b) + (RecCon r) -> foldr go NoScope r + go (RecordPatSynField a b) c = combineScopes c + $ combineScopes (mkLScope a) (mkLScope b) + detSpan = case detScope of + LocalScope a -> Just a + _ -> Nothing + toBind (PrefixCon args) = PrefixCon $ map (C Use) args + toBind (InfixCon a b) = InfixCon (C Use a) (C Use b) + toBind (RecCon r) = RecCon $ map (PSC detSpan) r + XPatSynBind _ -> [] + +instance ( ToHie (MatchGroup a (LHsExpr a)) + ) => ToHie (HsPatSynDir a) where + toHie dir = case dir of + ExplicitBidirectional mg -> toHie mg + _ -> pure [] + +instance ( a ~ GhcPass p + , ToHie body + , ToHie (HsMatchContext (NameOrRdrName (IdP a))) + , ToHie (PScoped (LPat a)) + , ToHie (GRHSs a body) + , Data (Match a body) + ) => ToHie (LMatch (GhcPass p) body) where + toHie (L span m ) = concatM $ makeNode m span : case m of + Match{m_ctxt=mctx, m_pats = pats, m_grhss = grhss } -> + [ toHie mctx + , let rhsScope = mkScope $ grhss_span grhss + in toHie $ patScopes Nothing rhsScope NoScope pats + , toHie grhss + ] + XMatch _ -> [] + +instance ( ToHie (Context (Located a)) + ) => ToHie (HsMatchContext a) where + toHie (FunRhs{mc_fun=name}) = toHie $ C MatchBind name + toHie (StmtCtxt a) = toHie a + toHie _ = pure [] + +instance ( ToHie (HsMatchContext a) + ) => ToHie (HsStmtContext a) where + toHie (PatGuard a) = toHie a + toHie (ParStmtCtxt a) = toHie a + toHie (TransStmtCtxt a) = toHie a + toHie _ = pure [] + +instance ( a ~ GhcPass p + , ToHie (Context (Located (IdP a))) + , ToHie (RContext (HsRecFields a (PScoped (LPat a)))) + , ToHie (LHsExpr a) + , ToHie (TScoped (LHsSigWcType a)) + , ProtectSig a + , ToHie (TScoped (ProtectedSig a)) + , HasType (LPat a) + , Data (HsSplice a) + ) => ToHie (PScoped (Located (Pat (GhcPass p)))) where + toHie (PS rsp scope pscope lpat@(dL -> L ospan opat)) = + concatM $ getTypeNode lpat : case opat of + WildPat _ -> + [] + VarPat _ lname -> + [ toHie $ C (PatternBind scope pscope rsp) lname + ] + LazyPat _ p -> + [ toHie $ PS rsp scope pscope p + ] + AsPat _ lname pat -> + [ toHie $ C (PatternBind scope + (combineScopes (mkLScope (dL pat)) pscope) + rsp) + lname + , toHie $ PS rsp scope pscope pat + ] + ParPat _ pat -> + [ toHie $ PS rsp scope pscope pat + ] + BangPat _ pat -> + [ toHie $ PS rsp scope pscope pat + ] + ListPat _ pats -> + [ toHie $ patScopes rsp scope pscope pats + ] + TuplePat _ pats _ -> + [ toHie $ patScopes rsp scope pscope pats + ] + SumPat _ pat _ _ -> + [ toHie $ PS rsp scope pscope pat + ] + ConPatIn c dets -> + [ toHie $ C Use c + , toHie $ contextify dets + ] + ConPatOut {pat_con = con, pat_args = dets}-> + [ toHie $ C Use $ fmap conLikeName con + , toHie $ contextify dets + ] + ViewPat _ expr pat -> + [ toHie expr + , toHie $ PS rsp scope pscope pat + ] + SplicePat _ sp -> + [ toHie $ L ospan sp + ] + LitPat _ _ -> + [] + NPat _ _ _ _ -> + [] + NPlusKPat _ n _ _ _ _ -> + [ toHie $ C (PatternBind scope pscope rsp) n + ] + SigPat _ pat sig -> + [ toHie $ PS rsp scope pscope pat + , let cscope = mkLScope (dL pat) in + toHie $ TS (ResolvedScopes [cscope, scope, pscope]) + (protectSig @a cscope sig) + -- See Note [Scoping Rules for SigPat] + ] + CoPat _ _ _ _ -> + [] + XPat _ -> [] + where + contextify (PrefixCon args) = PrefixCon $ patScopes rsp scope pscope args + contextify (InfixCon a b) = InfixCon a' b' + where [a', b'] = patScopes rsp scope pscope [a,b] + contextify (RecCon r) = RecCon $ RC RecFieldMatch $ contextify_rec r + contextify_rec (HsRecFields fds a) = HsRecFields (map go scoped_fds) a + where + go (RS fscope (L spn (HsRecField lbl pat pun))) = + L spn $ HsRecField lbl (PS rsp scope fscope pat) pun + scoped_fds = listScopes pscope fds + +instance ( ToHie body + , ToHie (LGRHS a body) + , ToHie (RScoped (LHsLocalBinds a)) + ) => ToHie (GRHSs a body) where + toHie grhs = concatM $ case grhs of + GRHSs _ grhss binds -> + [ toHie grhss + , toHie $ RS (mkScope $ grhss_span grhs) binds + ] + XGRHSs _ -> [] + +instance ( ToHie (Located body) + , ToHie (RScoped (GuardLStmt a)) + , Data (GRHS a (Located body)) + ) => ToHie (LGRHS a (Located body)) where + toHie (L span g) = concatM $ makeNode g span : case g of + GRHS _ guards body -> + [ toHie $ listScopes (mkLScope body) guards + , toHie body + ] + XGRHS _ -> [] + +instance ( a ~ GhcPass p + , ToHie (Context (Located (IdP a))) + , HasType (LHsExpr a) + , ToHie (PScoped (LPat a)) + , ToHie (MatchGroup a (LHsExpr a)) + , ToHie (LGRHS a (LHsExpr a)) + , ToHie (RContext (HsRecordBinds a)) + , ToHie (RFContext (Located (AmbiguousFieldOcc a))) + , ToHie (ArithSeqInfo a) + , ToHie (LHsCmdTop a) + , ToHie (RScoped (GuardLStmt a)) + , ToHie (RScoped (LHsLocalBinds a)) + , ToHie (TScoped (LHsWcType (NoGhcTc a))) + , ToHie (TScoped (LHsSigWcType (NoGhcTc a))) + , Data (HsExpr a) + , Data (HsSplice a) + , Data (HsTupArg a) + , Data (AmbiguousFieldOcc a) + , (HasRealDataConName a) + ) => ToHie (LHsExpr (GhcPass p)) where + toHie e@(L mspan oexpr) = concatM $ getTypeNode e : case oexpr of + HsVar _ (L _ var) -> + [ toHie $ C Use (L mspan var) + -- Patch up var location since typechecker removes it + ] + HsUnboundVar _ _ -> + [] + HsConLikeOut _ con -> + [ toHie $ C Use $ L mspan $ conLikeName con + ] + HsRecFld _ fld -> + [ toHie $ RFC RecFieldOcc Nothing (L mspan fld) + ] + HsOverLabel _ _ _ -> [] + HsIPVar _ _ -> [] + HsOverLit _ _ -> [] + HsLit _ _ -> [] + HsLam _ mg -> + [ toHie mg + ] + HsLamCase _ mg -> + [ toHie mg + ] + HsApp _ a b -> + [ toHie a + , toHie b + ] + HsAppType _ expr sig -> + [ toHie expr + , toHie $ TS (ResolvedScopes []) sig + ] + OpApp _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + NegApp _ a _ -> + [ toHie a + ] + HsPar _ a -> + [ toHie a + ] + SectionL _ a b -> + [ toHie a + , toHie b + ] + SectionR _ a b -> + [ toHie a + , toHie b + ] + ExplicitTuple _ args _ -> + [ toHie args + ] + ExplicitSum _ _ _ expr -> + [ toHie expr + ] + HsCase _ expr matches -> + [ toHie expr + , toHie matches + ] + HsIf _ _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + HsMultiIf _ grhss -> + [ toHie grhss + ] + HsLet _ binds expr -> + [ toHie $ RS (mkLScope expr) binds + , toHie expr + ] + HsDo _ _ (L ispan stmts) -> + [ pure $ locOnly ispan + , toHie $ listScopes NoScope stmts + ] + ExplicitList _ _ exprs -> + [ toHie exprs + ] + RecordCon {rcon_ext = mrealcon, rcon_con_name = name, rcon_flds = binds} -> + [ toHie $ C Use (getRealDataCon @a mrealcon name) + -- See Note [Real DataCon Name] + , toHie $ RC RecFieldAssign $ binds + ] + RecordUpd {rupd_expr = expr, rupd_flds = upds}-> + [ toHie expr + , toHie $ map (RC RecFieldAssign) upds + ] + ExprWithTySig _ expr sig -> + [ toHie expr + , toHie $ TS (ResolvedScopes [mkLScope expr]) sig + ] + ArithSeq _ _ info -> + [ toHie info + ] + HsSCC _ _ _ expr -> + [ toHie expr + ] + HsCoreAnn _ _ _ expr -> + [ toHie expr + ] + HsProc _ pat cmdtop -> + [ toHie $ PS Nothing (mkLScope cmdtop) NoScope pat + , toHie cmdtop + ] + HsStatic _ expr -> + [ toHie expr + ] + HsTick _ _ expr -> + [ toHie expr + ] + HsBinTick _ _ _ expr -> + [ toHie expr + ] + HsTickPragma _ _ _ _ expr -> + [ toHie expr + ] + HsWrap _ _ a -> + [ toHie $ L mspan a + ] + HsBracket _ b -> + [ toHie b + ] + HsRnBracketOut _ b p -> + [ toHie b + , toHie p + ] + HsTcBracketOut _ b p -> + [ toHie b + , toHie p + ] + HsSpliceE _ x -> + [ toHie $ L mspan x + ] + XExpr _ -> [] + +instance ( a ~ GhcPass p + , ToHie (LHsExpr a) + , Data (HsTupArg a) + ) => ToHie (LHsTupArg (GhcPass p)) where + toHie (L span arg) = concatM $ makeNode arg span : case arg of + Present _ expr -> + [ toHie expr + ] + Missing _ -> [] + XTupArg _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (LHsExpr a) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (LHsLocalBinds a)) + , ToHie (RScoped (ApplicativeArg a)) + , ToHie (Located body) + , Data (StmtLR a a (Located body)) + , Data (StmtLR a a (Located (HsExpr a))) + ) => ToHie (RScoped (LStmt (GhcPass p) (Located body))) where + toHie (RS scope (L span stmt)) = concatM $ makeNode stmt span : case stmt of + LastStmt _ body _ _ -> + [ toHie body + ] + BindStmt _ pat body _ _ -> + [ toHie $ PS (getRealSpan $ getLoc body) scope NoScope pat + , toHie body + ] + ApplicativeStmt _ stmts _ -> + [ concatMapM (toHie . RS scope . snd) stmts + ] + BodyStmt _ body _ _ -> + [ toHie body + ] + LetStmt _ binds -> + [ toHie $ RS scope binds + ] + ParStmt _ parstmts _ _ -> + [ concatMapM (\(ParStmtBlock _ stmts _ _) -> + toHie $ listScopes NoScope stmts) + parstmts + ] + TransStmt {trS_stmts = stmts, trS_using = using, trS_by = by} -> + [ toHie $ listScopes scope stmts + , toHie using + , toHie by + ] + RecStmt {recS_stmts = stmts} -> + [ toHie $ map (RS $ combineScopes scope (mkScope span)) stmts + ] + XStmtLR _ -> [] + +instance ( ToHie (LHsExpr a) + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (HsLocalBinds a) + ) => ToHie (RScoped (LHsLocalBinds a)) where + toHie (RS scope (L sp binds)) = concatM $ makeNode binds sp : case binds of + EmptyLocalBinds _ -> [] + HsIPBinds _ _ -> [] + HsValBinds _ valBinds -> + [ toHie $ RS (combineScopes scope $ mkScope sp) + valBinds + ] + XHsLocalBindsLR _ -> [] + +instance ( ToHie (BindContext (LHsBind a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (XXValBindsLR a a)) + ) => ToHie (RScoped (HsValBindsLR a a)) where + toHie (RS sc v) = concatM $ case v of + ValBinds _ binds sigs -> + [ toHie $ fmap (BC RegularBind sc) binds + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] + XValBindsLR x -> [ toHie $ RS sc x ] + +instance ToHie (RScoped (NHsValBindsLR GhcTc)) where + toHie (RS sc (NValBinds binds sigs)) = concatM $ + [ toHie (concatMap (map (BC RegularBind sc) . bagToList . snd) binds) + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] +instance ToHie (RScoped (NHsValBindsLR GhcRn)) where + toHie (RS sc (NValBinds binds sigs)) = concatM $ + [ toHie (concatMap (map (BC RegularBind sc) . bagToList . snd) binds) + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] + +instance ( ToHie (RContext (LHsRecField a arg)) + ) => ToHie (RContext (HsRecFields a arg)) where + toHie (RC c (HsRecFields fields _)) = toHie $ map (RC c) fields + +instance ( ToHie (RFContext (Located label)) + , ToHie arg + , HasLoc arg + , Data label + , Data arg + ) => ToHie (RContext (LHsRecField' label arg)) where + toHie (RC c (L span recfld)) = concatM $ makeNode recfld span : case recfld of + HsRecField label expr _ -> + [ toHie $ RFC c (getRealSpan $ loc expr) label + , toHie expr + ] + +instance ToHie (RFContext (LFieldOcc GhcRn)) where + toHie (RFC c rhs (L nspan f)) = concatM $ case f of + FieldOcc name _ -> + [ toHie $ C (RecField c rhs) (L nspan name) + ] + XFieldOcc _ -> [] + +instance ToHie (RFContext (LFieldOcc GhcTc)) where + toHie (RFC c rhs (L nspan f)) = concatM $ case f of + FieldOcc var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + XFieldOcc _ -> [] + +instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcRn))) where + toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of + Unambiguous name _ -> + [ toHie $ C (RecField c rhs) $ L nspan name + ] + Ambiguous _name _ -> + [ ] + XAmbiguousFieldOcc _ -> [] + +instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcTc))) where + toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of + Unambiguous var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + Ambiguous var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + XAmbiguousFieldOcc _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (LHsExpr a) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (StmtLR a a (Located (HsExpr a))) + , Data (HsLocalBinds a) + ) => ToHie (RScoped (ApplicativeArg (GhcPass p))) where + toHie (RS sc (ApplicativeArgOne _ pat expr _ _)) = concatM + [ toHie $ PS Nothing sc NoScope pat + , toHie expr + ] + toHie (RS sc (ApplicativeArgMany _ stmts _ pat)) = concatM + [ toHie $ listScopes NoScope stmts + , toHie $ PS Nothing sc NoScope pat + ] + toHie (RS _ (XApplicativeArg _)) = pure [] + +instance (ToHie arg, ToHie rec) => ToHie (HsConDetails arg rec) where + toHie (PrefixCon args) = toHie args + toHie (RecCon rec) = toHie rec + toHie (InfixCon a b) = concatM [ toHie a, toHie b] + +instance ( ToHie (LHsCmd a) + , Data (HsCmdTop a) + ) => ToHie (LHsCmdTop a) where + toHie (L span top) = concatM $ makeNode top span : case top of + HsCmdTop _ cmd -> + [ toHie cmd + ] + XCmdTop _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (LHsExpr a) + , ToHie (MatchGroup a (LHsCmd a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (HsCmd a) + , Data (HsCmdTop a) + , Data (StmtLR a a (Located (HsCmd a))) + , Data (HsLocalBinds a) + , Data (StmtLR a a (Located (HsExpr a))) + ) => ToHie (LHsCmd (GhcPass p)) where + toHie (L span cmd) = concatM $ makeNode cmd span : case cmd of + HsCmdArrApp _ a b _ _ -> + [ toHie a + , toHie b + ] + HsCmdArrForm _ a _ _ cmdtops -> + [ toHie a + , toHie cmdtops + ] + HsCmdApp _ a b -> + [ toHie a + , toHie b + ] + HsCmdLam _ mg -> + [ toHie mg + ] + HsCmdPar _ a -> + [ toHie a + ] + HsCmdCase _ expr alts -> + [ toHie expr + , toHie alts + ] + HsCmdIf _ _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + HsCmdLet _ binds cmd' -> + [ toHie $ RS (mkLScope cmd') binds + , toHie cmd' + ] + HsCmdDo _ (L ispan stmts) -> + [ pure $ locOnly ispan + , toHie $ listScopes NoScope stmts + ] + HsCmdWrap _ _ _ -> [] + XCmd _ -> [] + +instance ToHie (TyClGroup GhcRn) where + toHie TyClGroup{ group_tyclds = classes + , group_roles = roles + , group_kisigs = sigs + , group_instds = instances } = + concatM + [ toHie classes + , toHie sigs + , toHie roles + , toHie instances + ] + toHie (XTyClGroup _) = pure [] + +instance ToHie (LTyClDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + FamDecl {tcdFam = fdecl} -> + [ toHie (L span fdecl) + ] + SynDecl {tcdLName = name, tcdTyVars = vars, tcdRhs = typ} -> + [ toHie $ C (Decl SynDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [mkScope $ getLoc typ]) vars + , toHie typ + ] + DataDecl {tcdLName = name, tcdTyVars = vars, tcdDataDefn = defn} -> + [ toHie $ C (Decl DataDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [quant_scope, rhs_scope]) vars + , toHie defn + ] + where + quant_scope = mkLScope $ dd_ctxt defn + rhs_scope = sig_sc `combineScopes` con_sc `combineScopes` deriv_sc + sig_sc = maybe NoScope mkLScope $ dd_kindSig defn + con_sc = foldr combineScopes NoScope $ map mkLScope $ dd_cons defn + deriv_sc = mkLScope $ dd_derivs defn + ClassDecl { tcdCtxt = context + , tcdLName = name + , tcdTyVars = vars + , tcdFDs = deps + , tcdSigs = sigs + , tcdMeths = meths + , tcdATs = typs + , tcdATDefs = deftyps + } -> + [ toHie $ C (Decl ClassDec $ getRealSpan span) name + , toHie context + , toHie $ TS (ResolvedScopes [context_scope, rhs_scope]) vars + , toHie deps + , toHie $ map (SC $ SI ClassSig $ getRealSpan span) sigs + , toHie $ fmap (BC InstanceBind ModuleScope) meths + , toHie typs + , concatMapM (pure . locOnly . getLoc) deftyps + , toHie deftyps + ] + where + context_scope = mkLScope context + rhs_scope = foldl1' combineScopes $ map mkScope + [ loc deps, loc sigs, loc (bagToList meths), loc typs, loc deftyps] + XTyClDecl _ -> [] + +instance ToHie (LFamilyDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + FamilyDecl _ info name vars _ sig inj -> + [ toHie $ C (Decl FamDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [rhsSpan]) vars + , toHie info + , toHie $ RS injSpan sig + , toHie inj + ] + where + rhsSpan = sigSpan `combineScopes` injSpan + sigSpan = mkScope $ getLoc sig + injSpan = maybe NoScope (mkScope . getLoc) inj + XFamilyDecl _ -> [] + +instance ToHie (FamilyInfo GhcRn) where + toHie (ClosedTypeFamily (Just eqns)) = concatM $ + [ concatMapM (pure . locOnly . getLoc) eqns + , toHie $ map go eqns + ] + where + go (L l ib) = TS (ResolvedScopes [mkScope l]) ib + toHie _ = pure [] + +instance ToHie (RScoped (LFamilyResultSig GhcRn)) where + toHie (RS sc (L span sig)) = concatM $ makeNode sig span : case sig of + NoSig _ -> + [] + KindSig _ k -> + [ toHie k + ] + TyVarSig _ bndr -> + [ toHie $ TVS (ResolvedScopes [sc]) NoScope bndr + ] + XFamilyResultSig _ -> [] + +instance ToHie (Located (FunDep (Located Name))) where + toHie (L span fd@(lhs, rhs)) = concatM $ + [ makeNode fd span + , toHie $ map (C Use) lhs + , toHie $ map (C Use) rhs + ] + +instance (ToHie rhs, HasLoc rhs) + => ToHie (TScoped (FamEqn GhcRn rhs)) where + toHie (TS _ f) = toHie f + +instance (ToHie rhs, HasLoc rhs) + => ToHie (FamEqn GhcRn rhs) where + toHie fe@(FamEqn _ var tybndrs pats _ rhs) = concatM $ + [ toHie $ C (Decl InstDec $ getRealSpan $ loc fe) var + , toHie $ fmap (tvScopes (ResolvedScopes []) scope) tybndrs + , toHie pats + , toHie rhs + ] + where scope = combineScopes patsScope rhsScope + patsScope = mkScope (loc pats) + rhsScope = mkScope (loc rhs) + toHie (XFamEqn _) = pure [] + +instance ToHie (LInjectivityAnn GhcRn) where + toHie (L span ann) = concatM $ makeNode ann span : case ann of + InjectivityAnn lhs rhs -> + [ toHie $ C Use lhs + , toHie $ map (C Use) rhs + ] + +instance ToHie (HsDataDefn GhcRn) where + toHie (HsDataDefn _ _ ctx _ mkind cons derivs) = concatM + [ toHie ctx + , toHie mkind + , toHie cons + , toHie derivs + ] + toHie (XHsDataDefn _) = pure [] + +instance ToHie (HsDeriving GhcRn) where + toHie (L span clauses) = concatM + [ pure $ locOnly span + , toHie clauses + ] + +instance ToHie (LHsDerivingClause GhcRn) where + toHie (L span cl) = concatM $ makeNode cl span : case cl of + HsDerivingClause _ strat (L ispan tys) -> + [ toHie strat + , pure $ locOnly ispan + , toHie $ map (TS (ResolvedScopes [])) tys + ] + XHsDerivingClause _ -> [] + +instance ToHie (Located (DerivStrategy GhcRn)) where + toHie (L span strat) = concatM $ makeNode strat span : case strat of + StockStrategy -> [] + AnyclassStrategy -> [] + NewtypeStrategy -> [] + ViaStrategy s -> [ toHie $ TS (ResolvedScopes []) s ] + +instance ToHie (Located OverlapMode) where + toHie (L span _) = pure $ locOnly span + +instance ToHie (LConDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ConDeclGADT { con_names = names, con_qvars = qvars + , con_mb_cxt = ctx, con_args = args, con_res_ty = typ } -> + [ toHie $ map (C (Decl ConDec $ getRealSpan span)) names + , toHie $ TS (ResolvedScopes [ctxScope, rhsScope]) qvars + , toHie ctx + , toHie args + , toHie typ + ] + where + rhsScope = combineScopes argsScope tyScope + ctxScope = maybe NoScope mkLScope ctx + argsScope = condecl_scope args + tyScope = mkLScope typ + ConDeclH98 { con_name = name, con_ex_tvs = qvars + , con_mb_cxt = ctx, con_args = dets } -> + [ toHie $ C (Decl ConDec $ getRealSpan span) name + , toHie $ tvScopes (ResolvedScopes []) rhsScope qvars + , toHie ctx + , toHie dets + ] + where + rhsScope = combineScopes ctxScope argsScope + ctxScope = maybe NoScope mkLScope ctx + argsScope = condecl_scope dets + XConDecl _ -> [] + where condecl_scope args = case args of + PrefixCon xs -> foldr combineScopes NoScope $ map mkLScope xs + InfixCon a b -> combineScopes (mkLScope a) (mkLScope b) + RecCon x -> mkLScope x + +instance ToHie (Located [LConDeclField GhcRn]) where + toHie (L span decls) = concatM $ + [ pure $ locOnly span + , toHie decls + ] + +instance ( HasLoc thing + , ToHie (TScoped thing) + ) => ToHie (TScoped (HsImplicitBndrs GhcRn thing)) where + toHie (TS sc (HsIB ibrn a)) = concatM $ + [ pure $ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) ibrn + , toHie $ TS sc a + ] + where span = loc a + toHie (TS _ (XHsImplicitBndrs _)) = pure [] + +instance ( HasLoc thing + , ToHie (TScoped thing) + ) => ToHie (TScoped (HsWildCardBndrs GhcRn thing)) where + toHie (TS sc (HsWC names a)) = concatM $ + [ pure $ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) names + , toHie $ TS sc a + ] + where span = loc a + toHie (TS _ (XHsWildCardBndrs _)) = pure [] + +instance ToHie (LStandaloneKindSig GhcRn) where + toHie (L sp sig) = concatM [makeNode sig sp, toHie sig] + +instance ToHie (StandaloneKindSig GhcRn) where + toHie sig = concatM $ case sig of + StandaloneKindSig _ name typ -> + [ toHie $ C TyDecl name + , toHie $ TS (ResolvedScopes []) typ + ] + XStandaloneKindSig _ -> [] + +instance ToHie (SigContext (LSig GhcRn)) where + toHie (SC (SI styp msp) (L sp sig)) = concatM $ makeNode sig sp : case sig of + TypeSig _ names typ -> + [ toHie $ map (C TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ + ] + PatSynSig _ names typ -> + [ toHie $ map (C TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ + ] + ClassOpSig _ _ names typ -> + [ case styp of + ClassSig -> toHie $ map (C $ ClassTyDecl $ getRealSpan sp) names + _ -> toHie $ map (C $ TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) msp) typ + ] + IdSig _ _ -> [] + FixSig _ fsig -> + [ toHie $ L sp fsig + ] + InlineSig _ name _ -> + [ toHie $ (C Use) name + ] + SpecSig _ name typs _ -> + [ toHie $ (C Use) name + , toHie $ map (TS (ResolvedScopes [])) typs + ] + SpecInstSig _ _ typ -> + [ toHie $ TS (ResolvedScopes []) typ + ] + MinimalSig _ _ form -> + [ toHie form + ] + SCCFunSig _ _ name mtxt -> + [ toHie $ (C Use) name + , pure $ maybe [] (locOnly . getLoc) mtxt + ] + CompleteMatchSig _ _ (L ispan names) typ -> + [ pure $ locOnly ispan + , toHie $ map (C Use) names + , toHie $ fmap (C Use) typ + ] + XSig _ -> [] + +instance ToHie (LHsType GhcRn) where + toHie x = toHie $ TS (ResolvedScopes []) x + +instance ToHie (TScoped (LHsType GhcRn)) where + toHie (TS tsc (L span t)) = concatM $ makeNode t span : case t of + HsForAllTy _ _ bndrs body -> + [ toHie $ tvScopes tsc (mkScope $ getLoc body) bndrs + , toHie body + ] + HsQualTy _ ctx body -> + [ toHie ctx + , toHie body + ] + HsTyVar _ _ var -> + [ toHie $ C Use var + ] + HsAppTy _ a b -> + [ toHie a + , toHie b + ] + HsAppKindTy _ ty ki -> + [ toHie ty + , toHie $ TS (ResolvedScopes []) ki + ] + HsFunTy _ a b -> + [ toHie a + , toHie b + ] + HsListTy _ a -> + [ toHie a + ] + HsTupleTy _ _ tys -> + [ toHie tys + ] + HsSumTy _ tys -> + [ toHie tys + ] + HsOpTy _ a op b -> + [ toHie a + , toHie $ C Use op + , toHie b + ] + HsParTy _ a -> + [ toHie a + ] + HsIParamTy _ ip ty -> + [ toHie ip + , toHie ty + ] + HsKindSig _ a b -> + [ toHie a + , toHie b + ] + HsSpliceTy _ a -> + [ toHie $ L span a + ] + HsDocTy _ a _ -> + [ toHie a + ] + HsBangTy _ _ ty -> + [ toHie ty + ] + HsRecTy _ fields -> + [ toHie fields + ] + HsExplicitListTy _ _ tys -> + [ toHie tys + ] + HsExplicitTupleTy _ tys -> + [ toHie tys + ] + HsTyLit _ _ -> [] + HsWildCardTy _ -> [] + HsStarTy _ _ -> [] + XHsType _ -> [] + +instance (ToHie tm, ToHie ty) => ToHie (HsArg tm ty) where + toHie (HsValArg tm) = toHie tm + toHie (HsTypeArg _ ty) = toHie ty + toHie (HsArgPar sp) = pure $ locOnly sp + +instance ToHie (TVScoped (LHsTyVarBndr GhcRn)) where + toHie (TVS tsc sc (L span bndr)) = concatM $ makeNode bndr span : case bndr of + UserTyVar _ var -> + [ toHie $ C (TyVarBind sc tsc) var + ] + KindedTyVar _ var kind -> + [ toHie $ C (TyVarBind sc tsc) var + , toHie kind + ] + XTyVarBndr _ -> [] + +instance ToHie (TScoped (LHsQTyVars GhcRn)) where + toHie (TS sc (HsQTvs implicits vars)) = concatM $ + [ pure $ bindingsOnly bindings + , toHie $ tvScopes sc NoScope vars + ] + where + varLoc = loc vars + bindings = map (C $ TyVarBind (mkScope varLoc) sc) implicits + toHie (TS _ (XLHsQTyVars _)) = pure [] + +instance ToHie (LHsContext GhcRn) where + toHie (L span tys) = concatM $ + [ pure $ locOnly span + , toHie tys + ] + +instance ToHie (LConDeclField GhcRn) where + toHie (L span field) = concatM $ makeNode field span : case field of + ConDeclField _ fields typ _ -> + [ toHie $ map (RFC RecFieldDecl (getRealSpan $ loc typ)) fields + , toHie typ + ] + XConDeclField _ -> [] + +instance ToHie (LHsExpr a) => ToHie (ArithSeqInfo a) where + toHie (From expr) = toHie expr + toHie (FromThen a b) = concatM $ + [ toHie a + , toHie b + ] + toHie (FromTo a b) = concatM $ + [ toHie a + , toHie b + ] + toHie (FromThenTo a b c) = concatM $ + [ toHie a + , toHie b + , toHie c + ] + +instance ToHie (LSpliceDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + SpliceDecl _ splice _ -> + [ toHie splice + ] + XSpliceDecl _ -> [] + +instance ToHie (HsBracket a) where + toHie _ = pure [] + +instance ToHie PendingRnSplice where + toHie _ = pure [] + +instance ToHie PendingTcSplice where + toHie _ = pure [] + +instance ToHie (LBooleanFormula (Located Name)) where + toHie (L span form) = concatM $ makeNode form span : case form of + Var a -> + [ toHie $ C Use a + ] + And forms -> + [ toHie forms + ] + Or forms -> + [ toHie forms + ] + Parens f -> + [ toHie f + ] + +instance ToHie (Located HsIPName) where + toHie (L span e) = makeNode e span + +instance ( ToHie (LHsExpr a) + , Data (HsSplice a) + ) => ToHie (Located (HsSplice a)) where + toHie (L span sp) = concatM $ makeNode sp span : case sp of + HsTypedSplice _ _ _ expr -> + [ toHie expr + ] + HsUntypedSplice _ _ _ expr -> + [ toHie expr + ] + HsQuasiQuote _ _ _ ispan _ -> + [ pure $ locOnly ispan + ] + HsSpliced _ _ _ -> + [] + HsSplicedT _ -> + [] + XSplice _ -> [] + +instance ToHie (LRoleAnnotDecl GhcRn) where + toHie (L span annot) = concatM $ makeNode annot span : case annot of + RoleAnnotDecl _ var roles -> + [ toHie $ C Use var + , concatMapM (pure . locOnly . getLoc) roles + ] + XRoleAnnotDecl _ -> [] + +instance ToHie (LInstDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ClsInstD _ d -> + [ toHie $ L span d + ] + DataFamInstD _ d -> + [ toHie $ L span d + ] + TyFamInstD _ d -> + [ toHie $ L span d + ] + XInstDecl _ -> [] + +instance ToHie (LClsInstDecl GhcRn) where + toHie (L span decl) = concatM + [ toHie $ TS (ResolvedScopes [mkScope span]) $ cid_poly_ty decl + , toHie $ fmap (BC InstanceBind ModuleScope) $ cid_binds decl + , toHie $ map (SC $ SI InstSig $ getRealSpan span) $ cid_sigs decl + , pure $ concatMap (locOnly . getLoc) $ cid_tyfam_insts decl + , toHie $ cid_tyfam_insts decl + , pure $ concatMap (locOnly . getLoc) $ cid_datafam_insts decl + , toHie $ cid_datafam_insts decl + , toHie $ cid_overlap_mode decl + ] + +instance ToHie (LDataFamInstDecl GhcRn) where + toHie (L sp (DataFamInstDecl d)) = toHie $ TS (ResolvedScopes [mkScope sp]) d + +instance ToHie (LTyFamInstDecl GhcRn) where + toHie (L sp (TyFamInstDecl d)) = toHie $ TS (ResolvedScopes [mkScope sp]) d + +instance ToHie (Context a) + => ToHie (PatSynFieldContext (RecordPatSynField a)) where + toHie (PSC sp (RecordPatSynField a b)) = concatM $ + [ toHie $ C (RecField RecFieldDecl sp) a + , toHie $ C Use b + ] + +instance ToHie (LDerivDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + DerivDecl _ typ strat overlap -> + [ toHie $ TS (ResolvedScopes []) typ + , toHie strat + , toHie overlap + ] + XDerivDecl _ -> [] + +instance ToHie (LFixitySig GhcRn) where + toHie (L span sig) = concatM $ makeNode sig span : case sig of + FixitySig _ vars _ -> + [ toHie $ map (C Use) vars + ] + XFixitySig _ -> [] + +instance ToHie (LDefaultDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + DefaultDecl _ typs -> + [ toHie typs + ] + XDefaultDecl _ -> [] + +instance ToHie (LForeignDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ForeignImport {fd_name = name, fd_sig_ty = sig, fd_fi = fi} -> + [ toHie $ C (ValBind RegularBind ModuleScope $ getRealSpan span) name + , toHie $ TS (ResolvedScopes []) sig + , toHie fi + ] + ForeignExport {fd_name = name, fd_sig_ty = sig, fd_fe = fe} -> + [ toHie $ C Use name + , toHie $ TS (ResolvedScopes []) sig + , toHie fe + ] + XForeignDecl _ -> [] + +instance ToHie ForeignImport where + toHie (CImport (L a _) (L b _) _ _ (L c _)) = pure $ concat $ + [ locOnly a + , locOnly b + , locOnly c + ] + +instance ToHie ForeignExport where + toHie (CExport (L a _) (L b _)) = pure $ concat $ + [ locOnly a + , locOnly b + ] + +instance ToHie (LWarnDecls GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + Warnings _ _ warnings -> + [ toHie warnings + ] + XWarnDecls _ -> [] + +instance ToHie (LWarnDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + Warning _ vars _ -> + [ toHie $ map (C Use) vars + ] + XWarnDecl _ -> [] + +instance ToHie (LAnnDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + HsAnnotation _ _ prov expr -> + [ toHie prov + , toHie expr + ] + XAnnDecl _ -> [] + +instance ToHie (Context (Located a)) => ToHie (AnnProvenance a) where + toHie (ValueAnnProvenance a) = toHie $ C Use a + toHie (TypeAnnProvenance a) = toHie $ C Use a + toHie ModuleAnnProvenance = pure [] + +instance ToHie (LRuleDecls GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + HsRules _ _ rules -> + [ toHie rules + ] + XRuleDecls _ -> [] + +instance ToHie (LRuleDecl GhcRn) where + toHie (L _ (XRuleDecl _)) = pure [] + toHie (L span r@(HsRule _ rname _ tybndrs bndrs exprA exprB)) = concatM + [ makeNode r span + , pure $ locOnly $ getLoc rname + , toHie $ fmap (tvScopes (ResolvedScopes []) scope) tybndrs + , toHie $ map (RS $ mkScope span) bndrs + , toHie exprA + , toHie exprB + ] + where scope = bndrs_sc `combineScopes` exprA_sc `combineScopes` exprB_sc + bndrs_sc = maybe NoScope mkLScope (listToMaybe bndrs) + exprA_sc = mkLScope exprA + exprB_sc = mkLScope exprB + +instance ToHie (RScoped (LRuleBndr GhcRn)) where + toHie (RS sc (L span bndr)) = concatM $ makeNode bndr span : case bndr of + RuleBndr _ var -> + [ toHie $ C (ValBind RegularBind sc Nothing) var + ] + RuleBndrSig _ var typ -> + [ toHie $ C (ValBind RegularBind sc Nothing) var + , toHie $ TS (ResolvedScopes [sc]) typ + ] + XRuleBndr _ -> [] + +instance ToHie (LImportDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ImportDecl { ideclName = name, ideclAs = as, ideclHiding = hidden } -> + [ toHie $ IEC Import name + , toHie $ fmap (IEC ImportAs) as + , maybe (pure []) goIE hidden + ] + XImportDecl _ -> [] + where + goIE (hiding, (L sp liens)) = concatM $ + [ pure $ locOnly sp + , toHie $ map (IEC c) liens + ] + where + c = if hiding then ImportHiding else Import + +instance ToHie (IEContext (LIE GhcRn)) where + toHie (IEC c (L span ie)) = concatM $ makeNode ie span : case ie of + IEVar _ n -> + [ toHie $ IEC c n + ] + IEThingAbs _ n -> + [ toHie $ IEC c n + ] + IEThingAll _ n -> + [ toHie $ IEC c n + ] + IEThingWith _ n _ ns flds -> + [ toHie $ IEC c n + , toHie $ map (IEC c) ns + , toHie $ map (IEC c) flds + ] + IEModuleContents _ n -> + [ toHie $ IEC c n + ] + IEGroup _ _ _ -> [] + IEDoc _ _ -> [] + IEDocNamed _ _ -> [] + XIE _ -> [] + +instance ToHie (IEContext (LIEWrappedName Name)) where + toHie (IEC c (L span iewn)) = concatM $ makeNode iewn span : case iewn of + IEName n -> + [ toHie $ C (IEThing c) n + ] + IEPattern p -> + [ toHie $ C (IEThing c) p + ] + IEType n -> + [ toHie $ C (IEThing c) n + ] + +instance ToHie (IEContext (Located (FieldLbl Name))) where + toHie (IEC c (L span lbl)) = concatM $ makeNode lbl span : case lbl of + FieldLabel _ _ n -> + [ toHie $ C (IEThing c) $ L span n + ] diff --git a/hie-compat/src-ghc810/Compat/HieBin.hs b/hie-compat/src-ghc810/Compat/HieBin.hs new file mode 100644 index 00000000000..1a6ff2bef1d --- /dev/null +++ b/hie-compat/src-ghc810/Compat/HieBin.hs @@ -0,0 +1,399 @@ +{- +Binary serialization for .hie files. +-} +{- HLINT ignore -} +{-# LANGUAGE ScopedTypeVariables #-} +module Compat.HieBin ( readHieFile, readHieFileWithVersion, HieHeader, writeHieFile, HieName(..), toHieName, HieFileResult(..), hieMagic, hieNameOcc,NameCacheUpdater(..)) where + +import GHC.Settings ( maybeRead ) + +import Config ( cProjectVersion ) +import Binary +import BinIface ( getDictFastString ) +import FastMutInt +import FastString ( FastString ) +import Module ( Module ) +import Name +import NameCache +import Outputable +import PrelInfo +import SrcLoc +import UniqSupply ( takeUniqFromSupply ) +import Unique +import UniqFM +import IfaceEnv + +import qualified Data.Array as A +import Data.IORef +import Data.ByteString ( ByteString ) +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BSC +import Data.List ( mapAccumR ) +import Data.Word ( Word8, Word32 ) +import Control.Monad ( replicateM, when ) +import System.Directory ( createDirectoryIfMissing ) +import System.FilePath ( takeDirectory ) + +import HieTypes + +-- | `Name`'s get converted into `HieName`'s before being written into @.hie@ +-- files. See 'toHieName' and 'fromHieName' for logic on how to convert between +-- these two types. +data HieName + = ExternalName !Module !OccName !SrcSpan + | LocalName !OccName !SrcSpan + | KnownKeyName !Unique + deriving (Eq) + +instance Ord HieName where + compare (ExternalName a b c) (ExternalName d e f) = compare (a,b,c) (d,e,f) + compare (LocalName a b) (LocalName c d) = compare (a,b) (c,d) + compare (KnownKeyName a) (KnownKeyName b) = nonDetCmpUnique a b + -- Not actually non determinstic as it is a KnownKey + compare ExternalName{} _ = LT + compare LocalName{} ExternalName{} = GT + compare LocalName{} _ = LT + compare KnownKeyName{} _ = GT + +instance Outputable HieName where + ppr (ExternalName m n sp) = text "ExternalName" <+> ppr m <+> ppr n <+> ppr sp + ppr (LocalName n sp) = text "LocalName" <+> ppr n <+> ppr sp + ppr (KnownKeyName u) = text "KnownKeyName" <+> ppr u + +hieNameOcc :: HieName -> OccName +hieNameOcc (ExternalName _ occ _) = occ +hieNameOcc (LocalName occ _) = occ +hieNameOcc (KnownKeyName u) = + case lookupKnownKeyName u of + Just n -> nameOccName n + Nothing -> pprPanic "hieNameOcc:unknown known-key unique" + (ppr (unpkUnique u)) + + +data HieSymbolTable = HieSymbolTable + { hie_symtab_next :: !FastMutInt + , hie_symtab_map :: !(IORef (UniqFM (Int, HieName))) + } + +data HieDictionary = HieDictionary + { hie_dict_next :: !FastMutInt -- The next index to use + , hie_dict_map :: !(IORef (UniqFM (Int,FastString))) -- indexed by FastString + } + +initBinMemSize :: Int +initBinMemSize = 1024*1024 + +-- | The header for HIE files - Capital ASCII letters "HIE". +hieMagic :: [Word8] +hieMagic = [72,73,69] + +hieMagicLen :: Int +hieMagicLen = length hieMagic + +ghcVersion :: ByteString +ghcVersion = BSC.pack cProjectVersion + +putBinLine :: BinHandle -> ByteString -> IO () +putBinLine bh xs = do + mapM_ (putByte bh) $ BS.unpack xs + putByte bh 10 -- newline char + +-- | Write a `HieFile` to the given `FilePath`, with a proper header and +-- symbol tables for `Name`s and `FastString`s +writeHieFile :: FilePath -> HieFile -> IO () +writeHieFile hie_file_path hiefile = do + bh0 <- openBinMem initBinMemSize + + -- Write the header: hieHeader followed by the + -- hieVersion and the GHC version used to generate this file + mapM_ (putByte bh0) hieMagic + putBinLine bh0 $ BSC.pack $ show hieVersion + putBinLine bh0 $ ghcVersion + + -- remember where the dictionary pointer will go + dict_p_p <- tellBin bh0 + put_ bh0 dict_p_p + + -- remember where the symbol table pointer will go + symtab_p_p <- tellBin bh0 + put_ bh0 symtab_p_p + + -- Make some intial state + symtab_next <- newFastMutInt + writeFastMutInt symtab_next 0 + symtab_map <- newIORef emptyUFM + let hie_symtab = HieSymbolTable { + hie_symtab_next = symtab_next, + hie_symtab_map = symtab_map } + dict_next_ref <- newFastMutInt + writeFastMutInt dict_next_ref 0 + dict_map_ref <- newIORef emptyUFM + let hie_dict = HieDictionary { + hie_dict_next = dict_next_ref, + hie_dict_map = dict_map_ref } + + -- put the main thing + let bh = setUserData bh0 $ newWriteState (putName hie_symtab) + (putName hie_symtab) + (putFastString hie_dict) + put_ bh hiefile + + -- write the symtab pointer at the front of the file + symtab_p <- tellBin bh + putAt bh symtab_p_p symtab_p + seekBin bh symtab_p + + -- write the symbol table itself + symtab_next' <- readFastMutInt symtab_next + symtab_map' <- readIORef symtab_map + putSymbolTable bh symtab_next' symtab_map' + + -- write the dictionary pointer at the front of the file + dict_p <- tellBin bh + putAt bh dict_p_p dict_p + seekBin bh dict_p + + -- write the dictionary itself + dict_next <- readFastMutInt dict_next_ref + dict_map <- readIORef dict_map_ref + putDictionary bh dict_next dict_map + + -- and send the result to the file + createDirectoryIfMissing True (takeDirectory hie_file_path) + writeBinMem bh hie_file_path + return () + +data HieFileResult + = HieFileResult + { hie_file_result_version :: Integer + , hie_file_result_ghc_version :: ByteString + , hie_file_result :: HieFile + } + +type HieHeader = (Integer, ByteString) + +-- | Read a `HieFile` from a `FilePath`. Can use +-- an existing `NameCache`. Allows you to specify +-- which versions of hieFile to attempt to read. +-- `Left` case returns the failing header versions. +readHieFileWithVersion :: (HieHeader -> Bool) -> NameCacheUpdater -> FilePath -> IO (Either HieHeader HieFileResult) +readHieFileWithVersion readVersion ncu file = do + bh0 <- readBinMem file + + (hieVersion, ghcVersion) <- readHieFileHeader file bh0 + + if readVersion (hieVersion, ghcVersion) + then do + hieFile <- readHieFileContents bh0 ncu + return $ Right (HieFileResult hieVersion ghcVersion hieFile) + else return $ Left (hieVersion, ghcVersion) + + +-- | Read a `HieFile` from a `FilePath`. Can use +-- an existing `NameCache`. +readHieFile :: NameCacheUpdater -> FilePath -> IO HieFileResult +readHieFile ncu file = do + + bh0 <- readBinMem file + + (readHieVersion, ghcVersion) <- readHieFileHeader file bh0 + + -- Check if the versions match + when (readHieVersion /= hieVersion) $ + panic $ unwords ["readHieFile: hie file versions don't match for file:" + , file + , "Expected" + , show hieVersion + , "but got", show readHieVersion + ] + hieFile <- readHieFileContents bh0 ncu + return $ HieFileResult hieVersion ghcVersion hieFile + +readBinLine :: BinHandle -> IO ByteString +readBinLine bh = BS.pack . reverse <$> loop [] + where + loop acc = do + char <- get bh :: IO Word8 + if char == 10 -- ASCII newline '\n' + then return acc + else loop (char : acc) + +readHieFileHeader :: FilePath -> BinHandle -> IO HieHeader +readHieFileHeader file bh0 = do + -- Read the header + magic <- replicateM hieMagicLen (get bh0) + version <- BSC.unpack <$> readBinLine bh0 + case maybeRead version of + Nothing -> + panic $ unwords ["readHieFileHeader: hieVersion isn't an Integer:" + , show version + ] + Just readHieVersion -> do + ghcVersion <- readBinLine bh0 + + -- Check if the header is valid + when (magic /= hieMagic) $ + panic $ unwords ["readHieFileHeader: headers don't match for file:" + , file + , "Expected" + , show hieMagic + , "but got", show magic + ] + return (readHieVersion, ghcVersion) + +readHieFileContents :: BinHandle -> NameCacheUpdater -> IO HieFile +readHieFileContents bh0 ncu = do + + dict <- get_dictionary bh0 + + -- read the symbol table so we are capable of reading the actual data + bh1 <- do + let bh1 = setUserData bh0 $ newReadState (error "getSymtabName") + (getDictFastString dict) + symtab <- get_symbol_table bh1 + let bh1' = setUserData bh1 + $ newReadState (getSymTabName symtab) + (getDictFastString dict) + return bh1' + + -- load the actual data + hiefile <- get bh1 + return hiefile + where + get_dictionary bin_handle = do + dict_p <- get bin_handle + data_p <- tellBin bin_handle + seekBin bin_handle dict_p + dict <- getDictionary bin_handle + seekBin bin_handle data_p + return dict + + get_symbol_table bh1 = do + symtab_p <- get bh1 + data_p' <- tellBin bh1 + seekBin bh1 symtab_p + symtab <- getSymbolTable bh1 ncu + seekBin bh1 data_p' + return symtab + +putFastString :: HieDictionary -> BinHandle -> FastString -> IO () +putFastString HieDictionary { hie_dict_next = j_r, + hie_dict_map = out_r} bh f + = do + out <- readIORef out_r + let unique = getUnique f + case lookupUFM out unique of + Just (j, _) -> put_ bh (fromIntegral j :: Word32) + Nothing -> do + j <- readFastMutInt j_r + put_ bh (fromIntegral j :: Word32) + writeFastMutInt j_r (j + 1) + writeIORef out_r $! addToUFM out unique (j, f) + +putSymbolTable :: BinHandle -> Int -> UniqFM (Int,HieName) -> IO () +putSymbolTable bh next_off symtab = do + put_ bh next_off + let names = A.elems (A.array (0,next_off-1) (nonDetEltsUFM symtab)) + mapM_ (putHieName bh) names + +getSymbolTable :: BinHandle -> NameCacheUpdater -> IO SymbolTable +getSymbolTable bh ncu = do + sz <- get bh + od_names <- replicateM sz (getHieName bh) + updateNameCache ncu $ \nc -> + let arr = A.listArray (0,sz-1) names + (nc', names) = mapAccumR fromHieName nc od_names + in (nc',arr) + +getSymTabName :: SymbolTable -> BinHandle -> IO Name +getSymTabName st bh = do + i :: Word32 <- get bh + return $ st A.! (fromIntegral i) + +putName :: HieSymbolTable -> BinHandle -> Name -> IO () +putName (HieSymbolTable next ref) bh name = do + symmap <- readIORef ref + case lookupUFM symmap name of + Just (off, ExternalName mod occ (UnhelpfulSpan _)) + | isGoodSrcSpan (nameSrcSpan name) -> do + let hieName = ExternalName mod occ (nameSrcSpan name) + writeIORef ref $! addToUFM symmap name (off, hieName) + put_ bh (fromIntegral off :: Word32) + Just (off, LocalName _occ span) + | notLocal (toHieName name) || nameSrcSpan name /= span -> do + writeIORef ref $! addToUFM symmap name (off, toHieName name) + put_ bh (fromIntegral off :: Word32) + Just (off, _) -> put_ bh (fromIntegral off :: Word32) + Nothing -> do + off <- readFastMutInt next + writeFastMutInt next (off+1) + writeIORef ref $! addToUFM symmap name (off, toHieName name) + put_ bh (fromIntegral off :: Word32) + + where + notLocal :: HieName -> Bool + notLocal LocalName{} = False + notLocal _ = True + + +-- ** Converting to and from `HieName`'s + +toHieName :: Name -> HieName +toHieName name + | isKnownKeyName name = KnownKeyName (nameUnique name) + | isExternalName name = ExternalName (nameModule name) + (nameOccName name) + (nameSrcSpan name) + | otherwise = LocalName (nameOccName name) (nameSrcSpan name) + +fromHieName :: NameCache -> HieName -> (NameCache, Name) +fromHieName nc (ExternalName mod occ span) = + let cache = nsNames nc + in case lookupOrigNameCache cache mod occ of + Just name + | nameSrcSpan name == span -> (nc, name) + | otherwise -> + let name' = setNameLoc name span + new_cache = extendNameCache cache mod occ name' + in ( nc{ nsNames = new_cache }, name' ) + Nothing -> + let (uniq, us) = takeUniqFromSupply (nsUniqs nc) + name = mkExternalName uniq mod occ span + new_cache = extendNameCache cache mod occ name + in ( nc{ nsUniqs = us, nsNames = new_cache }, name ) +fromHieName nc (LocalName occ span) = + let (uniq, us) = takeUniqFromSupply (nsUniqs nc) + name = mkInternalName uniq occ span + in ( nc{ nsUniqs = us }, name ) +fromHieName nc (KnownKeyName u) = case lookupKnownKeyName u of + Nothing -> pprPanic "fromHieName:unknown known-key unique" + (ppr (unpkUnique u)) + Just n -> (nc, n) + +-- ** Reading and writing `HieName`'s + +putHieName :: BinHandle -> HieName -> IO () +putHieName bh (ExternalName mod occ span) = do + putByte bh 0 + put_ bh (mod, occ, span) +putHieName bh (LocalName occName span) = do + putByte bh 1 + put_ bh (occName, span) +putHieName bh (KnownKeyName uniq) = do + putByte bh 2 + put_ bh $ unpkUnique uniq + +getHieName :: BinHandle -> IO HieName +getHieName bh = do + t <- getByte bh + case t of + 0 -> do + (modu, occ, span) <- get bh + return $ ExternalName modu occ span + 1 -> do + (occ, span) <- get bh + return $ LocalName occ span + 2 -> do + (c,i) <- get bh + return $ KnownKeyName $ mkUnique c i + _ -> panic "HieBin.getHieName: invalid tag" diff --git a/hie-compat/src-ghc86/Compat/HieAst.hs b/hie-compat/src-ghc86/Compat/HieAst.hs new file mode 100644 index 00000000000..6b019a0dbfb --- /dev/null +++ b/hie-compat/src-ghc86/Compat/HieAst.hs @@ -0,0 +1,1783 @@ +{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} +{- +Forked from GHC v8.8.1 to work around the readFile side effect in mkHiefile + +Main functions for .hie file generation +-} +{- HLINT ignore -} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE DataKinds #-} +module Compat.HieAst ( mkHieFile, enrichHie ) where + +import Avail ( Avails ) +import Bag ( Bag, bagToList ) +import BasicTypes +import BooleanFormula +import Class ( FunDep ) +import CoreUtils ( exprType ) +import ConLike ( conLikeName ) +import Desugar ( deSugarExpr ) +import FieldLabel +import HsSyn +import HscTypes +import Module ( ModuleName, ml_hs_file ) +import MonadUtils ( concatMapM, liftIO ) +import Name ( Name, nameSrcSpan ) +import SrcLoc +import TcHsSyn ( hsLitType, hsPatType ) +import Type ( mkFunTys, Type ) +import TysWiredIn ( mkListTy, mkSumTy ) +import Var ( Id, Var, setVarName, varName, varType ) +import TcRnTypes +import MkIface ( mkIfaceExports ) + +import Compat.HieTypes +import Compat.HieUtils + +import qualified Data.Array as A +import qualified Data.ByteString as BS +import qualified Data.Map as M +import qualified Data.Set as S +import Data.Data ( Data, Typeable ) +import Data.List (foldl', foldl1' ) +import Control.Monad.Trans.Reader +import Control.Monad.Trans.Class ( lift ) + +-- These synonyms match those defined in main/GHC.hs +type RenamedSource = ( HsGroup GhcRn, [LImportDecl GhcRn] + , Maybe [(LIE GhcRn, Avails)] + , Maybe LHsDocString ) +type TypecheckedSource = LHsBinds GhcTc + +-- | Marks that a field uses the GhcRn variant even when the pass +-- parameter is GhcTc. Useful for storing HsTypes in HsExprs, say, because +-- HsType GhcTc should never occur. +type family NoGhcTc (p :: *) where + -- this way, GHC can figure out that the result is a GhcPass + NoGhcTc (GhcPass pass) = GhcPass (NoGhcTcPass pass) + NoGhcTc other = other + +type family NoGhcTcPass (p :: Pass) :: Pass where + NoGhcTcPass 'Typechecked = 'Renamed + NoGhcTcPass other = other + +{- Note [Name Remapping] +The Typechecker introduces new names for mono names in AbsBinds. +We don't care about the distinction between mono and poly bindings, +so we replace all occurrences of the mono name with the poly name. +-} +newtype HieState = HieState + { name_remapping :: M.Map Name Id + } + +initState :: HieState +initState = HieState M.empty + +class ModifyState a where -- See Note [Name Remapping] + addSubstitution :: a -> a -> HieState -> HieState + +instance ModifyState Name where + addSubstitution _ _ hs = hs + +instance ModifyState Id where + addSubstitution mono poly hs = + hs{name_remapping = M.insert (varName mono) poly (name_remapping hs)} + +modifyState :: ModifyState (IdP p) => [ABExport p] -> HieState -> HieState +modifyState = foldr go id + where + go ABE{abe_poly=poly,abe_mono=mono} f = addSubstitution mono poly . f + go _ f = f + +type HieM = ReaderT HieState Hsc + +-- | Construct an 'HieFile' from the outputs of the typechecker. +mkHieFile :: ModSummary + -> TcGblEnv + -> RenamedSource + -> BS.ByteString + -> Hsc HieFile +mkHieFile ms ts rs src = do + let tc_binds = tcg_binds ts + (asts', arr) <- getCompressedAsts tc_binds rs + let Just src_file = ml_hs_file $ ms_location ms + return $ HieFile + { hie_hs_file = src_file + , hie_module = ms_mod ms + , hie_types = arr + , hie_asts = asts' + -- mkIfaceExports sorts the AvailInfos for stability + , hie_exports = mkIfaceExports (tcg_exports ts) + , hie_hs_src = src + } + +getCompressedAsts :: TypecheckedSource -> RenamedSource + -> Hsc (HieASTs TypeIndex, A.Array TypeIndex HieTypeFlat) +getCompressedAsts ts rs = do + asts <- enrichHie ts rs + return $ compressTypes asts + +enrichHie :: TypecheckedSource -> RenamedSource -> Hsc (HieASTs Type) +enrichHie ts (hsGrp, imports, exports, _) = flip runReaderT initState $ do + tasts <- toHie $ fmap (BC RegularBind ModuleScope) ts + rasts <- processGrp hsGrp + imps <- toHie $ filter (not . ideclImplicit . unLoc) imports + exps <- toHie $ fmap (map $ IEC Export . fst) exports + let spanFile children = case children of + [] -> mkRealSrcSpan (mkRealSrcLoc "" 1 1) (mkRealSrcLoc "" 1 1) + _ -> mkRealSrcSpan (realSrcSpanStart $ nodeSpan $ head children) + (realSrcSpanEnd $ nodeSpan $ last children) + + modulify xs = + Node (simpleNodeInfo "Module" "Module") (spanFile xs) xs + + asts = HieASTs + $ resolveTyVarScopes + $ M.map (modulify . mergeSortAsts) + $ M.fromListWith (++) + $ map (\x -> (srcSpanFile (nodeSpan x),[x])) flat_asts + + flat_asts = concat + [ tasts + , rasts + , imps + , exps + ] + return asts + where + processGrp grp = concatM + [ toHie $ fmap (RS ModuleScope ) hs_valds grp + , toHie $ hs_splcds grp + , toHie $ hs_tyclds grp + , toHie $ hs_derivds grp + , toHie $ hs_fixds grp + , toHie $ hs_defds grp + , toHie $ hs_fords grp + , toHie $ hs_warnds grp + , toHie $ hs_annds grp + , toHie $ hs_ruleds grp + ] + +getRealSpan :: SrcSpan -> Maybe Span +getRealSpan (RealSrcSpan sp) = Just sp +getRealSpan _ = Nothing + +grhss_span :: GRHSs p body -> SrcSpan +grhss_span (GRHSs _ xs bs) = foldl' combineSrcSpans (getLoc bs) (map getLoc xs) +grhss_span (XGRHSs _) = error "XGRHS has no span" + +bindingsOnly :: [Context Name] -> [HieAST a] +bindingsOnly [] = [] +bindingsOnly (C c n : xs) = case nameSrcSpan n of + RealSrcSpan span -> Node nodeinfo span [] : bindingsOnly xs + where nodeinfo = NodeInfo S.empty [] (M.singleton (Right n) info) + info = mempty{identInfo = S.singleton c} + _ -> bindingsOnly xs + +concatM :: Monad m => [m [a]] -> m [a] +concatM xs = concat <$> sequence xs + +{- Note [Capturing Scopes and other non local information] +toHie is a local tranformation, but scopes of bindings cannot be known locally, +hence we have to push the relevant info down into the binding nodes. +We use the following types (*Context and *Scoped) to wrap things and +carry the required info +(Maybe Span) always carries the span of the entire binding, including rhs +-} +data Context a = C ContextInfo a -- Used for names and bindings + +data RContext a = RC RecFieldContext a +data RFContext a = RFC RecFieldContext (Maybe Span) a +-- ^ context for record fields + +data IEContext a = IEC IEType a +-- ^ context for imports/exports + +data BindContext a = BC BindType Scope a +-- ^ context for imports/exports + +data PatSynFieldContext a = PSC (Maybe Span) a +-- ^ context for pattern synonym fields. + +data SigContext a = SC SigInfo a +-- ^ context for type signatures + +data SigInfo = SI SigType (Maybe Span) + +data SigType = BindSig | ClassSig | InstSig + +data RScoped a = RS Scope a +-- ^ Scope spans over everything to the right of a, (mostly) not +-- including a itself +-- (Includes a in a few special cases like recursive do bindings) or +-- let/where bindings + +-- | Pattern scope +data PScoped a = PS (Maybe Span) + Scope -- ^ use site of the pattern + Scope -- ^ pattern to the right of a, not including a + a + deriving (Typeable, Data) -- Pattern Scope + +{- Note [TyVar Scopes] +Due to -XScopedTypeVariables, type variables can be in scope quite far from +their original binding. We resolve the scope of these type variables +in a separate pass +-} +data TScoped a = TS TyVarScope a -- TyVarScope + +data TVScoped a = TVS TyVarScope Scope a -- TyVarScope +-- ^ First scope remains constant +-- Second scope is used to build up the scope of a tyvar over +-- things to its right, ala RScoped + +-- | Each element scopes over the elements to the right +listScopes :: Scope -> [Located a] -> [RScoped (Located a)] +listScopes _ [] = [] +listScopes rhsScope [pat] = [RS rhsScope pat] +listScopes rhsScope (pat : pats) = RS sc pat : pats' + where + pats'@((RS scope p):_) = listScopes rhsScope pats + sc = combineScopes scope $ mkScope $ getLoc p + +-- | 'listScopes' specialised to 'PScoped' things +patScopes + :: Maybe Span + -> Scope + -> Scope + -> [LPat (GhcPass p)] + -> [PScoped (LPat (GhcPass p))] +patScopes rsp useScope patScope xs = + map (\(RS sc a) -> PS rsp useScope sc a) $ + listScopes patScope xs + +-- | 'listScopes' specialised to 'TVScoped' things +tvScopes + :: TyVarScope + -> Scope + -> [LHsTyVarBndr a] + -> [TVScoped (LHsTyVarBndr a)] +tvScopes tvScope rhsScope xs = + map (\(RS sc a)-> TVS tvScope sc a) $ listScopes rhsScope xs + +{- Note [Scoping Rules for SigPat] +Explicitly quantified variables in pattern type signatures are not +brought into scope in the rhs, but implicitly quantified variables +are (HsWC and HsIB). +This is unlike other signatures, where explicitly quantified variables +are brought into the RHS Scope +For example +foo :: forall a. ...; +foo = ... -- a is in scope here + +bar (x :: forall a. a -> a) = ... -- a is not in scope here +-- ^ a is in scope here (pattern body) + +bax (x :: a) = ... -- a is in scope here +Because of HsWC and HsIB pass on their scope to their children +we must wrap the LHsType in pattern signatures in a +Shielded explictly, so that the HsWC/HsIB scope is not passed +on the the LHsType +-} + +data Shielded a = SH Scope a -- Ignores its TScope, uses its own scope instead + +type family ProtectedSig a where + ProtectedSig GhcRn = HsWildCardBndrs GhcRn (HsImplicitBndrs + GhcRn + (Shielded (LHsType GhcRn))) + ProtectedSig GhcTc = NoExt + +class ProtectSig a where + protectSig :: Scope -> XSigPat a -> ProtectedSig a + +instance (HasLoc a) => HasLoc (Shielded a) where + loc (SH _ a) = loc a + +instance (ToHie (TScoped a)) => ToHie (TScoped (Shielded a)) where + toHie (TS _ (SH sc a)) = toHie (TS (ResolvedScopes [sc]) a) + +instance ProtectSig GhcTc where + protectSig _ _ = NoExt + +instance ProtectSig GhcRn where + protectSig sc (HsWC a (HsIB b sig)) = + HsWC a (HsIB b (SH sc sig)) + protectSig _ _ = error "protectSig not given HsWC (HsIB)" + +class HasLoc a where + -- ^ defined so that HsImplicitBndrs and HsWildCardBndrs can + -- know what their implicit bindings are scoping over + loc :: a -> SrcSpan + +instance HasLoc thing => HasLoc (TScoped thing) where + loc (TS _ a) = loc a + +instance HasLoc thing => HasLoc (PScoped thing) where + loc (PS _ _ _ a) = loc a + +instance HasLoc (LHsQTyVars GhcRn) where + loc (HsQTvs _ vs) = loc vs + loc _ = noSrcSpan + +instance HasLoc thing => HasLoc (HsImplicitBndrs a thing) where + loc (HsIB _ a) = loc a + loc _ = noSrcSpan + +instance HasLoc thing => HasLoc (HsWildCardBndrs a thing) where + loc (HsWC _ a) = loc a + loc _ = noSrcSpan + +instance HasLoc (Located a) where + loc (L l _) = l + +instance HasLoc a => HasLoc [a] where + loc [] = noSrcSpan + loc xs = foldl1' combineSrcSpans $ map loc xs + +instance (HasLoc a, HasLoc b) => HasLoc (FamEqn s a b) where + loc (FamEqn _ a b _ c) = foldl1' combineSrcSpans [loc a, loc b, loc c] + loc _ = noSrcSpan +{- +instance (HasLoc tm, HasLoc ty) => HasLoc (HsArg tm ty) where + loc (HsValArg tm) = loc tm + loc (HsTypeArg _ ty) = loc ty + loc (HsArgPar sp) = sp +-} + +instance HasLoc (HsDataDefn GhcRn) where + loc def@(HsDataDefn{}) = loc $ dd_cons def + -- Only used for data family instances, so we only need rhs + -- Most probably the rest will be unhelpful anyway + loc _ = noSrcSpan + +-- | The main worker class +class ToHie a where + toHie :: a -> HieM [HieAST Type] + +-- | Used to collect type info +class Data a => HasType a where + getTypeNode :: a -> HieM [HieAST Type] + +instance (ToHie a) => ToHie [a] where + toHie = concatMapM toHie + +instance (ToHie a) => ToHie (Bag a) where + toHie = toHie . bagToList + +instance (ToHie a) => ToHie (Maybe a) where + toHie = maybe (pure []) toHie + +instance ToHie (Context (Located NoExt)) where + toHie _ = pure [] + +instance ToHie (TScoped NoExt) where + toHie _ = pure [] + +instance ToHie (IEContext (Located ModuleName)) where + toHie (IEC c (L (RealSrcSpan span) mname)) = + pure $ [Node (NodeInfo S.empty [] idents) span []] + where details = mempty{identInfo = S.singleton (IEThing c)} + idents = M.singleton (Left mname) details + toHie _ = pure [] + +instance ToHie (Context (Located Var)) where + toHie c = case c of + C context (L (RealSrcSpan span) name') + -> do + m <- asks name_remapping + let name = M.findWithDefault name' (varName name') m + pure + [Node + (NodeInfo S.empty [] $ + M.singleton (Right $ varName name) + (IdentifierDetails (Just $ varType name') + (S.singleton context))) + span + []] + _ -> pure [] + +instance ToHie (Context (Located Name)) where + toHie c = case c of + C context (L (RealSrcSpan span) name') -> do + m <- asks name_remapping + let name = case M.lookup name' m of + Just var -> varName var + Nothing -> name' + pure + [Node + (NodeInfo S.empty [] $ + M.singleton (Right name) + (IdentifierDetails Nothing + (S.singleton context))) + span + []] + _ -> pure [] + +-- | Dummy instances - never called +instance ToHie (TScoped (LHsSigWcType GhcTc)) where + toHie _ = pure [] +instance ToHie (TScoped (LHsWcType GhcTc)) where + toHie _ = pure [] +instance ToHie (SigContext (LSig GhcTc)) where + toHie _ = pure [] +instance ToHie (TScoped Type) where + toHie _ = pure [] + +instance HasType (LHsBind GhcRn) where + getTypeNode (L spn bind) = makeNode bind spn + +instance HasType (LHsBind GhcTc) where + getTypeNode (L spn bind) = case bind of + FunBind{fun_id = name} -> makeTypeNode bind spn (varType $ unLoc name) + _ -> makeNode bind spn + +instance HasType (LPat GhcRn) where + getTypeNode (L spn pat) = makeNode pat spn + +instance HasType (LPat GhcTc) where + getTypeNode (L spn opat) = makeTypeNode opat spn (hsPatType opat) + +instance HasType (LHsExpr GhcRn) where + getTypeNode (L spn e) = makeNode e spn + +-- | This instance tries to construct 'HieAST' nodes which include the type of +-- the expression. It is not yet possible to do this efficiently for all +-- expression forms, so we skip filling in the type for those inputs. +-- +-- 'HsApp', for example, doesn't have any type information available directly on +-- the node. Our next recourse would be to desugar it into a 'CoreExpr' then +-- query the type of that. Yet both the desugaring call and the type query both +-- involve recursive calls to the function and argument! This is particularly +-- problematic when you realize that the HIE traversal will eventually visit +-- those nodes too and ask for their types again. +-- +-- Since the above is quite costly, we just skip cases where computing the +-- expression's type is going to be expensive. +-- +-- See #16233 +instance HasType (LHsExpr GhcTc) where + getTypeNode e@(L spn e') = lift $ + -- Some expression forms have their type immediately available + let tyOpt = case e' of + HsLit _ l -> Just (hsLitType l) + HsOverLit _ o -> Just (overLitType o) + + HsLam _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) + HsLamCase _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) + HsCase _ _ (MG { mg_ext = groupTy }) -> Just (mg_res_ty groupTy) + + ExplicitList ty _ _ -> Just (mkListTy ty) + ExplicitSum ty _ _ _ -> Just (mkSumTy ty) + HsDo ty _ _ -> Just ty + HsMultiIf ty _ -> Just ty + + _ -> Nothing + + in + case tyOpt of + _ | skipDesugaring e' -> fallback + | otherwise -> do + hs_env <- Hsc $ \e w -> return (e,w) + (_,mbe) <- liftIO $ deSugarExpr hs_env e + maybe fallback (makeTypeNode e' spn . exprType) mbe + where + fallback = makeNode e' spn + + matchGroupType :: MatchGroupTc -> Type + matchGroupType (MatchGroupTc args res) = mkFunTys args res + + -- | Skip desugaring of these expressions for performance reasons. + -- + -- See impact on Haddock output (esp. missing type annotations or links) + -- before marking more things here as 'False'. See impact on Haddock + -- performance before marking more things as 'True'. + skipDesugaring :: HsExpr a -> Bool + skipDesugaring e = case e of + HsVar{} -> False + HsUnboundVar{} -> False + HsConLikeOut{} -> False + HsRecFld{} -> False + HsOverLabel{} -> False + HsIPVar{} -> False + HsWrap{} -> False + _ -> True + +instance ( ToHie (Context (Located (IdP a))) + , ToHie (MatchGroup a (LHsExpr a)) + , ToHie (PScoped (LPat a)) + , ToHie (GRHSs a (LHsExpr a)) + , ToHie (LHsExpr a) + , ToHie (Located (PatSynBind a a)) + , HasType (LHsBind a) + , ModifyState (IdP a) + , Data (HsBind a) + ) => ToHie (BindContext (LHsBind a)) where + toHie (BC context scope b@(L span bind)) = + concatM $ getTypeNode b : case bind of + FunBind{fun_id = name, fun_matches = matches} -> + [ toHie $ C (ValBind context scope $ getRealSpan span) name + , toHie matches + ] + PatBind{pat_lhs = lhs, pat_rhs = rhs} -> + [ toHie $ PS (getRealSpan span) scope NoScope lhs + , toHie rhs + ] + VarBind{var_rhs = expr} -> + [ toHie expr + ] + AbsBinds{abs_exports = xs, abs_binds = binds} -> + [ local (modifyState xs) $ -- Note [Name Remapping] + toHie $ fmap (BC context scope) binds + ] + PatSynBind _ psb -> + [ toHie $ L span psb -- PatSynBinds only occur at the top level + ] + XHsBindsLR _ -> [] + +instance ( ToHie (LMatch a body) + ) => ToHie (MatchGroup a body) where + toHie mg = concatM $ case mg of + MG{ mg_alts = (L span alts) , mg_origin = FromSource } -> + [ pure $ locOnly span + , toHie alts + ] + MG{} -> [] + XMatchGroup _ -> [] + +instance ( ToHie (Context (Located (IdP a))) + , ToHie (PScoped (LPat a)) + , ToHie (HsPatSynDir a) + ) => ToHie (Located (PatSynBind a a)) where + toHie (L sp psb) = concatM $ case psb of + PSB{psb_id=var, psb_args=dets, psb_def=pat, psb_dir=dir} -> + [ toHie $ C (Decl PatSynDec $ getRealSpan sp) var + , toHie $ toBind dets + , toHie $ PS Nothing lhsScope NoScope pat + , toHie dir + ] + where + lhsScope = combineScopes varScope detScope + varScope = mkLScope var + detScope = case dets of + (PrefixCon args) -> foldr combineScopes NoScope $ map mkLScope args + (InfixCon a b) -> combineScopes (mkLScope a) (mkLScope b) + (RecCon r) -> foldr go NoScope r + go (RecordPatSynField a b) c = combineScopes c + $ combineScopes (mkLScope a) (mkLScope b) + detSpan = case detScope of + LocalScope a -> Just a + _ -> Nothing + toBind (PrefixCon args) = PrefixCon $ map (C Use) args + toBind (InfixCon a b) = InfixCon (C Use a) (C Use b) + toBind (RecCon r) = RecCon $ map (PSC detSpan) r + XPatSynBind _ -> [] + +instance ( ToHie (MatchGroup a (LHsExpr a)) + ) => ToHie (HsPatSynDir a) where + toHie dir = case dir of + ExplicitBidirectional mg -> toHie mg + _ -> pure [] + +instance ( a ~ GhcPass p + , ToHie body + , ToHie (HsMatchContext (NameOrRdrName (IdP a))) + , ToHie (PScoped (LPat a)) + , ToHie (GRHSs a body) + , Data (Match a body) + ) => ToHie (LMatch (GhcPass p) body) where + toHie (L span m ) = concatM $ makeNode m span : case m of + Match{m_ctxt=mctx, m_pats = pats, m_grhss = grhss } -> + [ toHie mctx + , let rhsScope = mkScope $ grhss_span grhss + in toHie $ patScopes Nothing rhsScope NoScope pats + , toHie grhss + ] + XMatch _ -> [] + +instance ( ToHie (Context (Located a)) + ) => ToHie (HsMatchContext a) where + toHie (FunRhs{mc_fun=name}) = toHie $ C MatchBind name + toHie (StmtCtxt a) = toHie a + toHie _ = pure [] + +instance ( ToHie (HsMatchContext a) + ) => ToHie (HsStmtContext a) where + toHie (PatGuard a) = toHie a + toHie (ParStmtCtxt a) = toHie a + toHie (TransStmtCtxt a) = toHie a + toHie _ = pure [] + +instance ( a ~ GhcPass p + , ToHie (Context (Located (IdP a))) + , ToHie (RContext (HsRecFields a (PScoped (LPat a)))) + , ToHie (LHsExpr a) + , ToHie (TScoped (LHsSigWcType a)) + , ProtectSig a + , ToHie (TScoped (ProtectedSig a)) + , HasType (LPat a) + , Data (HsSplice a) + ) => ToHie (PScoped (LPat (GhcPass p))) where + toHie (PS rsp scope pscope lpat@(L ospan opat)) = + concatM $ getTypeNode lpat : case opat of + WildPat _ -> + [] + VarPat _ lname -> + [ toHie $ C (PatternBind scope pscope rsp) lname + ] + LazyPat _ p -> + [ toHie $ PS rsp scope pscope p + ] + AsPat _ lname pat -> + [ toHie $ C (PatternBind scope + (combineScopes (mkLScope pat) pscope) + rsp) + lname + , toHie $ PS rsp scope pscope pat + ] + ParPat _ pat -> + [ toHie $ PS rsp scope pscope pat + ] + BangPat _ pat -> + [ toHie $ PS rsp scope pscope pat + ] + ListPat _ pats -> + [ toHie $ patScopes rsp scope pscope pats + ] + TuplePat _ pats _ -> + [ toHie $ patScopes rsp scope pscope pats + ] + SumPat _ pat _ _ -> + [ toHie $ PS rsp scope pscope pat + ] + ConPatIn c dets -> + [ toHie $ C Use c + , toHie $ contextify dets + ] + ConPatOut {pat_con = con, pat_args = dets}-> + [ toHie $ C Use $ fmap conLikeName con + , toHie $ contextify dets + ] + ViewPat _ expr pat -> + [ toHie expr + , toHie $ PS rsp scope pscope pat + ] + SplicePat _ sp -> + [ toHie $ L ospan sp + ] + LitPat _ _ -> + [] + NPat _ _ _ _ -> + [] + NPlusKPat _ n _ _ _ _ -> + [ toHie $ C (PatternBind scope pscope rsp) n + ] + SigPat sig pat -> + [ toHie $ PS rsp scope pscope pat + , let cscope = mkLScope pat in + toHie $ TS (ResolvedScopes [cscope, scope, pscope]) + (protectSig @a cscope sig) + -- See Note [Scoping Rules for SigPat] + ] + CoPat _ _ _ _ -> + [] + XPat _ -> [] + where + contextify (PrefixCon args) = PrefixCon $ patScopes rsp scope pscope args + contextify (InfixCon a b) = InfixCon a' b' + where [a', b'] = patScopes rsp scope pscope [a,b] + contextify (RecCon r) = RecCon $ RC RecFieldMatch $ contextify_rec r + contextify_rec (HsRecFields fds a) = HsRecFields (map go scoped_fds) a + where + go (RS fscope (L spn (HsRecField lbl pat pun))) = + L spn $ HsRecField lbl (PS rsp scope fscope pat) pun + scoped_fds = listScopes pscope fds + +instance ( ToHie body + , ToHie (LGRHS a body) + , ToHie (RScoped (LHsLocalBinds a)) + ) => ToHie (GRHSs a body) where + toHie grhs = concatM $ case grhs of + GRHSs _ grhss binds -> + [ toHie grhss + , toHie $ RS (mkScope $ grhss_span grhs) binds + ] + XGRHSs _ -> [] + +instance ( ToHie (Located body) + , ToHie (RScoped (GuardLStmt a)) + , Data (GRHS a (Located body)) + ) => ToHie (LGRHS a (Located body)) where + toHie (L span g) = concatM $ makeNode g span : case g of + GRHS _ guards body -> + [ toHie $ listScopes (mkLScope body) guards + , toHie body + ] + XGRHS _ -> [] + +instance ( a ~ GhcPass p + , ToHie (Context (Located (IdP a))) + , HasType (LHsExpr a) + , ToHie (PScoped (LPat a)) + , ToHie (MatchGroup a (LHsExpr a)) + , ToHie (LGRHS a (LHsExpr a)) + , ToHie (RContext (HsRecordBinds a)) + , ToHie (RFContext (Located (AmbiguousFieldOcc a))) + , ToHie (ArithSeqInfo a) + , ToHie (LHsCmdTop a) + , ToHie (RScoped (GuardLStmt a)) + , ToHie (RScoped (LHsLocalBinds a)) + , ToHie (TScoped (LHsWcType (NoGhcTc a))) + , ToHie (TScoped (LHsSigWcType (NoGhcTc a))) + , ToHie (TScoped (XExprWithTySig (GhcPass p))) + , ToHie (TScoped (XAppTypeE (GhcPass p))) + , Data (HsExpr a) + , Data (HsSplice a) + , Data (HsTupArg a) + , Data (AmbiguousFieldOcc a) + ) => ToHie (LHsExpr (GhcPass p)) where + toHie e@(L mspan oexpr) = concatM $ getTypeNode e : case oexpr of + HsVar _ (L _ var) -> + [ toHie $ C Use (L mspan var) + -- Patch up var location since typechecker removes it + ] + HsUnboundVar _ _ -> + [] + HsConLikeOut _ con -> + [ toHie $ C Use $ L mspan $ conLikeName con + ] + HsRecFld _ fld -> + [ toHie $ RFC RecFieldOcc Nothing (L mspan fld) + ] + HsOverLabel _ _ _ -> [] + HsIPVar _ _ -> [] + HsOverLit _ _ -> [] + HsLit _ _ -> [] + HsLam _ mg -> + [ toHie mg + ] + HsLamCase _ mg -> + [ toHie mg + ] + HsApp _ a b -> + [ toHie a + , toHie b + ] + HsAppType sig expr -> + [ toHie expr + , toHie $ TS (ResolvedScopes []) sig + ] + OpApp _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + NegApp _ a _ -> + [ toHie a + ] + HsPar _ a -> + [ toHie a + ] + SectionL _ a b -> + [ toHie a + , toHie b + ] + SectionR _ a b -> + [ toHie a + , toHie b + ] + ExplicitTuple _ args _ -> + [ toHie args + ] + ExplicitSum _ _ _ expr -> + [ toHie expr + ] + HsCase _ expr matches -> + [ toHie expr + , toHie matches + ] + HsIf _ _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + HsMultiIf _ grhss -> + [ toHie grhss + ] + HsLet _ binds expr -> + [ toHie $ RS (mkLScope expr) binds + , toHie expr + ] + HsDo _ _ (L ispan stmts) -> + [ pure $ locOnly ispan + , toHie $ listScopes NoScope stmts + ] + ExplicitList _ _ exprs -> + [ toHie exprs + ] + RecordCon {rcon_con_name = name, rcon_flds = binds}-> + [ toHie $ C Use name + , toHie $ RC RecFieldAssign $ binds + ] + RecordUpd {rupd_expr = expr, rupd_flds = upds}-> + [ toHie expr + , toHie $ map (RC RecFieldAssign) upds + ] + ExprWithTySig sig expr -> + [ toHie expr + , toHie $ TS (ResolvedScopes [mkLScope expr]) sig + ] + ArithSeq _ _ info -> + [ toHie info + ] + HsSCC _ _ _ expr -> + [ toHie expr + ] + HsCoreAnn _ _ _ expr -> + [ toHie expr + ] + HsProc _ pat cmdtop -> + [ toHie $ PS Nothing (mkLScope cmdtop) NoScope pat + , toHie cmdtop + ] + HsStatic _ expr -> + [ toHie expr + ] + HsArrApp _ a b _ _ -> + [ toHie a + , toHie b + ] + HsArrForm _ expr _ cmds -> + [ toHie expr + , toHie cmds + ] + HsTick _ _ expr -> + [ toHie expr + ] + HsBinTick _ _ _ expr -> + [ toHie expr + ] + HsTickPragma _ _ _ _ expr -> + [ toHie expr + ] + HsWrap _ _ a -> + [ toHie $ L mspan a + ] + HsBracket _ b -> + [ toHie b + ] + HsRnBracketOut _ b p -> + [ toHie b + , toHie p + ] + HsTcBracketOut _ b p -> + [ toHie b + , toHie p + ] + HsSpliceE _ x -> + [ toHie $ L mspan x + ] + EWildPat _ -> [] + EAsPat _ a b -> + [ toHie $ C Use a + , toHie b + ] + EViewPat _ a b -> + [ toHie a + , toHie b + ] + ELazyPat _ a -> + [ toHie a + ] + XExpr _ -> [] + +instance ( a ~ GhcPass p + , ToHie (LHsExpr a) + , Data (HsTupArg a) + ) => ToHie (LHsTupArg (GhcPass p)) where + toHie (L span arg) = concatM $ makeNode arg span : case arg of + Present _ expr -> + [ toHie expr + ] + Missing _ -> [] + XTupArg _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (LHsExpr a) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (LHsLocalBinds a)) + , ToHie (RScoped (ApplicativeArg a)) + , ToHie (Located body) + , Data (StmtLR a a (Located body)) + , Data (StmtLR a a (Located (HsExpr a))) + ) => ToHie (RScoped (LStmt (GhcPass p) (Located body))) where + toHie (RS scope (L span stmt)) = concatM $ makeNode stmt span : case stmt of + LastStmt _ body _ _ -> + [ toHie body + ] + BindStmt _ pat body _ _ -> + [ toHie $ PS (getRealSpan $ getLoc body) scope NoScope pat + , toHie body + ] + ApplicativeStmt _ stmts _ -> + [ concatMapM (toHie . RS scope . snd) stmts + ] + BodyStmt _ body _ _ -> + [ toHie body + ] + LetStmt _ binds -> + [ toHie $ RS scope binds + ] + ParStmt _ parstmts _ _ -> + [ concatMapM (\(ParStmtBlock _ stmts _ _) -> + toHie $ listScopes NoScope stmts) + parstmts + ] + TransStmt {trS_stmts = stmts, trS_using = using, trS_by = by} -> + [ toHie $ listScopes scope stmts + , toHie using + , toHie by + ] + RecStmt {recS_stmts = stmts} -> + [ toHie $ map (RS $ combineScopes scope (mkScope span)) stmts + ] + XStmtLR _ -> [] + +instance ( ToHie (LHsExpr a) + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (HsLocalBinds a) + ) => ToHie (RScoped (LHsLocalBinds a)) where + toHie (RS scope (L sp binds)) = concatM $ makeNode binds sp : case binds of + EmptyLocalBinds _ -> [] + HsIPBinds _ _ -> [] + HsValBinds _ valBinds -> + [ toHie $ RS (combineScopes scope $ mkScope sp) + valBinds + ] + XHsLocalBindsLR _ -> [] + +instance ( ToHie (BindContext (LHsBind a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (XXValBindsLR a a)) + ) => ToHie (RScoped (HsValBindsLR a a)) where + toHie (RS sc v) = concatM $ case v of + ValBinds _ binds sigs -> + [ toHie $ fmap (BC RegularBind sc) binds + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] + XValBindsLR x -> [ toHie $ RS sc x ] + +instance ToHie (RScoped (NHsValBindsLR GhcTc)) where + toHie (RS sc (NValBinds binds sigs)) = concatM $ + [ toHie (concatMap (map (BC RegularBind sc) . bagToList . snd) binds) + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] +instance ToHie (RScoped (NHsValBindsLR GhcRn)) where + toHie (RS sc (NValBinds binds sigs)) = concatM $ + [ toHie (concatMap (map (BC RegularBind sc) . bagToList . snd) binds) + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] + +instance ( ToHie (RContext (LHsRecField a arg)) + ) => ToHie (RContext (HsRecFields a arg)) where + toHie (RC c (HsRecFields fields _)) = toHie $ map (RC c) fields + +instance ( ToHie (RFContext (Located label)) + , ToHie arg + , HasLoc arg + , Data label + , Data arg + ) => ToHie (RContext (LHsRecField' label arg)) where + toHie (RC c (L span recfld)) = concatM $ makeNode recfld span : case recfld of + HsRecField label expr _ -> + [ toHie $ RFC c (getRealSpan $ loc expr) label + , toHie expr + ] + +instance ToHie (RFContext (LFieldOcc GhcRn)) where + toHie (RFC c rhs (L nspan f)) = concatM $ case f of + FieldOcc name _ -> + [ toHie $ C (RecField c rhs) (L nspan name) + ] + XFieldOcc _ -> [] + +instance ToHie (RFContext (LFieldOcc GhcTc)) where + toHie (RFC c rhs (L nspan f)) = concatM $ case f of + FieldOcc var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + XFieldOcc _ -> [] + +instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcRn))) where + toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of + Unambiguous name _ -> + [ toHie $ C (RecField c rhs) $ L nspan name + ] + Ambiguous _name _ -> + [ ] + XAmbiguousFieldOcc _ -> [] + +instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcTc))) where + toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of + Unambiguous var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + Ambiguous var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + XAmbiguousFieldOcc _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (LHsExpr a) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (StmtLR a a (Located (HsExpr a))) + , Data (HsLocalBinds a) + ) => ToHie (RScoped (ApplicativeArg (GhcPass p))) where + toHie (RS sc (ApplicativeArgOne _ pat expr _)) = concatM + [ toHie $ PS Nothing sc NoScope pat + , toHie expr + ] + toHie (RS sc (ApplicativeArgMany _ stmts _ pat)) = concatM + [ toHie $ listScopes NoScope stmts + , toHie $ PS Nothing sc NoScope pat + ] + toHie (RS _ (XApplicativeArg _)) = pure [] + +instance (ToHie arg, ToHie rec) => ToHie (HsConDetails arg rec) where + toHie (PrefixCon args) = toHie args + toHie (RecCon rec) = toHie rec + toHie (InfixCon a b) = concatM [ toHie a, toHie b] + +instance ( ToHie (LHsCmd a) + , Data (HsCmdTop a) + ) => ToHie (LHsCmdTop a) where + toHie (L span top) = concatM $ makeNode top span : case top of + HsCmdTop _ cmd -> + [ toHie cmd + ] + XCmdTop _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (LHsExpr a) + , ToHie (MatchGroup a (LHsCmd a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (HsCmd a) + , Data (HsCmdTop a) + , Data (StmtLR a a (Located (HsCmd a))) + , Data (HsLocalBinds a) + , Data (StmtLR a a (Located (HsExpr a))) + ) => ToHie (LHsCmd (GhcPass p)) where + toHie (L span cmd) = concatM $ makeNode cmd span : case cmd of + HsCmdArrApp _ a b _ _ -> + [ toHie a + , toHie b + ] + HsCmdArrForm _ a _ _ cmdtops -> + [ toHie a + , toHie cmdtops + ] + HsCmdApp _ a b -> + [ toHie a + , toHie b + ] + HsCmdLam _ mg -> + [ toHie mg + ] + HsCmdPar _ a -> + [ toHie a + ] + HsCmdCase _ expr alts -> + [ toHie expr + , toHie alts + ] + HsCmdIf _ _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + HsCmdLet _ binds cmd' -> + [ toHie $ RS (mkLScope cmd') binds + , toHie cmd' + ] + HsCmdDo _ (L ispan stmts) -> + [ pure $ locOnly ispan + , toHie $ listScopes NoScope stmts + ] + HsCmdWrap _ _ _ -> [] + XCmd _ -> [] + +instance ToHie (TyClGroup GhcRn) where + toHie (TyClGroup _ classes roles instances) = concatM + [ toHie classes + , toHie roles + , toHie instances + ] + toHie (XTyClGroup _) = pure [] + +instance ToHie (LTyClDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + FamDecl {tcdFam = fdecl} -> + [ toHie (L span fdecl) + ] + SynDecl {tcdLName = name, tcdTyVars = vars, tcdRhs = typ} -> + [ toHie $ C (Decl SynDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [mkScope $ getLoc typ]) vars + , toHie typ + ] + DataDecl {tcdLName = name, tcdTyVars = vars, tcdDataDefn = defn} -> + [ toHie $ C (Decl DataDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [quant_scope, rhs_scope]) vars + , toHie defn + ] + where + quant_scope = mkLScope $ dd_ctxt defn + rhs_scope = sig_sc `combineScopes` con_sc `combineScopes` deriv_sc + sig_sc = maybe NoScope mkLScope $ dd_kindSig defn + con_sc = foldr combineScopes NoScope $ map mkLScope $ dd_cons defn + deriv_sc = mkLScope $ dd_derivs defn + ClassDecl { tcdCtxt = context + , tcdLName = name + , tcdTyVars = vars + , tcdFDs = deps + , tcdSigs = sigs + , tcdMeths = meths + , tcdATs = typs + , tcdATDefs = deftyps + } -> + [ toHie $ C (Decl ClassDec $ getRealSpan span) name + , toHie context + , toHie $ TS (ResolvedScopes [context_scope, rhs_scope]) vars + , toHie deps + , toHie $ map (SC $ SI ClassSig $ getRealSpan span) sigs + , toHie $ fmap (BC InstanceBind ModuleScope) meths + , toHie typs + , concatMapM (pure . locOnly . getLoc) deftyps + , toHie $ map (go . unLoc) deftyps + ] + where + context_scope = mkLScope context + rhs_scope = foldl1' combineScopes $ map mkScope + [ loc deps, loc sigs, loc (bagToList meths), loc typs, loc deftyps] + + go :: TyFamDefltEqn GhcRn + -> FamEqn GhcRn (TScoped (LHsQTyVars GhcRn)) (LHsType GhcRn) + go (FamEqn a var pat b rhs) = + FamEqn a var (TS (ResolvedScopes [mkLScope rhs]) pat) b rhs + go (XFamEqn NoExt) = XFamEqn NoExt + XTyClDecl _ -> [] + +instance ToHie (LFamilyDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + FamilyDecl _ info name vars _ sig inj -> + [ toHie $ C (Decl FamDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [rhsSpan]) vars + , toHie info + , toHie $ RS injSpan sig + , toHie inj + ] + where + rhsSpan = sigSpan `combineScopes` injSpan + sigSpan = mkScope $ getLoc sig + injSpan = maybe NoScope (mkScope . getLoc) inj + XFamilyDecl _ -> [] + +instance ToHie (FamilyInfo GhcRn) where + toHie (ClosedTypeFamily (Just eqns)) = concatM $ + [ concatMapM (pure . locOnly . getLoc) eqns + , toHie $ map go eqns + ] + where + go (L l ib) = TS (ResolvedScopes [mkScope l]) ib + toHie _ = pure [] + +instance ToHie (RScoped (LFamilyResultSig GhcRn)) where + toHie (RS sc (L span sig)) = concatM $ makeNode sig span : case sig of + NoSig _ -> + [] + KindSig _ k -> + [ toHie k + ] + TyVarSig _ bndr -> + [ toHie $ TVS (ResolvedScopes [sc]) NoScope bndr + ] + XFamilyResultSig _ -> [] + +instance ToHie (Located (FunDep (Located Name))) where + toHie (L span fd@(lhs, rhs)) = concatM $ + [ makeNode fd span + , toHie $ map (C Use) lhs + , toHie $ map (C Use) rhs + ] + +instance (ToHie pats, ToHie rhs, HasLoc pats, HasLoc rhs) + => ToHie (TScoped (FamEqn GhcRn pats rhs)) where + toHie (TS _ f) = toHie f + +instance ( ToHie pats + , ToHie rhs + , HasLoc pats + , HasLoc rhs + ) => ToHie (FamEqn GhcRn pats rhs) where + toHie fe@(FamEqn _ var pats _ rhs) = concatM $ + [ toHie $ C (Decl InstDec $ getRealSpan $ loc fe) var + , toHie pats + , toHie rhs + ] + toHie (XFamEqn _) = pure [] + +instance ToHie (LInjectivityAnn GhcRn) where + toHie (L span ann) = concatM $ makeNode ann span : case ann of + InjectivityAnn lhs rhs -> + [ toHie $ C Use lhs + , toHie $ map (C Use) rhs + ] + +instance ToHie (HsDataDefn GhcRn) where + toHie (HsDataDefn _ _ ctx _ mkind cons derivs) = concatM + [ toHie ctx + , toHie mkind + , toHie cons + , toHie derivs + ] + toHie (XHsDataDefn _) = pure [] + +instance ToHie (HsDeriving GhcRn) where + toHie (L span clauses) = concatM + [ pure $ locOnly span + , toHie clauses + ] + +instance ToHie (LHsDerivingClause GhcRn) where + toHie (L span cl) = concatM $ makeNode cl span : case cl of + HsDerivingClause _ strat (L ispan tys) -> + [ toHie strat + , pure $ locOnly ispan + , toHie $ map (TS (ResolvedScopes [])) tys + ] + XHsDerivingClause _ -> [] + +instance ToHie (Located (DerivStrategy GhcRn)) where + toHie (L span strat) = concatM $ makeNode strat span : case strat of + StockStrategy -> [] + AnyclassStrategy -> [] + NewtypeStrategy -> [] + ViaStrategy s -> [ toHie $ TS (ResolvedScopes []) s ] + +instance ToHie (Located OverlapMode) where + toHie (L span _) = pure $ locOnly span + +instance ToHie (LConDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ConDeclGADT { con_names = names, con_qvars = qvars + , con_mb_cxt = ctx, con_args = args, con_res_ty = typ } -> + [ toHie $ map (C (Decl ConDec $ getRealSpan span)) names + , toHie $ TS (ResolvedScopes [ctxScope, rhsScope]) qvars + , toHie ctx + , toHie args + , toHie typ + ] + where + rhsScope = combineScopes argsScope tyScope + ctxScope = maybe NoScope mkLScope ctx + argsScope = condecl_scope args + tyScope = mkLScope typ + ConDeclH98 { con_name = name, con_ex_tvs = qvars + , con_mb_cxt = ctx, con_args = dets } -> + [ toHie $ C (Decl ConDec $ getRealSpan span) name + , toHie $ tvScopes (ResolvedScopes []) rhsScope qvars + , toHie ctx + , toHie dets + ] + where + rhsScope = combineScopes ctxScope argsScope + ctxScope = maybe NoScope mkLScope ctx + argsScope = condecl_scope dets + XConDecl _ -> [] + where condecl_scope args = case args of + PrefixCon xs -> foldr combineScopes NoScope $ map mkLScope xs + InfixCon a b -> combineScopes (mkLScope a) (mkLScope b) + RecCon x -> mkLScope x + +instance ToHie (Located [LConDeclField GhcRn]) where + toHie (L span decls) = concatM $ + [ pure $ locOnly span + , toHie decls + ] + +instance ( HasLoc thing + , ToHie (TScoped thing) + ) => ToHie (TScoped (HsImplicitBndrs GhcRn thing)) where + toHie (TS sc (HsIB ibrn a)) = concatM $ + [ pure $ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) $ (hsib_vars ibrn) + , toHie $ TS sc a + ] + where span = loc a + toHie (TS _ (XHsImplicitBndrs _)) = pure [] + +instance ( HasLoc thing + , ToHie (TScoped thing) + ) => ToHie (TScoped (HsWildCardBndrs GhcRn thing)) where + toHie (TS sc (HsWC names a)) = concatM $ + [ pure $ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) names + , toHie $ TS sc a + ] + where span = loc a + toHie (TS _ (XHsWildCardBndrs _)) = pure [] + +instance ToHie (SigContext (LSig GhcRn)) where + toHie (SC (SI styp msp) (L sp sig)) = concatM $ makeNode sig sp : case sig of + TypeSig _ names typ -> + [ toHie $ map (C TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ + ] + PatSynSig _ names typ -> + [ toHie $ map (C TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ + ] + ClassOpSig _ _ names typ -> + [ case styp of + ClassSig -> toHie $ map (C $ ClassTyDecl $ getRealSpan sp) names + _ -> toHie $ map (C $ TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) msp) typ + ] + IdSig _ _ -> [] + FixSig _ fsig -> + [ toHie $ L sp fsig + ] + InlineSig _ name _ -> + [ toHie $ (C Use) name + ] + SpecSig _ name typs _ -> + [ toHie $ (C Use) name + , toHie $ map (TS (ResolvedScopes [])) typs + ] + SpecInstSig _ _ typ -> + [ toHie $ TS (ResolvedScopes []) typ + ] + MinimalSig _ _ form -> + [ toHie form + ] + SCCFunSig _ _ name mtxt -> + [ toHie $ (C Use) name + , pure $ maybe [] (locOnly . getLoc) mtxt + ] + CompleteMatchSig _ _ (L ispan names) typ -> + [ pure $ locOnly ispan + , toHie $ map (C Use) names + , toHie $ fmap (C Use) typ + ] + XSig _ -> [] + +instance ToHie (LHsType GhcRn) where + toHie x = toHie $ TS (ResolvedScopes []) x + +instance ToHie (TScoped (LHsType GhcRn)) where + toHie (TS tsc (L span t)) = concatM $ makeNode t span : case t of + HsForAllTy _ bndrs body -> + [ toHie $ tvScopes tsc (mkScope $ getLoc body) bndrs + , toHie body + ] + HsQualTy _ ctx body -> + [ toHie ctx + , toHie body + ] + HsTyVar _ _ var -> + [ toHie $ C Use var + ] + HsAppTy _ a b -> + [ toHie a + , toHie b + ] + HsFunTy _ a b -> + [ toHie a + , toHie b + ] + HsListTy _ a -> + [ toHie a + ] + HsTupleTy _ _ tys -> + [ toHie tys + ] + HsSumTy _ tys -> + [ toHie tys + ] + HsOpTy _ a op b -> + [ toHie a + , toHie $ C Use op + , toHie b + ] + HsParTy _ a -> + [ toHie a + ] + HsIParamTy _ ip ty -> + [ toHie ip + , toHie ty + ] + HsKindSig _ a b -> + [ toHie a + , toHie b + ] + HsSpliceTy _ a -> + [ toHie $ L span a + ] + HsDocTy _ a _ -> + [ toHie a + ] + HsBangTy _ _ ty -> + [ toHie ty + ] + HsRecTy _ fields -> + [ toHie fields + ] + HsExplicitListTy _ _ tys -> + [ toHie tys + ] + HsExplicitTupleTy _ tys -> + [ toHie tys + ] + HsTyLit _ _ -> [] + HsWildCardTy _ -> [] + HsStarTy _ _ -> [] + XHsType _ -> [] + +{- +instance (ToHie tm, ToHie ty) => ToHie (HsArg tm ty) where + toHie (HsValArg tm) = toHie tm + toHie (HsTypeArg _ ty) = toHie ty + toHie (HsArgPar sp) = pure $ locOnly sp +-} + +instance ToHie (TVScoped (LHsTyVarBndr GhcRn)) where + toHie (TVS tsc sc (L span bndr)) = concatM $ makeNode bndr span : case bndr of + UserTyVar _ var -> + [ toHie $ C (TyVarBind sc tsc) var + ] + KindedTyVar _ var kind -> + [ toHie $ C (TyVarBind sc tsc) var + , toHie kind + ] + XTyVarBndr _ -> [] + +instance ToHie (TScoped (LHsQTyVars GhcRn)) where + toHie (TS sc (HsQTvs (HsQTvsRn implicits _) vars)) = concatM $ + [ pure $ bindingsOnly bindings + , toHie $ tvScopes sc NoScope vars + ] + where + varLoc = loc vars + bindings = map (C $ TyVarBind (mkScope varLoc) sc) implicits + toHie (TS _ (XLHsQTyVars _)) = pure [] + +instance ToHie (LHsContext GhcRn) where + toHie (L span tys) = concatM $ + [ pure $ locOnly span + , toHie tys + ] + +instance ToHie (LConDeclField GhcRn) where + toHie (L span field) = concatM $ makeNode field span : case field of + ConDeclField _ fields typ _ -> + [ toHie $ map (RFC RecFieldDecl (getRealSpan $ loc typ)) fields + , toHie typ + ] + XConDeclField _ -> [] + +instance ToHie (LHsExpr a) => ToHie (ArithSeqInfo a) where + toHie (From expr) = toHie expr + toHie (FromThen a b) = concatM $ + [ toHie a + , toHie b + ] + toHie (FromTo a b) = concatM $ + [ toHie a + , toHie b + ] + toHie (FromThenTo a b c) = concatM $ + [ toHie a + , toHie b + , toHie c + ] + +instance ToHie (LSpliceDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + SpliceDecl _ splice _ -> + [ toHie splice + ] + XSpliceDecl _ -> [] + +instance ToHie (HsBracket a) where + toHie _ = pure [] + +instance ToHie PendingRnSplice where + toHie _ = pure [] + +instance ToHie PendingTcSplice where + toHie _ = pure [] + +instance ToHie (LBooleanFormula (Located Name)) where + toHie (L span form) = concatM $ makeNode form span : case form of + Var a -> + [ toHie $ C Use a + ] + And forms -> + [ toHie forms + ] + Or forms -> + [ toHie forms + ] + Parens f -> + [ toHie f + ] + +instance ToHie (Located HsIPName) where + toHie (L span e) = makeNode e span + +instance ( ToHie (LHsExpr a) + , Data (HsSplice a) + ) => ToHie (Located (HsSplice a)) where + toHie (L span sp) = concatM $ makeNode sp span : case sp of + HsTypedSplice _ _ _ expr -> + [ toHie expr + ] + HsUntypedSplice _ _ _ expr -> + [ toHie expr + ] + HsQuasiQuote _ _ _ ispan _ -> + [ pure $ locOnly ispan + ] + HsSpliced _ _ _ -> + [] + XSplice _ -> [] + +instance ToHie (LRoleAnnotDecl GhcRn) where + toHie (L span annot) = concatM $ makeNode annot span : case annot of + RoleAnnotDecl _ var roles -> + [ toHie $ C Use var + , concatMapM (pure . locOnly . getLoc) roles + ] + XRoleAnnotDecl _ -> [] + +instance ToHie (LInstDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ClsInstD _ d -> + [ toHie $ L span d + ] + DataFamInstD _ d -> + [ toHie $ L span d + ] + TyFamInstD _ d -> + [ toHie $ L span d + ] + XInstDecl _ -> [] + +instance ToHie (LClsInstDecl GhcRn) where + toHie (L span decl) = concatM + [ toHie $ TS (ResolvedScopes [mkScope span]) $ cid_poly_ty decl + , toHie $ fmap (BC InstanceBind ModuleScope) $ cid_binds decl + , toHie $ map (SC $ SI InstSig $ getRealSpan span) $ cid_sigs decl + , pure $ concatMap (locOnly . getLoc) $ cid_tyfam_insts decl + , toHie $ cid_tyfam_insts decl + , pure $ concatMap (locOnly . getLoc) $ cid_datafam_insts decl + , toHie $ cid_datafam_insts decl + , toHie $ cid_overlap_mode decl + ] + +instance ToHie (LDataFamInstDecl GhcRn) where + toHie (L sp (DataFamInstDecl d)) = toHie $ TS (ResolvedScopes [mkScope sp]) d + +instance ToHie (LTyFamInstDecl GhcRn) where + toHie (L sp (TyFamInstDecl d)) = toHie $ TS (ResolvedScopes [mkScope sp]) d + +instance ToHie (Context a) + => ToHie (PatSynFieldContext (RecordPatSynField a)) where + toHie (PSC sp (RecordPatSynField a b)) = concatM $ + [ toHie $ C (RecField RecFieldDecl sp) a + , toHie $ C Use b + ] + +instance ToHie (LDerivDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + DerivDecl _ typ strat overlap -> + [ toHie $ TS (ResolvedScopes []) typ + , toHie strat + , toHie overlap + ] + XDerivDecl _ -> [] + +instance ToHie (LFixitySig GhcRn) where + toHie (L span sig) = concatM $ makeNode sig span : case sig of + FixitySig _ vars _ -> + [ toHie $ map (C Use) vars + ] + XFixitySig _ -> [] + +instance ToHie (LDefaultDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + DefaultDecl _ typs -> + [ toHie typs + ] + XDefaultDecl _ -> [] + +instance ToHie (LForeignDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ForeignImport {fd_name = name, fd_sig_ty = sig, fd_fi = fi} -> + [ toHie $ C (ValBind RegularBind ModuleScope $ getRealSpan span) name + , toHie $ TS (ResolvedScopes []) sig + , toHie fi + ] + ForeignExport {fd_name = name, fd_sig_ty = sig, fd_fe = fe} -> + [ toHie $ C Use name + , toHie $ TS (ResolvedScopes []) sig + , toHie fe + ] + XForeignDecl _ -> [] + +instance ToHie ForeignImport where + toHie (CImport (L a _) (L b _) _ _ (L c _)) = pure $ concat $ + [ locOnly a + , locOnly b + , locOnly c + ] + +instance ToHie ForeignExport where + toHie (CExport (L a _) (L b _)) = pure $ concat $ + [ locOnly a + , locOnly b + ] + +instance ToHie (LWarnDecls GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + Warnings _ _ warnings -> + [ toHie warnings + ] + XWarnDecls _ -> [] + +instance ToHie (LWarnDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + Warning _ vars _ -> + [ toHie $ map (C Use) vars + ] + XWarnDecl _ -> [] + +instance ToHie (LAnnDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + HsAnnotation _ _ prov expr -> + [ toHie prov + , toHie expr + ] + XAnnDecl _ -> [] + +instance ToHie (Context (Located a)) => ToHie (AnnProvenance a) where + toHie (ValueAnnProvenance a) = toHie $ C Use a + toHie (TypeAnnProvenance a) = toHie $ C Use a + toHie ModuleAnnProvenance = pure [] + +instance ToHie (LRuleDecls GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + HsRules _ _ rules -> + [ toHie rules + ] + XRuleDecls _ -> [] + +instance ToHie (LRuleDecl GhcRn) where + toHie (L _ (XRuleDecl _)) = pure [] + toHie (L span r@(HsRule _ rname _ bndrs exprA exprB)) = concatM + [ makeNode r span + , pure $ locOnly $ getLoc rname + , toHie $ map (RS $ mkScope span) bndrs + , toHie exprA + , toHie exprB + ] + +instance ToHie (RScoped (LRuleBndr GhcRn)) where + toHie (RS sc (L span bndr)) = concatM $ makeNode bndr span : case bndr of + RuleBndr _ var -> + [ toHie $ C (ValBind RegularBind sc Nothing) var + ] + RuleBndrSig _ var typ -> + [ toHie $ C (ValBind RegularBind sc Nothing) var + , toHie $ TS (ResolvedScopes [sc]) typ + ] + XRuleBndr _ -> [] + +instance ToHie (LImportDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ImportDecl { ideclName = name, ideclAs = as, ideclHiding = hidden } -> + [ toHie $ IEC Import name + , toHie $ fmap (IEC ImportAs) as + , maybe (pure []) goIE hidden + ] + XImportDecl _ -> [] + where + goIE (hiding, (L sp liens)) = concatM $ + [ pure $ locOnly sp + , toHie $ map (IEC c) liens + ] + where + c = if hiding then ImportHiding else Import + +instance ToHie (IEContext (LIE GhcRn)) where + toHie (IEC c (L span ie)) = concatM $ makeNode ie span : case ie of + IEVar _ n -> + [ toHie $ IEC c n + ] + IEThingAbs _ n -> + [ toHie $ IEC c n + ] + IEThingAll _ n -> + [ toHie $ IEC c n + ] + IEThingWith _ n _ ns flds -> + [ toHie $ IEC c n + , toHie $ map (IEC c) ns + , toHie $ map (IEC c) flds + ] + IEModuleContents _ n -> + [ toHie $ IEC c n + ] + IEGroup _ _ _ -> [] + IEDoc _ _ -> [] + IEDocNamed _ _ -> [] + XIE _ -> [] + +instance ToHie (IEContext (LIEWrappedName Name)) where + toHie (IEC c (L span iewn)) = concatM $ makeNode iewn span : case iewn of + IEName n -> + [ toHie $ C (IEThing c) n + ] + IEPattern p -> + [ toHie $ C (IEThing c) p + ] + IEType n -> + [ toHie $ C (IEThing c) n + ] + +instance ToHie (IEContext (Located (FieldLbl Name))) where + toHie (IEC c (L span lbl)) = concatM $ makeNode lbl span : case lbl of + FieldLabel _ _ n -> + [ toHie $ C (IEThing c) $ L span n + ] + diff --git a/hie-compat/src-ghc86/Compat/HieBin.hs b/hie-compat/src-ghc86/Compat/HieBin.hs new file mode 100644 index 00000000000..94e9ad3e50f --- /dev/null +++ b/hie-compat/src-ghc86/Compat/HieBin.hs @@ -0,0 +1,388 @@ +{- +Binary serialization for .hie files. +-} +{-# LANGUAGE ScopedTypeVariables #-} +module Compat.HieBin ( readHieFile, readHieFileWithVersion, HieHeader, writeHieFile, HieName(..), toHieName, HieFileResult(..), hieMagic,NameCacheUpdater(..)) where + +import Config ( cProjectVersion ) +import Binary +import BinIface ( getDictFastString ) +import FastMutInt +import FastString ( FastString ) +import Module ( Module ) +import Name +import NameCache +import Outputable +import PrelInfo +import SrcLoc +import UniqSupply ( takeUniqFromSupply ) +import Util ( maybeRead ) +import Unique +import UniqFM +import IfaceEnv + +import qualified Data.Array as A +import Data.IORef +import Data.ByteString ( ByteString ) +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BSC +import Data.List ( mapAccumR ) +import Data.Word ( Word8, Word32 ) +import Control.Monad ( replicateM, when ) +import System.Directory ( createDirectoryIfMissing ) +import System.FilePath ( takeDirectory ) + +import Compat.HieTypes + +-- | `Name`'s get converted into `HieName`'s before being written into @.hie@ +-- files. See 'toHieName' and 'fromHieName' for logic on how to convert between +-- these two types. +data HieName + = ExternalName !Module !OccName !SrcSpan + | LocalName !OccName !SrcSpan + | KnownKeyName !Unique + deriving (Eq) + +instance Ord HieName where + compare (ExternalName a b c) (ExternalName d e f) = compare (a,b,c) (d,e,f) + compare (LocalName a b) (LocalName c d) = compare (a,b) (c,d) + compare (KnownKeyName a) (KnownKeyName b) = nonDetCmpUnique a b + -- Not actually non determinstic as it is a KnownKey + compare ExternalName{} _ = LT + compare LocalName{} ExternalName{} = GT + compare LocalName{} _ = LT + compare KnownKeyName{} _ = GT + +instance Outputable HieName where + ppr (ExternalName m n sp) = text "ExternalName" <+> ppr m <+> ppr n <+> ppr sp + ppr (LocalName n sp) = text "LocalName" <+> ppr n <+> ppr sp + ppr (KnownKeyName u) = text "KnownKeyName" <+> ppr u + + +data HieSymbolTable = HieSymbolTable + { hie_symtab_next :: !FastMutInt + , hie_symtab_map :: !(IORef (UniqFM (Int, HieName))) + } + +data HieDictionary = HieDictionary + { hie_dict_next :: !FastMutInt -- The next index to use + , hie_dict_map :: !(IORef (UniqFM (Int,FastString))) -- indexed by FastString + } + +initBinMemSize :: Int +initBinMemSize = 1024*1024 + +-- | The header for HIE files - Capital ASCII letters "HIE". +hieMagic :: [Word8] +hieMagic = [72,73,69] + +hieMagicLen :: Int +hieMagicLen = length hieMagic + +ghcVersion :: ByteString +ghcVersion = BSC.pack cProjectVersion + +putBinLine :: BinHandle -> ByteString -> IO () +putBinLine bh xs = do + mapM_ (putByte bh) $ BS.unpack xs + putByte bh 10 -- newline char + +-- | Write a `HieFile` to the given `FilePath`, with a proper header and +-- symbol tables for `Name`s and `FastString`s +writeHieFile :: FilePath -> HieFile -> IO () +writeHieFile hie_file_path hiefile = do + bh0 <- openBinMem initBinMemSize + + -- Write the header: hieHeader followed by the + -- hieVersion and the GHC version used to generate this file + mapM_ (putByte bh0) hieMagic + putBinLine bh0 $ BSC.pack $ show hieVersion + putBinLine bh0 $ ghcVersion + + -- remember where the dictionary pointer will go + dict_p_p <- tellBin bh0 + put_ bh0 dict_p_p + + -- remember where the symbol table pointer will go + symtab_p_p <- tellBin bh0 + put_ bh0 symtab_p_p + + -- Make some intial state + symtab_next <- newFastMutInt + writeFastMutInt symtab_next 0 + symtab_map <- newIORef emptyUFM + let hie_symtab = HieSymbolTable { + hie_symtab_next = symtab_next, + hie_symtab_map = symtab_map } + dict_next_ref <- newFastMutInt + writeFastMutInt dict_next_ref 0 + dict_map_ref <- newIORef emptyUFM + let hie_dict = HieDictionary { + hie_dict_next = dict_next_ref, + hie_dict_map = dict_map_ref } + + -- put the main thing + let bh = setUserData bh0 $ newWriteState (putName hie_symtab) + (putName hie_symtab) + (putFastString hie_dict) + put_ bh hiefile + + -- write the symtab pointer at the front of the file + symtab_p <- tellBin bh + putAt bh symtab_p_p symtab_p + seekBin bh symtab_p + + -- write the symbol table itself + symtab_next' <- readFastMutInt symtab_next + symtab_map' <- readIORef symtab_map + putSymbolTable bh symtab_next' symtab_map' + + -- write the dictionary pointer at the front of the file + dict_p <- tellBin bh + putAt bh dict_p_p dict_p + seekBin bh dict_p + + -- write the dictionary itself + dict_next <- readFastMutInt dict_next_ref + dict_map <- readIORef dict_map_ref + putDictionary bh dict_next dict_map + + -- and send the result to the file + createDirectoryIfMissing True (takeDirectory hie_file_path) + writeBinMem bh hie_file_path + return () + +data HieFileResult + = HieFileResult + { hie_file_result_version :: Integer + , hie_file_result_ghc_version :: ByteString + , hie_file_result :: HieFile + } + +type HieHeader = (Integer, ByteString) + +-- | Read a `HieFile` from a `FilePath`. Can use +-- an existing `NameCache`. Allows you to specify +-- which versions of hieFile to attempt to read. +-- `Left` case returns the failing header versions. +readHieFileWithVersion :: (HieHeader -> Bool) -> NameCacheUpdater -> FilePath -> IO (Either HieHeader HieFileResult) +readHieFileWithVersion readVersion ncu file = do + bh0 <- readBinMem file + + (hieVersion, ghcVersion) <- readHieFileHeader file bh0 + + if readVersion (hieVersion, ghcVersion) + then do + hieFile <- readHieFileContents bh0 ncu + return $ Right (HieFileResult hieVersion ghcVersion hieFile) + else return $ Left (hieVersion, ghcVersion) + + +-- | Read a `HieFile` from a `FilePath`. Can use +-- an existing `NameCache`. +readHieFile :: NameCacheUpdater -> FilePath -> IO HieFileResult +readHieFile ncu file = do + + bh0 <- readBinMem file + + (readHieVersion, ghcVersion) <- readHieFileHeader file bh0 + + -- Check if the versions match + when (readHieVersion /= hieVersion) $ + panic $ unwords ["readHieFile: hie file versions don't match for file:" + , file + , "Expected" + , show hieVersion + , "but got", show readHieVersion + ] + hieFile <- readHieFileContents bh0 ncu + return $ HieFileResult hieVersion ghcVersion hieFile + +readBinLine :: BinHandle -> IO ByteString +readBinLine bh = BS.pack . reverse <$> loop [] + where + loop acc = do + char <- get bh :: IO Word8 + if char == 10 -- ASCII newline '\n' + then return acc + else loop (char : acc) + +readHieFileHeader :: FilePath -> BinHandle -> IO HieHeader +readHieFileHeader file bh0 = do + -- Read the header + magic <- replicateM hieMagicLen (get bh0) + version <- BSC.unpack <$> readBinLine bh0 + case maybeRead version of + Nothing -> + panic $ unwords ["readHieFileHeader: hieVersion isn't an Integer:" + , show version + ] + Just readHieVersion -> do + ghcVersion <- readBinLine bh0 + + -- Check if the header is valid + when (magic /= hieMagic) $ + panic $ unwords ["readHieFileHeader: headers don't match for file:" + , file + , "Expected" + , show hieMagic + , "but got", show magic + ] + return (readHieVersion, ghcVersion) + +readHieFileContents :: BinHandle -> NameCacheUpdater -> IO HieFile +readHieFileContents bh0 ncu = do + + dict <- get_dictionary bh0 + + -- read the symbol table so we are capable of reading the actual data + bh1 <- do + let bh1 = setUserData bh0 $ newReadState (error "getSymtabName") + (getDictFastString dict) + symtab <- get_symbol_table bh1 + let bh1' = setUserData bh1 + $ newReadState (getSymTabName symtab) + (getDictFastString dict) + return bh1' + + -- load the actual data + hiefile <- get bh1 + return hiefile + where + get_dictionary bin_handle = do + dict_p <- get bin_handle + data_p <- tellBin bin_handle + seekBin bin_handle dict_p + dict <- getDictionary bin_handle + seekBin bin_handle data_p + return dict + + get_symbol_table bh1 = do + symtab_p <- get bh1 + data_p' <- tellBin bh1 + seekBin bh1 symtab_p + symtab <- getSymbolTable bh1 ncu + seekBin bh1 data_p' + return symtab + +putFastString :: HieDictionary -> BinHandle -> FastString -> IO () +putFastString HieDictionary { hie_dict_next = j_r, + hie_dict_map = out_r} bh f + = do + out <- readIORef out_r + let unique = getUnique f + case lookupUFM out unique of + Just (j, _) -> put_ bh (fromIntegral j :: Word32) + Nothing -> do + j <- readFastMutInt j_r + put_ bh (fromIntegral j :: Word32) + writeFastMutInt j_r (j + 1) + writeIORef out_r $! addToUFM out unique (j, f) + +putSymbolTable :: BinHandle -> Int -> UniqFM (Int,HieName) -> IO () +putSymbolTable bh next_off symtab = do + put_ bh next_off + let names = A.elems (A.array (0,next_off-1) (nonDetEltsUFM symtab)) + mapM_ (putHieName bh) names + +getSymbolTable :: BinHandle -> NameCacheUpdater -> IO SymbolTable +getSymbolTable bh ncu = do + sz <- get bh + od_names <- replicateM sz (getHieName bh) + updateNameCache ncu $ \nc -> + let arr = A.listArray (0,sz-1) names + (nc', names) = mapAccumR fromHieName nc od_names + in (nc',arr) + +getSymTabName :: SymbolTable -> BinHandle -> IO Name +getSymTabName st bh = do + i :: Word32 <- get bh + return $ st A.! (fromIntegral i) + +putName :: HieSymbolTable -> BinHandle -> Name -> IO () +putName (HieSymbolTable next ref) bh name = do + symmap <- readIORef ref + case lookupUFM symmap name of + Just (off, ExternalName mod occ (UnhelpfulSpan _)) + | isGoodSrcSpan (nameSrcSpan name) -> do + let hieName = ExternalName mod occ (nameSrcSpan name) + writeIORef ref $! addToUFM symmap name (off, hieName) + put_ bh (fromIntegral off :: Word32) + Just (off, LocalName _occ span) + | notLocal (toHieName name) || nameSrcSpan name /= span -> do + writeIORef ref $! addToUFM symmap name (off, toHieName name) + put_ bh (fromIntegral off :: Word32) + Just (off, _) -> put_ bh (fromIntegral off :: Word32) + Nothing -> do + off <- readFastMutInt next + writeFastMutInt next (off+1) + writeIORef ref $! addToUFM symmap name (off, toHieName name) + put_ bh (fromIntegral off :: Word32) + + where + notLocal :: HieName -> Bool + notLocal LocalName{} = False + notLocal _ = True + + +-- ** Converting to and from `HieName`'s + +toHieName :: Name -> HieName +toHieName name + | isKnownKeyName name = KnownKeyName (nameUnique name) + | isExternalName name = ExternalName (nameModule name) + (nameOccName name) + (nameSrcSpan name) + | otherwise = LocalName (nameOccName name) (nameSrcSpan name) + +fromHieName :: NameCache -> HieName -> (NameCache, Name) +fromHieName nc (ExternalName mod occ span) = + let cache = nsNames nc + in case lookupOrigNameCache cache mod occ of + Just name + | nameSrcSpan name == span -> (nc, name) + | otherwise -> + let name' = setNameLoc name span + new_cache = extendNameCache cache mod occ name' + in ( nc{ nsNames = new_cache }, name' ) + Nothing -> + let (uniq, us) = takeUniqFromSupply (nsUniqs nc) + name = mkExternalName uniq mod occ span + new_cache = extendNameCache cache mod occ name + in ( nc{ nsUniqs = us, nsNames = new_cache }, name ) +fromHieName nc (LocalName occ span) = + let (uniq, us) = takeUniqFromSupply (nsUniqs nc) + name = mkInternalName uniq occ span + in ( nc{ nsUniqs = us }, name ) +fromHieName nc (KnownKeyName u) = case lookupKnownKeyName u of + Nothing -> pprPanic "fromHieName:unknown known-key unique" + (ppr (unpkUnique u)) + Just n -> (nc, n) + +-- ** Reading and writing `HieName`'s + +putHieName :: BinHandle -> HieName -> IO () +putHieName bh (ExternalName mod occ span) = do + putByte bh 0 + put_ bh (mod, occ, span) +putHieName bh (LocalName occName span) = do + putByte bh 1 + put_ bh (occName, span) +putHieName bh (KnownKeyName uniq) = do + putByte bh 2 + put_ bh $ unpkUnique uniq + +getHieName :: BinHandle -> IO HieName +getHieName bh = do + t <- getByte bh + case t of + 0 -> do + (modu, occ, span) <- get bh + return $ ExternalName modu occ span + 1 -> do + (occ, span) <- get bh + return $ LocalName occ span + 2 -> do + (c,i) <- get bh + return $ KnownKeyName $ mkUnique c i + _ -> panic "HieBin.getHieName: invalid tag" diff --git a/hie-compat/src-ghc86/Compat/HieDebug.hs b/hie-compat/src-ghc86/Compat/HieDebug.hs new file mode 100644 index 00000000000..76a43844667 --- /dev/null +++ b/hie-compat/src-ghc86/Compat/HieDebug.hs @@ -0,0 +1,145 @@ +{- +Functions to validate and check .hie file ASTs generated by GHC. +-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE FlexibleContexts #-} +module Compat.HieDebug where + +import Prelude hiding ((<>)) +import SrcLoc +import Module +import FastString +import Outputable + +import Compat.HieTypes +import Compat.HieBin +import Compat.HieUtils + +import qualified Data.Map as M +import qualified Data.Set as S +import Data.Function ( on ) +import Data.List ( sortOn ) +import Data.Foldable ( toList ) + +ppHies :: Outputable a => (HieASTs a) -> SDoc +ppHies (HieASTs asts) = M.foldrWithKey go "" asts + where + go k a rest = vcat $ + [ "File: " <> ppr k + , ppHie a + , rest + ] + +ppHie :: Outputable a => HieAST a -> SDoc +ppHie = go 0 + where + go n (Node inf sp children) = hang header n rest + where + rest = vcat $ map (go (n+2)) children + header = hsep + [ "Node" + , ppr sp + , ppInfo inf + ] + +ppInfo :: Outputable a => NodeInfo a -> SDoc +ppInfo ni = hsep + [ ppr $ toList $ nodeAnnotations ni + , ppr $ nodeType ni + , ppr $ M.toList $ nodeIdentifiers ni + ] + +type Diff a = a -> a -> [SDoc] + +diffFile :: Diff HieFile +diffFile = diffAsts eqDiff `on` (getAsts . hie_asts) + +diffAsts :: (Outputable a, Eq a) => Diff a -> Diff (M.Map FastString (HieAST a)) +diffAsts f = diffList (diffAst f) `on` M.elems + +diffAst :: (Outputable a, Eq a) => Diff a -> Diff (HieAST a) +diffAst diffType (Node info1 span1 xs1) (Node info2 span2 xs2) = + infoDiff ++ spanDiff ++ diffList (diffAst diffType) xs1 xs2 + where + spanDiff + | span1 /= span2 = [hsep ["Spans", ppr span1, "and", ppr span2, "differ"]] + | otherwise = [] + infoDiff + = (diffList eqDiff `on` (S.toAscList . nodeAnnotations)) info1 info2 + ++ (diffList diffType `on` nodeType) info1 info2 + ++ (diffIdents `on` nodeIdentifiers) info1 info2 + diffIdents a b = (diffList diffIdent `on` normalizeIdents) a b + diffIdent (a,b) (c,d) = diffName a c + ++ eqDiff b d + diffName (Right a) (Right b) = case (a,b) of + (ExternalName m o _, ExternalName m' o' _) -> eqDiff (m,o) (m',o') + (LocalName o _, ExternalName _ o' _) -> eqDiff o o' + _ -> eqDiff a b + diffName a b = eqDiff a b + +type DiffIdent = Either ModuleName HieName + +normalizeIdents :: NodeIdentifiers a -> [(DiffIdent,IdentifierDetails a)] +normalizeIdents = sortOn fst . map (first toHieName) . M.toList + where + first f (a,b) = (fmap f a, b) + +diffList :: Diff a -> Diff [a] +diffList f xs ys + | length xs == length ys = concat $ zipWith f xs ys + | otherwise = ["length of lists doesn't match"] + +eqDiff :: (Outputable a, Eq a) => Diff a +eqDiff a b + | a == b = [] + | otherwise = [hsep [ppr a, "and", ppr b, "do not match"]] + +validAst :: HieAST a -> Either SDoc () +validAst (Node _ span children) = do + checkContainment children + checkSorted children + mapM_ validAst children + where + checkSorted [] = return () + checkSorted [_] = return () + checkSorted (x:y:xs) + | nodeSpan x `leftOf` nodeSpan y = checkSorted (y:xs) + | otherwise = Left $ hsep + [ ppr $ nodeSpan x + , "is not to the left of" + , ppr $ nodeSpan y + ] + checkContainment [] = return () + checkContainment (x:xs) + | span `containsSpan` (nodeSpan x) = checkContainment xs + | otherwise = Left $ hsep + [ ppr $ span + , "does not contain" + , ppr $ nodeSpan x + ] + +-- | Look for any identifiers which occur outside of their supposed scopes. +-- Returns a list of error messages. +validateScopes :: M.Map FastString (HieAST a) -> [SDoc] +validateScopes asts = M.foldrWithKey (\k a b -> valid k a ++ b) [] refMap + where + refMap = generateReferencesMap asts + valid (Left _) _ = [] + valid (Right n) refs = concatMap inScope refs + where + mapRef = foldMap getScopeFromContext . identInfo . snd + scopes = case foldMap mapRef refs of + Just xs -> xs + Nothing -> [] + inScope (sp, dets) + | definedInAsts asts n + && any isOccurrence (identInfo dets) + = case scopes of + [] -> [] + _ -> if any (`scopeContainsSpan` sp) scopes + then [] + else return $ hsep $ + [ "Name", ppr n, "at position", ppr sp + , "doesn't occur in calculated scope", ppr scopes] + | otherwise = [] diff --git a/hie-compat/src-ghc86/Compat/HieTypes.hs b/hie-compat/src-ghc86/Compat/HieTypes.hs new file mode 100644 index 00000000000..cdf52adf404 --- /dev/null +++ b/hie-compat/src-ghc86/Compat/HieTypes.hs @@ -0,0 +1,534 @@ +{- +Types for the .hie file format are defined here. + +For more information see https://gitlab.haskell.org/ghc/ghc/wikis/hie-files +-} +{-# LANGUAGE DeriveTraversable #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# OPTIONS_GHC -Wno-orphans #-} +module Compat.HieTypes where + +import Config +import Binary +import FastString ( FastString ) +import IfaceType +import Module ( ModuleName, Module ) +import Name ( Name ) +import Outputable hiding ( (<>) ) +import SrcLoc +import Avail + +import qualified Data.Array as A +import qualified Data.Map as M +import qualified Data.Set as S +import Data.ByteString ( ByteString ) +import Data.Data ( Typeable, Data ) +import Data.Semigroup ( Semigroup(..) ) +import Data.Word ( Word8 ) +import Control.Applicative ( (<|>) ) + +type Span = RealSrcSpan + +instance Binary RealSrcSpan where + put_ bh ss = do + put_ bh (srcSpanFile ss) + put_ bh (srcSpanStartLine ss) + put_ bh (srcSpanStartCol ss) + put_ bh (srcSpanEndLine ss) + put_ bh (srcSpanEndCol ss) + + get bh = do + f <- get bh + sl <- get bh + sc <- get bh + el <- get bh + ec <- get bh + return (mkRealSrcSpan (mkRealSrcLoc f sl sc) + (mkRealSrcLoc f el ec)) + +instance (A.Ix a, Binary a, Binary b) => Binary (A.Array a b) where + put_ bh arr = do + put_ bh $ A.bounds arr + put_ bh $ A.elems arr + get bh = do + bounds <- get bh + xs <- get bh + return $ A.listArray bounds xs + +-- | Current version of @.hie@ files +hieVersion :: Integer +hieVersion = read (cProjectVersionInt ++ cProjectPatchLevel) :: Integer + +{- | +GHC builds up a wealth of information about Haskell source as it compiles it. +@.hie@ files are a way of persisting some of this information to disk so that +external tools that need to work with haskell source don't need to parse, +typecheck, and rename all over again. These files contain: + + * a simplified AST + + * nodes are annotated with source positions and types + * identifiers are annotated with scope information + + * the raw bytes of the initial Haskell source + +Besides saving compilation cycles, @.hie@ files also offer a more stable +interface than the GHC API. +-} +data HieFile = HieFile + { hie_hs_file :: FilePath + -- ^ Initial Haskell source file path + + , hie_module :: Module + -- ^ The module this HIE file is for + + , hie_types :: A.Array TypeIndex HieTypeFlat + -- ^ Types referenced in the 'hie_asts'. + -- + -- See Note [Efficient serialization of redundant type info] + + , hie_asts :: HieASTs TypeIndex + -- ^ Type-annotated abstract syntax trees + + , hie_exports :: [AvailInfo] + -- ^ The names that this module exports + + , hie_hs_src :: ByteString + -- ^ Raw bytes of the initial Haskell source + } +instance Binary HieFile where + put_ bh hf = do + put_ bh $ hie_hs_file hf + put_ bh $ hie_module hf + put_ bh $ hie_types hf + put_ bh $ hie_asts hf + put_ bh $ hie_exports hf + put_ bh $ hie_hs_src hf + + get bh = HieFile + <$> get bh + <*> get bh + <*> get bh + <*> get bh + <*> get bh + <*> get bh + + +{- +Note [Efficient serialization of redundant type info] +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The type information in .hie files is highly repetitive and redundant. For +example, consider the expression + + const True 'a' + +There is a lot of shared structure between the types of subterms: + + * const True 'a' :: Bool + * const True :: Char -> Bool + * const :: Bool -> Char -> Bool + +Since all 3 of these types need to be stored in the .hie file, it is worth +making an effort to deduplicate this shared structure. The trick is to define +a new data type that is a flattened version of 'Type': + + data HieType a = HAppTy a a -- data Type = AppTy Type Type + | HFunTy a a -- | FunTy Type Type + | ... + + type TypeIndex = Int + +Types in the final AST are stored in an 'A.Array TypeIndex (HieType TypeIndex)', +where the 'TypeIndex's in the 'HieType' are references to other elements of the +array. Types recovered from GHC are deduplicated and stored in this compressed +form with sharing of subtrees. +-} + +type TypeIndex = Int + +-- | A flattened version of 'Type'. +-- +-- See Note [Efficient serialization of redundant type info] +data HieType a + = HTyVarTy Name + | HAppTy a a + | HTyConApp IfaceTyCon (HieArgs a) + | HForAllTy ((Name, a),ArgFlag) a + | HFunTy a a + | HQualTy a a -- ^ type with constraint: @t1 => t2@ (see 'IfaceDFunTy') + | HLitTy IfaceTyLit + | HCastTy a + | HCoercionTy + deriving (Functor, Foldable, Traversable, Eq) + +type HieTypeFlat = HieType TypeIndex + +-- | Roughly isomorphic to the original core 'Type'. +newtype HieTypeFix = Roll (HieType (HieTypeFix)) + +instance Binary (HieType TypeIndex) where + put_ bh (HTyVarTy n) = do + putByte bh 0 + put_ bh n + put_ bh (HAppTy a b) = do + putByte bh 1 + put_ bh a + put_ bh b + put_ bh (HTyConApp n xs) = do + putByte bh 2 + put_ bh n + put_ bh xs + put_ bh (HForAllTy bndr a) = do + putByte bh 3 + put_ bh bndr + put_ bh a + put_ bh (HFunTy a b) = do + putByte bh 4 + put_ bh a + put_ bh b + put_ bh (HQualTy a b) = do + putByte bh 5 + put_ bh a + put_ bh b + put_ bh (HLitTy l) = do + putByte bh 6 + put_ bh l + put_ bh (HCastTy a) = do + putByte bh 7 + put_ bh a + put_ bh (HCoercionTy) = putByte bh 8 + + get bh = do + (t :: Word8) <- get bh + case t of + 0 -> HTyVarTy <$> get bh + 1 -> HAppTy <$> get bh <*> get bh + 2 -> HTyConApp <$> get bh <*> get bh + 3 -> HForAllTy <$> get bh <*> get bh + 4 -> HFunTy <$> get bh <*> get bh + 5 -> HQualTy <$> get bh <*> get bh + 6 -> HLitTy <$> get bh + 7 -> HCastTy <$> get bh + 8 -> return HCoercionTy + _ -> panic "Binary (HieArgs Int): invalid tag" + + +-- | A list of type arguments along with their respective visibilities (ie. is +-- this an argument that would return 'True' for 'isVisibleArgFlag'?). +newtype HieArgs a = HieArgs [(Bool,a)] + deriving (Functor, Foldable, Traversable, Eq) + +instance Binary (HieArgs TypeIndex) where + put_ bh (HieArgs xs) = put_ bh xs + get bh = HieArgs <$> get bh + +-- | Mapping from filepaths (represented using 'FastString') to the +-- corresponding AST +newtype HieASTs a = HieASTs { getAsts :: (M.Map FastString (HieAST a)) } + deriving (Functor, Foldable, Traversable) + +instance Binary (HieASTs TypeIndex) where + put_ bh asts = put_ bh $ M.toAscList $ getAsts asts + get bh = HieASTs <$> fmap M.fromDistinctAscList (get bh) + + +data HieAST a = + Node + { nodeInfo :: NodeInfo a + , nodeSpan :: Span + , nodeChildren :: [HieAST a] + } deriving (Functor, Foldable, Traversable) + +instance Binary (HieAST TypeIndex) where + put_ bh ast = do + put_ bh $ nodeInfo ast + put_ bh $ nodeSpan ast + put_ bh $ nodeChildren ast + + get bh = Node + <$> get bh + <*> get bh + <*> get bh + + +-- | The information stored in one AST node. +-- +-- The type parameter exists to provide flexibility in representation of types +-- (see Note [Efficient serialization of redundant type info]). +data NodeInfo a = NodeInfo + { nodeAnnotations :: S.Set (FastString,FastString) + -- ^ (name of the AST node constructor, name of the AST node Type) + + , nodeType :: [a] + -- ^ The Haskell types of this node, if any. + + , nodeIdentifiers :: NodeIdentifiers a + -- ^ All the identifiers and their details + } deriving (Functor, Foldable, Traversable) + +instance Binary (NodeInfo TypeIndex) where + put_ bh ni = do + put_ bh $ S.toAscList $ nodeAnnotations ni + put_ bh $ nodeType ni + put_ bh $ M.toList $ nodeIdentifiers ni + get bh = NodeInfo + <$> fmap (S.fromDistinctAscList) (get bh) + <*> get bh + <*> fmap (M.fromList) (get bh) + +type Identifier = Either ModuleName Name + +type NodeIdentifiers a = M.Map Identifier (IdentifierDetails a) + +-- | Information associated with every identifier +-- +-- We need to include types with identifiers because sometimes multiple +-- identifiers occur in the same span(Overloaded Record Fields and so on) +data IdentifierDetails a = IdentifierDetails + { identType :: Maybe a + , identInfo :: S.Set ContextInfo + } deriving (Eq, Functor, Foldable, Traversable) + +instance Outputable a => Outputable (IdentifierDetails a) where + ppr x = text "IdentifierDetails" <+> ppr (identType x) <+> ppr (identInfo x) + +instance Semigroup (IdentifierDetails a) where + d1 <> d2 = IdentifierDetails (identType d1 <|> identType d2) + (S.union (identInfo d1) (identInfo d2)) + +instance Monoid (IdentifierDetails a) where + mempty = IdentifierDetails Nothing S.empty + +instance Binary (IdentifierDetails TypeIndex) where + put_ bh dets = do + put_ bh $ identType dets + put_ bh $ S.toAscList $ identInfo dets + get bh = IdentifierDetails + <$> get bh + <*> fmap (S.fromDistinctAscList) (get bh) + + +-- | Different contexts under which identifiers exist +data ContextInfo + = Use -- ^ regular variable + | MatchBind + | IEThing IEType -- ^ import/export + | TyDecl + + -- | Value binding + | ValBind + BindType -- ^ whether or not the binding is in an instance + Scope -- ^ scope over which the value is bound + (Maybe Span) -- ^ span of entire binding + + -- | Pattern binding + -- + -- This case is tricky because the bound identifier can be used in two + -- distinct scopes. Consider the following example (with @-XViewPatterns@) + -- + -- @ + -- do (b, a, (a -> True)) <- bar + -- foo a + -- @ + -- + -- The identifier @a@ has two scopes: in the view pattern @(a -> True)@ and + -- in the rest of the @do@-block in @foo a@. + | PatternBind + Scope -- ^ scope /in the pattern/ (the variable bound can be used + -- further in the pattern) + Scope -- ^ rest of the scope outside the pattern + (Maybe Span) -- ^ span of entire binding + + | ClassTyDecl (Maybe Span) + + -- | Declaration + | Decl + DeclType -- ^ type of declaration + (Maybe Span) -- ^ span of entire binding + + -- | Type variable + | TyVarBind Scope TyVarScope + + -- | Record field + | RecField RecFieldContext (Maybe Span) + deriving (Eq, Ord, Show) + +instance Outputable ContextInfo where + ppr = text . show + +instance Binary ContextInfo where + put_ bh Use = putByte bh 0 + put_ bh (IEThing t) = do + putByte bh 1 + put_ bh t + put_ bh TyDecl = putByte bh 2 + put_ bh (ValBind bt sc msp) = do + putByte bh 3 + put_ bh bt + put_ bh sc + put_ bh msp + put_ bh (PatternBind a b c) = do + putByte bh 4 + put_ bh a + put_ bh b + put_ bh c + put_ bh (ClassTyDecl sp) = do + putByte bh 5 + put_ bh sp + put_ bh (Decl a b) = do + putByte bh 6 + put_ bh a + put_ bh b + put_ bh (TyVarBind a b) = do + putByte bh 7 + put_ bh a + put_ bh b + put_ bh (RecField a b) = do + putByte bh 8 + put_ bh a + put_ bh b + put_ bh MatchBind = putByte bh 9 + + get bh = do + (t :: Word8) <- get bh + case t of + 0 -> return Use + 1 -> IEThing <$> get bh + 2 -> return TyDecl + 3 -> ValBind <$> get bh <*> get bh <*> get bh + 4 -> PatternBind <$> get bh <*> get bh <*> get bh + 5 -> ClassTyDecl <$> get bh + 6 -> Decl <$> get bh <*> get bh + 7 -> TyVarBind <$> get bh <*> get bh + 8 -> RecField <$> get bh <*> get bh + 9 -> return MatchBind + _ -> panic "Binary ContextInfo: invalid tag" + + +-- | Types of imports and exports +data IEType + = Import + | ImportAs + | ImportHiding + | Export + deriving (Eq, Enum, Ord, Show) + +instance Binary IEType where + put_ bh b = putByte bh (fromIntegral (fromEnum b)) + get bh = do x <- getByte bh; pure $! (toEnum (fromIntegral x)) + + +data RecFieldContext + = RecFieldDecl + | RecFieldAssign + | RecFieldMatch + | RecFieldOcc + deriving (Eq, Enum, Ord, Show) + +instance Binary RecFieldContext where + put_ bh b = putByte bh (fromIntegral (fromEnum b)) + get bh = do x <- getByte bh; pure $! (toEnum (fromIntegral x)) + + +data BindType + = RegularBind + | InstanceBind + deriving (Eq, Ord, Show, Enum) + +instance Binary BindType where + put_ bh b = putByte bh (fromIntegral (fromEnum b)) + get bh = do x <- getByte bh; pure $! (toEnum (fromIntegral x)) + + +data DeclType + = FamDec -- ^ type or data family + | SynDec -- ^ type synonym + | DataDec -- ^ data declaration + | ConDec -- ^ constructor declaration + | PatSynDec -- ^ pattern synonym + | ClassDec -- ^ class declaration + | InstDec -- ^ instance declaration + deriving (Eq, Ord, Show, Enum) + +instance Binary DeclType where + put_ bh b = putByte bh (fromIntegral (fromEnum b)) + get bh = do x <- getByte bh; pure $! (toEnum (fromIntegral x)) + + +data Scope + = NoScope + | LocalScope Span + | ModuleScope + deriving (Eq, Ord, Show, Typeable, Data) + +instance Outputable Scope where + ppr NoScope = text "NoScope" + ppr (LocalScope sp) = text "LocalScope" <+> ppr sp + ppr ModuleScope = text "ModuleScope" + +instance Binary Scope where + put_ bh NoScope = putByte bh 0 + put_ bh (LocalScope span) = do + putByte bh 1 + put_ bh span + put_ bh ModuleScope = putByte bh 2 + + get bh = do + (t :: Word8) <- get bh + case t of + 0 -> return NoScope + 1 -> LocalScope <$> get bh + 2 -> return ModuleScope + _ -> panic "Binary Scope: invalid tag" + + +-- | Scope of a type variable. +-- +-- This warrants a data type apart from 'Scope' because of complexities +-- introduced by features like @-XScopedTypeVariables@ and @-XInstanceSigs@. For +-- example, consider: +-- +-- @ +-- foo, bar, baz :: forall a. a -> a +-- @ +-- +-- Here @a@ is in scope in all the definitions of @foo@, @bar@, and @baz@, so we +-- need a list of scopes to keep track of this. Furthermore, this list cannot be +-- computed until we resolve the binding sites of @foo@, @bar@, and @baz@. +-- +-- Consequently, @a@ starts with an @'UnresolvedScope' [foo, bar, baz] Nothing@ +-- which later gets resolved into a 'ResolvedScopes'. +data TyVarScope + = ResolvedScopes [Scope] + + -- | Unresolved scopes should never show up in the final @.hie@ file + | UnresolvedScope + [Name] -- ^ names of the definitions over which the scope spans + (Maybe Span) -- ^ the location of the instance/class declaration for + -- the case where the type variable is declared in a + -- method type signature + deriving (Eq, Ord) + +instance Show TyVarScope where + show (ResolvedScopes sc) = show sc + show _ = error "UnresolvedScope" + +instance Binary TyVarScope where + put_ bh (ResolvedScopes xs) = do + putByte bh 0 + put_ bh xs + put_ bh (UnresolvedScope ns span) = do + putByte bh 1 + put_ bh ns + put_ bh span + + get bh = do + (t :: Word8) <- get bh + case t of + 0 -> ResolvedScopes <$> get bh + 1 -> UnresolvedScope <$> get bh <*> get bh + _ -> panic "Binary TyVarScope: invalid tag" diff --git a/hie-compat/src-ghc86/Compat/HieUtils.hs b/hie-compat/src-ghc86/Compat/HieUtils.hs new file mode 100644 index 00000000000..519a8f50e56 --- /dev/null +++ b/hie-compat/src-ghc86/Compat/HieUtils.hs @@ -0,0 +1,451 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TupleSections #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE FlexibleInstances #-} +module Compat.HieUtils where + +import CoreMap +import DynFlags ( DynFlags ) +import FastString ( FastString, mkFastString ) +import IfaceType +import Name hiding (varName) +import Outputable ( renderWithStyle, ppr, defaultUserStyle ) +import SrcLoc +import ToIface +import TyCon +import TyCoRep +import Type +import Var +import VarEnv + +import Compat.HieTypes + +import qualified Data.Map as M +import qualified Data.Set as S +import qualified Data.IntMap.Strict as IM +import qualified Data.Array as A +import Data.Data ( typeOf, typeRepTyCon, Data(toConstr) ) +import Data.Maybe ( maybeToList ) +import Data.Monoid +import Data.Traversable ( for ) +import Control.Monad.Trans.State.Strict hiding (get) + + +generateReferencesMap + :: Foldable f + => f (HieAST a) + -> M.Map Identifier [(Span, IdentifierDetails a)] +generateReferencesMap = foldr (\ast m -> M.unionWith (++) (go ast) m) M.empty + where + go ast = M.unionsWith (++) (this : map go (nodeChildren ast)) + where + this = fmap (pure . (nodeSpan ast,)) $ nodeIdentifiers $ nodeInfo ast + +renderHieType :: DynFlags -> HieTypeFix -> String +renderHieType df ht = renderWithStyle df (ppr $ hieTypeToIface ht) sty + where sty = defaultUserStyle df + +resolveVisibility :: Type -> [Type] -> [(Bool,Type)] +resolveVisibility kind ty_args + = go (mkEmptyTCvSubst in_scope) kind ty_args + where + in_scope = mkInScopeSet (tyCoVarsOfTypes ty_args) + + go _ _ [] = [] + go env ty ts + | Just ty' <- coreView ty + = go env ty' ts + go env (ForAllTy (TvBndr tv vis) res) (t:ts) + | isVisibleArgFlag vis = (True , t) : ts' + | otherwise = (False, t) : ts' + where + ts' = go (extendTvSubst env tv t) res ts + + go env (FunTy _ res) (t:ts) -- No type-class args in tycon apps + = (True,t) : (go env res ts) + + go env (TyVarTy tv) ts + | Just ki <- lookupTyVar env tv = go env ki ts + go env kind (t:ts) = (True, t) : (go env kind ts) -- Ill-kinded + +foldType :: (HieType a -> a) -> HieTypeFix -> a +foldType f (Roll t) = f $ fmap (foldType f) t + +hieTypeToIface :: HieTypeFix -> IfaceType +hieTypeToIface = foldType go + where + go (HTyVarTy n) = IfaceTyVar $ occNameFS $ getOccName n + go (HAppTy a b) = IfaceAppTy a b + go (HLitTy l) = IfaceLitTy l + go (HForAllTy ((n,k),af) t) = let b = (occNameFS $ getOccName n, k) + in IfaceForAllTy (TvBndr b af) t + go (HFunTy a b) = IfaceFunTy a b + go (HQualTy pred b) = IfaceDFunTy pred b + go (HCastTy a) = a + go HCoercionTy = IfaceTyVar "" + go (HTyConApp a xs) = IfaceTyConApp a (hieToIfaceArgs xs) + + -- This isn't fully faithful - we can't produce the 'Inferred' case + hieToIfaceArgs :: HieArgs IfaceType -> IfaceTcArgs + hieToIfaceArgs (HieArgs xs) = go' xs + where + go' [] = ITC_Nil + go' ((True ,x):xs) = ITC_Vis x $ go' xs + go' ((False,x):xs) = ITC_Invis x $ go' xs + +data HieTypeState + = HTS + { tyMap :: !(TypeMap TypeIndex) + , htyTable :: !(IM.IntMap HieTypeFlat) + , freshIndex :: !TypeIndex + } + +initialHTS :: HieTypeState +initialHTS = HTS emptyTypeMap IM.empty 0 + +freshTypeIndex :: State HieTypeState TypeIndex +freshTypeIndex = do + index <- gets freshIndex + modify' $ \hts -> hts { freshIndex = index+1 } + return index + +compressTypes + :: HieASTs Type + -> (HieASTs TypeIndex, A.Array TypeIndex HieTypeFlat) +compressTypes asts = (a, arr) + where + (a, (HTS _ m i)) = flip runState initialHTS $ + for asts $ \typ -> do + i <- getTypeIndex typ + return i + arr = A.array (0,i-1) (IM.toList m) + +recoverFullType :: TypeIndex -> A.Array TypeIndex HieTypeFlat -> HieTypeFix +recoverFullType i m = go i + where + go i = Roll $ fmap go (m A.! i) + +getTypeIndex :: Type -> State HieTypeState TypeIndex +getTypeIndex t + | otherwise = do + tm <- gets tyMap + case lookupTypeMap tm t of + Just i -> return i + Nothing -> do + ht <- go t + extendHTS t ht + where + extendHTS t ht = do + i <- freshTypeIndex + modify' $ \(HTS tm tt fi) -> + HTS (extendTypeMap tm t i) (IM.insert i ht tt) fi + return i + + go (TyVarTy v) = return $ HTyVarTy $ varName v + go (AppTy a b) = do + ai <- getTypeIndex a + bi <- getTypeIndex b + return $ HAppTy ai bi + go (TyConApp f xs) = do + let visArgs = HieArgs $ resolveVisibility (tyConKind f) xs + is <- mapM getTypeIndex visArgs + return $ HTyConApp (toIfaceTyCon f) is + go (ForAllTy (TvBndr v a) t) = do + k <- getTypeIndex (varType v) + i <- getTypeIndex t + return $ HForAllTy ((varName v,k),a) i + go (FunTy a b) = do + ai <- getTypeIndex a + bi <- getTypeIndex b + return $ if isPredTy a + then HQualTy ai bi + else HFunTy ai bi + go (LitTy a) = return $ HLitTy $ toIfaceTyLit a + go (CastTy t _) = do + i <- getTypeIndex t + return $ HCastTy i + go (CoercionTy _) = return HCoercionTy + +resolveTyVarScopes :: M.Map FastString (HieAST a) -> M.Map FastString (HieAST a) +resolveTyVarScopes asts = M.map go asts + where + go ast = resolveTyVarScopeLocal ast asts + +resolveTyVarScopeLocal :: HieAST a -> M.Map FastString (HieAST a) -> HieAST a +resolveTyVarScopeLocal ast asts = go ast + where + resolveNameScope dets = dets{identInfo = + S.map resolveScope (identInfo dets)} + resolveScope (TyVarBind sc (UnresolvedScope names Nothing)) = + TyVarBind sc $ ResolvedScopes + [ LocalScope binding + | name <- names + , Just binding <- [getNameBinding name asts] + ] + resolveScope (TyVarBind sc (UnresolvedScope names (Just sp))) = + TyVarBind sc $ ResolvedScopes + [ LocalScope binding + | name <- names + , Just binding <- [getNameBindingInClass name sp asts] + ] + resolveScope scope = scope + go (Node info span children) = Node info' span $ map go children + where + info' = info { nodeIdentifiers = idents } + idents = M.map resolveNameScope $ nodeIdentifiers info + +getNameBinding :: Name -> M.Map FastString (HieAST a) -> Maybe Span +getNameBinding n asts = do + (_,msp) <- getNameScopeAndBinding n asts + msp + +getNameScope :: Name -> M.Map FastString (HieAST a) -> Maybe [Scope] +getNameScope n asts = do + (scopes,_) <- getNameScopeAndBinding n asts + return scopes + +getNameBindingInClass + :: Name + -> Span + -> M.Map FastString (HieAST a) + -> Maybe Span +getNameBindingInClass n sp asts = do + ast <- M.lookup (srcSpanFile sp) asts + getFirst $ foldMap First $ do + child <- flattenAst ast + dets <- maybeToList + $ M.lookup (Right n) $ nodeIdentifiers $ nodeInfo child + let binding = foldMap (First . getBindSiteFromContext) (identInfo dets) + return (getFirst binding) + +getNameScopeAndBinding + :: Name + -> M.Map FastString (HieAST a) + -> Maybe ([Scope], Maybe Span) +getNameScopeAndBinding n asts = case nameSrcSpan n of + RealSrcSpan sp -> do -- @Maybe + ast <- M.lookup (srcSpanFile sp) asts + defNode <- selectLargestContainedBy sp ast + getFirst $ foldMap First $ do -- @[] + node <- flattenAst defNode + dets <- maybeToList + $ M.lookup (Right n) $ nodeIdentifiers $ nodeInfo node + scopes <- maybeToList $ foldMap getScopeFromContext (identInfo dets) + let binding = foldMap (First . getBindSiteFromContext) (identInfo dets) + return $ Just (scopes, getFirst binding) + _ -> Nothing + +getScopeFromContext :: ContextInfo -> Maybe [Scope] +getScopeFromContext (ValBind _ sc _) = Just [sc] +getScopeFromContext (PatternBind a b _) = Just [a, b] +getScopeFromContext (ClassTyDecl _) = Just [ModuleScope] +getScopeFromContext (Decl _ _) = Just [ModuleScope] +getScopeFromContext (TyVarBind a (ResolvedScopes xs)) = Just $ a:xs +getScopeFromContext (TyVarBind a _) = Just [a] +getScopeFromContext _ = Nothing + +getBindSiteFromContext :: ContextInfo -> Maybe Span +getBindSiteFromContext (ValBind _ _ sp) = sp +getBindSiteFromContext (PatternBind _ _ sp) = sp +getBindSiteFromContext _ = Nothing + +flattenAst :: HieAST a -> [HieAST a] +flattenAst n = + n : concatMap flattenAst (nodeChildren n) + +smallestContainingSatisfying + :: Span + -> (HieAST a -> Bool) + -> HieAST a + -> Maybe (HieAST a) +smallestContainingSatisfying sp cond node + | nodeSpan node `containsSpan` sp = getFirst $ mconcat + [ foldMap (First . smallestContainingSatisfying sp cond) $ + nodeChildren node + , First $ if cond node then Just node else Nothing + ] + | sp `containsSpan` nodeSpan node = Nothing + | otherwise = Nothing + +selectLargestContainedBy :: Span -> HieAST a -> Maybe (HieAST a) +selectLargestContainedBy sp node + | sp `containsSpan` nodeSpan node = Just node + | nodeSpan node `containsSpan` sp = + getFirst $ foldMap (First . selectLargestContainedBy sp) $ + nodeChildren node + | otherwise = Nothing + +selectSmallestContaining :: Span -> HieAST a -> Maybe (HieAST a) +selectSmallestContaining sp node + | nodeSpan node `containsSpan` sp = getFirst $ mconcat + [ foldMap (First . selectSmallestContaining sp) $ nodeChildren node + , First (Just node) + ] + | sp `containsSpan` nodeSpan node = Nothing + | otherwise = Nothing + +definedInAsts :: M.Map FastString (HieAST a) -> Name -> Bool +definedInAsts asts n = case nameSrcSpan n of + RealSrcSpan sp -> srcSpanFile sp `elem` M.keys asts + _ -> False + +isOccurrence :: ContextInfo -> Bool +isOccurrence Use = True +isOccurrence _ = False + +scopeContainsSpan :: Scope -> Span -> Bool +scopeContainsSpan NoScope _ = False +scopeContainsSpan ModuleScope _ = True +scopeContainsSpan (LocalScope a) b = a `containsSpan` b + +-- | One must contain the other. Leaf nodes cannot contain anything +combineAst :: HieAST Type -> HieAST Type -> HieAST Type +combineAst a@(Node aInf aSpn xs) b@(Node bInf bSpn ys) + | aSpn == bSpn = Node (aInf `combineNodeInfo` bInf) aSpn (mergeAsts xs ys) + | aSpn `containsSpan` bSpn = combineAst b a +combineAst a (Node xs span children) = Node xs span (insertAst a children) + +-- | Insert an AST in a sorted list of disjoint Asts +insertAst :: HieAST Type -> [HieAST Type] -> [HieAST Type] +insertAst x = mergeAsts [x] + +-- | Merge two nodes together. +-- +-- Precondition and postcondition: elements in 'nodeType' are ordered. +combineNodeInfo :: NodeInfo Type -> NodeInfo Type -> NodeInfo Type +(NodeInfo as ai ad) `combineNodeInfo` (NodeInfo bs bi bd) = + NodeInfo (S.union as bs) (mergeSorted ai bi) (M.unionWith (<>) ad bd) + where + mergeSorted :: [Type] -> [Type] -> [Type] + mergeSorted la@(a:as) lb@(b:bs) = case nonDetCmpType a b of + LT -> a : mergeSorted as lb + EQ -> a : mergeSorted as bs + GT -> b : mergeSorted la bs + mergeSorted as [] = as + mergeSorted [] bs = bs + + +{- | Merge two sorted, disjoint lists of ASTs, combining when necessary. + +In the absence of position-altering pragmas (ex: @# line "file.hs" 3@), +different nodes in an AST tree should either have disjoint spans (in +which case you can say for sure which one comes first) or one span +should be completely contained in the other (in which case the contained +span corresponds to some child node). + +However, since Haskell does have position-altering pragmas it /is/ +possible for spans to be overlapping. Here is an example of a source file +in which @foozball@ and @quuuuuux@ have overlapping spans: + +@ +module Baz where + +# line 3 "Baz.hs" +foozball :: Int +foozball = 0 + +# line 3 "Baz.hs" +bar, quuuuuux :: Int +bar = 1 +quuuuuux = 2 +@ + +In these cases, we just do our best to produce sensible `HieAST`'s. The blame +should be laid at the feet of whoever wrote the line pragmas in the first place +(usually the C preprocessor...). +-} +mergeAsts :: [HieAST Type] -> [HieAST Type] -> [HieAST Type] +mergeAsts xs [] = xs +mergeAsts [] ys = ys +mergeAsts xs@(a:as) ys@(b:bs) + | span_a `containsSpan` span_b = mergeAsts (combineAst a b : as) bs + | span_b `containsSpan` span_a = mergeAsts as (combineAst a b : bs) + | span_a `rightOf` span_b = b : mergeAsts xs bs + | span_a `leftOf` span_b = a : mergeAsts as ys + + -- These cases are to work around ASTs that are not fully disjoint + | span_a `startsRightOf` span_b = b : mergeAsts as ys + | otherwise = a : mergeAsts as ys + where + span_a = nodeSpan a + span_b = nodeSpan b + +rightOf :: Span -> Span -> Bool +rightOf s1 s2 + = (srcSpanStartLine s1, srcSpanStartCol s1) + >= (srcSpanEndLine s2, srcSpanEndCol s2) + && (srcSpanFile s1 == srcSpanFile s2) + +leftOf :: Span -> Span -> Bool +leftOf s1 s2 + = (srcSpanEndLine s1, srcSpanEndCol s1) + <= (srcSpanStartLine s2, srcSpanStartCol s2) + && (srcSpanFile s1 == srcSpanFile s2) + +startsRightOf :: Span -> Span -> Bool +startsRightOf s1 s2 + = (srcSpanStartLine s1, srcSpanStartCol s1) + >= (srcSpanStartLine s2, srcSpanStartCol s2) + +-- | combines and sorts ASTs using a merge sort +mergeSortAsts :: [HieAST Type] -> [HieAST Type] +mergeSortAsts = go . map pure + where + go [] = [] + go [xs] = xs + go xss = go (mergePairs xss) + mergePairs [] = [] + mergePairs [xs] = [xs] + mergePairs (xs:ys:xss) = mergeAsts xs ys : mergePairs xss + +simpleNodeInfo :: FastString -> FastString -> NodeInfo a +simpleNodeInfo cons typ = NodeInfo (S.singleton (cons, typ)) [] M.empty + +locOnly :: SrcSpan -> [HieAST a] +locOnly (RealSrcSpan span) = + [Node e span []] + where e = NodeInfo S.empty [] M.empty +locOnly _ = [] + +mkScope :: SrcSpan -> Scope +mkScope (RealSrcSpan sp) = LocalScope sp +mkScope _ = NoScope + +mkLScope :: Located a -> Scope +mkLScope = mkScope . getLoc + +combineScopes :: Scope -> Scope -> Scope +combineScopes ModuleScope _ = ModuleScope +combineScopes _ ModuleScope = ModuleScope +combineScopes NoScope x = x +combineScopes x NoScope = x +combineScopes (LocalScope a) (LocalScope b) = + mkScope $ combineSrcSpans (RealSrcSpan a) (RealSrcSpan b) + +{-# INLINEABLE makeNode #-} +makeNode + :: (Applicative m, Data a) + => a -- ^ helps fill in 'nodeAnnotations' (with 'Data') + -> SrcSpan -- ^ return an empty list if this is unhelpful + -> m [HieAST b] +makeNode x spn = pure $ case spn of + RealSrcSpan span -> [Node (simpleNodeInfo cons typ) span []] + _ -> [] + where + cons = mkFastString . show . toConstr $ x + typ = mkFastString . show . typeRepTyCon . typeOf $ x + +{-# INLINEABLE makeTypeNode #-} +makeTypeNode + :: (Applicative m, Data a) + => a -- ^ helps fill in 'nodeAnnotations' (with 'Data') + -> SrcSpan -- ^ return an empty list if this is unhelpful + -> Type -- ^ type to associate with the node + -> m [HieAST Type] +makeTypeNode x spn etyp = pure $ case spn of + RealSrcSpan span -> + [Node (NodeInfo (S.singleton (cons,typ)) [etyp] M.empty) span []] + _ -> [] + where + cons = mkFastString . show . toConstr $ x + typ = mkFastString . show . typeRepTyCon . typeOf $ x diff --git a/hie-compat/src-ghc88/Compat/HieAst.hs b/hie-compat/src-ghc88/Compat/HieAst.hs new file mode 100644 index 00000000000..c9092184b1b --- /dev/null +++ b/hie-compat/src-ghc88/Compat/HieAst.hs @@ -0,0 +1,1786 @@ +{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-} +{- +Forked from GHC v8.8.1 to work around the readFile side effect in mkHiefile + +Main functions for .hie file generation +-} +{- HLINT ignore -} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE TypeSynonymInstances #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE ViewPatterns #-} +{-# LANGUAGE DeriveDataTypeable #-} +module Compat.HieAst ( mkHieFile, enrichHie ) where + +import Avail ( Avails ) +import Bag ( Bag, bagToList ) +import BasicTypes +import BooleanFormula +import Class ( FunDep ) +import CoreUtils ( exprType ) +import ConLike ( conLikeName ) +import Desugar ( deSugarExpr ) +import FieldLabel +import HsSyn +import HscTypes +import Module ( ModuleName, ml_hs_file ) +import MonadUtils ( concatMapM, liftIO ) +import Name ( Name, nameSrcSpan ) +import SrcLoc +import TcHsSyn ( hsLitType, hsPatType ) +import Type ( mkFunTys, Type ) +import TysWiredIn ( mkListTy, mkSumTy ) +import Var ( Id, Var, setVarName, varName, varType ) +import TcRnTypes +import MkIface ( mkIfaceExports ) + +import HieTypes +import HieUtils + +import qualified Data.Array as A +import qualified Data.ByteString as BS +import qualified Data.Map as M +import qualified Data.Set as S +import Data.Data ( Data, Typeable ) +import Data.List (foldl', foldl1' ) +import Data.Maybe ( listToMaybe ) +import Control.Monad.Trans.Reader +import Control.Monad.Trans.Class ( lift ) + +-- These synonyms match those defined in main/GHC.hs +type RenamedSource = ( HsGroup GhcRn, [LImportDecl GhcRn] + , Maybe [(LIE GhcRn, Avails)] + , Maybe LHsDocString ) +type TypecheckedSource = LHsBinds GhcTc + + +{- Note [Name Remapping] +The Typechecker introduces new names for mono names in AbsBinds. +We don't care about the distinction between mono and poly bindings, +so we replace all occurrences of the mono name with the poly name. +-} +newtype HieState = HieState + { name_remapping :: M.Map Name Id + } + +initState :: HieState +initState = HieState M.empty + +class ModifyState a where -- See Note [Name Remapping] + addSubstitution :: a -> a -> HieState -> HieState + +instance ModifyState Name where + addSubstitution _ _ hs = hs + +instance ModifyState Id where + addSubstitution mono poly hs = + hs{name_remapping = M.insert (varName mono) poly (name_remapping hs)} + +modifyState :: ModifyState (IdP p) => [ABExport p] -> HieState -> HieState +modifyState = foldr go id + where + go ABE{abe_poly=poly,abe_mono=mono} f = addSubstitution mono poly . f + go _ f = f + +type HieM = ReaderT HieState Hsc + +-- | Construct an 'HieFile' from the outputs of the typechecker. +mkHieFile :: ModSummary + -> TcGblEnv + -> RenamedSource + -> BS.ByteString + -> Hsc HieFile +mkHieFile ms ts rs src = do + let tc_binds = tcg_binds ts + (asts', arr) <- getCompressedAsts tc_binds rs + let Just src_file = ml_hs_file $ ms_location ms + return $ HieFile + { hie_hs_file = src_file + , hie_module = ms_mod ms + , hie_types = arr + , hie_asts = asts' + -- mkIfaceExports sorts the AvailInfos for stability + , hie_exports = mkIfaceExports (tcg_exports ts) + , hie_hs_src = src + } + +getCompressedAsts :: TypecheckedSource -> RenamedSource + -> Hsc (HieASTs TypeIndex, A.Array TypeIndex HieTypeFlat) +getCompressedAsts ts rs = do + asts <- enrichHie ts rs + return $ compressTypes asts + +enrichHie :: TypecheckedSource -> RenamedSource -> Hsc (HieASTs Type) +enrichHie ts (hsGrp, imports, exports, _) = flip runReaderT initState $ do + tasts <- toHie $ fmap (BC RegularBind ModuleScope) ts + rasts <- processGrp hsGrp + imps <- toHie $ filter (not . ideclImplicit . unLoc) imports + exps <- toHie $ fmap (map $ IEC Export . fst) exports + let spanFile children = case children of + [] -> mkRealSrcSpan (mkRealSrcLoc "" 1 1) (mkRealSrcLoc "" 1 1) + _ -> mkRealSrcSpan (realSrcSpanStart $ nodeSpan $ head children) + (realSrcSpanEnd $ nodeSpan $ last children) + + modulify xs = + Node (simpleNodeInfo "Module" "Module") (spanFile xs) xs + + asts = HieASTs + $ resolveTyVarScopes + $ M.map (modulify . mergeSortAsts) + $ M.fromListWith (++) + $ map (\x -> (srcSpanFile (nodeSpan x),[x])) flat_asts + + flat_asts = concat + [ tasts + , rasts + , imps + , exps + ] + return asts + where + processGrp grp = concatM + [ toHie $ fmap (RS ModuleScope ) hs_valds grp + , toHie $ hs_splcds grp + , toHie $ hs_tyclds grp + , toHie $ hs_derivds grp + , toHie $ hs_fixds grp + , toHie $ hs_defds grp + , toHie $ hs_fords grp + , toHie $ hs_warnds grp + , toHie $ hs_annds grp + , toHie $ hs_ruleds grp + ] + +getRealSpan :: SrcSpan -> Maybe Span +getRealSpan (RealSrcSpan sp) = Just sp +getRealSpan _ = Nothing + +grhss_span :: GRHSs p body -> SrcSpan +grhss_span (GRHSs _ xs bs) = foldl' combineSrcSpans (getLoc bs) (map getLoc xs) +grhss_span (XGRHSs _) = error "XGRHS has no span" + +bindingsOnly :: [Context Name] -> [HieAST a] +bindingsOnly [] = [] +bindingsOnly (C c n : xs) = case nameSrcSpan n of + RealSrcSpan span -> Node nodeinfo span [] : bindingsOnly xs + where nodeinfo = NodeInfo S.empty [] (M.singleton (Right n) info) + info = mempty{identInfo = S.singleton c} + _ -> bindingsOnly xs + +concatM :: Monad m => [m [a]] -> m [a] +concatM xs = concat <$> sequence xs + +{- Note [Capturing Scopes and other non local information] +toHie is a local tranformation, but scopes of bindings cannot be known locally, +hence we have to push the relevant info down into the binding nodes. +We use the following types (*Context and *Scoped) to wrap things and +carry the required info +(Maybe Span) always carries the span of the entire binding, including rhs +-} +data Context a = C ContextInfo a -- Used for names and bindings + +data RContext a = RC RecFieldContext a +data RFContext a = RFC RecFieldContext (Maybe Span) a +-- ^ context for record fields + +data IEContext a = IEC IEType a +-- ^ context for imports/exports + +data BindContext a = BC BindType Scope a +-- ^ context for imports/exports + +data PatSynFieldContext a = PSC (Maybe Span) a +-- ^ context for pattern synonym fields. + +data SigContext a = SC SigInfo a +-- ^ context for type signatures + +data SigInfo = SI SigType (Maybe Span) + +data SigType = BindSig | ClassSig | InstSig + +data RScoped a = RS Scope a +-- ^ Scope spans over everything to the right of a, (mostly) not +-- including a itself +-- (Includes a in a few special cases like recursive do bindings) or +-- let/where bindings + +-- | Pattern scope +data PScoped a = PS (Maybe Span) + Scope -- ^ use site of the pattern + Scope -- ^ pattern to the right of a, not including a + a + deriving (Typeable, Data) -- Pattern Scope + +{- Note [TyVar Scopes] +Due to -XScopedTypeVariables, type variables can be in scope quite far from +their original binding. We resolve the scope of these type variables +in a separate pass +-} +data TScoped a = TS TyVarScope a -- TyVarScope + +data TVScoped a = TVS TyVarScope Scope a -- TyVarScope +-- ^ First scope remains constant +-- Second scope is used to build up the scope of a tyvar over +-- things to its right, ala RScoped + +-- | Each element scopes over the elements to the right +listScopes :: Scope -> [Located a] -> [RScoped (Located a)] +listScopes _ [] = [] +listScopes rhsScope [pat] = [RS rhsScope pat] +listScopes rhsScope (pat : pats) = RS sc pat : pats' + where + pats'@((RS scope p):_) = listScopes rhsScope pats + sc = combineScopes scope $ mkScope $ getLoc p + +-- | 'listScopes' specialised to 'PScoped' things +patScopes + :: Maybe Span + -> Scope + -> Scope + -> [LPat (GhcPass p)] + -> [PScoped (LPat (GhcPass p))] +patScopes rsp useScope patScope xs = + map (\(RS sc a) -> PS rsp useScope sc (unLoc a)) $ + listScopes patScope (map dL xs) + +-- | 'listScopes' specialised to 'TVScoped' things +tvScopes + :: TyVarScope + -> Scope + -> [LHsTyVarBndr a] + -> [TVScoped (LHsTyVarBndr a)] +tvScopes tvScope rhsScope xs = + map (\(RS sc a)-> TVS tvScope sc a) $ listScopes rhsScope xs + +{- Note [Scoping Rules for SigPat] +Explicitly quantified variables in pattern type signatures are not +brought into scope in the rhs, but implicitly quantified variables +are (HsWC and HsIB). +This is unlike other signatures, where explicitly quantified variables +are brought into the RHS Scope +For example +foo :: forall a. ...; +foo = ... -- a is in scope here + +bar (x :: forall a. a -> a) = ... -- a is not in scope here +-- ^ a is in scope here (pattern body) + +bax (x :: a) = ... -- a is in scope here +Because of HsWC and HsIB pass on their scope to their children +we must wrap the LHsType in pattern signatures in a +Shielded explictly, so that the HsWC/HsIB scope is not passed +on the the LHsType +-} + +data Shielded a = SH Scope a -- Ignores its TScope, uses its own scope instead + +type family ProtectedSig a where + ProtectedSig GhcRn = HsWildCardBndrs GhcRn (HsImplicitBndrs + GhcRn + (Shielded (LHsType GhcRn))) + ProtectedSig GhcTc = NoExt + +class ProtectSig a where + protectSig :: Scope -> LHsSigWcType (NoGhcTc a) -> ProtectedSig a + +instance (HasLoc a) => HasLoc (Shielded a) where + loc (SH _ a) = loc a + +instance (ToHie (TScoped a)) => ToHie (TScoped (Shielded a)) where + toHie (TS _ (SH sc a)) = toHie (TS (ResolvedScopes [sc]) a) + +instance ProtectSig GhcTc where + protectSig _ _ = NoExt + +instance ProtectSig GhcRn where + protectSig sc (HsWC a (HsIB b sig)) = + HsWC a (HsIB b (SH sc sig)) + protectSig _ _ = error "protectSig not given HsWC (HsIB)" + +class HasLoc a where + -- ^ defined so that HsImplicitBndrs and HsWildCardBndrs can + -- know what their implicit bindings are scoping over + loc :: a -> SrcSpan + +instance HasLoc thing => HasLoc (TScoped thing) where + loc (TS _ a) = loc a + +instance HasLoc thing => HasLoc (PScoped thing) where + loc (PS _ _ _ a) = loc a + +instance HasLoc (LHsQTyVars GhcRn) where + loc (HsQTvs _ vs) = loc vs + loc _ = noSrcSpan + +instance HasLoc thing => HasLoc (HsImplicitBndrs a thing) where + loc (HsIB _ a) = loc a + loc _ = noSrcSpan + +instance HasLoc thing => HasLoc (HsWildCardBndrs a thing) where + loc (HsWC _ a) = loc a + loc _ = noSrcSpan + +instance HasLoc (Located a) where + loc (L l _) = l + +instance HasLoc a => HasLoc [a] where + loc [] = noSrcSpan + loc xs = foldl1' combineSrcSpans $ map loc xs + +instance (HasLoc a, HasLoc b) => HasLoc (FamEqn s a b) where + loc (FamEqn _ a Nothing b _ c) = foldl1' combineSrcSpans [loc a, loc b, loc c] + loc (FamEqn _ a (Just tvs) b _ c) = foldl1' combineSrcSpans + [loc a, loc tvs, loc b, loc c] + loc _ = noSrcSpan +instance (HasLoc tm, HasLoc ty) => HasLoc (HsArg tm ty) where + loc (HsValArg tm) = loc tm + loc (HsTypeArg _ ty) = loc ty + loc (HsArgPar sp) = sp + +instance HasLoc (HsDataDefn GhcRn) where + loc def@(HsDataDefn{}) = loc $ dd_cons def + -- Only used for data family instances, so we only need rhs + -- Most probably the rest will be unhelpful anyway + loc _ = noSrcSpan + +instance HasLoc (Pat (GhcPass a)) where + loc (dL -> L l _) = l + +-- | The main worker class +class ToHie a where + toHie :: a -> HieM [HieAST Type] + +-- | Used to collect type info +class Data a => HasType a where + getTypeNode :: a -> HieM [HieAST Type] + +instance (ToHie a) => ToHie [a] where + toHie = concatMapM toHie + +instance (ToHie a) => ToHie (Bag a) where + toHie = toHie . bagToList + +instance (ToHie a) => ToHie (Maybe a) where + toHie = maybe (pure []) toHie + +instance ToHie (Context (Located NoExt)) where + toHie _ = pure [] + +instance ToHie (TScoped NoExt) where + toHie _ = pure [] + +instance ToHie (IEContext (Located ModuleName)) where + toHie (IEC c (L (RealSrcSpan span) mname)) = + pure $ [Node (NodeInfo S.empty [] idents) span []] + where details = mempty{identInfo = S.singleton (IEThing c)} + idents = M.singleton (Left mname) details + toHie _ = pure [] + +instance ToHie (Context (Located Var)) where + toHie c = case c of + C context (L (RealSrcSpan span) name') + -> do + m <- asks name_remapping + let name = M.findWithDefault name' (varName name') m + pure + [Node + (NodeInfo S.empty [] $ + M.singleton (Right $ varName name) + (IdentifierDetails (Just $ varType name') + (S.singleton context))) + span + []] + _ -> pure [] + +instance ToHie (Context (Located Name)) where + toHie c = case c of + C context (L (RealSrcSpan span) name') -> do + m <- asks name_remapping + let name = case M.lookup name' m of + Just var -> varName var + Nothing -> name' + pure + [Node + (NodeInfo S.empty [] $ + M.singleton (Right name) + (IdentifierDetails Nothing + (S.singleton context))) + span + []] + _ -> pure [] + +-- | Dummy instances - never called +instance ToHie (TScoped (LHsSigWcType GhcTc)) where + toHie _ = pure [] +instance ToHie (TScoped (LHsWcType GhcTc)) where + toHie _ = pure [] +instance ToHie (SigContext (LSig GhcTc)) where + toHie _ = pure [] +instance ToHie (TScoped Type) where + toHie _ = pure [] + +instance HasType (LHsBind GhcRn) where + getTypeNode (L spn bind) = makeNode bind spn + +instance HasType (LHsBind GhcTc) where + getTypeNode (L spn bind) = case bind of + FunBind{fun_id = name} -> makeTypeNode bind spn (varType $ unLoc name) + _ -> makeNode bind spn + +instance HasType (LPat GhcRn) where + getTypeNode (dL -> L spn pat) = makeNode pat spn + +instance HasType (LPat GhcTc) where + getTypeNode (dL -> L spn opat) = makeTypeNode opat spn (hsPatType opat) + +instance HasType (LHsExpr GhcRn) where + getTypeNode (L spn e) = makeNode e spn + +-- | This instance tries to construct 'HieAST' nodes which include the type of +-- the expression. It is not yet possible to do this efficiently for all +-- expression forms, so we skip filling in the type for those inputs. +-- +-- 'HsApp', for example, doesn't have any type information available directly on +-- the node. Our next recourse would be to desugar it into a 'CoreExpr' then +-- query the type of that. Yet both the desugaring call and the type query both +-- involve recursive calls to the function and argument! This is particularly +-- problematic when you realize that the HIE traversal will eventually visit +-- those nodes too and ask for their types again. +-- +-- Since the above is quite costly, we just skip cases where computing the +-- expression's type is going to be expensive. +-- +-- See #16233 +instance HasType (LHsExpr GhcTc) where + getTypeNode e@(L spn e') = lift $ + -- Some expression forms have their type immediately available + let tyOpt = case e' of + HsLit _ l -> Just (hsLitType l) + HsOverLit _ o -> Just (overLitType o) + + HsLam _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) + HsLamCase _ (MG { mg_ext = groupTy }) -> Just (matchGroupType groupTy) + HsCase _ _ (MG { mg_ext = groupTy }) -> Just (mg_res_ty groupTy) + + ExplicitList ty _ _ -> Just (mkListTy ty) + ExplicitSum ty _ _ _ -> Just (mkSumTy ty) + HsDo ty _ _ -> Just ty + HsMultiIf ty _ -> Just ty + + _ -> Nothing + + in + case tyOpt of + _ | skipDesugaring e' -> fallback + | otherwise -> do + hs_env <- Hsc $ \e w -> return (e,w) + (_,mbe) <- liftIO $ deSugarExpr hs_env e + maybe fallback (makeTypeNode e' spn . exprType) mbe + where + fallback = makeNode e' spn + + matchGroupType :: MatchGroupTc -> Type + matchGroupType (MatchGroupTc args res) = mkFunTys args res + + -- | Skip desugaring of these expressions for performance reasons. + -- + -- See impact on Haddock output (esp. missing type annotations or links) + -- before marking more things here as 'False'. See impact on Haddock + -- performance before marking more things as 'True'. + skipDesugaring :: HsExpr a -> Bool + skipDesugaring e = case e of + HsVar{} -> False + HsUnboundVar{} -> False + HsConLikeOut{} -> False + HsRecFld{} -> False + HsOverLabel{} -> False + HsIPVar{} -> False + HsWrap{} -> False + _ -> True + +instance ( ToHie (Context (Located (IdP a))) + , ToHie (MatchGroup a (LHsExpr a)) + , ToHie (PScoped (LPat a)) + , ToHie (GRHSs a (LHsExpr a)) + , ToHie (LHsExpr a) + , ToHie (Located (PatSynBind a a)) + , HasType (LHsBind a) + , ModifyState (IdP a) + , Data (HsBind a) + ) => ToHie (BindContext (LHsBind a)) where + toHie (BC context scope b@(L span bind)) = + concatM $ getTypeNode b : case bind of + FunBind{fun_id = name, fun_matches = matches} -> + [ toHie $ C (ValBind context scope $ getRealSpan span) name + , toHie matches + ] + PatBind{pat_lhs = lhs, pat_rhs = rhs} -> + [ toHie $ PS (getRealSpan span) scope NoScope lhs + , toHie rhs + ] + VarBind{var_rhs = expr} -> + [ toHie expr + ] + AbsBinds{abs_exports = xs, abs_binds = binds} -> + [ local (modifyState xs) $ -- Note [Name Remapping] + toHie $ fmap (BC context scope) binds + ] + PatSynBind _ psb -> + [ toHie $ L span psb -- PatSynBinds only occur at the top level + ] + XHsBindsLR _ -> [] + +instance ( ToHie (LMatch a body) + ) => ToHie (MatchGroup a body) where + toHie mg = concatM $ case mg of + MG{ mg_alts = (L span alts) , mg_origin = FromSource } -> + [ pure $ locOnly span + , toHie alts + ] + MG{} -> [] + XMatchGroup _ -> [] + +instance ( ToHie (Context (Located (IdP a))) + , ToHie (PScoped (LPat a)) + , ToHie (HsPatSynDir a) + ) => ToHie (Located (PatSynBind a a)) where + toHie (L sp psb) = concatM $ case psb of + PSB{psb_id=var, psb_args=dets, psb_def=pat, psb_dir=dir} -> + [ toHie $ C (Decl PatSynDec $ getRealSpan sp) var + , toHie $ toBind dets + , toHie $ PS Nothing lhsScope NoScope pat + , toHie dir + ] + where + lhsScope = combineScopes varScope detScope + varScope = mkLScope var + detScope = case dets of + (PrefixCon args) -> foldr combineScopes NoScope $ map mkLScope args + (InfixCon a b) -> combineScopes (mkLScope a) (mkLScope b) + (RecCon r) -> foldr go NoScope r + go (RecordPatSynField a b) c = combineScopes c + $ combineScopes (mkLScope a) (mkLScope b) + detSpan = case detScope of + LocalScope a -> Just a + _ -> Nothing + toBind (PrefixCon args) = PrefixCon $ map (C Use) args + toBind (InfixCon a b) = InfixCon (C Use a) (C Use b) + toBind (RecCon r) = RecCon $ map (PSC detSpan) r + XPatSynBind _ -> [] + +instance ( ToHie (MatchGroup a (LHsExpr a)) + ) => ToHie (HsPatSynDir a) where + toHie dir = case dir of + ExplicitBidirectional mg -> toHie mg + _ -> pure [] + +instance ( a ~ GhcPass p + , ToHie body + , ToHie (HsMatchContext (NameOrRdrName (IdP a))) + , ToHie (PScoped (LPat a)) + , ToHie (GRHSs a body) + , Data (Match a body) + ) => ToHie (LMatch (GhcPass p) body) where + toHie (L span m ) = concatM $ makeNode m span : case m of + Match{m_ctxt=mctx, m_pats = pats, m_grhss = grhss } -> + [ toHie mctx + , let rhsScope = mkScope $ grhss_span grhss + in toHie $ patScopes Nothing rhsScope NoScope pats + , toHie grhss + ] + XMatch _ -> [] + +instance ( ToHie (Context (Located a)) + ) => ToHie (HsMatchContext a) where + toHie (FunRhs{mc_fun=name}) = toHie $ C MatchBind name + toHie (StmtCtxt a) = toHie a + toHie _ = pure [] + +instance ( ToHie (HsMatchContext a) + ) => ToHie (HsStmtContext a) where + toHie (PatGuard a) = toHie a + toHie (ParStmtCtxt a) = toHie a + toHie (TransStmtCtxt a) = toHie a + toHie _ = pure [] + +instance ( a ~ GhcPass p + , ToHie (Context (Located (IdP a))) + , ToHie (RContext (HsRecFields a (PScoped (LPat a)))) + , ToHie (LHsExpr a) + , ToHie (TScoped (LHsSigWcType a)) + , ProtectSig a + , ToHie (TScoped (ProtectedSig a)) + , HasType (LPat a) + , Data (HsSplice a) + ) => ToHie (PScoped (LPat (GhcPass p))) where + toHie (PS rsp scope pscope lpat@(dL -> L ospan opat)) = + concatM $ getTypeNode lpat : case opat of + WildPat _ -> + [] + VarPat _ lname -> + [ toHie $ C (PatternBind scope pscope rsp) lname + ] + LazyPat _ p -> + [ toHie $ PS rsp scope pscope p + ] + AsPat _ lname pat -> + [ toHie $ C (PatternBind scope + (combineScopes (mkLScope (dL pat)) pscope) + rsp) + lname + , toHie $ PS rsp scope pscope pat + ] + ParPat _ pat -> + [ toHie $ PS rsp scope pscope pat + ] + BangPat _ pat -> + [ toHie $ PS rsp scope pscope pat + ] + ListPat _ pats -> + [ toHie $ patScopes rsp scope pscope pats + ] + TuplePat _ pats _ -> + [ toHie $ patScopes rsp scope pscope pats + ] + SumPat _ pat _ _ -> + [ toHie $ PS rsp scope pscope pat + ] + ConPatIn c dets -> + [ toHie $ C Use c + , toHie $ contextify dets + ] + ConPatOut {pat_con = con, pat_args = dets}-> + [ toHie $ C Use $ fmap conLikeName con + , toHie $ contextify dets + ] + ViewPat _ expr pat -> + [ toHie expr + , toHie $ PS rsp scope pscope pat + ] + SplicePat _ sp -> + [ toHie $ L ospan sp + ] + LitPat _ _ -> + [] + NPat _ _ _ _ -> + [] + NPlusKPat _ n _ _ _ _ -> + [ toHie $ C (PatternBind scope pscope rsp) n + ] + SigPat _ pat sig -> + [ toHie $ PS rsp scope pscope pat + , let cscope = mkLScope (dL pat) in + toHie $ TS (ResolvedScopes [cscope, scope, pscope]) + (protectSig @a cscope sig) + -- See Note [Scoping Rules for SigPat] + ] + CoPat _ _ _ _ -> + [] + XPat _ -> [] + where + contextify (PrefixCon args) = PrefixCon $ patScopes rsp scope pscope args + contextify (InfixCon a b) = InfixCon a' b' + where [a', b'] = patScopes rsp scope pscope [a,b] + contextify (RecCon r) = RecCon $ RC RecFieldMatch $ contextify_rec r + contextify_rec (HsRecFields fds a) = HsRecFields (map go scoped_fds) a + where + go (RS fscope (L spn (HsRecField lbl pat pun))) = + L spn $ HsRecField lbl (PS rsp scope fscope pat) pun + scoped_fds = listScopes pscope fds + +instance ( ToHie body + , ToHie (LGRHS a body) + , ToHie (RScoped (LHsLocalBinds a)) + ) => ToHie (GRHSs a body) where + toHie grhs = concatM $ case grhs of + GRHSs _ grhss binds -> + [ toHie grhss + , toHie $ RS (mkScope $ grhss_span grhs) binds + ] + XGRHSs _ -> [] + +instance ( ToHie (Located body) + , ToHie (RScoped (GuardLStmt a)) + , Data (GRHS a (Located body)) + ) => ToHie (LGRHS a (Located body)) where + toHie (L span g) = concatM $ makeNode g span : case g of + GRHS _ guards body -> + [ toHie $ listScopes (mkLScope body) guards + , toHie body + ] + XGRHS _ -> [] + +instance ( a ~ GhcPass p + , ToHie (Context (Located (IdP a))) + , HasType (LHsExpr a) + , ToHie (PScoped (LPat a)) + , ToHie (MatchGroup a (LHsExpr a)) + , ToHie (LGRHS a (LHsExpr a)) + , ToHie (RContext (HsRecordBinds a)) + , ToHie (RFContext (Located (AmbiguousFieldOcc a))) + , ToHie (ArithSeqInfo a) + , ToHie (LHsCmdTop a) + , ToHie (RScoped (GuardLStmt a)) + , ToHie (RScoped (LHsLocalBinds a)) + , ToHie (TScoped (LHsWcType (NoGhcTc a))) + , ToHie (TScoped (LHsSigWcType (NoGhcTc a))) + , Data (HsExpr a) + , Data (HsSplice a) + , Data (HsTupArg a) + , Data (AmbiguousFieldOcc a) + ) => ToHie (LHsExpr (GhcPass p)) where + toHie e@(L mspan oexpr) = concatM $ getTypeNode e : case oexpr of + HsVar _ (L _ var) -> + [ toHie $ C Use (L mspan var) + -- Patch up var location since typechecker removes it + ] + HsUnboundVar _ _ -> + [] + HsConLikeOut _ con -> + [ toHie $ C Use $ L mspan $ conLikeName con + ] + HsRecFld _ fld -> + [ toHie $ RFC RecFieldOcc Nothing (L mspan fld) + ] + HsOverLabel _ _ _ -> [] + HsIPVar _ _ -> [] + HsOverLit _ _ -> [] + HsLit _ _ -> [] + HsLam _ mg -> + [ toHie mg + ] + HsLamCase _ mg -> + [ toHie mg + ] + HsApp _ a b -> + [ toHie a + , toHie b + ] + HsAppType _ expr sig -> + [ toHie expr + , toHie $ TS (ResolvedScopes []) sig + ] + OpApp _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + NegApp _ a _ -> + [ toHie a + ] + HsPar _ a -> + [ toHie a + ] + SectionL _ a b -> + [ toHie a + , toHie b + ] + SectionR _ a b -> + [ toHie a + , toHie b + ] + ExplicitTuple _ args _ -> + [ toHie args + ] + ExplicitSum _ _ _ expr -> + [ toHie expr + ] + HsCase _ expr matches -> + [ toHie expr + , toHie matches + ] + HsIf _ _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + HsMultiIf _ grhss -> + [ toHie grhss + ] + HsLet _ binds expr -> + [ toHie $ RS (mkLScope expr) binds + , toHie expr + ] + HsDo _ _ (L ispan stmts) -> + [ pure $ locOnly ispan + , toHie $ listScopes NoScope stmts + ] + ExplicitList _ _ exprs -> + [ toHie exprs + ] + RecordCon {rcon_con_name = name, rcon_flds = binds}-> + [ toHie $ C Use name + , toHie $ RC RecFieldAssign $ binds + ] + RecordUpd {rupd_expr = expr, rupd_flds = upds}-> + [ toHie expr + , toHie $ map (RC RecFieldAssign) upds + ] + ExprWithTySig _ expr sig -> + [ toHie expr + , toHie $ TS (ResolvedScopes [mkLScope expr]) sig + ] + ArithSeq _ _ info -> + [ toHie info + ] + HsSCC _ _ _ expr -> + [ toHie expr + ] + HsCoreAnn _ _ _ expr -> + [ toHie expr + ] + HsProc _ pat cmdtop -> + [ toHie $ PS Nothing (mkLScope cmdtop) NoScope pat + , toHie cmdtop + ] + HsStatic _ expr -> + [ toHie expr + ] + HsArrApp _ a b _ _ -> + [ toHie a + , toHie b + ] + HsArrForm _ expr _ cmds -> + [ toHie expr + , toHie cmds + ] + HsTick _ _ expr -> + [ toHie expr + ] + HsBinTick _ _ _ expr -> + [ toHie expr + ] + HsTickPragma _ _ _ _ expr -> + [ toHie expr + ] + HsWrap _ _ a -> + [ toHie $ L mspan a + ] + HsBracket _ b -> + [ toHie b + ] + HsRnBracketOut _ b p -> + [ toHie b + , toHie p + ] + HsTcBracketOut _ b p -> + [ toHie b + , toHie p + ] + HsSpliceE _ x -> + [ toHie $ L mspan x + ] + EWildPat _ -> [] + EAsPat _ a b -> + [ toHie $ C Use a + , toHie b + ] + EViewPat _ a b -> + [ toHie a + , toHie b + ] + ELazyPat _ a -> + [ toHie a + ] + XExpr _ -> [] + +instance ( a ~ GhcPass p + , ToHie (LHsExpr a) + , Data (HsTupArg a) + ) => ToHie (LHsTupArg (GhcPass p)) where + toHie (L span arg) = concatM $ makeNode arg span : case arg of + Present _ expr -> + [ toHie expr + ] + Missing _ -> [] + XTupArg _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (LHsExpr a) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (LHsLocalBinds a)) + , ToHie (RScoped (ApplicativeArg a)) + , ToHie (Located body) + , Data (StmtLR a a (Located body)) + , Data (StmtLR a a (Located (HsExpr a))) + ) => ToHie (RScoped (LStmt (GhcPass p) (Located body))) where + toHie (RS scope (L span stmt)) = concatM $ makeNode stmt span : case stmt of + LastStmt _ body _ _ -> + [ toHie body + ] + BindStmt _ pat body _ _ -> + [ toHie $ PS (getRealSpan $ getLoc body) scope NoScope pat + , toHie body + ] + ApplicativeStmt _ stmts _ -> + [ concatMapM (toHie . RS scope . snd) stmts + ] + BodyStmt _ body _ _ -> + [ toHie body + ] + LetStmt _ binds -> + [ toHie $ RS scope binds + ] + ParStmt _ parstmts _ _ -> + [ concatMapM (\(ParStmtBlock _ stmts _ _) -> + toHie $ listScopes NoScope stmts) + parstmts + ] + TransStmt {trS_stmts = stmts, trS_using = using, trS_by = by} -> + [ toHie $ listScopes scope stmts + , toHie using + , toHie by + ] + RecStmt {recS_stmts = stmts} -> + [ toHie $ map (RS $ combineScopes scope (mkScope span)) stmts + ] + XStmtLR _ -> [] + +instance ( ToHie (LHsExpr a) + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (HsLocalBinds a) + ) => ToHie (RScoped (LHsLocalBinds a)) where + toHie (RS scope (L sp binds)) = concatM $ makeNode binds sp : case binds of + EmptyLocalBinds _ -> [] + HsIPBinds _ _ -> [] + HsValBinds _ valBinds -> + [ toHie $ RS (combineScopes scope $ mkScope sp) + valBinds + ] + XHsLocalBindsLR _ -> [] + +instance ( ToHie (BindContext (LHsBind a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (XXValBindsLR a a)) + ) => ToHie (RScoped (HsValBindsLR a a)) where + toHie (RS sc v) = concatM $ case v of + ValBinds _ binds sigs -> + [ toHie $ fmap (BC RegularBind sc) binds + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] + XValBindsLR x -> [ toHie $ RS sc x ] + +instance ToHie (RScoped (NHsValBindsLR GhcTc)) where + toHie (RS sc (NValBinds binds sigs)) = concatM $ + [ toHie (concatMap (map (BC RegularBind sc) . bagToList . snd) binds) + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] +instance ToHie (RScoped (NHsValBindsLR GhcRn)) where + toHie (RS sc (NValBinds binds sigs)) = concatM $ + [ toHie (concatMap (map (BC RegularBind sc) . bagToList . snd) binds) + , toHie $ fmap (SC (SI BindSig Nothing)) sigs + ] + +instance ( ToHie (RContext (LHsRecField a arg)) + ) => ToHie (RContext (HsRecFields a arg)) where + toHie (RC c (HsRecFields fields _)) = toHie $ map (RC c) fields + +instance ( ToHie (RFContext (Located label)) + , ToHie arg + , HasLoc arg + , Data label + , Data arg + ) => ToHie (RContext (LHsRecField' label arg)) where + toHie (RC c (L span recfld)) = concatM $ makeNode recfld span : case recfld of + HsRecField label expr _ -> + [ toHie $ RFC c (getRealSpan $ loc expr) label + , toHie expr + ] + +instance ToHie (RFContext (LFieldOcc GhcRn)) where + toHie (RFC c rhs (L nspan f)) = concatM $ case f of + FieldOcc name _ -> + [ toHie $ C (RecField c rhs) (L nspan name) + ] + XFieldOcc _ -> [] + +instance ToHie (RFContext (LFieldOcc GhcTc)) where + toHie (RFC c rhs (L nspan f)) = concatM $ case f of + FieldOcc var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + XFieldOcc _ -> [] + +instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcRn))) where + toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of + Unambiguous name _ -> + [ toHie $ C (RecField c rhs) $ L nspan name + ] + Ambiguous _name _ -> + [ ] + XAmbiguousFieldOcc _ -> [] + +instance ToHie (RFContext (Located (AmbiguousFieldOcc GhcTc))) where + toHie (RFC c rhs (L nspan afo)) = concatM $ case afo of + Unambiguous var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + Ambiguous var _ -> + let var' = setVarName var (varName var) + in [ toHie $ C (RecField c rhs) (L nspan var') + ] + XAmbiguousFieldOcc _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (LHsExpr a) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (StmtLR a a (Located (HsExpr a))) + , Data (HsLocalBinds a) + ) => ToHie (RScoped (ApplicativeArg (GhcPass p))) where + toHie (RS sc (ApplicativeArgOne _ pat expr _)) = concatM + [ toHie $ PS Nothing sc NoScope pat + , toHie expr + ] + toHie (RS sc (ApplicativeArgMany _ stmts _ pat)) = concatM + [ toHie $ listScopes NoScope stmts + , toHie $ PS Nothing sc NoScope pat + ] + toHie (RS _ (XApplicativeArg _)) = pure [] + +instance (ToHie arg, ToHie rec) => ToHie (HsConDetails arg rec) where + toHie (PrefixCon args) = toHie args + toHie (RecCon rec) = toHie rec + toHie (InfixCon a b) = concatM [ toHie a, toHie b] + +instance ( ToHie (LHsCmd a) + , Data (HsCmdTop a) + ) => ToHie (LHsCmdTop a) where + toHie (L span top) = concatM $ makeNode top span : case top of + HsCmdTop _ cmd -> + [ toHie cmd + ] + XCmdTop _ -> [] + +instance ( a ~ GhcPass p + , ToHie (PScoped (LPat a)) + , ToHie (BindContext (LHsBind a)) + , ToHie (LHsExpr a) + , ToHie (MatchGroup a (LHsCmd a)) + , ToHie (SigContext (LSig a)) + , ToHie (RScoped (HsValBindsLR a a)) + , Data (HsCmd a) + , Data (HsCmdTop a) + , Data (StmtLR a a (Located (HsCmd a))) + , Data (HsLocalBinds a) + , Data (StmtLR a a (Located (HsExpr a))) + ) => ToHie (LHsCmd (GhcPass p)) where + toHie (L span cmd) = concatM $ makeNode cmd span : case cmd of + HsCmdArrApp _ a b _ _ -> + [ toHie a + , toHie b + ] + HsCmdArrForm _ a _ _ cmdtops -> + [ toHie a + , toHie cmdtops + ] + HsCmdApp _ a b -> + [ toHie a + , toHie b + ] + HsCmdLam _ mg -> + [ toHie mg + ] + HsCmdPar _ a -> + [ toHie a + ] + HsCmdCase _ expr alts -> + [ toHie expr + , toHie alts + ] + HsCmdIf _ _ a b c -> + [ toHie a + , toHie b + , toHie c + ] + HsCmdLet _ binds cmd' -> + [ toHie $ RS (mkLScope cmd') binds + , toHie cmd' + ] + HsCmdDo _ (L ispan stmts) -> + [ pure $ locOnly ispan + , toHie $ listScopes NoScope stmts + ] + HsCmdWrap _ _ _ -> [] + XCmd _ -> [] + +instance ToHie (TyClGroup GhcRn) where + toHie (TyClGroup _ classes roles instances) = concatM + [ toHie classes + , toHie roles + , toHie instances + ] + toHie (XTyClGroup _) = pure [] + +instance ToHie (LTyClDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + FamDecl {tcdFam = fdecl} -> + [ toHie (L span fdecl) + ] + SynDecl {tcdLName = name, tcdTyVars = vars, tcdRhs = typ} -> + [ toHie $ C (Decl SynDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [mkScope $ getLoc typ]) vars + , toHie typ + ] + DataDecl {tcdLName = name, tcdTyVars = vars, tcdDataDefn = defn} -> + [ toHie $ C (Decl DataDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [quant_scope, rhs_scope]) vars + , toHie defn + ] + where + quant_scope = mkLScope $ dd_ctxt defn + rhs_scope = sig_sc `combineScopes` con_sc `combineScopes` deriv_sc + sig_sc = maybe NoScope mkLScope $ dd_kindSig defn + con_sc = foldr combineScopes NoScope $ map mkLScope $ dd_cons defn + deriv_sc = mkLScope $ dd_derivs defn + ClassDecl { tcdCtxt = context + , tcdLName = name + , tcdTyVars = vars + , tcdFDs = deps + , tcdSigs = sigs + , tcdMeths = meths + , tcdATs = typs + , tcdATDefs = deftyps + } -> + [ toHie $ C (Decl ClassDec $ getRealSpan span) name + , toHie context + , toHie $ TS (ResolvedScopes [context_scope, rhs_scope]) vars + , toHie deps + , toHie $ map (SC $ SI ClassSig $ getRealSpan span) sigs + , toHie $ fmap (BC InstanceBind ModuleScope) meths + , toHie typs + , concatMapM (pure . locOnly . getLoc) deftyps + , toHie $ map (go . unLoc) deftyps + ] + where + context_scope = mkLScope context + rhs_scope = foldl1' combineScopes $ map mkScope + [ loc deps, loc sigs, loc (bagToList meths), loc typs, loc deftyps] + + go :: TyFamDefltEqn GhcRn + -> FamEqn GhcRn (TScoped (LHsQTyVars GhcRn)) (LHsType GhcRn) + go (FamEqn a var bndrs pat b rhs) = + FamEqn a var bndrs (TS (ResolvedScopes [mkLScope rhs]) pat) b rhs + go (XFamEqn NoExt) = XFamEqn NoExt + XTyClDecl _ -> [] + +instance ToHie (LFamilyDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + FamilyDecl _ info name vars _ sig inj -> + [ toHie $ C (Decl FamDec $ getRealSpan span) name + , toHie $ TS (ResolvedScopes [rhsSpan]) vars + , toHie info + , toHie $ RS injSpan sig + , toHie inj + ] + where + rhsSpan = sigSpan `combineScopes` injSpan + sigSpan = mkScope $ getLoc sig + injSpan = maybe NoScope (mkScope . getLoc) inj + XFamilyDecl _ -> [] + +instance ToHie (FamilyInfo GhcRn) where + toHie (ClosedTypeFamily (Just eqns)) = concatM $ + [ concatMapM (pure . locOnly . getLoc) eqns + , toHie $ map go eqns + ] + where + go (L l ib) = TS (ResolvedScopes [mkScope l]) ib + toHie _ = pure [] + +instance ToHie (RScoped (LFamilyResultSig GhcRn)) where + toHie (RS sc (L span sig)) = concatM $ makeNode sig span : case sig of + NoSig _ -> + [] + KindSig _ k -> + [ toHie k + ] + TyVarSig _ bndr -> + [ toHie $ TVS (ResolvedScopes [sc]) NoScope bndr + ] + XFamilyResultSig _ -> [] + +instance ToHie (Located (FunDep (Located Name))) where + toHie (L span fd@(lhs, rhs)) = concatM $ + [ makeNode fd span + , toHie $ map (C Use) lhs + , toHie $ map (C Use) rhs + ] + +instance (ToHie pats, ToHie rhs, HasLoc pats, HasLoc rhs) + => ToHie (TScoped (FamEqn GhcRn pats rhs)) where + toHie (TS _ f) = toHie f + +instance ( ToHie pats + , ToHie rhs + , HasLoc pats + , HasLoc rhs + ) => ToHie (FamEqn GhcRn pats rhs) where + toHie fe@(FamEqn _ var tybndrs pats _ rhs) = concatM $ + [ toHie $ C (Decl InstDec $ getRealSpan $ loc fe) var + , toHie $ fmap (tvScopes (ResolvedScopes []) scope) tybndrs + , toHie pats + , toHie rhs + ] + where scope = combineScopes patsScope rhsScope + patsScope = mkScope (loc pats) + rhsScope = mkScope (loc rhs) + toHie (XFamEqn _) = pure [] + +instance ToHie (LInjectivityAnn GhcRn) where + toHie (L span ann) = concatM $ makeNode ann span : case ann of + InjectivityAnn lhs rhs -> + [ toHie $ C Use lhs + , toHie $ map (C Use) rhs + ] + +instance ToHie (HsDataDefn GhcRn) where + toHie (HsDataDefn _ _ ctx _ mkind cons derivs) = concatM + [ toHie ctx + , toHie mkind + , toHie cons + , toHie derivs + ] + toHie (XHsDataDefn _) = pure [] + +instance ToHie (HsDeriving GhcRn) where + toHie (L span clauses) = concatM + [ pure $ locOnly span + , toHie clauses + ] + +instance ToHie (LHsDerivingClause GhcRn) where + toHie (L span cl) = concatM $ makeNode cl span : case cl of + HsDerivingClause _ strat (L ispan tys) -> + [ toHie strat + , pure $ locOnly ispan + , toHie $ map (TS (ResolvedScopes [])) tys + ] + XHsDerivingClause _ -> [] + +instance ToHie (Located (DerivStrategy GhcRn)) where + toHie (L span strat) = concatM $ makeNode strat span : case strat of + StockStrategy -> [] + AnyclassStrategy -> [] + NewtypeStrategy -> [] + ViaStrategy s -> [ toHie $ TS (ResolvedScopes []) s ] + +instance ToHie (Located OverlapMode) where + toHie (L span _) = pure $ locOnly span + +instance ToHie (LConDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ConDeclGADT { con_names = names, con_qvars = qvars + , con_mb_cxt = ctx, con_args = args, con_res_ty = typ } -> + [ toHie $ map (C (Decl ConDec $ getRealSpan span)) names + , toHie $ TS (ResolvedScopes [ctxScope, rhsScope]) qvars + , toHie ctx + , toHie args + , toHie typ + ] + where + rhsScope = combineScopes argsScope tyScope + ctxScope = maybe NoScope mkLScope ctx + argsScope = condecl_scope args + tyScope = mkLScope typ + ConDeclH98 { con_name = name, con_ex_tvs = qvars + , con_mb_cxt = ctx, con_args = dets } -> + [ toHie $ C (Decl ConDec $ getRealSpan span) name + , toHie $ tvScopes (ResolvedScopes []) rhsScope qvars + , toHie ctx + , toHie dets + ] + where + rhsScope = combineScopes ctxScope argsScope + ctxScope = maybe NoScope mkLScope ctx + argsScope = condecl_scope dets + XConDecl _ -> [] + where condecl_scope args = case args of + PrefixCon xs -> foldr combineScopes NoScope $ map mkLScope xs + InfixCon a b -> combineScopes (mkLScope a) (mkLScope b) + RecCon x -> mkLScope x + +instance ToHie (Located [LConDeclField GhcRn]) where + toHie (L span decls) = concatM $ + [ pure $ locOnly span + , toHie decls + ] + +instance ( HasLoc thing + , ToHie (TScoped thing) + ) => ToHie (TScoped (HsImplicitBndrs GhcRn thing)) where + toHie (TS sc (HsIB ibrn a)) = concatM $ + [ pure $ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) ibrn + , toHie $ TS sc a + ] + where span = loc a + toHie (TS _ (XHsImplicitBndrs _)) = pure [] + +instance ( HasLoc thing + , ToHie (TScoped thing) + ) => ToHie (TScoped (HsWildCardBndrs GhcRn thing)) where + toHie (TS sc (HsWC names a)) = concatM $ + [ pure $ bindingsOnly $ map (C $ TyVarBind (mkScope span) sc) names + , toHie $ TS sc a + ] + where span = loc a + toHie (TS _ (XHsWildCardBndrs _)) = pure [] + +instance ToHie (SigContext (LSig GhcRn)) where + toHie (SC (SI styp msp) (L sp sig)) = concatM $ makeNode sig sp : case sig of + TypeSig _ names typ -> + [ toHie $ map (C TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ + ] + PatSynSig _ names typ -> + [ toHie $ map (C TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) Nothing) typ + ] + ClassOpSig _ _ names typ -> + [ case styp of + ClassSig -> toHie $ map (C $ ClassTyDecl $ getRealSpan sp) names + _ -> toHie $ map (C $ TyDecl) names + , toHie $ TS (UnresolvedScope (map unLoc names) msp) typ + ] + IdSig _ _ -> [] + FixSig _ fsig -> + [ toHie $ L sp fsig + ] + InlineSig _ name _ -> + [ toHie $ (C Use) name + ] + SpecSig _ name typs _ -> + [ toHie $ (C Use) name + , toHie $ map (TS (ResolvedScopes [])) typs + ] + SpecInstSig _ _ typ -> + [ toHie $ TS (ResolvedScopes []) typ + ] + MinimalSig _ _ form -> + [ toHie form + ] + SCCFunSig _ _ name mtxt -> + [ toHie $ (C Use) name + , pure $ maybe [] (locOnly . getLoc) mtxt + ] + CompleteMatchSig _ _ (L ispan names) typ -> + [ pure $ locOnly ispan + , toHie $ map (C Use) names + , toHie $ fmap (C Use) typ + ] + XSig _ -> [] + +instance ToHie (LHsType GhcRn) where + toHie x = toHie $ TS (ResolvedScopes []) x + +instance ToHie (TScoped (LHsType GhcRn)) where + toHie (TS tsc (L span t)) = concatM $ makeNode t span : case t of + HsForAllTy _ bndrs body -> + [ toHie $ tvScopes tsc (mkScope $ getLoc body) bndrs + , toHie body + ] + HsQualTy _ ctx body -> + [ toHie ctx + , toHie body + ] + HsTyVar _ _ var -> + [ toHie $ C Use var + ] + HsAppTy _ a b -> + [ toHie a + , toHie b + ] + HsAppKindTy _ ty ki -> + [ toHie ty + , toHie $ TS (ResolvedScopes []) ki + ] + HsFunTy _ a b -> + [ toHie a + , toHie b + ] + HsListTy _ a -> + [ toHie a + ] + HsTupleTy _ _ tys -> + [ toHie tys + ] + HsSumTy _ tys -> + [ toHie tys + ] + HsOpTy _ a op b -> + [ toHie a + , toHie $ C Use op + , toHie b + ] + HsParTy _ a -> + [ toHie a + ] + HsIParamTy _ ip ty -> + [ toHie ip + , toHie ty + ] + HsKindSig _ a b -> + [ toHie a + , toHie b + ] + HsSpliceTy _ a -> + [ toHie $ L span a + ] + HsDocTy _ a _ -> + [ toHie a + ] + HsBangTy _ _ ty -> + [ toHie ty + ] + HsRecTy _ fields -> + [ toHie fields + ] + HsExplicitListTy _ _ tys -> + [ toHie tys + ] + HsExplicitTupleTy _ tys -> + [ toHie tys + ] + HsTyLit _ _ -> [] + HsWildCardTy _ -> [] + HsStarTy _ _ -> [] + XHsType _ -> [] + +instance (ToHie tm, ToHie ty) => ToHie (HsArg tm ty) where + toHie (HsValArg tm) = toHie tm + toHie (HsTypeArg _ ty) = toHie ty + toHie (HsArgPar sp) = pure $ locOnly sp + +instance ToHie (TVScoped (LHsTyVarBndr GhcRn)) where + toHie (TVS tsc sc (L span bndr)) = concatM $ makeNode bndr span : case bndr of + UserTyVar _ var -> + [ toHie $ C (TyVarBind sc tsc) var + ] + KindedTyVar _ var kind -> + [ toHie $ C (TyVarBind sc tsc) var + , toHie kind + ] + XTyVarBndr _ -> [] + +instance ToHie (TScoped (LHsQTyVars GhcRn)) where + toHie (TS sc (HsQTvs (HsQTvsRn implicits _) vars)) = concatM $ + [ pure $ bindingsOnly bindings + , toHie $ tvScopes sc NoScope vars + ] + where + varLoc = loc vars + bindings = map (C $ TyVarBind (mkScope varLoc) sc) implicits + toHie (TS _ (XLHsQTyVars _)) = pure [] + +instance ToHie (LHsContext GhcRn) where + toHie (L span tys) = concatM $ + [ pure $ locOnly span + , toHie tys + ] + +instance ToHie (LConDeclField GhcRn) where + toHie (L span field) = concatM $ makeNode field span : case field of + ConDeclField _ fields typ _ -> + [ toHie $ map (RFC RecFieldDecl (getRealSpan $ loc typ)) fields + , toHie typ + ] + XConDeclField _ -> [] + +instance ToHie (LHsExpr a) => ToHie (ArithSeqInfo a) where + toHie (From expr) = toHie expr + toHie (FromThen a b) = concatM $ + [ toHie a + , toHie b + ] + toHie (FromTo a b) = concatM $ + [ toHie a + , toHie b + ] + toHie (FromThenTo a b c) = concatM $ + [ toHie a + , toHie b + , toHie c + ] + +instance ToHie (LSpliceDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + SpliceDecl _ splice _ -> + [ toHie splice + ] + XSpliceDecl _ -> [] + +instance ToHie (HsBracket a) where + toHie _ = pure [] + +instance ToHie PendingRnSplice where + toHie _ = pure [] + +instance ToHie PendingTcSplice where + toHie _ = pure [] + +instance ToHie (LBooleanFormula (Located Name)) where + toHie (L span form) = concatM $ makeNode form span : case form of + Var a -> + [ toHie $ C Use a + ] + And forms -> + [ toHie forms + ] + Or forms -> + [ toHie forms + ] + Parens f -> + [ toHie f + ] + +instance ToHie (Located HsIPName) where + toHie (L span e) = makeNode e span + +instance ( ToHie (LHsExpr a) + , Data (HsSplice a) + ) => ToHie (Located (HsSplice a)) where + toHie (L span sp) = concatM $ makeNode sp span : case sp of + HsTypedSplice _ _ _ expr -> + [ toHie expr + ] + HsUntypedSplice _ _ _ expr -> + [ toHie expr + ] + HsQuasiQuote _ _ _ ispan _ -> + [ pure $ locOnly ispan + ] + HsSpliced _ _ _ -> + [] + HsSplicedT _ -> + [] + XSplice _ -> [] + +instance ToHie (LRoleAnnotDecl GhcRn) where + toHie (L span annot) = concatM $ makeNode annot span : case annot of + RoleAnnotDecl _ var roles -> + [ toHie $ C Use var + , concatMapM (pure . locOnly . getLoc) roles + ] + XRoleAnnotDecl _ -> [] + +instance ToHie (LInstDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ClsInstD _ d -> + [ toHie $ L span d + ] + DataFamInstD _ d -> + [ toHie $ L span d + ] + TyFamInstD _ d -> + [ toHie $ L span d + ] + XInstDecl _ -> [] + +instance ToHie (LClsInstDecl GhcRn) where + toHie (L span decl) = concatM + [ toHie $ TS (ResolvedScopes [mkScope span]) $ cid_poly_ty decl + , toHie $ fmap (BC InstanceBind ModuleScope) $ cid_binds decl + , toHie $ map (SC $ SI InstSig $ getRealSpan span) $ cid_sigs decl + , pure $ concatMap (locOnly . getLoc) $ cid_tyfam_insts decl + , toHie $ cid_tyfam_insts decl + , pure $ concatMap (locOnly . getLoc) $ cid_datafam_insts decl + , toHie $ cid_datafam_insts decl + , toHie $ cid_overlap_mode decl + ] + +instance ToHie (LDataFamInstDecl GhcRn) where + toHie (L sp (DataFamInstDecl d)) = toHie $ TS (ResolvedScopes [mkScope sp]) d + +instance ToHie (LTyFamInstDecl GhcRn) where + toHie (L sp (TyFamInstDecl d)) = toHie $ TS (ResolvedScopes [mkScope sp]) d + +instance ToHie (Context a) + => ToHie (PatSynFieldContext (RecordPatSynField a)) where + toHie (PSC sp (RecordPatSynField a b)) = concatM $ + [ toHie $ C (RecField RecFieldDecl sp) a + , toHie $ C Use b + ] + +instance ToHie (LDerivDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + DerivDecl _ typ strat overlap -> + [ toHie $ TS (ResolvedScopes []) typ + , toHie strat + , toHie overlap + ] + XDerivDecl _ -> [] + +instance ToHie (LFixitySig GhcRn) where + toHie (L span sig) = concatM $ makeNode sig span : case sig of + FixitySig _ vars _ -> + [ toHie $ map (C Use) vars + ] + XFixitySig _ -> [] + +instance ToHie (LDefaultDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + DefaultDecl _ typs -> + [ toHie typs + ] + XDefaultDecl _ -> [] + +instance ToHie (LForeignDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ForeignImport {fd_name = name, fd_sig_ty = sig, fd_fi = fi} -> + [ toHie $ C (ValBind RegularBind ModuleScope $ getRealSpan span) name + , toHie $ TS (ResolvedScopes []) sig + , toHie fi + ] + ForeignExport {fd_name = name, fd_sig_ty = sig, fd_fe = fe} -> + [ toHie $ C Use name + , toHie $ TS (ResolvedScopes []) sig + , toHie fe + ] + XForeignDecl _ -> [] + +instance ToHie ForeignImport where + toHie (CImport (L a _) (L b _) _ _ (L c _)) = pure $ concat $ + [ locOnly a + , locOnly b + , locOnly c + ] + +instance ToHie ForeignExport where + toHie (CExport (L a _) (L b _)) = pure $ concat $ + [ locOnly a + , locOnly b + ] + +instance ToHie (LWarnDecls GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + Warnings _ _ warnings -> + [ toHie warnings + ] + XWarnDecls _ -> [] + +instance ToHie (LWarnDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + Warning _ vars _ -> + [ toHie $ map (C Use) vars + ] + XWarnDecl _ -> [] + +instance ToHie (LAnnDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + HsAnnotation _ _ prov expr -> + [ toHie prov + , toHie expr + ] + XAnnDecl _ -> [] + +instance ToHie (Context (Located a)) => ToHie (AnnProvenance a) where + toHie (ValueAnnProvenance a) = toHie $ C Use a + toHie (TypeAnnProvenance a) = toHie $ C Use a + toHie ModuleAnnProvenance = pure [] + +instance ToHie (LRuleDecls GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + HsRules _ _ rules -> + [ toHie rules + ] + XRuleDecls _ -> [] + +instance ToHie (LRuleDecl GhcRn) where + toHie (L _ (XRuleDecl _)) = pure [] + toHie (L span r@(HsRule _ rname _ tybndrs bndrs exprA exprB)) = concatM + [ makeNode r span + , pure $ locOnly $ getLoc rname + , toHie $ fmap (tvScopes (ResolvedScopes []) scope) tybndrs + , toHie $ map (RS $ mkScope span) bndrs + , toHie exprA + , toHie exprB + ] + where scope = bndrs_sc `combineScopes` exprA_sc `combineScopes` exprB_sc + bndrs_sc = maybe NoScope mkLScope (listToMaybe bndrs) + exprA_sc = mkLScope exprA + exprB_sc = mkLScope exprB + +instance ToHie (RScoped (LRuleBndr GhcRn)) where + toHie (RS sc (L span bndr)) = concatM $ makeNode bndr span : case bndr of + RuleBndr _ var -> + [ toHie $ C (ValBind RegularBind sc Nothing) var + ] + RuleBndrSig _ var typ -> + [ toHie $ C (ValBind RegularBind sc Nothing) var + , toHie $ TS (ResolvedScopes [sc]) typ + ] + XRuleBndr _ -> [] + +instance ToHie (LImportDecl GhcRn) where + toHie (L span decl) = concatM $ makeNode decl span : case decl of + ImportDecl { ideclName = name, ideclAs = as, ideclHiding = hidden } -> + [ toHie $ IEC Import name + , toHie $ fmap (IEC ImportAs) as + , maybe (pure []) goIE hidden + ] + XImportDecl _ -> [] + where + goIE (hiding, (L sp liens)) = concatM $ + [ pure $ locOnly sp + , toHie $ map (IEC c) liens + ] + where + c = if hiding then ImportHiding else Import + +instance ToHie (IEContext (LIE GhcRn)) where + toHie (IEC c (L span ie)) = concatM $ makeNode ie span : case ie of + IEVar _ n -> + [ toHie $ IEC c n + ] + IEThingAbs _ n -> + [ toHie $ IEC c n + ] + IEThingAll _ n -> + [ toHie $ IEC c n + ] + IEThingWith _ n _ ns flds -> + [ toHie $ IEC c n + , toHie $ map (IEC c) ns + , toHie $ map (IEC c) flds + ] + IEModuleContents _ n -> + [ toHie $ IEC c n + ] + IEGroup _ _ _ -> [] + IEDoc _ _ -> [] + IEDocNamed _ _ -> [] + XIE _ -> [] + +instance ToHie (IEContext (LIEWrappedName Name)) where + toHie (IEC c (L span iewn)) = concatM $ makeNode iewn span : case iewn of + IEName n -> + [ toHie $ C (IEThing c) n + ] + IEPattern p -> + [ toHie $ C (IEThing c) p + ] + IEType n -> + [ toHie $ C (IEThing c) n + ] + +instance ToHie (IEContext (Located (FieldLbl Name))) where + toHie (IEC c (L span lbl)) = concatM $ makeNode lbl span : case lbl of + FieldLabel _ _ n -> + [ toHie $ C (IEThing c) $ L span n + ] + diff --git a/hie-compat/src-ghc88/Compat/HieBin.hs b/hie-compat/src-ghc88/Compat/HieBin.hs new file mode 100644 index 00000000000..859fc0f07d0 --- /dev/null +++ b/hie-compat/src-ghc88/Compat/HieBin.hs @@ -0,0 +1,389 @@ +{- +Binary serialization for .hie files. +-} +{- HLINT ignore -} +{-# LANGUAGE ScopedTypeVariables #-} +module Compat.HieBin ( readHieFile, readHieFileWithVersion, HieHeader, writeHieFile, HieName(..), toHieName, HieFileResult(..), hieMagic,NameCacheUpdater(..)) where + +import Config ( cProjectVersion ) +import Binary +import BinIface ( getDictFastString ) +import FastMutInt +import FastString ( FastString ) +import Module ( Module ) +import Name +import NameCache +import Outputable +import PrelInfo +import SrcLoc +import UniqSupply ( takeUniqFromSupply ) +import Util ( maybeRead ) +import Unique +import UniqFM +import IfaceEnv + +import qualified Data.Array as A +import Data.IORef +import Data.ByteString ( ByteString ) +import qualified Data.ByteString as BS +import qualified Data.ByteString.Char8 as BSC +import Data.List ( mapAccumR ) +import Data.Word ( Word8, Word32 ) +import Control.Monad ( replicateM, when ) +import System.Directory ( createDirectoryIfMissing ) +import System.FilePath ( takeDirectory ) + +import HieTypes + +-- | `Name`'s get converted into `HieName`'s before being written into @.hie@ +-- files. See 'toHieName' and 'fromHieName' for logic on how to convert between +-- these two types. +data HieName + = ExternalName !Module !OccName !SrcSpan + | LocalName !OccName !SrcSpan + | KnownKeyName !Unique + deriving (Eq) + +instance Ord HieName where + compare (ExternalName a b c) (ExternalName d e f) = compare (a,b,c) (d,e,f) + compare (LocalName a b) (LocalName c d) = compare (a,b) (c,d) + compare (KnownKeyName a) (KnownKeyName b) = nonDetCmpUnique a b + -- Not actually non determinstic as it is a KnownKey + compare ExternalName{} _ = LT + compare LocalName{} ExternalName{} = GT + compare LocalName{} _ = LT + compare KnownKeyName{} _ = GT + +instance Outputable HieName where + ppr (ExternalName m n sp) = text "ExternalName" <+> ppr m <+> ppr n <+> ppr sp + ppr (LocalName n sp) = text "LocalName" <+> ppr n <+> ppr sp + ppr (KnownKeyName u) = text "KnownKeyName" <+> ppr u + + +data HieSymbolTable = HieSymbolTable + { hie_symtab_next :: !FastMutInt + , hie_symtab_map :: !(IORef (UniqFM (Int, HieName))) + } + +data HieDictionary = HieDictionary + { hie_dict_next :: !FastMutInt -- The next index to use + , hie_dict_map :: !(IORef (UniqFM (Int,FastString))) -- indexed by FastString + } + +initBinMemSize :: Int +initBinMemSize = 1024*1024 + +-- | The header for HIE files - Capital ASCII letters "HIE". +hieMagic :: [Word8] +hieMagic = [72,73,69] + +hieMagicLen :: Int +hieMagicLen = length hieMagic + +ghcVersion :: ByteString +ghcVersion = BSC.pack cProjectVersion + +putBinLine :: BinHandle -> ByteString -> IO () +putBinLine bh xs = do + mapM_ (putByte bh) $ BS.unpack xs + putByte bh 10 -- newline char + +-- | Write a `HieFile` to the given `FilePath`, with a proper header and +-- symbol tables for `Name`s and `FastString`s +writeHieFile :: FilePath -> HieFile -> IO () +writeHieFile hie_file_path hiefile = do + bh0 <- openBinMem initBinMemSize + + -- Write the header: hieHeader followed by the + -- hieVersion and the GHC version used to generate this file + mapM_ (putByte bh0) hieMagic + putBinLine bh0 $ BSC.pack $ show hieVersion + putBinLine bh0 $ ghcVersion + + -- remember where the dictionary pointer will go + dict_p_p <- tellBin bh0 + put_ bh0 dict_p_p + + -- remember where the symbol table pointer will go + symtab_p_p <- tellBin bh0 + put_ bh0 symtab_p_p + + -- Make some intial state + symtab_next <- newFastMutInt + writeFastMutInt symtab_next 0 + symtab_map <- newIORef emptyUFM + let hie_symtab = HieSymbolTable { + hie_symtab_next = symtab_next, + hie_symtab_map = symtab_map } + dict_next_ref <- newFastMutInt + writeFastMutInt dict_next_ref 0 + dict_map_ref <- newIORef emptyUFM + let hie_dict = HieDictionary { + hie_dict_next = dict_next_ref, + hie_dict_map = dict_map_ref } + + -- put the main thing + let bh = setUserData bh0 $ newWriteState (putName hie_symtab) + (putName hie_symtab) + (putFastString hie_dict) + put_ bh hiefile + + -- write the symtab pointer at the front of the file + symtab_p <- tellBin bh + putAt bh symtab_p_p symtab_p + seekBin bh symtab_p + + -- write the symbol table itself + symtab_next' <- readFastMutInt symtab_next + symtab_map' <- readIORef symtab_map + putSymbolTable bh symtab_next' symtab_map' + + -- write the dictionary pointer at the front of the file + dict_p <- tellBin bh + putAt bh dict_p_p dict_p + seekBin bh dict_p + + -- write the dictionary itself + dict_next <- readFastMutInt dict_next_ref + dict_map <- readIORef dict_map_ref + putDictionary bh dict_next dict_map + + -- and send the result to the file + createDirectoryIfMissing True (takeDirectory hie_file_path) + writeBinMem bh hie_file_path + return () + +data HieFileResult + = HieFileResult + { hie_file_result_version :: Integer + , hie_file_result_ghc_version :: ByteString + , hie_file_result :: HieFile + } + +type HieHeader = (Integer, ByteString) + +-- | Read a `HieFile` from a `FilePath`. Can use +-- an existing `NameCache`. Allows you to specify +-- which versions of hieFile to attempt to read. +-- `Left` case returns the failing header versions. +readHieFileWithVersion :: (HieHeader -> Bool) -> NameCacheUpdater -> FilePath -> IO (Either HieHeader HieFileResult) +readHieFileWithVersion readVersion ncu file = do + bh0 <- readBinMem file + + (hieVersion, ghcVersion) <- readHieFileHeader file bh0 + + if readVersion (hieVersion, ghcVersion) + then do + hieFile <- readHieFileContents bh0 ncu + return $ Right (HieFileResult hieVersion ghcVersion hieFile) + else return $ Left (hieVersion, ghcVersion) + + +-- | Read a `HieFile` from a `FilePath`. Can use +-- an existing `NameCache`. +readHieFile :: NameCacheUpdater -> FilePath -> IO HieFileResult +readHieFile ncu file = do + + bh0 <- readBinMem file + + (readHieVersion, ghcVersion) <- readHieFileHeader file bh0 + + -- Check if the versions match + when (readHieVersion /= hieVersion) $ + panic $ unwords ["readHieFile: hie file versions don't match for file:" + , file + , "Expected" + , show hieVersion + , "but got", show readHieVersion + ] + hieFile <- readHieFileContents bh0 ncu + return $ HieFileResult hieVersion ghcVersion hieFile + +readBinLine :: BinHandle -> IO ByteString +readBinLine bh = BS.pack . reverse <$> loop [] + where + loop acc = do + char <- get bh :: IO Word8 + if char == 10 -- ASCII newline '\n' + then return acc + else loop (char : acc) + +readHieFileHeader :: FilePath -> BinHandle -> IO HieHeader +readHieFileHeader file bh0 = do + -- Read the header + magic <- replicateM hieMagicLen (get bh0) + version <- BSC.unpack <$> readBinLine bh0 + case maybeRead version of + Nothing -> + panic $ unwords ["readHieFileHeader: hieVersion isn't an Integer:" + , show version + ] + Just readHieVersion -> do + ghcVersion <- readBinLine bh0 + + -- Check if the header is valid + when (magic /= hieMagic) $ + panic $ unwords ["readHieFileHeader: headers don't match for file:" + , file + , "Expected" + , show hieMagic + , "but got", show magic + ] + return (readHieVersion, ghcVersion) + +readHieFileContents :: BinHandle -> NameCacheUpdater -> IO HieFile +readHieFileContents bh0 ncu = do + + dict <- get_dictionary bh0 + + -- read the symbol table so we are capable of reading the actual data + bh1 <- do + let bh1 = setUserData bh0 $ newReadState (error "getSymtabName") + (getDictFastString dict) + symtab <- get_symbol_table bh1 + let bh1' = setUserData bh1 + $ newReadState (getSymTabName symtab) + (getDictFastString dict) + return bh1' + + -- load the actual data + hiefile <- get bh1 + return hiefile + where + get_dictionary bin_handle = do + dict_p <- get bin_handle + data_p <- tellBin bin_handle + seekBin bin_handle dict_p + dict <- getDictionary bin_handle + seekBin bin_handle data_p + return dict + + get_symbol_table bh1 = do + symtab_p <- get bh1 + data_p' <- tellBin bh1 + seekBin bh1 symtab_p + symtab <- getSymbolTable bh1 ncu + seekBin bh1 data_p' + return symtab + +putFastString :: HieDictionary -> BinHandle -> FastString -> IO () +putFastString HieDictionary { hie_dict_next = j_r, + hie_dict_map = out_r} bh f + = do + out <- readIORef out_r + let unique = getUnique f + case lookupUFM out unique of + Just (j, _) -> put_ bh (fromIntegral j :: Word32) + Nothing -> do + j <- readFastMutInt j_r + put_ bh (fromIntegral j :: Word32) + writeFastMutInt j_r (j + 1) + writeIORef out_r $! addToUFM out unique (j, f) + +putSymbolTable :: BinHandle -> Int -> UniqFM (Int,HieName) -> IO () +putSymbolTable bh next_off symtab = do + put_ bh next_off + let names = A.elems (A.array (0,next_off-1) (nonDetEltsUFM symtab)) + mapM_ (putHieName bh) names + +getSymbolTable :: BinHandle -> NameCacheUpdater -> IO SymbolTable +getSymbolTable bh ncu = do + sz <- get bh + od_names <- replicateM sz (getHieName bh) + updateNameCache ncu $ \nc -> + let arr = A.listArray (0,sz-1) names + (nc', names) = mapAccumR fromHieName nc od_names + in (nc',arr) + +getSymTabName :: SymbolTable -> BinHandle -> IO Name +getSymTabName st bh = do + i :: Word32 <- get bh + return $ st A.! (fromIntegral i) + +putName :: HieSymbolTable -> BinHandle -> Name -> IO () +putName (HieSymbolTable next ref) bh name = do + symmap <- readIORef ref + case lookupUFM symmap name of + Just (off, ExternalName mod occ (UnhelpfulSpan _)) + | isGoodSrcSpan (nameSrcSpan name) -> do + let hieName = ExternalName mod occ (nameSrcSpan name) + writeIORef ref $! addToUFM symmap name (off, hieName) + put_ bh (fromIntegral off :: Word32) + Just (off, LocalName _occ span) + | notLocal (toHieName name) || nameSrcSpan name /= span -> do + writeIORef ref $! addToUFM symmap name (off, toHieName name) + put_ bh (fromIntegral off :: Word32) + Just (off, _) -> put_ bh (fromIntegral off :: Word32) + Nothing -> do + off <- readFastMutInt next + writeFastMutInt next (off+1) + writeIORef ref $! addToUFM symmap name (off, toHieName name) + put_ bh (fromIntegral off :: Word32) + + where + notLocal :: HieName -> Bool + notLocal LocalName{} = False + notLocal _ = True + + +-- ** Converting to and from `HieName`'s + +toHieName :: Name -> HieName +toHieName name + | isKnownKeyName name = KnownKeyName (nameUnique name) + | isExternalName name = ExternalName (nameModule name) + (nameOccName name) + (nameSrcSpan name) + | otherwise = LocalName (nameOccName name) (nameSrcSpan name) + +fromHieName :: NameCache -> HieName -> (NameCache, Name) +fromHieName nc (ExternalName mod occ span) = + let cache = nsNames nc + in case lookupOrigNameCache cache mod occ of + Just name + | nameSrcSpan name == span -> (nc, name) + | otherwise -> + let name' = setNameLoc name span + new_cache = extendNameCache cache mod occ name' + in ( nc{ nsNames = new_cache }, name' ) + Nothing -> + let (uniq, us) = takeUniqFromSupply (nsUniqs nc) + name = mkExternalName uniq mod occ span + new_cache = extendNameCache cache mod occ name + in ( nc{ nsUniqs = us, nsNames = new_cache }, name ) +fromHieName nc (LocalName occ span) = + let (uniq, us) = takeUniqFromSupply (nsUniqs nc) + name = mkInternalName uniq occ span + in ( nc{ nsUniqs = us }, name ) +fromHieName nc (KnownKeyName u) = case lookupKnownKeyName u of + Nothing -> pprPanic "fromHieName:unknown known-key unique" + (ppr (unpkUnique u)) + Just n -> (nc, n) + +-- ** Reading and writing `HieName`'s + +putHieName :: BinHandle -> HieName -> IO () +putHieName bh (ExternalName mod occ span) = do + putByte bh 0 + put_ bh (mod, occ, span) +putHieName bh (LocalName occName span) = do + putByte bh 1 + put_ bh (occName, span) +putHieName bh (KnownKeyName uniq) = do + putByte bh 2 + put_ bh $ unpkUnique uniq + +getHieName :: BinHandle -> IO HieName +getHieName bh = do + t <- getByte bh + case t of + 0 -> do + (modu, occ, span) <- get bh + return $ ExternalName modu occ span + 1 -> do + (occ, span) <- get bh + return $ LocalName occ span + 2 -> do + (c,i) <- get bh + return $ KnownKeyName $ mkUnique c i + _ -> panic "HieBin.getHieName: invalid tag" diff --git a/hie-compat/src-reexport/Compat/HieDebug.hs b/hie-compat/src-reexport/Compat/HieDebug.hs new file mode 100644 index 00000000000..32da665b6d2 --- /dev/null +++ b/hie-compat/src-reexport/Compat/HieDebug.hs @@ -0,0 +1,3 @@ +module Compat.HieDebug + ( module HieDebug ) where +import HieDebug diff --git a/hie-compat/src-reexport/Compat/HieTypes.hs b/hie-compat/src-reexport/Compat/HieTypes.hs new file mode 100644 index 00000000000..7185fb10bdd --- /dev/null +++ b/hie-compat/src-reexport/Compat/HieTypes.hs @@ -0,0 +1,3 @@ +module Compat.HieTypes + ( module HieTypes ) where +import HieTypes diff --git a/hie-compat/src-reexport/Compat/HieUtils.hs b/hie-compat/src-reexport/Compat/HieUtils.hs new file mode 100644 index 00000000000..c4c401e2693 --- /dev/null +++ b/hie-compat/src-reexport/Compat/HieUtils.hs @@ -0,0 +1,3 @@ +module Compat.HieUtils + ( module HieUtils ) where +import HieUtils diff --git a/hie-stack.yaml b/hie-stack.yaml index 1673b48e54b..1c03904013f 100644 --- a/hie-stack.yaml +++ b/hie-stack.yaml @@ -1,6 +1,7 @@ # This is a sample hie.yaml file for opening haskell-language-server # in hie, using stack as the build system. To use is, copy it to a # file called 'hie.yaml' +# TODO regenerate this file using gen-hie cradle: multi: - path: "./test/testdata" diff --git a/nix/default.nix b/nix/default.nix index 9eef54b1524..507370a8dad 100644 --- a/nix/default.nix +++ b/nix/default.nix @@ -17,8 +17,8 @@ let haskellPackages.extend (pkgs.haskell.lib.packageSourceOverrides { haskell-language-server = gitignoreSource ../.; ghcide = gitignoreSource ../ghcide; - shake-bench = gitignoreSource ../ghcide/shake-bench; - hie-compat = gitignoreSource ../ghcide/hie-compat; + shake-bench = gitignoreSource ../shake-bench; + hie-compat = gitignoreSource ../hie-compat; hls-plugin-api = gitignoreSource ../hls-plugin-api; hls-class-plugin = gitignoreSource ../plugins/hls-class-plugin; hls-explicit-imports-plugin = gitignoreSource ../plugins/hls-explicit-imports-plugin; diff --git a/plugins/default/src/Ide/Plugin/Pragmas.hs b/plugins/default/src/Ide/Plugin/Pragmas.hs index d043a06aaea..b53be452940 100644 --- a/plugins/default/src/Ide/Plugin/Pragmas.hs +++ b/plugins/default/src/Ide/Plugin/Pragmas.hs @@ -17,7 +17,6 @@ import qualified Data.HashMap.Strict as H import qualified Data.Text as T import Development.IDE as D import qualified GHC.Generics as Generics -import Ide.Plugin import Ide.Types import Language.Haskell.LSP.Types import qualified Language.Haskell.LSP.Types as J @@ -32,19 +31,12 @@ import qualified Language.Haskell.LSP.VFS as VFS descriptor :: PluginId -> PluginDescriptor descriptor plId = (defaultPluginDescriptor plId) - { pluginCommands = commands - , pluginCodeActionProvider = Just codeActionProvider + { pluginCodeActionProvider = Just codeActionProvider , pluginCompletionProvider = Just completion } -- --------------------------------------------------------------------- -commands :: [PluginCommand] -commands = [ PluginCommand "addPragma" "add the given pragma" addPragmaCmd - ] - --- --------------------------------------------------------------------- - -- | Parameters for the addPragma PluginCommand. data AddPragmaParams = AddPragmaParams { file :: J.Uri -- ^ Uri of the file to add the pragma to @@ -56,9 +48,9 @@ data AddPragmaParams = AddPragmaParams -- Pragma is added to the first line of the Uri. -- It is assumed that the pragma name is a valid pragma, -- thus, not validated. -addPragmaCmd :: CommandFunction AddPragmaParams -addPragmaCmd _lf _ide (AddPragmaParams uri pragmaName) = do - let +-- mkPragmaEdit :: CommandFunction AddPragmaParams +mkPragmaEdit :: Uri -> T.Text -> WorkspaceEdit +mkPragmaEdit uri pragmaName = res where pos = J.Position 0 0 textEdits = J.List [J.TextEdit (J.Range pos pos) @@ -67,13 +59,12 @@ addPragmaCmd _lf _ide (AddPragmaParams uri pragmaName) = do res = J.WorkspaceEdit (Just $ H.singleton uri textEdits) Nothing - return (Right Null, Just (WorkspaceApplyEdit, ApplyWorkspaceEditParams res)) -- --------------------------------------------------------------------- -- | Offer to add a missing Language Pragma to the top of a file. -- Pragmas are defined by a curated list of known pragmas, see 'possiblePragmas'. codeActionProvider :: CodeActionProvider -codeActionProvider _ state plId docId _ (J.CodeActionContext (J.List diags) _monly) = do +codeActionProvider _ state _plId docId _ (J.CodeActionContext (J.List diags) _monly) = do let mFile = docId ^. J.uri & uriToFilePath <&> toNormalizedFilePath' pm <- fmap join $ runAction "addPragma" state $ getParsedModule `traverse` mFile let dflags = ms_hspp_opts . pm_mod_summary <$> pm @@ -81,19 +72,16 @@ codeActionProvider _ state plId docId _ (J.CodeActionContext (J.List diags) _mon ghcDiags = filter (\d -> d ^. J.source == Just "typecheck") diags -- Get all potential Pragmas for all diagnostics. pragmas = concatMap (\d -> genPragma dflags (d ^. J.message)) ghcDiags - -- cmds <- mapM mkCommand ("FooPragma":pragmas) - cmds <- mapM mkCommand pragmas + cmds <- mapM mkCodeAction pragmas return $ Right $ List cmds where - mkCommand pragmaName = do + mkCodeAction pragmaName = do let - -- | Code Action for the given command. - codeAction :: J.Command -> J.CAResult - codeAction cmd = J.CACodeAction $ J.CodeAction title (Just J.CodeActionQuickFix) (Just (J.List [])) Nothing (Just cmd) + codeAction = J.CACodeAction $ J.CodeAction title (Just J.CodeActionQuickFix) (Just (J.List [])) (Just edit) Nothing title = "Add \"" <> pragmaName <> "\"" - cmdParams = [toJSON (AddPragmaParams (docId ^. J.uri) pragmaName)] - cmd <- mkLspCommand plId "addPragma" title (Just cmdParams) - return $ codeAction cmd + edit = mkPragmaEdit (docId ^. J.uri) pragmaName + return codeAction + genPragma mDynflags target | Just dynFlags <- mDynflags, -- GHC does not export 'OnOff', so we have to view it as string diff --git a/shake-bench/LICENSE b/shake-bench/LICENSE new file mode 100644 index 00000000000..b4f377fc105 --- /dev/null +++ b/shake-bench/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020-2021 Jose Iborra Lopez + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/shake-bench/shake-bench.cabal b/shake-bench/shake-bench.cabal new file mode 100644 index 00000000000..b966907ccef --- /dev/null +++ b/shake-bench/shake-bench.cabal @@ -0,0 +1,44 @@ +cabal-version: 2.2 +name: shake-bench +version: 0.1.0.0 +synopsis: Build rules for historical benchmarking +license: Apache-2.0 +license-file: LICENSE +author: Pepe Iborra +maintainer: pepeiborra@gmail.com +category: Development +build-type: Simple +description: + A library Shake rules to build and run benchmarks for multiple revisions of a project. + An example of usage can be found in the ghcide benchmark suite + +library + exposed-modules: Development.Benchmark.Rules + hs-source-dirs: src + build-depends: + aeson, + base == 4.*, + Chart, + Chart-diagrams, + diagrams, + diagrams-svg, + directory, + extra >= 1.7.2, + filepath, + shake, + text + default-language: Haskell2010 + default-extensions: + BangPatterns + DeriveFunctor + DeriveGeneric + FlexibleContexts + GeneralizedNewtypeDeriving + LambdaCase + NamedFieldPuns + RecordWildCards + ScopedTypeVariables + StandaloneDeriving + TupleSections + TypeApplications + ViewPatterns diff --git a/shake-bench/src/Development/Benchmark/Rules.hs b/shake-bench/src/Development/Benchmark/Rules.hs new file mode 100644 index 00000000000..6870aeb85c1 --- /dev/null +++ b/shake-bench/src/Development/Benchmark/Rules.hs @@ -0,0 +1,568 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE ApplicativeDo #-} +{-# LANGUAGE ConstraintKinds #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE TypeFamilies #-} + +{- | + This module provides a bunch of Shake rules to build multiple revisions of a + project and analyse their performance. + + It assumes a project bench suite composed of examples that runs a fixed set + of experiments on every example + + Your code must implement all of the GetFoo oracles and the IsExample class, + instantiate the Shake rules, and probably 'want' a set of targets. + + The results of the benchmarks and the analysis are recorded in the file + system, using the following structure: + + + ├── binaries + │ └── + │  ├── ghc.path - path to ghc used to build the executable + │  └── - binary for this version + │  └── commitid - Git commit id for this reference + ├─ + │ ├── results.csv - aggregated results for all the versions + │ └── + │   ├── .benchmark-gcStats - RTS -s output + │   ├── .csv - stats for the experiment + │   ├── .svg - Graph of bytes over elapsed time + │   ├── .diff.svg - idem, including the previous version + │   ├── .log - bench stdout + │   └── results.csv - results of all the experiments for the example + ├── results.csv - aggregated results of all the experiments and versions + └── .svg - graph of bytes over elapsed time, for all the included versions + + For diff graphs, the "previous version" is the preceding entry in the list of versions + in the config file. A possible improvement is to obtain this info via `git rev-list`. + -} +module Development.Benchmark.Rules + ( + buildRules, MkBuildRules(..), + benchRules, MkBenchRules(..), BenchProject(..), + csvRules, + svgRules, + allTargets, + GetExample(..), GetExamples(..), + IsExample(..), RuleResultForExample, + GetExperiments(..), + GetVersions(..), + GetCommitId(..), + GetBuildSystem(..), + BuildSystem(..), findGhcForBuildSystem, + Escaped(..), Unescaped(..), escapeExperiment, unescapeExperiment, + GitCommit + + ) where + +import Control.Applicative +import Control.Monad +import Data.Aeson (FromJSON (..), + ToJSON (..), + Value (..), (.!=), + (.:?)) +import Data.List (find, transpose) +import Data.List.Extra (lower) +import Data.Maybe (fromMaybe) +import Data.Text (Text) +import qualified Data.Text as T +import Development.Shake +import Development.Shake.Classes (Binary, Hashable, + NFData, Typeable) +import GHC.Exts (IsList (toList), + fromList) +import GHC.Generics (Generic) +import GHC.Stack (HasCallStack) +import qualified Graphics.Rendering.Chart.Backend.Diagrams as E +import Graphics.Rendering.Chart.Easy ((.=)) +import qualified Graphics.Rendering.Chart.Easy as E +import System.Directory (findExecutable, createDirectoryIfMissing) +import System.FilePath +import qualified Text.ParserCombinators.ReadP as P +import Text.Read (Read (..), get, + readMaybe, + readP_to_Prec) + +newtype GetExperiments = GetExperiments () deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetVersions = GetVersions () deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetParent = GetParent Text deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetCommitId = GetCommitId String deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetBuildSystem = GetBuildSystem () deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetExample = GetExample String deriving newtype (Binary, Eq, Hashable, NFData, Show) +newtype GetExamples = GetExamples () deriving newtype (Binary, Eq, Hashable, NFData, Show) + +type instance RuleResult GetExperiments = [Unescaped String] +type instance RuleResult GetVersions = [GitCommit] +type instance RuleResult GetParent = Text +type instance RuleResult GetCommitId = String +type instance RuleResult GetBuildSystem = BuildSystem + +type RuleResultForExample e = + ( RuleResult GetExample ~ Maybe e + , RuleResult GetExamples ~ [e] + , IsExample e) + +-- | Knowledge needed to run an example +class (Binary e, Eq e, Hashable e, NFData e, Show e, Typeable e) => IsExample e where + getExampleName :: e -> String + +-------------------------------------------------------------------------------- + +allTargets :: RuleResultForExample e => FilePath -> Action () +allTargets buildFolder = do + experiments <- askOracle $ GetExperiments () + examples <- askOracle $ GetExamples () + versions <- askOracle $ GetVersions () + need $ + [buildFolder getExampleName e "results.csv" | e <- examples ] ++ + [buildFolder "results.csv"] + ++ [ buildFolder getExampleName ex escaped (escapeExperiment e) <.> "svg" + | e <- experiments + , ex <- examples + ] + ++ [ buildFolder + getExampleName ex + T.unpack (humanName ver) + escaped (escapeExperiment e) <.> mode <.> "svg" + | e <- experiments, + ex <- examples, + ver <- versions, + mode <- ["", "diff"] + ] + +-------------------------------------------------------------------------------- +type OutputFolder = FilePath + +data MkBuildRules buildSystem = MkBuildRules + { -- | Return the path to the GHC executable to use for the project found in the cwd + findGhc :: buildSystem -> FilePath -> IO FilePath + -- | Name of the binary produced by 'buildProject' + , executableName :: String + -- | Build the project found in the cwd and save the build artifacts in the output folder + , buildProject :: buildSystem + -> [CmdOption] + -> OutputFolder + -> Action () + } + +-- | Rules that drive a build system to build various revisions of a project +buildRules :: FilePattern -> MkBuildRules BuildSystem -> Rules () +-- TODO generalize BuildSystem +buildRules build MkBuildRules{..} = do + -- query git for the commitid for a version + build -/- "binaries/*/commitid" %> \out -> do + alwaysRerun + + let [_,_,ver,_] = splitDirectories out + mbEntry <- find ((== T.pack ver) . humanName) <$> askOracle (GetVersions ()) + let gitThing :: String + gitThing = maybe ver (T.unpack . gitName) mbEntry + Stdout commitid <- command [] "git" ["rev-list", "-n", "1", gitThing] + writeFileChanged out $ init commitid + + -- build rules for HEAD + priority 10 $ [ build -/- "binaries/HEAD/" <> executableName + , build -/- "binaries/HEAD/ghc.path" + ] + &%> \[out, ghcpath] -> do + liftIO $ createDirectoryIfMissing True $ dropFileName out + -- TOOD more precise dependency tracking + need =<< getDirectoryFiles "." ["//*.hs", "*.cabal"] + buildSystem <- askOracle $ GetBuildSystem () + buildProject buildSystem [Cwd "."] (takeDirectory out) + ghcLoc <- liftIO $ findGhc buildSystem "." + writeFile' ghcpath ghcLoc + + -- build rules for non HEAD revisions + [build -/- "binaries/*/" <> executableName + ,build -/- "binaries/*/ghc.path" + ] &%> \[out, ghcPath] -> do + let [_, _binaries, _ver, _] = splitDirectories out + liftIO $ createDirectoryIfMissing True $ dropFileName out + commitid <- readFile' $ takeDirectory out "commitid" + cmd_ $ "git worktree add bench-temp " ++ commitid + buildSystem <- askOracle $ GetBuildSystem () + flip actionFinally (cmd_ ("git worktree remove bench-temp --force" :: String)) $ do + ghcLoc <- liftIO $ findGhc buildSystem "bench-temp" + buildProject buildSystem [Cwd "bench-temp"] (".." takeDirectory out) + writeFile' ghcPath ghcLoc + +-------------------------------------------------------------------------------- +data MkBenchRules buildSystem example = MkBenchRules + { benchProject :: buildSystem -> [CmdOption] -> BenchProject example -> Action () + -- | Name of the executable to benchmark. Should match the one used to 'MkBuildRules' + , executableName :: String + } + +data BenchProject example = BenchProject + { outcsv :: FilePath -- ^ where to save the CSV output + , exePath :: FilePath -- ^ where to find the executable for benchmarking + , exeExtraArgs :: [String] -- ^ extra args for the executable + , example :: example -- ^ example to benchmark + , experiment :: Escaped String -- ^ experiment to run + } + +-- TODO generalize BuildSystem +benchRules :: RuleResultForExample example => FilePattern -> Resource -> MkBenchRules BuildSystem example -> Rules () +benchRules build benchResource MkBenchRules{..} = do + -- run an experiment + priority 0 $ + [ build -/- "*/*/*.csv", + build -/- "*/*/*.benchmark-gcStats", + build -/- "*/*/*.log" + ] + &%> \[outcsv, outGc, outLog] -> do + let [_, exampleName, ver, exp] = splitDirectories outcsv + example <- fromMaybe (error $ "Unknown example " <> exampleName) + <$> askOracle (GetExample exampleName) + buildSystem <- askOracle $ GetBuildSystem () + liftIO $ createDirectoryIfMissing True $ dropFileName outcsv + let exePath = build "binaries" ver executableName + exeExtraArgs = ["+RTS", "-I0.5", "-S" <> takeFileName outGc, "-RTS"] + ghcPath = build "binaries" ver "ghc.path" + experiment = Escaped $ dropExtension exp + need [exePath, ghcPath] + ghcPath <- readFile' ghcPath + withResource benchResource 1 $ do + benchProject buildSystem + [ EchoStdout False, + FileStdout outLog, + RemEnv "NIX_GHC_LIBDIR", + RemEnv "GHC_PACKAGE_PATH", + AddPath [takeDirectory ghcPath, "."] [] + ] + BenchProject{..} + cmd_ Shell $ "mv *.benchmark-gcStats " <> dropFileName outcsv + + +-------------------------------------------------------------------------------- + +-- | Rules to aggregate the CSV output of individual experiments +csvRules :: forall example . RuleResultForExample example => FilePattern -> Rules () +csvRules build = do + -- build results for every experiment*example + build -/- "*/*/results.csv" %> \out -> do + experiments <- askOracle $ GetExperiments () + + let allResultFiles = [takeDirectory out escaped (escapeExperiment e) <.> "csv" | e <- experiments] + allResults <- traverse readFileLines allResultFiles + + let header = head $ head allResults + results = map tail allResults + writeFileChanged out $ unlines $ header : concat results + + -- aggregate all experiments for an example + build -/- "*/results.csv" %> \out -> do + versions <- map (T.unpack . humanName) <$> askOracle (GetVersions ()) + let example = takeFileName $ takeDirectory out + allResultFiles = + [build example v "results.csv" | v <- versions] + + allResults <- traverse readFileLines allResultFiles + + let header = head $ head allResults + results = map tail allResults + header' = "version, " <> header + results' = zipWith (\v -> map (\l -> v <> ", " <> l)) versions results + + writeFileChanged out $ unlines $ header' : interleave results' + + -- aggregate all examples + build -/- "results.csv" %> \out -> do + examples <- map (getExampleName @example) <$> askOracle (GetExamples ()) + let allResultFiles = [build e "results.csv" | e <- examples] + + allResults <- traverse readFileLines allResultFiles + + let header = head $ head allResults + results = map tail allResults + header' = "example, " <> header + results' = zipWith (\e -> map (\l -> e <> ", " <> l)) examples results + + writeFileChanged out $ unlines $ header' : concat results' + +-------------------------------------------------------------------------------- + +-- | Rules to produce charts for the GC stats +svgRules :: FilePattern -> Rules () +svgRules build = do + + _ <- addOracle $ \(GetParent name) -> findPrev name <$> askOracle (GetVersions ()) + + -- chart GC stats for an experiment on a given revision + priority 1 $ + build -/- "*/*/*.svg" %> \out -> do + let [b, example, ver, exp] = splitDirectories out + runLog <- loadRunLog b example (Escaped $ dropExtension exp) ver + let diagram = Diagram Live [runLog] title + title = ver <> " live bytes over time" + plotDiagram True diagram out + + -- chart of GC stats for an experiment on this and the previous revision + priority 2 $ + build -/- "*/*/*.diff.svg" %> \out -> do + let [b, example, ver, exp_] = splitDirectories out + exp = Escaped $ dropExtension $ dropExtension exp_ + prev <- askOracle $ GetParent $ T.pack ver + + runLog <- loadRunLog b example exp ver + runLogPrev <- loadRunLog b example exp $ T.unpack prev + + let diagram = Diagram Live [runLog, runLogPrev] title + title = show (unescapeExperiment exp) <> " - live bytes over time compared" + plotDiagram True diagram out + + -- aggregated chart of GC stats for all the revisions + build -/- "*/*.svg" %> \out -> do + let exp = Escaped $ dropExtension $ takeFileName out + example = takeFileName $ takeDirectory out + versions <- askOracle $ GetVersions () + + runLogs <- forM (filter include versions) $ \v -> do + loadRunLog build example exp $ T.unpack $ humanName v + + let diagram = Diagram Live runLogs title + title = show (unescapeExperiment exp) <> " - live bytes over time" + plotDiagram False diagram out + + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- + +-- | Default build system that handles Cabal and Stack +data BuildSystem = Cabal | Stack + deriving (Eq, Read, Show, Generic) + deriving (Binary, Hashable, NFData) + +findGhcForBuildSystem :: BuildSystem -> FilePath -> IO FilePath +findGhcForBuildSystem Cabal _cwd = + liftIO $ fromMaybe (error "ghc is not in the PATH") <$> findExecutable "ghc" +findGhcForBuildSystem Stack cwd = do + Stdout ghcLoc <- cmd [Cwd cwd] ("stack exec which ghc" :: String) + return ghcLoc + +instance FromJSON BuildSystem where + parseJSON x = fromString . lower <$> parseJSON x + where + fromString "stack" = Stack + fromString "cabal" = Cabal + fromString other = error $ "Unknown build system: " <> other + +instance ToJSON BuildSystem where + toJSON = toJSON . show + +-------------------------------------------------------------------------------- + +data GitCommit = GitCommit + { -- | A git hash, tag or branch name (e.g. v0.1.0) + gitName :: Text, + -- | A human understandable name (e.g. fix-collisions-leak) + name :: Maybe Text, + -- | The human understandable name of the parent, if specified explicitly + parent :: Maybe Text, + -- | Whether to include this version in the top chart + include :: Bool + } + deriving (Binary, Eq, Hashable, Generic, NFData, Show) + +instance FromJSON GitCommit where + parseJSON (String s) = pure $ GitCommit s Nothing Nothing True + parseJSON (Object (toList -> [(name, String gitName)])) = + pure $ GitCommit gitName (Just name) Nothing True + parseJSON (Object (toList -> [(name, Object props)])) = + GitCommit + <$> props .:? "git" .!= name + <*> pure (Just name) + <*> props .:? "parent" + <*> props .:? "include" .!= True + parseJSON _ = empty + +instance ToJSON GitCommit where + toJSON GitCommit {..} = + case name of + Nothing -> String gitName + Just n -> Object $ fromList [(n, String gitName)] + +humanName :: GitCommit -> Text +humanName GitCommit {..} = fromMaybe gitName name + +findPrev :: Text -> [GitCommit] -> Text +findPrev name (x : y : xx) + | humanName y == name = humanName x + | otherwise = findPrev name (y : xx) +findPrev name _ = name + +-------------------------------------------------------------------------------- + +-- | A line in the output of -S +data Frame = Frame + { allocated, copied, live :: !Int, + user, elapsed, totUser, totElapsed :: !Double, + generation :: !Int + } + deriving (Show) + +instance Read Frame where + readPrec = do + spaces + allocated <- readPrec @Int <* spaces + copied <- readPrec @Int <* spaces + live <- readPrec @Int <* spaces + user <- readPrec @Double <* spaces + elapsed <- readPrec @Double <* spaces + totUser <- readPrec @Double <* spaces + totElapsed <- readPrec @Double <* spaces + _ <- readPrec @Int <* spaces + _ <- readPrec @Int <* spaces + "(Gen: " <- replicateM 7 get + generation <- readPrec @Int + ')' <- get + return Frame {..} + where + spaces = readP_to_Prec $ const P.skipSpaces + +-- | A file path containing the output of -S for a given run +data RunLog = RunLog + { runVersion :: !String, + _runExample :: !String, + _runExperiment :: !String, + runFrames :: ![Frame], + runSuccess :: !Bool + } + +loadRunLog :: HasCallStack => FilePath -> String -> Escaped FilePath -> FilePath -> Action RunLog +loadRunLog buildF example exp ver = do + let log_fp = buildF example ver escaped exp <.> "benchmark-gcStats" + csv_fp = replaceExtension log_fp "csv" + log <- readFileLines log_fp + csv <- readFileLines csv_fp + let frames = + [ f + | l <- log, + Just f <- [readMaybe l], + -- filter out gen 0 events as there are too many + generation f == 1 + ] + -- TODO this assumes a certain structure in the CSV file + success = case map (T.split (== ',') . T.pack) csv of + [_header, _name:s:_] | Just s <- readMaybe (T.unpack s) -> s + _ -> error $ "Cannot parse: " <> csv_fp + return $ RunLog ver example (dropExtension $ escaped exp) frames success + +-------------------------------------------------------------------------------- + +data TraceMetric = Allocated | Copied | Live | User | Elapsed + deriving (Generic, Enum, Bounded, Read) + +instance Show TraceMetric where + show Allocated = "Allocated bytes" + show Copied = "Copied bytes" + show Live = "Live bytes" + show User = "User time" + show Elapsed = "Elapsed time" + +frameMetric :: TraceMetric -> Frame -> Double +frameMetric Allocated = fromIntegral . allocated +frameMetric Copied = fromIntegral . copied +frameMetric Live = fromIntegral . live +frameMetric Elapsed = elapsed +frameMetric User = user + +data Diagram = Diagram + { traceMetric :: TraceMetric, + runLogs :: [RunLog], + title :: String + } + deriving (Generic) + +plotDiagram :: Bool -> Diagram -> FilePath -> Action () +plotDiagram includeFailed t@Diagram {traceMetric, runLogs} out = do + let extract = frameMetric traceMetric + liftIO $ E.toFile E.def out $ do + E.layout_title .= title t + E.setColors myColors + forM_ runLogs $ \rl -> + when (includeFailed || runSuccess rl) $ E.plot $ do + lplot <- E.line + (runVersion rl ++ if runSuccess rl then "" else " (FAILED)") + [ [ (totElapsed f, extract f) + | f <- runFrames rl + ] + ] + return (lplot E.& E.plot_lines_style . E.line_width E.*~ 2) + +-------------------------------------------------------------------------------- + +newtype Escaped a = Escaped {escaped :: a} + +newtype Unescaped a = Unescaped {unescaped :: a} + deriving newtype (Show, FromJSON, ToJSON, Eq, NFData, Binary, Hashable) + +escapeExperiment :: Unescaped String -> Escaped String +escapeExperiment = Escaped . map f . unescaped + where + f ' ' = '_' + f other = other + +unescapeExperiment :: Escaped String -> Unescaped String +unescapeExperiment = Unescaped . map f . escaped + where + f '_' = ' ' + f other = other + +-------------------------------------------------------------------------------- + +(-/-) :: FilePattern -> FilePattern -> FilePattern +a -/- b = a <> "/" <> b + +interleave :: [[a]] -> [a] +interleave = concat . transpose + +-------------------------------------------------------------------------------- + +myColors :: [E.AlphaColour Double] +myColors = map E.opaque + [ E.blue + , E.green + , E.red + , E.orange + , E.yellow + , E.violet + , E.black + , E.gold + , E.brown + , E.hotpink + , E.aliceblue + , E.aqua + , E.beige + , E.bisque + , E.blueviolet + , E.burlywood + , E.cadetblue + , E.chartreuse + , E.coral + , E.crimson + , E.darkblue + , E.darkgray + , E.darkgreen + , E.darkkhaki + , E.darkmagenta + , E.deeppink + , E.dodgerblue + , E.firebrick + , E.forestgreen + , E.fuchsia + , E.greenyellow + , E.lightsalmon + , E.seagreen + , E.olive + , E.sandybrown + , E.sienna + , E.peru + ] diff --git a/stack-8.10.1.yaml b/stack-8.10.1.yaml index 22391e54616..978dfd883ad 100644 --- a/stack-8.10.1.yaml +++ b/stack-8.10.1.yaml @@ -2,8 +2,9 @@ resolver: nightly-2020-08-16 # Last 8.10.1 packages: - . -- ./ghcide/hie-compat +- ./hie-compat - ./ghcide/ +# - ./shake-bench - ./hls-plugin-api - ./plugins/hls-class-plugin - ./plugins/hls-explicit-imports-plugin @@ -40,6 +41,14 @@ extra-deps: - semigroups-0.18.5 - temporary-1.2.1.1 +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + heapsize: + - --disable-library-for-ghci + flags: haskell-language-server: pedantic: true diff --git a/stack-8.10.2.yaml b/stack-8.10.2.yaml index 9b7a16c630d..7e0b7786940 100644 --- a/stack-8.10.2.yaml +++ b/stack-8.10.2.yaml @@ -2,9 +2,10 @@ resolver: nightly-2020-12-09 packages: - . -- ./ghcide/hie-compat +- ./hie-compat - ./ghcide/ - ./hls-plugin-api +# - ./shake-bench - ./plugins/hls-class-plugin - ./plugins/hls-explicit-imports-plugin - ./plugins/hls-hlint-plugin @@ -32,6 +33,14 @@ extra-deps: - semigroups-0.18.5 - temporary-1.2.1.1 +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + heapsize: + - --disable-library-for-ghci + flags: haskell-language-server: pedantic: true diff --git a/stack-8.6.4.yaml b/stack-8.6.4.yaml index 0ef3a38e8f4..f70faab22a2 100644 --- a/stack-8.6.4.yaml +++ b/stack-8.6.4.yaml @@ -3,8 +3,9 @@ compiler: ghc-8.6.4 packages: - . - - ./ghcide/hie-compat + - ./hie-compat - ./ghcide/ +# - ./shake-bench - ./hls-plugin-api - ./plugins/hls-class-plugin - ./plugins/hls-explicit-imports-plugin @@ -86,6 +87,14 @@ flags: retrie: BuildExecutable: false +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + heapsize: + - --disable-library-for-ghci + # allow-newer: true nix: diff --git a/stack-8.6.5.yaml b/stack-8.6.5.yaml index 785a71aab36..c16d891e461 100644 --- a/stack-8.6.5.yaml +++ b/stack-8.6.5.yaml @@ -2,9 +2,10 @@ resolver: lts-14.27 # Last 8.6.5 packages: - . - - ./ghcide/hie-compat + - ./hie-compat - ./ghcide/ - ./hls-plugin-api +# - ./shake-bench - ./plugins/hls-class-plugin - ./plugins/hls-explicit-imports-plugin - ./plugins/hls-hlint-plugin @@ -78,6 +79,13 @@ extra-deps: - with-utf8-1.0.2.1@sha256:95c02fffa643ddbeb092359802a512007c3e644cd509809f4716ad54592c437b,3057 - th-env-0.1.0.2@sha256:d8f1f37f42a8f1a22404d7d0579528af18f5dac7232cca6bdbd5117c115a0ad5,1370 +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + heapsize: + - --disable-library-for-ghci flags: haskell-language-server: diff --git a/stack-8.8.2.yaml b/stack-8.8.2.yaml index 42e1b1bc513..feb54527d69 100644 --- a/stack-8.8.2.yaml +++ b/stack-8.8.2.yaml @@ -2,9 +2,10 @@ resolver: lts-15.3 # Last 8.8.2 packages: - . - - ./ghcide/hie-compat + - ./hie-compat - ./ghcide/ - ./hls-plugin-api + - ./shake-bench - ./plugins/hls-class-plugin - ./plugins/hls-explicit-imports-plugin - ./plugins/hls-hlint-plugin @@ -63,6 +64,14 @@ extra-deps: - with-utf8-1.0.2.1@sha256:95c02fffa643ddbeb092359802a512007c3e644cd509809f4716ad54592c437b,3057 - th-env-0.1.0.2@sha256:d8f1f37f42a8f1a22404d7d0579528af18f5dac7232cca6bdbd5117c115a0ad5,1370 +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + heapsize: + - --disable-library-for-ghci + flags: haskell-language-server: pedantic: true diff --git a/stack-8.8.3.yaml b/stack-8.8.3.yaml index eaf22cdd576..b187c4296d8 100644 --- a/stack-8.8.3.yaml +++ b/stack-8.8.3.yaml @@ -2,8 +2,9 @@ resolver: lts-16.11 # Last 8.8.3 packages: - . -- ./ghcide/hie-compat +- ./hie-compat - ./ghcide/ +- ./shake-bench - ./hls-plugin-api - ./plugins/hls-class-plugin - ./plugins/hls-explicit-imports-plugin @@ -53,6 +54,14 @@ extra-deps: - stylish-haskell-0.12.2.0 - temporary-1.2.1.1 +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + heapsize: + - --disable-library-for-ghci + flags: haskell-language-server: pedantic: true diff --git a/stack-8.8.4.yaml b/stack-8.8.4.yaml index 811f443b701..ea9f3ce40a7 100644 --- a/stack-8.8.4.yaml +++ b/stack-8.8.4.yaml @@ -2,8 +2,9 @@ resolver: lts-16.25 packages: - . -- ./ghcide/hie-compat +- ./hie-compat - ./ghcide/ +- ./shake-bench - ./hls-plugin-api - ./plugins/hls-class-plugin - ./plugins/hls-explicit-imports-plugin @@ -49,6 +50,14 @@ extra-deps: - stylish-haskell-0.12.2.0 - temporary-1.2.1.1 +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + heapsize: + - --disable-library-for-ghci + flags: haskell-language-server: pedantic: true diff --git a/stack.yaml b/stack.yaml index fcaf10a1abc..11822b61f82 100644 --- a/stack.yaml +++ b/stack.yaml @@ -2,8 +2,9 @@ resolver: lts-14.27 # Last 8.6.5 packages: - . -- ./ghcide/hie-compat -- ./ghcide/ +- ./hie-compat +- ./ghcide +- ./shake-bench - ./hls-plugin-api - ./plugins/hls-class-plugin - ./plugins/hls-explicit-imports-plugin @@ -82,6 +83,14 @@ flags: retrie: BuildExecutable: false +configure-options: + ghcide: + - --disable-library-for-ghci + haskell-language-server: + - --disable-library-for-ghci + heapsize: + - --disable-library-for-ghci + # allow-newer: true nix: diff --git a/test/functional/FunctionalCodeAction.hs b/test/functional/FunctionalCodeAction.hs index 23a356d1ec1..db67adb9cc8 100644 --- a/test/functional/FunctionalCodeAction.hs +++ b/test/functional/FunctionalCodeAction.hs @@ -19,6 +19,8 @@ import Language.Haskell.LSP.Types import qualified Language.Haskell.LSP.Types.Lens as L import qualified Language.Haskell.LSP.Types.Capabilities as C import Test.Hls.Util +import Test.Hspec.Expectations + import Test.Tasty import Test.Tasty.ExpectedFailure (ignoreTestBecause, expectFailBecause) import Test.Tasty.HUnit @@ -293,17 +295,19 @@ redundantImportTests = testGroup "redundant import code actions" [ runSession hlsCommand fullCaps "test/testdata/redundantImportTest/" $ do doc <- openDoc "src/CodeActionRedundant.hs" "haskell" - diags <- waitForDiagnosticsFrom doc + diags <- waitForDiagnosticsFromSource doc "typecheck" liftIO $ expectDiagnostic diags ["The import of", "Data.List", "is redundant"] mActions <- getAllCodeActions doc - let allActions@[removeAction, removeAllAction, makeAllExplicitAction] = map fromAction mActions + let allActions = map fromAction mActions + actionTitles = map (view L.title) allActions + + liftIO $ actionTitles `shouldContain` ["Remove import", "Remove all redundant imports"] + + let Just removeAction = find (\x -> x ^. L.title == "Remove import") allActions liftIO $ do - removeAction ^. L.title @?= "Remove import" - removeAllAction ^. L.title @?= "Remove all redundant imports" - makeAllExplicitAction ^. L.title @?= "Make all imports explicit" forM_ allActions $ \a -> a ^. L.kind @?= Just CodeActionQuickFix forM_ allActions $ \a -> a ^. L.command @?= Nothing forM_ allActions $ \a -> isJust (a ^. L.edit) @? "Has edit" @@ -318,7 +322,7 @@ redundantImportTests = testGroup "redundant import code actions" [ , testCase "doesn't touch other imports" $ runSession hlsCommand noLiteralCaps "test/testdata/redundantImportTest/" $ do doc <- openDoc "src/MultipleImports.hs" "haskell" - _ <- waitForDiagnosticsFrom doc + _ <- waitForDiagnosticsFromSource doc "typecheck" CACommand cmd : _ <- getAllCodeActions doc executeCommand cmd contents <- documentContents doc diff --git a/test/utils/Test/Hls/Util.hs b/test/utils/Test/Hls/Util.hs index 3d69fa41575..9fcd5331e95 100644 --- a/test/utils/Test/Hls/Util.hs +++ b/test/utils/Test/Hls/Util.hs @@ -160,7 +160,7 @@ logFilePath = "hls-" ++ show ghcVersion ++ ".log" hlsCommand :: String hlsCommand = unsafePerformIO $ do testExe <- fromMaybe "haskell-language-server" <$> lookupEnv "HLS_TEST_EXE" - pure $ testExe ++ " --lsp -d -l test-logs/" ++ logFilePath + pure $ testExe ++ " --lsp -d -j2 -l test-logs/" ++ logFilePath hlsCommandVomit :: String hlsCommandVomit = hlsCommand ++ " --vomit"