summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorcocreature <>2019-09-11 12:08:00 (GMT)
committerhdiff <hdiff@hdiff.luite.com>2019-09-11 12:08:00 (GMT)
commit535390c7c6fa496f5de683cf8b80474e0b847403 (patch)
tree3592ab93c16a1e0d08fe7037400e2e54212754fd
version 0.0.10.0.1
-rw-r--r--LICENSE201
-rw-r--r--cbits/getmodtime.c21
-rw-r--r--exe/Arguments.hs27
-rw-r--r--exe/Main.hs142
-rw-r--r--ghcide.cabal169
-rw-r--r--src/Development/IDE/Core/Compile.hs427
-rw-r--r--src/Development/IDE/Core/Debouncer.hs45
-rw-r--r--src/Development/IDE/Core/FileStore.hs211
-rw-r--r--src/Development/IDE/Core/OfInterest.hs80
-rw-r--r--src/Development/IDE/Core/PositionMapping.hs84
-rw-r--r--src/Development/IDE/Core/RuleTypes.hs135
-rw-r--r--src/Development/IDE/Core/Rules.hs377
-rw-r--r--src/Development/IDE/Core/Service.hs95
-rw-r--r--src/Development/IDE/Core/Shake.hs674
-rw-r--r--src/Development/IDE/GHC/CPP.hs197
-rw-r--r--src/Development/IDE/GHC/Compat.hs78
-rw-r--r--src/Development/IDE/GHC/Error.hs152
-rw-r--r--src/Development/IDE/GHC/Orphans.hs59
-rw-r--r--src/Development/IDE/GHC/Util.hs133
-rw-r--r--src/Development/IDE/GHC/Warnings.hs42
-rw-r--r--src/Development/IDE/Import/DependencyInformation.hs289
-rw-r--r--src/Development/IDE/Import/FindImports.hs126
-rw-r--r--src/Development/IDE/LSP/CodeAction.hs97
-rw-r--r--src/Development/IDE/LSP/Definition.hs43
-rw-r--r--src/Development/IDE/LSP/Hover.hs47
-rw-r--r--src/Development/IDE/LSP/LanguageServer.hs187
-rw-r--r--src/Development/IDE/LSP/Notifications.hs55
-rw-r--r--src/Development/IDE/LSP/Protocol.hs23
-rw-r--r--src/Development/IDE/LSP/Server.hs40
-rw-r--r--src/Development/IDE/Spans/AtPoint.hs144
-rw-r--r--src/Development/IDE/Spans/Calculate.hs187
-rw-r--r--src/Development/IDE/Spans/Documentation.hs92
-rw-r--r--src/Development/IDE/Spans/Type.hs61
-rw-r--r--src/Development/IDE/Types/Diagnostics.hs112
-rw-r--r--src/Development/IDE/Types/Location.hs100
-rw-r--r--src/Development/IDE/Types/Logger.hs50
-rw-r--r--src/Development/IDE/Types/Options.hs82
-rw-r--r--test/cabal/Development/IDE/Test/Runfiles.hs12
-rw-r--r--test/exe/Main.hs214
-rw-r--r--test/src/Development/IDE/Test.hs105
40 files changed, 5415 insertions, 0 deletions
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..d1f5c90
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2019 Digital Asset (Switzerland) GmbH and/or its affiliates
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/cbits/getmodtime.c b/cbits/getmodtime.c
new file mode 100644
index 0000000..0973b52
--- /dev/null
+++ b/cbits/getmodtime.c
@@ -0,0 +1,21 @@
+// Copyright (c) 2019 The DAML Authors. All rights reserved.
+// SPDX-License-Identifier: Apache-2.0
+
+#include <sys/stat.h>
+#include <time.h>
+int getmodtime(const char* pathname, time_t* sec, long* nsec) {
+ struct stat s;
+ int r = stat(pathname, &s);
+ if (r != 0) {
+ return r;
+ }
+#ifdef __APPLE__
+ *sec = s.st_mtimespec.tv_sec;
+ *nsec = s.st_mtimespec.tv_nsec;
+#else
+ *sec = s.st_mtim.tv_sec;
+ *nsec = s.st_mtim.tv_nsec;
+#endif
+ return 0;
+}
+
diff --git a/exe/Arguments.hs b/exe/Arguments.hs
new file mode 100644
index 0000000..88fe14c
--- /dev/null
+++ b/exe/Arguments.hs
@@ -0,0 +1,27 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+module Arguments(Arguments(..), getArguments) where
+
+import Options.Applicative
+
+
+data Arguments = Arguments
+ {argLSP :: Bool
+ ,argsCwd :: Maybe FilePath
+ ,argFiles :: [FilePath]
+ }
+
+getArguments :: IO Arguments
+getArguments = execParser opts
+ where
+ opts = info (arguments <**> helper)
+ ( fullDesc
+ <> progDesc "Used as a test bed to check your IDE will work"
+ <> header "ghcide - the core of a Haskell IDE")
+
+arguments :: Parser Arguments
+arguments = Arguments
+ <$> switch (long "lsp" <> help "Start talking to an LSP server")
+ <*> optional (strOption $ long "cwd" <> metavar "DIR" <> help "Change to this directory")
+ <*> many (argument str (metavar "FILES/DIRS..."))
diff --git a/exe/Main.hs b/exe/Main.hs
new file mode 100644
index 0000000..756ca7f
--- /dev/null
+++ b/exe/Main.hs
@@ -0,0 +1,142 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+{-# OPTIONS_GHC -Wno-dodgy-imports #-} -- GHC no longer exports def in GHC 8.6 and above
+
+module Main(main) where
+
+import Arguments
+import Data.Maybe
+import Data.List.Extra
+import System.FilePath
+import Control.Concurrent.Extra
+import Control.Monad.Extra
+import Data.Default
+import System.Time.Extra
+import Development.IDE.Core.FileStore
+import Development.IDE.Core.OfInterest
+import Development.IDE.Core.Service
+import Development.IDE.Core.Rules
+import Development.IDE.Core.Shake
+import Development.IDE.Core.RuleTypes
+import Development.IDE.LSP.Protocol
+import Development.IDE.Types.Location
+import Development.IDE.Types.Diagnostics
+import Development.IDE.Types.Options
+import Development.IDE.Types.Logger
+import Development.IDE.GHC.Util
+import qualified Data.Text as T
+import qualified Data.Text.IO as T
+import Language.Haskell.LSP.Messages
+import Linker
+import System.Info
+import Data.Version
+import Development.IDE.LSP.LanguageServer
+import System.Directory.Extra as IO
+import System.Environment
+import System.IO
+import Development.Shake hiding (Env)
+import qualified Data.Set as Set
+
+import GHC hiding (def)
+import qualified GHC.Paths
+
+import HIE.Bios
+
+-- Set the GHC libdir to the nix libdir if it's present.
+getLibdir :: IO FilePath
+getLibdir = fromMaybe GHC.Paths.libdir <$> lookupEnv "NIX_GHC_LIBDIR"
+
+main :: IO ()
+main = do
+ -- WARNING: If you write to stdout before runLanguageServer
+ -- then the language server will not work
+ hPutStrLn stderr $ "Starting ghcide (GHC v" ++ showVersion compilerVersion ++ ")"
+ Arguments{..} <- getArguments
+
+ -- lock to avoid overlapping output on stdout
+ lock <- newLock
+ let logger = Logger $ \pri msg -> withLock lock $
+ T.putStrLn $ T.pack ("[" ++ upper (show pri) ++ "] ") <> msg
+
+ whenJust argsCwd setCurrentDirectory
+
+ dir <- getCurrentDirectory
+ hPutStrLn stderr dir
+
+ if argLSP then do
+ t <- offsetTime
+ hPutStrLn stderr "Starting LSP server..."
+ runLanguageServer def def $ \event vfs caps -> do
+ t <- t
+ hPutStrLn stderr $ "Started LSP server in " ++ showDuration t
+ let options = (defaultIdeOptions $ loadEnvironment dir)
+ { optReportProgress = clientSupportsProgress caps }
+ initialise (mainRule >> action kick) event logger options vfs
+ else do
+ -- Note that this whole section needs to change once we have genuine
+ -- multi environment support. Needs rewriting in terms of loadEnvironment.
+ putStrLn "[1/6] Finding hie-bios cradle"
+ cradle <- findCradle (dir <> "/")
+ print cradle
+
+ putStrLn "\n[2/6] Converting Cradle to GHC session"
+ env <- newSession' cradle
+
+ putStrLn "\n[3/6] Initialising IDE session"
+ vfs <- makeVFSHandle
+ ide <- initialise mainRule (showEvent lock) logger (defaultIdeOptions $ return $ const $ return env) vfs
+
+ putStrLn "\n[4/6] Finding interesting files"
+ files <- nubOrd <$> expandFiles (argFiles ++ ["." | null argFiles])
+ putStrLn $ "Found " ++ show (length files) ++ " files"
+
+ putStrLn "\n[5/6] Setting interesting files"
+ setFilesOfInterest ide $ Set.fromList $ map toNormalizedFilePath files
+
+ putStrLn "\n[6/6] Loading interesting files"
+ results <- runActionSync ide $ uses TypeCheck $ map toNormalizedFilePath files
+ let (worked, failed) = partition fst $ zip (map isJust results) files
+ putStrLn $ "Files that worked: " ++ show (length worked)
+ putStrLn $ "Files that failed: " ++ show (length failed)
+ putStr $ unlines $ map ((++) " * " . snd) failed
+
+ putStrLn "Done"
+
+
+expandFiles :: [FilePath] -> IO [FilePath]
+expandFiles = concatMapM $ \x -> do
+ b <- IO.doesFileExist x
+ if b then return [x] else do
+ let recurse "." = True
+ recurse x | "." `isPrefixOf` takeFileName x = False -- skip .git etc
+ recurse x = takeFileName x `notElem` ["dist","dist-newstyle"] -- cabal directories
+ files <- filter (\x -> takeExtension x `elem` [".hs",".lhs"]) <$> listFilesInside (return . recurse) x
+ when (null files) $
+ fail $ "Couldn't find any .hs/.lhs files inside directory: " ++ x
+ return files
+
+
+kick :: Action ()
+kick = do
+ files <- getFilesOfInterest
+ void $ uses TypeCheck $ Set.toList files
+
+-- | Print an LSP event.
+showEvent :: Lock -> FromServerMessage -> IO ()
+showEvent _ (EventFileDiagnostics _ []) = return ()
+showEvent lock (EventFileDiagnostics (toNormalizedFilePath -> file) diags) =
+ withLock lock $ T.putStrLn $ showDiagnosticsColored $ map (file,) diags
+showEvent lock e = withLock lock $ print e
+
+newSession' :: Cradle -> IO HscEnvEq
+newSession' cradle = getLibdir >>= \libdir -> do
+ env <- runGhc (Just libdir) $ do
+ initializeFlagsWithCradle "" cradle
+ getSession
+ initDynLinker env
+ newHscEnvEq env
+
+loadEnvironment :: FilePath -> IO (FilePath -> Action HscEnvEq)
+loadEnvironment dir = do
+ res <- liftIO $ newSession' =<< findCradle (dir <> "/")
+ return $ const $ return res
diff --git a/ghcide.cabal b/ghcide.cabal
new file mode 100644
index 0000000..0955866
--- /dev/null
+++ b/ghcide.cabal
@@ -0,0 +1,169 @@
+cabal-version: 1.20
+build-type: Simple
+category: Development
+name: ghcide
+version: 0.0.1
+license: Apache-2.0
+license-file: LICENSE
+author: Digital Asset
+maintainer: Digital Asset
+copyright: Digital Asset 2018-2019
+synopsis: The core of an IDE
+description:
+ A library for building Haskell IDE's on top of the GHC API.
+homepage: https://github.com/digital-asset/daml#readme
+bug-reports: https://github.com/digital-asset/daml/issues
+tested-with: GHC==8.6.5
+
+source-repository head
+ type: git
+ location: https://github.com/digital-asset/daml.git
+
+library
+ default-language: Haskell2010
+ build-depends:
+ aeson,
+ async,
+ base == 4.*,
+ binary,
+ bytestring,
+ containers,
+ data-default,
+ deepseq,
+ directory,
+ extra,
+ filepath,
+ ghc-boot-th,
+ ghc-boot,
+ ghc >= 8.4,
+ hashable,
+ haskell-lsp-types,
+ haskell-lsp >= 0.15,
+ mtl,
+ network-uri,
+ prettyprinter-ansi-terminal,
+ prettyprinter-ansi-terminal,
+ prettyprinter,
+ rope-utf16-splay,
+ safe-exceptions,
+ shake >= 0.17.5,
+ sorted-list,
+ stm,
+ syb,
+ text,
+ time,
+ transformers,
+ unordered-containers,
+ utf8-string
+ if !os(windows)
+ build-depends:
+ unix
+ c-sources:
+ cbits/getmodtime.c
+
+ cpp-options: -DGHC_STABLE
+ default-extensions:
+ BangPatterns
+ DeriveFunctor
+ DeriveGeneric
+ GeneralizedNewtypeDeriving
+ LambdaCase
+ NamedFieldPuns
+ OverloadedStrings
+ RecordWildCards
+ ScopedTypeVariables
+ StandaloneDeriving
+ TupleSections
+ TypeApplications
+ ViewPatterns
+
+ hs-source-dirs:
+ src
+ exposed-modules:
+ Development.IDE.Core.FileStore
+ Development.IDE.Core.OfInterest
+ Development.IDE.Core.PositionMapping
+ Development.IDE.Core.Rules
+ Development.IDE.Core.RuleTypes
+ Development.IDE.Core.Service
+ Development.IDE.Core.Shake
+ Development.IDE.GHC.Util
+ Development.IDE.Import.DependencyInformation
+ Development.IDE.LSP.LanguageServer
+ Development.IDE.LSP.Protocol
+ Development.IDE.LSP.Server
+ Development.IDE.Types.Diagnostics
+ Development.IDE.Types.Location
+ Development.IDE.Types.Logger
+ Development.IDE.Types.Options
+ other-modules:
+ Development.IDE.Core.Debouncer
+ Development.IDE.Core.Compile
+ Development.IDE.GHC.Compat
+ Development.IDE.GHC.CPP
+ Development.IDE.GHC.Error
+ Development.IDE.GHC.Orphans
+ Development.IDE.GHC.Warnings
+ Development.IDE.Import.FindImports
+ Development.IDE.LSP.CodeAction
+ Development.IDE.LSP.Definition
+ Development.IDE.LSP.Hover
+ Development.IDE.LSP.Notifications
+ Development.IDE.Spans.AtPoint
+ Development.IDE.Spans.Calculate
+ Development.IDE.Spans.Documentation
+ Development.IDE.Spans.Type
+
+executable ghcide
+ default-language: Haskell2010
+ hs-source-dirs: exe
+ ghc-options: -threaded
+ main-is: Main.hs
+ build-depends:
+ base == 4.*,
+ containers,
+ data-default,
+ directory,
+ extra,
+ filepath,
+ ghc-paths,
+ ghc,
+ haskell-lsp,
+ hie-bios,
+ ghcide,
+ optparse-applicative,
+ shake,
+ text
+ other-modules:
+ Arguments
+
+ default-extensions:
+ RecordWildCards
+ TupleSections
+ ViewPatterns
+
+test-suite ghcide-tests
+ type: exitcode-stdio-1.0
+ default-language: Haskell2010
+ build-tool-depends:
+ ghcide:ghcide
+ build-depends:
+ base,
+ containers,
+ extra,
+ filepath,
+ haskell-lsp-types,
+ lens,
+ lsp-test,
+ parser-combinators,
+ tasty,
+ tasty-hunit,
+ text
+ hs-source-dirs: test/cabal test/exe test/src
+ ghc-options: -threaded
+ main-is: Main.hs
+ other-modules:
+ Development.IDE.Test
+ Development.IDE.Test.Runfiles
+ default-extensions:
+ OverloadedStrings
diff --git a/src/Development/IDE/Core/Compile.hs b/src/Development/IDE/Core/Compile.hs
new file mode 100644
index 0000000..31f2b01
--- /dev/null
+++ b/src/Development/IDE/Core/Compile.hs
@@ -0,0 +1,427 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE RankNTypes #-}
+{-# LANGUAGE CPP #-}
+
+-- | Based on https://ghc.haskell.org/trac/ghc/wiki/Commentary/Compiler/API.
+-- Given a list of paths to find libraries, and a file to compile, produce a list of 'CoreModule' values.
+module Development.IDE.Core.Compile
+ ( TcModuleResult(..)
+ , compileModule
+ , parseModule
+ , typecheckModule
+ , computePackageDeps
+ , addRelativeImport
+ ) where
+
+import Development.IDE.Core.RuleTypes
+import Development.IDE.GHC.CPP
+import Development.IDE.GHC.Error
+import Development.IDE.GHC.Warnings
+import Development.IDE.Types.Diagnostics
+import Development.IDE.GHC.Orphans()
+import Development.IDE.GHC.Util
+import Development.IDE.GHC.Compat
+import qualified GHC.LanguageExtensions.Type as GHC
+import Development.IDE.Types.Options
+import Development.IDE.Types.Location
+
+import GHC hiding (parseModule, typecheckModule)
+import qualified Parser
+import Lexer
+import ErrUtils
+
+import qualified GHC
+import Panic
+import GhcMonad
+import GhcPlugins as GHC hiding (fst3, (<>))
+import qualified HeaderInfo as Hdr
+import MkIface
+import StringBuffer as SB
+import TidyPgm
+import qualified GHC.LanguageExtensions as LangExt
+
+import Control.Monad.Extra
+import Control.Monad.Except
+import Control.Monad.Trans.Except
+import Data.Function
+import Data.Ord
+import qualified Data.Text as T
+import Data.IORef
+import Data.List.Extra
+import Data.Maybe
+import Data.Tuple.Extra
+import qualified Data.Map.Strict as Map
+import System.FilePath
+import System.IO.Extra
+import Data.Char
+
+import SysTools (Option (..), runUnlit)
+
+
+-- | Given a string buffer, return a pre-processed @ParsedModule@.
+parseModule
+ :: IdeOptions
+ -> HscEnv
+ -> FilePath
+ -> Maybe SB.StringBuffer
+ -> IO ([FileDiagnostic], Maybe ParsedModule)
+parseModule IdeOptions{..} env file =
+ fmap (either (, Nothing) (second Just)) .
+ -- We need packages since imports fail to resolve otherwise.
+ runGhcEnv env . runExceptT . parseFileContents optPreprocessor file
+
+
+-- | Given a package identifier, what packages does it depend on
+computePackageDeps
+ :: HscEnv
+ -> InstalledUnitId
+ -> IO (Either [FileDiagnostic] [InstalledUnitId])
+computePackageDeps env pkg = do
+ let dflags = hsc_dflags env
+ case lookupInstalledPackage dflags pkg of
+ Nothing -> return $ Left [ideErrorText (toNormalizedFilePath noFilePath) $
+ T.pack $ "unknown package: " ++ show pkg]
+ Just pkgInfo -> return $ Right $ depends pkgInfo
+
+
+-- | Typecheck a single module using the supplied dependencies and packages.
+typecheckModule
+ :: HscEnv
+ -> [TcModuleResult]
+ -> ParsedModule
+ -> IO ([FileDiagnostic], Maybe TcModuleResult)
+typecheckModule packageState deps pm =
+ fmap (either (, Nothing) (second Just)) $
+ runGhcEnv packageState $
+ catchSrcErrors "typecheck" $ do
+ setupEnv deps
+ (warnings, tcm) <- withWarnings "typecheck" $ \tweak ->
+ GHC.typecheckModule pm{pm_mod_summary = tweak $ pm_mod_summary pm}
+ tcm2 <- mkTcModuleResult tcm
+ return (warnings, tcm2)
+
+-- | Compile a single type-checked module to a 'CoreModule' value, or
+-- provide errors.
+compileModule
+ :: HscEnv
+ -> [TcModuleResult]
+ -> TcModuleResult
+ -> IO ([FileDiagnostic], Maybe CoreModule)
+compileModule packageState deps tmr =
+ fmap (either (, Nothing) (second Just)) $
+ runGhcEnv packageState $
+ catchSrcErrors "compile" $ do
+ setupEnv (deps ++ [tmr])
+
+ let tm = tmrModule tmr
+ session <- getSession
+ (warnings,desugar) <- withWarnings "compile" $ \tweak -> do
+ let pm = tm_parsed_module tm
+ let pm' = pm{pm_mod_summary = tweak $ pm_mod_summary pm}
+ let tm' = tm{tm_parsed_module = pm'}
+ GHC.dm_core_module <$> GHC.desugarModule tm'
+
+ -- give variables unique OccNames
+ (tidy, details) <- liftIO $ tidyProgram session desugar
+
+ let core = CoreModule
+ (cg_module tidy)
+ (md_types details)
+ (cg_binds tidy)
+ (mg_safe_haskell desugar)
+
+ return (warnings, core)
+
+
+addRelativeImport :: ParsedModule -> DynFlags -> DynFlags
+addRelativeImport modu dflags = dflags
+ {importPaths = nubOrd $ maybeToList (moduleImportPaths modu) ++ importPaths dflags}
+
+mkTcModuleResult
+ :: GhcMonad m
+ => TypecheckedModule
+ -> m TcModuleResult
+mkTcModuleResult tcm = do
+ session <- getSession
+ (iface, _) <- liftIO $ mkIfaceTc session Nothing Sf_None details tcGblEnv
+ let mod_info = HomeModInfo iface details Nothing
+ return $ TcModuleResult tcm mod_info
+ where
+ (tcGblEnv, details) = tm_internals_ tcm
+
+-- | Setup the environment that GHC needs according to our
+-- best understanding (!)
+setupEnv :: GhcMonad m => [TcModuleResult] -> m ()
+setupEnv tmsIn = do
+ -- if both a .hs-boot file and a .hs file appear here, we want to make sure that the .hs file
+ -- takes precedence, so put the .hs-boot file earlier in the list
+ let isSourceFile = (==HsBootFile) . ms_hsc_src . pm_mod_summary . tm_parsed_module . tmrModule
+ tms = sortBy (compare `on` Down . isSourceFile) tmsIn
+
+ session <- getSession
+
+ let mss = map (pm_mod_summary . tm_parsed_module . tmrModule) tms
+
+ -- set the target and module graph in the session
+ let graph = mkModuleGraph mss
+ setSession session { hsc_mod_graph = graph }
+
+ -- Make modules available for others that import them,
+ -- by putting them in the finder cache.
+ let ims = map (InstalledModule (thisInstalledUnitId $ hsc_dflags session) . moduleName . ms_mod) mss
+ ifrs = zipWith (\ms -> InstalledFound (ms_location ms)) mss ims
+ liftIO $ modifyIORef (hsc_FC session) $ \fc ->
+ foldl' (\fc (im, ifr) -> GHC.extendInstalledModuleEnv fc im ifr) fc
+ $ zip ims ifrs
+
+ -- load dependent modules, which must be in topological order.
+ mapM_ loadModuleHome tms
+
+
+-- | Load a module, quickly. Input doesn't need to be desugared.
+-- A module must be loaded before dependent modules can be typechecked.
+-- This variant of loadModuleHome will *never* cause recompilation, it just
+-- modifies the session.
+loadModuleHome
+ :: (GhcMonad m)
+ => TcModuleResult
+ -> m ()
+loadModuleHome tmr = modifySession $ \e ->
+ e { hsc_HPT = addToHpt (hsc_HPT e) mod mod_info }
+ where
+ ms = pm_mod_summary . tm_parsed_module . tmrModule $ tmr
+ mod_info = tmrModInfo tmr
+ mod = ms_mod_name ms
+
+
+
+-- | GhcMonad function to chase imports of a module given as a StringBuffer. Returns given module's
+-- name and its imports.
+getImportsParsed :: DynFlags ->
+ GHC.ParsedSource ->
+ Either [FileDiagnostic] (GHC.ModuleName, [(Bool, (Maybe FastString, Located GHC.ModuleName))])
+getImportsParsed dflags (L loc parsed) = do
+ let modName = maybe (GHC.mkModuleName "Main") GHC.unLoc $ GHC.hsmodName parsed
+
+ -- most of these corner cases are also present in https://hackage.haskell.org/package/ghc-8.6.1/docs/src/HeaderInfo.html#getImports
+ -- but we want to avoid parsing the module twice
+ let implicit_prelude = xopt GHC.ImplicitPrelude dflags
+ implicit_imports = Hdr.mkPrelImports modName loc implicit_prelude $ GHC.hsmodImports parsed
+
+ -- filter out imports that come from packages
+ return (modName, [(ideclSource i, (fmap sl_fs $ ideclPkgQual i, ideclName i))
+ | i <- map GHC.unLoc $ implicit_imports ++ GHC.hsmodImports parsed
+ , GHC.moduleNameString (GHC.unLoc $ ideclName i) /= "GHC.Prim"
+ ])
+
+
+-- | Produce a module summary from a StringBuffer.
+getModSummaryFromBuffer
+ :: GhcMonad m
+ => FilePath
+ -> SB.StringBuffer
+ -> DynFlags
+ -> GHC.ParsedSource
+ -> ExceptT [FileDiagnostic] m ModSummary
+getModSummaryFromBuffer fp contents dflags parsed = do
+ (modName, imports) <- liftEither $ getImportsParsed dflags parsed
+
+ let modLoc = ModLocation
+ { ml_hs_file = Just fp
+ , ml_hi_file = derivedFile "hi"
+ , ml_obj_file = derivedFile "o"
+#ifndef GHC_STABLE
+ , ml_hie_file = derivedFile "hie"
+#endif
+ -- This does not consider the dflags configuration
+ -- (-osuf and -hisuf, object and hi dir.s).
+ -- However, we anyway don't want to generate them.
+ }
+ InstalledUnitId unitId = thisInstalledUnitId dflags
+ return $ ModSummary
+ { ms_mod = mkModule (fsToUnitId unitId) modName
+ , ms_location = modLoc
+ , ms_hs_date = error "Rules should not depend on ms_hs_date"
+ -- When we are working with a virtual file we do not have a file date.
+ -- To avoid silent issues where something is not processed because the date
+ -- has not changed, we make sure that things blow up if they depend on the
+ -- date.
+ , ms_textual_imps = [imp | (False, imp) <- imports]
+ , ms_hspp_file = fp
+ , ms_hspp_opts = dflags
+ , ms_hspp_buf = Just contents
+
+ -- defaults:
+ , ms_hsc_src = sourceType
+ , ms_obj_date = Nothing
+ , ms_iface_date = Nothing
+#ifndef GHC_STABLE
+ , ms_hie_date = Nothing
+#endif
+ , ms_srcimps = [imp | (True, imp) <- imports]
+ , ms_parsed_mod = Nothing
+ }
+ where
+ (sourceType, derivedFile) =
+ let (stem, ext) = splitExtension fp in
+ if "-boot" `isSuffixOf` ext
+ then (HsBootFile, \newExt -> stem <.> newExt ++ "-boot")
+ else (HsSrcFile , \newExt -> stem <.> newExt)
+
+-- | Run (unlit) literate haskell preprocessor on a file, or buffer if set
+runLhs :: DynFlags -> FilePath -> Maybe SB.StringBuffer -> IO SB.StringBuffer
+runLhs dflags filename contents = withTempDir $ \dir -> do
+ let fout = dir </> takeFileName filename <.> "unlit"
+ filesrc <- case contents of
+ Nothing -> return filename
+ Just cnts -> do
+ let fsrc = dir </> takeFileName filename <.> "literate"
+ withBinaryFile fsrc WriteMode $ \h ->
+ hPutStringBuffer h cnts
+ return fsrc
+ unlit filesrc fout
+ SB.hGetStringBuffer fout
+ where
+ unlit filein fileout = SysTools.runUnlit dflags (args filein fileout)
+ args filein fileout = [
+ SysTools.Option "-h"
+ , SysTools.Option (escape filename) -- name this file
+ , SysTools.FileOption "" filein -- input file
+ , SysTools.FileOption "" fileout ] -- output file
+ -- taken from ghc's DriverPipeline.hs
+ escape ('\\':cs) = '\\':'\\': escape cs
+ escape ('\"':cs) = '\\':'\"': escape cs
+ escape ('\'':cs) = '\\':'\'': escape cs
+ escape (c:cs) = c : escape cs
+ escape [] = []
+
+-- | Run CPP on a file
+runCpp :: DynFlags -> FilePath -> Maybe SB.StringBuffer -> IO SB.StringBuffer
+runCpp dflags filename contents = withTempDir $ \dir -> do
+ let out = dir </> takeFileName filename <.> "out"
+ case contents of
+ Nothing -> do
+ -- Happy case, file is not modified, so run CPP on it in-place
+ -- which also makes things like relative #include files work
+ -- and means location information is correct
+ doCpp dflags True filename out
+ liftIO $ SB.hGetStringBuffer out
+
+ Just contents -> do
+ -- Sad path, we have to create a version of the path in a temp dir
+ -- __FILE__ macro is wrong, ignoring that for now (likely not a real issue)
+
+ -- Relative includes aren't going to work, so we fix that by adding to the include path.
+ dflags <- return $ addIncludePathsQuote (takeDirectory filename) dflags
+
+ -- Location information is wrong, so we fix that by patching it afterwards.
+ let inp = dir </> "___GHCIDE_MAGIC___"
+ withBinaryFile inp WriteMode $ \h ->
+ hPutStringBuffer h contents
+ doCpp dflags True inp out
+
+ -- Fix up the filename in lines like:
+ -- # 1 "C:/Temp/extra-dir-914611385186/___GHCIDE_MAGIC___"
+ let tweak x
+ | Just x <- stripPrefix "# " x
+ , "___GHCIDE_MAGIC___" `isInfixOf` x
+ , let num = takeWhile (not . isSpace) x
+ -- important to use /, and never \ for paths, even on Windows, since then C escapes them
+ -- and GHC gets all confused
+ = "# " <> num <> " \"" <> map (\x -> if isPathSeparator x then '/' else x) filename <> "\""
+ | otherwise = x
+ stringToStringBuffer . unlines . map tweak . lines <$> readFileUTF8' out
+
+-- | Given a buffer, flags, file path and module summary, produce a
+-- parsed module (or errors) and any parse warnings.
+parseFileContents
+ :: GhcMonad m
+ => (GHC.ParsedSource -> ([(GHC.SrcSpan, String)], GHC.ParsedSource))
+ -> FilePath -- ^ the filename (for source locations)
+ -> Maybe SB.StringBuffer -- ^ Haskell module source text (full Unicode is supported)
+ -> ExceptT [FileDiagnostic] m ([FileDiagnostic], ParsedModule)
+parseFileContents preprocessor filename mbContents = do
+ let loc = mkRealSrcLoc (mkFastString filename) 1 1
+ contents <- liftIO $ maybe (hGetStringBuffer filename) return mbContents
+ let isOnDisk = isNothing mbContents
+
+ -- unlit content if literate Haskell ending
+ (isOnDisk, contents) <- if ".lhs" `isSuffixOf` filename
+ then do
+ dflags <- getDynFlags
+ newcontent <- liftIO $ runLhs dflags filename mbContents
+ return (False, newcontent)
+ else return (isOnDisk, contents)
+
+ dflags <- ExceptT $ parsePragmasIntoDynFlags filename contents
+ (contents, dflags) <-
+ if not $ xopt LangExt.Cpp dflags then
+ return (contents, dflags)
+ else do
+ contents <- liftIO $ runCpp dflags filename $ if isOnDisk then Nothing else Just contents
+ dflags <- ExceptT $ parsePragmasIntoDynFlags filename contents
+ return (contents, dflags)
+
+ case unP Parser.parseModule (mkPState dflags contents loc) of
+ PFailed _ locErr msgErr ->
+ throwE $ diagFromErrMsg "parser" dflags $ mkPlainErrMsg dflags locErr msgErr
+ POk pst rdr_module ->
+ let hpm_annotations =
+ (Map.fromListWith (++) $ annotations pst,
+ Map.fromList ((noSrcSpan,comment_q pst)
+ :annotations_comments pst))
+ (warns, errs) = getMessages pst dflags
+ in
+ do
+ -- Just because we got a `POk`, it doesn't mean there
+ -- weren't errors! To clarify, the GHC parser
+ -- distinguishes between fatal and non-fatal
+ -- errors. Non-fatal errors are the sort that don't
+ -- prevent parsing from continuing (that is, a parse
+ -- tree can still be produced despite the error so that
+ -- further errors/warnings can be collected). Fatal
+ -- errors are those from which a parse tree just can't
+ -- be produced.
+ unless (null errs) $
+ throwE $ diagFromErrMsgs "parser" dflags $ snd $ getMessages pst dflags
+
+ -- Ok, we got here. It's safe to continue.
+ let (errs, parsed) = preprocessor rdr_module
+ unless (null errs) $ throwE $ diagFromStrings "parser" errs
+ ms <- getModSummaryFromBuffer filename contents dflags parsed
+ let pm =
+ ParsedModule {
+ pm_mod_summary = ms
+ , pm_parsed_source = parsed
+ , pm_extra_src_files=[] -- src imports not allowed
+ , pm_annotations = hpm_annotations
+ }
+ warnings = diagFromErrMsgs "parser" dflags warns
+ pure (warnings, pm)
+
+
+-- | This reads the pragma information directly from the provided buffer.
+parsePragmasIntoDynFlags
+ :: GhcMonad m
+ => FilePath
+ -> SB.StringBuffer
+ -> m (Either [FileDiagnostic] DynFlags)
+parsePragmasIntoDynFlags fp contents = catchSrcErrors "pragmas" $ do
+ dflags0 <- getSessionDynFlags
+ let opts = Hdr.getOptions dflags0 contents fp
+ (dflags, _, _) <- parseDynamicFilePragma dflags0 opts
+ return dflags
+
+-- | Run something in a Ghc monad and catch the errors (SourceErrors and
+-- compiler-internal exceptions like Panic or InstallationError).
+catchSrcErrors :: GhcMonad m => T.Text -> m a -> m (Either [FileDiagnostic] a)
+catchSrcErrors fromWhere ghcM = do
+ dflags <- getDynFlags
+ handleGhcException (ghcExceptionToDiagnostics dflags) $
+ handleSourceError (sourceErrorToDiagnostics dflags) $
+ Right <$> ghcM
+ where
+ ghcExceptionToDiagnostics dflags = return . Left . diagFromGhcException fromWhere dflags
+ sourceErrorToDiagnostics dflags = return . Left . diagFromErrMsgs fromWhere dflags . srcErrorMessages
diff --git a/src/Development/IDE/Core/Debouncer.hs b/src/Development/IDE/Core/Debouncer.hs
new file mode 100644
index 0000000..f1d989f
--- /dev/null
+++ b/src/Development/IDE/Core/Debouncer.hs
@@ -0,0 +1,45 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+module Development.IDE.Core.Debouncer
+ ( Debouncer
+ , newDebouncer
+ , registerEvent
+ ) where
+
+import Control.Concurrent.Extra
+import Control.Concurrent.Async
+import Control.Exception
+import Control.Monad.Extra
+import Data.Map.Strict (Map)
+import qualified Data.Map.Strict as Map
+import System.Time.Extra
+
+-- | A debouncer can be used to avoid triggering many events
+-- (e.g. diagnostics) for the same key (e.g. the same file)
+-- within a short timeframe. This is accomplished
+-- by delaying each event for a given time. If another event
+-- is registered for the same key within that timeframe,
+-- only the new event will fire.
+newtype Debouncer k = Debouncer (Var (Map k (Async ())))
+
+-- | Create a new empty debouncer.
+newDebouncer :: IO (Debouncer k)
+newDebouncer = do
+ m <- newVar Map.empty
+ pure $ Debouncer m
+
+-- | Register an event that will fire after the given delay if no other event
+-- for the same key gets registered until then.
+--
+-- If there is a pending event for the same key, the pending event will be killed.
+-- Events are run unmasked so it is up to the user of `registerEvent`
+-- to mask if required.
+registerEvent :: Ord k => Debouncer k -> Seconds -> k -> IO () -> IO ()
+registerEvent (Debouncer d) delay k fire = modifyVar_ d $ \m -> mask_ $ do
+ whenJust (Map.lookup k m) cancel
+ a <- asyncWithUnmask $ \unmask -> unmask $ do
+ sleep delay
+ fire
+ modifyVar_ d (pure . Map.delete k)
+ pure $ Map.insert k a m
diff --git a/src/Development/IDE/Core/FileStore.hs b/src/Development/IDE/Core/FileStore.hs
new file mode 100644
index 0000000..00086fc
--- /dev/null
+++ b/src/Development/IDE/Core/FileStore.hs
@@ -0,0 +1,211 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+{-# LANGUAGE CPP #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Development.IDE.Core.FileStore(
+ getFileExists, getFileContents,
+ setBufferModified,
+ setSomethingModified,
+ fileStoreRules,
+ VFSHandle,
+ makeVFSHandle,
+ makeLSPVFSHandle,
+ ) where
+
+import StringBuffer
+import Development.IDE.GHC.Orphans()
+import Development.IDE.GHC.Util
+
+import Control.Concurrent.Extra
+import qualified Data.Map.Strict as Map
+import Data.Maybe
+import qualified Data.Text as T
+import Control.Monad.Extra
+import qualified System.Directory as Dir
+import Development.Shake
+import Development.Shake.Classes
+import Development.IDE.Core.Shake
+import Control.Exception
+import GHC.Generics
+import Data.Either.Extra
+import System.IO.Error
+import qualified Data.ByteString.Char8 as BS
+import Development.IDE.Types.Diagnostics
+import Development.IDE.Types.Location
+import qualified Data.Rope.UTF16 as Rope
+
+#ifdef mingw32_HOST_OS
+import Data.Time
+#else
+import Foreign.C.String
+import Foreign.C.Types
+import Foreign.Marshal (alloca)
+import Foreign.Ptr
+import Foreign.Storable
+import qualified System.Posix.Error as Posix
+#endif
+
+import Language.Haskell.LSP.Core
+import Language.Haskell.LSP.VFS
+
+-- | haskell-lsp manages the VFS internally and automatically so we cannot use
+-- the builtin VFS without spawning up an LSP server. To be able to test things
+-- like `setBufferModified` we abstract over the VFS implementation.
+data VFSHandle = VFSHandle
+ { getVirtualFile :: NormalizedUri -> IO (Maybe VirtualFile)
+ -- ^ get the contents of a virtual file
+ , setVirtualFileContents :: Maybe (NormalizedUri -> Maybe T.Text -> IO ())
+ -- ^ set a specific file to a value. If Nothing then we are ignoring these
+ -- signals anyway so can just say something was modified
+ }
+
+instance IsIdeGlobal VFSHandle
+
+makeVFSHandle :: IO VFSHandle
+makeVFSHandle = do
+ vfsVar <- newVar (1, Map.empty)
+ pure VFSHandle
+ { getVirtualFile = \uri -> do
+ (_nextVersion, vfs) <- readVar vfsVar
+ pure $ Map.lookup uri vfs
+ , setVirtualFileContents = Just $ \uri content ->
+ modifyVar_ vfsVar $ \(nextVersion, vfs) -> pure $ (nextVersion + 1, ) $
+ case content of
+ Nothing -> Map.delete uri vfs
+ Just content -> Map.insert uri (VirtualFile nextVersion (Rope.fromText content) Nothing) vfs
+ }
+
+makeLSPVFSHandle :: LspFuncs c -> VFSHandle
+makeLSPVFSHandle lspFuncs = VFSHandle
+ { getVirtualFile = getVirtualFileFunc lspFuncs
+ , setVirtualFileContents = Nothing
+ }
+
+
+-- | Get the contents of a file, either dirty (if the buffer is modified) or Nothing to mean use from disk.
+type instance RuleResult GetFileContents = (FileVersion, Maybe StringBuffer)
+
+-- | Does the file exist.
+type instance RuleResult GetFileExists = Bool
+
+
+data GetFileExists = GetFileExists
+ deriving (Eq, Show, Generic)
+instance Hashable GetFileExists
+instance NFData GetFileExists
+
+data GetFileContents = GetFileContents
+ deriving (Eq, Show, Generic)
+instance Hashable GetFileContents
+instance NFData GetFileContents
+
+
+getFileExistsRule :: VFSHandle -> Rules ()
+getFileExistsRule vfs =
+ defineEarlyCutoff $ \GetFileExists file -> do
+ alwaysRerun
+ res <- liftIO $ handle (\(_ :: IOException) -> return False) $
+ (isJust <$> getVirtualFile vfs (filePathToUri' file)) ||^
+ Dir.doesFileExist (fromNormalizedFilePath file)
+ return (Just $ if res then BS.singleton '1' else BS.empty, ([], Just res))
+
+
+getModificationTimeRule :: VFSHandle -> Rules ()
+getModificationTimeRule vfs =
+ defineEarlyCutoff $ \GetModificationTime file -> do
+ let file' = fromNormalizedFilePath file
+ let wrap time = (Just time, ([], Just $ ModificationTime time))
+ alwaysRerun
+ mbVirtual <- liftIO $ getVirtualFile vfs $ filePathToUri' file
+ case mbVirtual of
+ Just (VirtualFile ver _ _) -> pure (Just $ BS.pack $ show ver, ([], Just $ VFSVersion ver))
+ Nothing -> liftIO $ fmap wrap (getModTime file')
+ `catch` \(e :: IOException) -> do
+ let err | isDoesNotExistError e = "File does not exist: " ++ file'
+ | otherwise = "IO error while reading " ++ file' ++ ", " ++ displayException e
+ return (Nothing, ([ideErrorText file $ T.pack err], Nothing))
+ where
+ -- Dir.getModificationTime is surprisingly slow since it performs
+ -- a ton of conversions. Since we do not actually care about
+ -- the format of the time, we can get away with something cheaper.
+ -- For now, we only try to do this on Unix systems where it seems to get the
+ -- time spent checking file modifications (which happens on every change)
+ -- from > 0.5s to ~0.15s.
+ -- We might also want to try speeding this up on Windows at some point.
+ getModTime :: FilePath -> IO BS.ByteString
+ getModTime f =
+#ifdef mingw32_HOST_OS
+ do time <- Dir.getModificationTime f
+ pure $! BS.pack $ show (toModifiedJulianDay $ utctDay time, diffTimeToPicoseconds $ utctDayTime time)
+#else
+ withCString f $ \f' ->
+ alloca $ \secPtr ->
+ alloca $ \nsecPtr -> do
+ Posix.throwErrnoPathIfMinus1Retry_ "getmodtime" f $ c_getModTime f' secPtr nsecPtr
+ sec <- peek secPtr
+ nsec <- peek nsecPtr
+ pure $! BS.pack $ show sec <> "." <> show nsec
+
+-- Sadly even unix’s getFileStatus + modificationTimeHiRes is still about twice as slow
+-- as doing the FFI call ourselves :(.
+foreign import ccall "getmodtime" c_getModTime :: CString -> Ptr CTime -> Ptr CLong -> IO Int
+#endif
+
+getFileContentsRule :: VFSHandle -> Rules ()
+getFileContentsRule vfs =
+ define $ \GetFileContents file -> do
+ -- need to depend on modification time to introduce a dependency with Cutoff
+ time <- use_ GetModificationTime file
+ res <- liftIO $ ideTryIOException file $ do
+ mbVirtual <- getVirtualFile vfs $ filePathToUri' file
+ pure $ textToStringBuffer . Rope.toText . _text <$> mbVirtual
+ case res of
+ Left err -> return ([err], Nothing)
+ Right contents -> return ([], Just (time, contents))
+
+ideTryIOException :: NormalizedFilePath -> IO a -> IO (Either FileDiagnostic a)
+ideTryIOException fp act =
+ mapLeft
+ (\(e :: IOException) -> ideErrorText fp $ T.pack $ show e)
+ <$> try act
+
+
+getFileContents :: NormalizedFilePath -> Action (FileVersion, Maybe StringBuffer)
+getFileContents = use_ GetFileContents
+
+getFileExists :: NormalizedFilePath -> Action Bool
+getFileExists =
+ -- we deliberately and intentionally wrap the file as an FilePath WITHOUT mkAbsolute
+ -- so that if the file doesn't exist, is on a shared drive that is unmounted etc we get a properly
+ -- cached 'No' rather than an exception in the wrong place
+ use_ GetFileExists
+
+
+fileStoreRules :: VFSHandle -> Rules ()
+fileStoreRules vfs = do
+ addIdeGlobal vfs
+ getModificationTimeRule vfs
+ getFileContentsRule vfs
+ getFileExistsRule vfs
+
+
+-- | Notify the compiler service that a particular file has been modified.
+-- Use 'Nothing' to say the file is no longer in the virtual file system
+-- but should be sourced from disk, or 'Just' to give its new value.
+setBufferModified :: IdeState -> NormalizedFilePath -> Maybe T.Text -> IO ()
+setBufferModified state absFile contents = do
+ VFSHandle{..} <- getIdeGlobalState state
+ whenJust setVirtualFileContents $ \set ->
+ set (filePathToUri' absFile) contents
+ void $ shakeRun state []
+
+-- | Note that some buffer somewhere has been modified, but don't say what.
+-- Only valid if the virtual file system was initialised by LSP, as that
+-- independently tracks which files are modified.
+setSomethingModified :: IdeState -> IO ()
+setSomethingModified state = do
+ VFSHandle{..} <- getIdeGlobalState state
+ when (isJust setVirtualFileContents) $
+ fail "setSomethingModified can't be called on this type of VFSHandle"
+ void $ shakeRun state []
diff --git a/src/Development/IDE/Core/OfInterest.hs b/src/Development/IDE/Core/OfInterest.hs
new file mode 100644
index 0000000..d7d4cfc
--- /dev/null
+++ b/src/Development/IDE/Core/OfInterest.hs
@@ -0,0 +1,80 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+
+{-# LANGUAGE TypeFamilies #-}
+{-# LANGUAGE FlexibleInstances #-}
+
+-- | A Shake implementation of the compiler service, built
+-- using the "Shaker" abstraction layer for in-memory use.
+--
+module Development.IDE.Core.OfInterest(
+ ofInterestRules,
+ getFilesOfInterest, setFilesOfInterest, modifyFilesOfInterest,
+ ) where
+
+import Control.Concurrent.Extra
+import Control.Monad.Except
+import Data.Hashable
+import Control.DeepSeq
+import GHC.Generics
+import Data.Typeable
+import qualified Data.ByteString.UTF8 as BS
+import Control.Exception
+import Development.IDE.Types.Location
+import Development.IDE.Types.Logger
+import Data.Set (Set)
+import qualified Data.Set as Set
+import qualified Data.Text as T
+import Data.Tuple.Extra
+import Development.Shake
+
+import Development.IDE.Core.Shake
+
+
+
+newtype OfInterestVar = OfInterestVar (Var (Set NormalizedFilePath))
+instance IsIdeGlobal OfInterestVar
+
+
+type instance RuleResult GetFilesOfInterest = Set NormalizedFilePath
+
+
+data GetFilesOfInterest = GetFilesOfInterest
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GetFilesOfInterest
+instance NFData GetFilesOfInterest
+
+
+ofInterestRules :: Rules ()
+ofInterestRules = do
+ addIdeGlobal . OfInterestVar =<< liftIO (newVar Set.empty)
+ defineEarlyCutoff $ \GetFilesOfInterest _file -> assert (null $ fromNormalizedFilePath _file) $ do
+ alwaysRerun
+ filesOfInterest <- getFilesOfInterestUntracked
+ pure (Just $ BS.fromString $ show filesOfInterest, ([], Just filesOfInterest))
+
+
+getFilesOfInterest :: Action (Set NormalizedFilePath)
+getFilesOfInterest = useNoFile_ GetFilesOfInterest
+
+
+
+------------------------------------------------------------
+-- Exposed API
+
+-- | Set the files-of-interest which will be built and kept-up-to-date.
+setFilesOfInterest :: IdeState -> Set NormalizedFilePath -> IO ()
+setFilesOfInterest state files = modifyFilesOfInterest state (const files)
+
+getFilesOfInterestUntracked :: Action (Set NormalizedFilePath)
+getFilesOfInterestUntracked = do
+ OfInterestVar var <- getIdeGlobalAction
+ liftIO $ readVar var
+
+modifyFilesOfInterest :: IdeState -> (Set NormalizedFilePath -> Set NormalizedFilePath) -> IO ()
+modifyFilesOfInterest state f = do
+ OfInterestVar var <- getIdeGlobalState state
+ files <- modifyVar var $ pure . dupe . f
+ logDebug (ideLogger state) $ "Set files of interest to: " <> T.pack (show $ Set.toList files)
+ void $ shakeRun state []
diff --git a/src/Development/IDE/Core/PositionMapping.hs b/src/Development/IDE/Core/PositionMapping.hs
new file mode 100644
index 0000000..f995295
--- /dev/null
+++ b/src/Development/IDE/Core/PositionMapping.hs
@@ -0,0 +1,84 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+module Development.IDE.Core.PositionMapping
+ ( PositionMapping(..)
+ , toCurrentRange
+ , fromCurrentRange
+ , applyChange
+ , idMapping
+ -- toCurrent and fromCurrent are mainly exposed for testing
+ , toCurrent
+ , fromCurrent
+ ) where
+
+import Control.Monad
+import qualified Data.Text as T
+import Language.Haskell.LSP.Types
+
+data PositionMapping = PositionMapping
+ { toCurrentPosition :: !(Position -> Maybe Position)
+ , fromCurrentPosition :: !(Position -> Maybe Position)
+ }
+
+toCurrentRange :: PositionMapping -> Range -> Maybe Range
+toCurrentRange mapping (Range a b) =
+ Range <$> toCurrentPosition mapping a <*> toCurrentPosition mapping b
+
+fromCurrentRange :: PositionMapping -> Range -> Maybe Range
+fromCurrentRange mapping (Range a b) =
+ Range <$> fromCurrentPosition mapping a <*> fromCurrentPosition mapping b
+
+idMapping :: PositionMapping
+idMapping = PositionMapping Just Just
+
+applyChange :: PositionMapping -> TextDocumentContentChangeEvent -> PositionMapping
+applyChange posMapping (TextDocumentContentChangeEvent (Just r) _ t) = PositionMapping
+ { toCurrentPosition = toCurrent r t <=< toCurrentPosition posMapping
+ , fromCurrentPosition = fromCurrentPosition posMapping <=< fromCurrent r t
+ }
+applyChange posMapping _ = posMapping
+
+toCurrent :: Range -> T.Text -> Position -> Maybe Position
+toCurrent (Range (Position startLine startColumn) (Position endLine endColumn)) t (Position line column)
+ | line < startLine || line == startLine && column < startColumn =
+ -- Position is before the change and thereby unchanged.
+ Just $ Position line column
+ | line > endLine || line == endLine && column >= endColumn =
+ -- Position is after the change so increase line and column number
+ -- as necessary.
+ Just $ Position (line + lineDiff) newColumn
+ | otherwise = Nothing
+ -- Position is in the region that was changed.
+ where
+ lineDiff = linesNew - linesOld
+ linesNew = T.count "\n" t
+ linesOld = endLine - startLine
+ newEndColumn
+ | linesNew == 0 = startColumn + T.length t
+ | otherwise = T.length $ T.takeWhileEnd (/= '\n') t
+ newColumn
+ | line == endLine = column + newEndColumn - endColumn
+ | otherwise = column
+
+fromCurrent :: Range -> T.Text -> Position -> Maybe Position
+fromCurrent (Range (Position startLine startColumn) (Position endLine endColumn)) t (Position line column)
+ | line < startLine || line == startLine && column < startColumn =
+ -- Position is before the change and thereby unchanged
+ Just $ Position line column
+ | line > newEndLine || line == newEndLine && column >= newEndColumn =
+ -- Position is after the change so increase line and column number
+ -- as necessary.
+ Just $ Position (line - lineDiff) newColumn
+ | otherwise = Nothing
+ -- Position is in the region that was changed.
+ where
+ lineDiff = linesNew - linesOld
+ linesNew = T.count "\n" t
+ linesOld = endLine - startLine
+ newEndLine = endLine + lineDiff
+ newEndColumn
+ | linesNew == 0 = startColumn + T.length t
+ | otherwise = T.length $ T.takeWhileEnd (/= '\n') t
+ newColumn
+ | line == newEndLine = column - (newEndColumn - endColumn)
+ | otherwise = column
diff --git a/src/Development/IDE/Core/RuleTypes.hs b/src/Development/IDE/Core/RuleTypes.hs
new file mode 100644
index 0000000..8cefdf9
--- /dev/null
+++ b/src/Development/IDE/Core/RuleTypes.hs
@@ -0,0 +1,135 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE TypeFamilies #-}
+
+-- | A Shake implementation of the compiler service, built
+-- using the "Shaker" abstraction layer for in-memory use.
+--
+module Development.IDE.Core.RuleTypes(
+ module Development.IDE.Core.RuleTypes
+ ) where
+
+import Control.DeepSeq
+import Development.IDE.Import.DependencyInformation
+import Development.IDE.GHC.Util
+import Development.IDE.Types.Location
+import Data.Hashable
+import Data.Typeable
+import qualified Data.Set as S
+import Development.Shake
+import GHC.Generics (Generic)
+
+import GHC
+import Module (InstalledUnitId)
+import HscTypes (HomeModInfo)
+import Development.IDE.GHC.Compat
+
+import Development.IDE.Spans.Type
+
+
+-- NOTATION
+-- Foo+ means Foo for the dependencies
+-- Foo* means Foo for me and Foo+
+
+-- | The parse tree for the file using GetFileContents
+type instance RuleResult GetParsedModule = ParsedModule
+
+-- | The dependency information produced by following the imports recursively.
+-- This rule will succeed even if there is an error, e.g., a module could not be located,
+-- a module could not be parsed or an import cycle.
+type instance RuleResult GetDependencyInformation = DependencyInformation
+
+-- | Transitive module and pkg dependencies based on the information produced by GetDependencyInformation.
+-- This rule is also responsible for calling ReportImportCycles for each file in the transitive closure.
+type instance RuleResult GetDependencies = TransitiveDependencies
+
+-- | Contains the typechecked module and the OrigNameCache entry for
+-- that module.
+data TcModuleResult = TcModuleResult
+ { tmrModule :: TypecheckedModule
+ , tmrModInfo :: HomeModInfo
+ }
+instance Show TcModuleResult where
+ show = show . pm_mod_summary . tm_parsed_module . tmrModule
+
+instance NFData TcModuleResult where
+ rnf = rwhnf
+
+-- | The type checked version of this file, requires TypeCheck+
+type instance RuleResult TypeCheck = TcModuleResult
+
+-- | Information about what spans occur where, requires TypeCheck
+type instance RuleResult GetSpanInfo = [SpanInfo]
+
+-- | Convert to Core, requires TypeCheck*
+type instance RuleResult GenerateCore = CoreModule
+
+-- | A GHC session that we reuse.
+type instance RuleResult GhcSession = HscEnvEq
+
+-- | Resolve the imports in a module to the file path of a module
+-- in the same package or the package id of another package.
+type instance RuleResult GetLocatedImports = ([(Located ModuleName, Maybe NormalizedFilePath)], S.Set InstalledUnitId)
+
+-- | This rule is used to report import cycles. It depends on GetDependencyInformation.
+-- We cannot report the cycles directly from GetDependencyInformation since
+-- we can only report diagnostics for the current file.
+type instance RuleResult ReportImportCycles = ()
+
+-- | Read the given HIE file.
+type instance RuleResult GetHieFile = HieFile
+
+
+data GetParsedModule = GetParsedModule
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GetParsedModule
+instance NFData GetParsedModule
+
+data GetLocatedImports = GetLocatedImports
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GetLocatedImports
+instance NFData GetLocatedImports
+
+data GetDependencyInformation = GetDependencyInformation
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GetDependencyInformation
+instance NFData GetDependencyInformation
+
+data ReportImportCycles = ReportImportCycles
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable ReportImportCycles
+instance NFData ReportImportCycles
+
+data GetDependencies = GetDependencies
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GetDependencies
+instance NFData GetDependencies
+
+data TypeCheck = TypeCheck
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable TypeCheck
+instance NFData TypeCheck
+
+data GetSpanInfo = GetSpanInfo
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GetSpanInfo
+instance NFData GetSpanInfo
+
+data GenerateCore = GenerateCore
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GenerateCore
+instance NFData GenerateCore
+
+data GhcSession = GhcSession
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GhcSession
+instance NFData GhcSession
+
+-- Note that we embed the filepath here instead of using the filepath associated with Shake keys.
+-- Otherwise we will garbage collect the result since files in package dependencies will not be declared reachable.
+data GetHieFile = GetHieFile FilePath
+ deriving (Eq, Show, Typeable, Generic)
+instance Hashable GetHieFile
+instance NFData GetHieFile
diff --git a/src/Development/IDE/Core/Rules.hs b/src/Development/IDE/Core/Rules.hs
new file mode 100644
index 0000000..9176ab4
--- /dev/null
+++ b/src/Development/IDE/Core/Rules.hs
@@ -0,0 +1,377 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE TypeFamilies #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+
+-- | A Shake implementation of the compiler service, built
+-- using the "Shaker" abstraction layer for in-memory use.
+--
+module Development.IDE.Core.Rules(
+ IdeState, GetDependencies(..), GetParsedModule(..), TransitiveDependencies(..),
+ Priority(..),
+ priorityTypeCheck,
+ priorityGenerateCore,
+ priorityFilesOfInterest,
+ runAction, useE, useNoFileE, usesE,
+ toIdeResult, defineNoFile,
+ mainRule,
+ getGhcCore,
+ getAtPoint,
+ getDefinition,
+ getDependencies,
+ getParsedModule,
+ fileFromParsedModule,
+ writeIfacesAndHie,
+ ) where
+
+import Control.Monad.Except
+import Control.Monad.Trans.Maybe
+import Development.IDE.Core.Compile
+import Development.IDE.Types.Options
+import Development.IDE.Spans.Calculate
+import Development.IDE.Import.DependencyInformation
+import Development.IDE.Import.FindImports
+import Development.IDE.Core.FileStore
+import Development.IDE.Types.Diagnostics
+import Development.IDE.Types.Location
+import Development.IDE.GHC.Util
+import Data.Coerce
+import Data.Either.Extra
+import Data.Maybe
+import Data.Foldable
+import qualified Data.IntMap.Strict as IntMap
+import qualified Data.IntSet as IntSet
+import Data.List
+import qualified Data.Set as Set
+import qualified Data.Text as T
+import Development.IDE.GHC.Error
+import Development.Shake hiding (Diagnostic)
+import Development.IDE.Core.RuleTypes
+
+import GHC hiding (parseModule, typecheckModule)
+import Development.IDE.GHC.Compat
+import UniqSupply
+import NameCache
+import HscTypes
+import GHC.Generics(Generic)
+
+import qualified Development.IDE.Spans.AtPoint as AtPoint
+import Development.IDE.Core.Service
+import Development.IDE.Core.Shake
+import Development.Shake.Classes
+import System.Directory
+import System.FilePath
+import MkIface
+
+-- | This is useful for rules to convert rules that can only produce errors or
+-- a result into the more general IdeResult type that supports producing
+-- warnings while also producing a result.
+toIdeResult :: Either [FileDiagnostic] v -> IdeResult v
+toIdeResult = either (, Nothing) (([],) . Just)
+
+-- | useE is useful to implement functions that aren’t rules but need shortcircuiting
+-- e.g. getDefinition.
+useE :: IdeRule k v => k -> NormalizedFilePath -> MaybeT Action v
+useE k = MaybeT . use k
+
+useNoFileE :: IdeRule k v => k -> MaybeT Action v
+useNoFileE k = useE k ""
+
+usesE :: IdeRule k v => k -> [NormalizedFilePath] -> MaybeT Action [v]
+usesE k = MaybeT . fmap sequence . uses k
+
+defineNoFile :: IdeRule k v => (k -> Action v) -> Rules ()
+defineNoFile f = define $ \k file -> do
+ if file == "" then do res <- f k; return ([], Just res) else
+ fail $ "Rule " ++ show k ++ " should always be called with the empty string for a file"
+
+
+------------------------------------------------------------
+-- Exposed API
+
+
+-- | Generate the GHC Core for the supplied file and its dependencies.
+getGhcCore :: NormalizedFilePath -> Action (Maybe [CoreModule])
+getGhcCore file = runMaybeT $ do
+ files <- transitiveModuleDeps <$> useE GetDependencies file
+ pms <- usesE GetParsedModule $ files ++ [file]
+ usesE GenerateCore $ map fileFromParsedModule pms
+
+
+
+-- | Get all transitive file dependencies of a given module.
+-- Does not include the file itself.
+getDependencies :: NormalizedFilePath -> Action (Maybe [NormalizedFilePath])
+getDependencies file = fmap transitiveModuleDeps <$> use GetDependencies file
+
+-- | Try to get hover text for the name under point.
+getAtPoint :: NormalizedFilePath -> Position -> Action (Maybe (Maybe Range, [T.Text]))
+getAtPoint file pos = fmap join $ runMaybeT $ do
+ opts <- lift getIdeOptions
+ files <- transitiveModuleDeps <$> useE GetDependencies file
+ tms <- usesE TypeCheck (file : files)
+ spans <- useE GetSpanInfo file
+ return $ AtPoint.atPoint opts (map tmrModule tms) spans pos
+
+-- | Goto Definition.
+getDefinition :: NormalizedFilePath -> Position -> Action (Maybe Location)
+getDefinition file pos = fmap join $ runMaybeT $ do
+ spans <- useE GetSpanInfo file
+ pkgState <- hscEnv <$> useE GhcSession file
+ opts <- lift getIdeOptions
+ let getHieFile x = useNoFile (GetHieFile x)
+ lift $ AtPoint.gotoDefinition getHieFile opts pkgState spans pos
+
+-- | Parse the contents of a daml file.
+getParsedModule :: NormalizedFilePath -> Action (Maybe ParsedModule)
+getParsedModule file = use GetParsedModule file
+
+-- | Write interface files and hie files to the location specified by the given options.
+writeIfacesAndHie ::
+ NormalizedFilePath -> [NormalizedFilePath] -> Action (Maybe [NormalizedFilePath])
+writeIfacesAndHie ifDir files =
+ runMaybeT $ do
+ tcms <- usesE TypeCheck files
+ fmap concat $ forM (zip files tcms) $ \(file, tcm) -> do
+ session <- lift $ hscEnv <$> use_ GhcSession file
+ liftIO $ writeTcm session tcm
+ where
+ writeTcm session tcm =
+ do
+ let fp =
+ fromNormalizedFilePath ifDir </>
+ (ms_hspp_file $
+ pm_mod_summary $ tm_parsed_module $ tmrModule tcm)
+ createDirectoryIfMissing True (takeDirectory fp)
+ let ifaceFp = replaceExtension fp ".hi"
+ let hieFp = replaceExtension fp ".hie"
+ writeIfaceFile
+ (hsc_dflags session)
+ ifaceFp
+ (hm_iface $ tmrModInfo tcm)
+ hieFile <-
+ liftIO $
+ runHsc session $
+ mkHieFile
+ (pm_mod_summary $ tm_parsed_module $ tmrModule tcm)
+ (fst $ tm_internals_ $ tmrModule tcm)
+ (fromJust $ tm_renamed_source $ tmrModule tcm)
+ writeHieFile hieFp hieFile
+ pure [toNormalizedFilePath ifaceFp, toNormalizedFilePath hieFp]
+
+------------------------------------------------------------
+-- Rules
+-- These typically go from key to value and are oracles.
+
+priorityTypeCheck :: Priority
+priorityTypeCheck = Priority 0
+
+priorityGenerateCore :: Priority
+priorityGenerateCore = Priority (-1)
+
+priorityFilesOfInterest :: Priority
+priorityFilesOfInterest = Priority (-2)
+
+getParsedModuleRule :: Rules ()
+getParsedModuleRule =
+ define $ \GetParsedModule file -> do
+ (_, contents) <- getFileContents file
+ packageState <- hscEnv <$> use_ GhcSession file
+ opt <- getIdeOptions
+ liftIO $ parseModule opt packageState (fromNormalizedFilePath file) contents
+
+getLocatedImportsRule :: Rules ()
+getLocatedImportsRule =
+ define $ \GetLocatedImports file -> do
+ pm <- use_ GetParsedModule file
+ let ms = pm_mod_summary pm
+ let imports = [(False, imp) | imp <- ms_textual_imps ms] ++ [(True, imp) | imp <- ms_srcimps ms]
+ env <- hscEnv <$> useNoFile_ GhcSession
+ let dflags = addRelativeImport pm $ hsc_dflags env
+ opt <- getIdeOptions
+ (diags, imports') <- fmap unzip $ forM imports $ \(isSource, (mbPkgName, modName)) -> do
+ diagOrImp <- locateModule dflags (optExtensions opt) getFileExists modName mbPkgName isSource
+ case diagOrImp of
+ Left diags -> pure (diags, Left (modName, Nothing))
+ Right (FileImport path) -> pure ([], Left (modName, Just path))
+ Right (PackageImport pkgId) -> liftIO $ do
+ diagsOrPkgDeps <- computePackageDeps env pkgId
+ case diagsOrPkgDeps of
+ Left diags -> pure (diags, Right Nothing)
+ Right pkgIds -> pure ([], Right $ Just $ pkgId : pkgIds)
+ let (moduleImports, pkgImports) = partitionEithers imports'
+ case sequence pkgImports of
+ Nothing -> pure (concat diags, Nothing)
+ Just pkgImports -> pure (concat diags, Just (moduleImports, Set.fromList $ concat pkgImports))
+
+
+-- | Given a target file path, construct the raw dependency results by following
+-- imports recursively.
+rawDependencyInformation :: NormalizedFilePath -> Action RawDependencyInformation
+rawDependencyInformation f = do
+ let (initialId, initialMap) = getPathId f emptyPathIdMap
+ go (IntSet.singleton $ getFilePathId initialId)
+ (RawDependencyInformation IntMap.empty initialMap)
+ where
+ go fs rawDepInfo =
+ case IntSet.minView fs of
+ -- Queue is empty
+ Nothing -> pure rawDepInfo
+ -- Pop f from the queue and process it
+ Just (f, fs) -> do
+ let fId = FilePathId f
+ importsOrErr <- use GetLocatedImports $ idToPath (rawPathIdMap rawDepInfo) fId
+ case importsOrErr of
+ Nothing ->
+ -- File doesn’t parse
+ let rawDepInfo' = insertImport fId (Left ModuleParseError) rawDepInfo
+ in go fs rawDepInfo'
+ Just (modImports, pkgImports) -> do
+ let f :: PathIdMap -> (a, Maybe NormalizedFilePath) -> (PathIdMap, (a, Maybe FilePathId))
+ f pathMap (imp, mbPath) = case mbPath of
+ Nothing -> (pathMap, (imp, Nothing))
+ Just path ->
+ let (pathId, pathMap') = getPathId path pathMap
+ in (pathMap', (imp, Just pathId))
+ -- Convert paths in imports to ids and update the path map
+ let (pathIdMap, modImports') = mapAccumL f (rawPathIdMap rawDepInfo) modImports
+ -- Files that we haven’t seen before are added to the queue.
+ let newFiles =
+ IntSet.fromList (coerce $ mapMaybe snd modImports')
+ IntSet.\\ IntMap.keysSet (rawImports rawDepInfo)
+ let rawDepInfo' = insertImport fId (Right $ ModuleImports modImports' pkgImports) rawDepInfo
+ go (newFiles `IntSet.union` fs) (rawDepInfo' { rawPathIdMap = pathIdMap })
+
+getDependencyInformationRule :: Rules ()
+getDependencyInformationRule =
+ define $ \GetDependencyInformation file -> do
+ rawDepInfo <- rawDependencyInformation file
+ pure ([], Just $ processDependencyInformation rawDepInfo)
+
+reportImportCyclesRule :: Rules ()
+reportImportCyclesRule =
+ define $ \ReportImportCycles file -> fmap (\errs -> if null errs then ([], Just ()) else (errs, Nothing)) $ do
+ DependencyInformation{..} <- use_ GetDependencyInformation file
+ let fileId = pathToId depPathIdMap file
+ case IntMap.lookup (getFilePathId fileId) depErrorNodes of
+ Nothing -> pure []
+ Just errs -> do
+ let cycles = mapMaybe (cycleErrorInFile fileId) (toList errs)
+ -- Convert cycles of files into cycles of module names
+ forM cycles $ \(imp, files) -> do
+ modNames <- forM files $ \fileId -> do
+ let file = idToPath depPathIdMap fileId
+ getModuleName file
+ pure $ toDiag imp $ sort modNames
+ where cycleErrorInFile f (PartOfCycle imp fs)
+ | f `elem` fs = Just (imp, fs)
+ cycleErrorInFile _ _ = Nothing
+ toDiag imp mods = (fp ,) $ Diagnostic
+ { _range = (_range :: Location -> Range) loc
+ , _severity = Just DsError
+ , _source = Just "Import cycle detection"
+ , _message = "Cyclic module dependency between " <> showCycle mods
+ , _code = Nothing
+ , _relatedInformation = Nothing
+ }
+ where loc = srcSpanToLocation (getLoc imp)
+ fp = toNormalizedFilePath $ srcSpanToFilename (getLoc imp)
+ getModuleName file = do
+ pm <- use_ GetParsedModule file
+ pure (moduleNameString . moduleName . ms_mod $ pm_mod_summary pm)
+ showCycle mods = T.intercalate ", " (map T.pack mods)
+
+-- returns all transitive dependencies in topological order.
+-- NOTE: result does not include the argument file.
+getDependenciesRule :: Rules ()
+getDependenciesRule =
+ define $ \GetDependencies file -> do
+ depInfo@DependencyInformation{..} <- use_ GetDependencyInformation file
+ let allFiles = reachableModules depInfo
+ _ <- uses_ ReportImportCycles allFiles
+ return ([], transitiveDeps depInfo file)
+
+-- Source SpanInfo is used by AtPoint and Goto Definition.
+getSpanInfoRule :: Rules ()
+getSpanInfoRule =
+ define $ \GetSpanInfo file -> do
+ tc <- use_ TypeCheck file
+ (fileImports, _) <- use_ GetLocatedImports file
+ packageState <- hscEnv <$> use_ GhcSession file
+ x <- liftIO $ getSrcSpanInfos packageState fileImports tc
+ return ([], Just x)
+
+-- Typechecks a module.
+typeCheckRule :: Rules ()
+typeCheckRule =
+ define $ \TypeCheck file -> do
+ pm <- use_ GetParsedModule file
+ deps <- use_ GetDependencies file
+ tms <- uses_ TypeCheck (transitiveModuleDeps deps)
+ setPriority priorityTypeCheck
+ packageState <- hscEnv <$> use_ GhcSession file
+ liftIO $ typecheckModule packageState tms pm
+
+
+generateCoreRule :: Rules ()
+generateCoreRule =
+ define $ \GenerateCore file -> do
+ deps <- use_ GetDependencies file
+ (tm:tms) <- uses_ TypeCheck (file:transitiveModuleDeps deps)
+ setPriority priorityGenerateCore
+ packageState <- hscEnv <$> use_ GhcSession file
+ liftIO $ compileModule packageState tms tm
+
+
+-- A local rule type to get caching. We want to use newCache, but it has
+-- thread killed exception issues, so we lift it to a full rule.
+-- https://github.com/digital-asset/daml/pull/2808#issuecomment-529639547
+type instance RuleResult GhcSessionIO = GhcSessionFun
+
+data GhcSessionIO = GhcSessionIO deriving (Eq, Show, Typeable, Generic)
+instance Hashable GhcSessionIO
+instance NFData GhcSessionIO
+
+newtype GhcSessionFun = GhcSessionFun (FilePath -> Action HscEnvEq)
+instance Show GhcSessionFun where show _ = "GhcSessionFun"
+instance NFData GhcSessionFun where rnf !_ = ()
+
+
+loadGhcSession :: Rules ()
+loadGhcSession = do
+ defineNoFile $ \GhcSessionIO -> do
+ opts <- getIdeOptions
+ liftIO $ GhcSessionFun <$> optGhcSession opts
+ define $ \GhcSession file -> do
+ GhcSessionFun fun <- useNoFile_ GhcSessionIO
+ val <- fun $ fromNormalizedFilePath file
+ return ([], Just val)
+
+
+getHieFileRule :: Rules ()
+getHieFileRule =
+ defineNoFile $ \(GetHieFile f) -> do
+ u <- liftIO $ mkSplitUniqSupply 'a'
+ let nameCache = initNameCache u []
+ liftIO $ fmap (hie_file_result . fst) $ readHieFile nameCache f
+
+-- | A rule that wires per-file rules together
+mainRule :: Rules ()
+mainRule = do
+ getParsedModuleRule
+ getLocatedImportsRule
+ getDependencyInformationRule
+ reportImportCyclesRule
+ getDependenciesRule
+ typeCheckRule
+ getSpanInfoRule
+ generateCoreRule
+ loadGhcSession
+ getHieFileRule
+
+------------------------------------------------------------
+
+fileFromParsedModule :: ParsedModule -> NormalizedFilePath
+fileFromParsedModule = toNormalizedFilePath . ms_hspp_file . pm_mod_summary
diff --git a/src/Development/IDE/Core/Service.hs b/src/Development/IDE/Core/Service.hs
new file mode 100644
index 0000000..278ec83
--- /dev/null
+++ b/src/Development/IDE/Core/Service.hs
@@ -0,0 +1,95 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE TypeFamilies #-}
+{-# LANGUAGE FlexibleInstances #-}
+
+-- | A Shake implementation of the compiler service, built
+-- using the "Shaker" abstraction layer for in-memory use.
+--
+module Development.IDE.Core.Service(
+ getIdeOptions,
+ IdeState, initialise, shutdown,
+ runAction,
+ runActionSync,
+ writeProfile,
+ getDiagnostics, unsafeClearDiagnostics,
+ ideLogger,
+ updatePositionMapping,
+ ) where
+
+import Control.Concurrent.Extra
+import Control.Concurrent.Async
+import Control.Monad.Except
+import Development.IDE.Types.Options (IdeOptions(..))
+import Development.IDE.Core.FileStore
+import Development.IDE.Core.OfInterest
+import Development.IDE.Types.Logger
+import Development.Shake
+import Data.Either.Extra
+import qualified Language.Haskell.LSP.Messages as LSP
+
+import Development.IDE.Core.Shake
+
+
+
+newtype GlobalIdeOptions = GlobalIdeOptions IdeOptions
+instance IsIdeGlobal GlobalIdeOptions
+
+------------------------------------------------------------
+-- Exposed API
+
+-- | Initialise the Compiler Service.
+initialise :: Rules ()
+ -> (LSP.FromServerMessage -> IO ())
+ -> Logger
+ -> IdeOptions
+ -> VFSHandle
+ -> IO IdeState
+initialise mainRule toDiags logger options vfs =
+ shakeOpen
+ toDiags
+ logger
+ (optShakeProfiling options)
+ (optReportProgress options)
+ (shakeOptions { shakeThreads = optThreads options
+ , shakeFiles = "/dev/null"
+ }) $ do
+ addIdeGlobal $ GlobalIdeOptions options
+ fileStoreRules vfs
+ ofInterestRules
+ mainRule
+
+writeProfile :: IdeState -> FilePath -> IO ()
+writeProfile = shakeProfile
+
+-- | Shutdown the Compiler Service.
+shutdown :: IdeState -> IO ()
+shutdown = shakeShut
+
+-- This will return as soon as the result of the action is
+-- available. There might still be other rules running at this point,
+-- e.g., the ofInterestRule.
+runAction :: IdeState -> Action a -> IO a
+runAction ide action = do
+ bar <- newBarrier
+ res <- shakeRun ide [do v <- action; liftIO $ signalBarrier bar v; return v]
+ -- shakeRun might throw an exception (either through action or a default rule),
+ -- in which case action may not complete successfully, and signalBarrier might not be called.
+ -- Therefore we wait for either res (which propagates the exception) or the barrier.
+ -- Importantly, if the barrier does finish, cancelling res only kills waiting for the result,
+ -- it doesn't kill the actual work
+ fmap fromEither $ race (head <$> res) $ waitBarrier bar
+
+
+-- | `runActionSync` is similar to `runAction` but it will
+-- wait for all rules (so in particular the `ofInterestRule`) to
+-- finish running. This is mainly useful in tests, where you want
+-- to wait for all rules to fire so you can check diagnostics.
+runActionSync :: IdeState -> Action a -> IO a
+runActionSync s act = fmap head $ join $ shakeRun s [act]
+
+getIdeOptions :: Action IdeOptions
+getIdeOptions = do
+ GlobalIdeOptions x <- getIdeGlobalAction
+ return x
diff --git a/src/Development/IDE/Core/Shake.hs b/src/Development/IDE/Core/Shake.hs
new file mode 100644
index 0000000..69c7b02
--- /dev/null
+++ b/src/Development/IDE/Core/Shake.hs
@@ -0,0 +1,674 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE ExistentialQuantification #-}
+{-# LANGUAGE TypeFamilies #-}
+{-# LANGUAGE ConstraintKinds #-}
+
+-- | A Shake implementation of the compiler service.
+--
+-- There are two primary locations where data lives, and both of
+-- these contain much the same data:
+--
+-- * The Shake database (inside 'shakeDb') stores a map of shake keys
+-- to shake values. In our case, these are all of type 'Q' to 'A'.
+-- During a single run all the values in the Shake database are consistent
+-- so are used in conjunction with each other, e.g. in 'uses'.
+--
+-- * The 'Values' type stores a map of keys to values. These values are
+-- always stored as real Haskell values, whereas Shake serialises all 'A' values
+-- between runs. To deserialise a Shake value, we just consult Values.
+module Development.IDE.Core.Shake(
+ IdeState,
+ IdeRule, IdeResult, GetModificationTime(..),
+ shakeOpen, shakeShut,
+ shakeRun,
+ shakeProfile,
+ use, useWithStale, useNoFile, uses, usesWithStale,
+ use_, useNoFile_, uses_,
+ define, defineEarlyCutoff,
+ getDiagnostics, unsafeClearDiagnostics,
+ IsIdeGlobal, addIdeGlobal, getIdeGlobalState, getIdeGlobalAction,
+ garbageCollect,
+ setPriority,
+ sendEvent,
+ ideLogger,
+ actionLogger,
+ FileVersion(..),
+ Priority(..),
+ updatePositionMapping
+ ) where
+
+import Development.Shake hiding (ShakeValue)
+import Development.Shake.Database
+import Development.Shake.Classes
+import Development.Shake.Rule
+import qualified Data.HashMap.Strict as HMap
+import qualified Data.Map.Strict as Map
+import qualified Data.Map.Merge.Strict as Map
+import qualified Data.ByteString.Char8 as BS
+import Data.Dynamic
+import Data.Maybe
+import Data.Map.Strict (Map)
+import Data.List.Extra
+import qualified Data.Set as Set
+import qualified Data.Text as T
+import Data.Tuple.Extra
+import Data.Unique
+import Development.IDE.Core.Debouncer
+import Development.IDE.Core.PositionMapping
+import Development.IDE.Types.Logger hiding (Priority)
+import Language.Haskell.LSP.Diagnostics
+import qualified Data.SortedList as SL
+import Development.IDE.Types.Diagnostics
+import Development.IDE.Types.Location
+import Development.IDE.Types.Options
+import Control.Concurrent.Extra
+import Control.Exception
+import Control.DeepSeq
+import System.Time.Extra
+import Data.Typeable
+import qualified Language.Haskell.LSP.Messages as LSP
+import qualified Language.Haskell.LSP.Types as LSP
+import System.FilePath hiding (makeRelative)
+import qualified Development.Shake as Shake
+import Control.Monad.Extra
+import Data.Time
+import GHC.Generics
+import System.IO.Unsafe
+import Numeric.Extra
+import Language.Haskell.LSP.Types
+
+
+-- information we stash inside the shakeExtra field
+data ShakeExtras = ShakeExtras
+ {eventer :: LSP.FromServerMessage -> IO ()
+ ,debouncer :: Debouncer NormalizedUri
+ ,logger :: Logger
+ ,globals :: Var (HMap.HashMap TypeRep Dynamic)
+ ,state :: Var Values
+ ,diagnostics :: Var DiagnosticStore
+ ,publishedDiagnostics :: Var (Map NormalizedUri [Diagnostic])
+ -- ^ This represents the set of diagnostics that we have published.
+ -- Due to debouncing not every change might get published.
+ ,positionMapping :: Var (Map NormalizedUri (Map TextDocumentVersion PositionMapping))
+ -- ^ Map from a text document version to a PositionMapping that describes how to map
+ -- positions in a version of that document to positions in the latest version
+ }
+
+getShakeExtras :: Action ShakeExtras
+getShakeExtras = do
+ Just x <- getShakeExtra @ShakeExtras
+ return x
+
+getShakeExtrasRules :: Rules ShakeExtras
+getShakeExtrasRules = do
+ Just x <- getShakeExtraRules @ShakeExtras
+ return x
+
+
+
+class Typeable a => IsIdeGlobal a where
+
+addIdeGlobal :: IsIdeGlobal a => a -> Rules ()
+addIdeGlobal x@(typeOf -> ty) = do
+ ShakeExtras{globals} <- getShakeExtrasRules
+ liftIO $ modifyVar_ globals $ \mp -> case HMap.lookup ty mp of
+ Just _ -> error $ "Can't addIdeGlobal twice on the same type, got " ++ show ty
+ Nothing -> return $! HMap.insert ty (toDyn x) mp
+
+
+getIdeGlobalExtras :: forall a . IsIdeGlobal a => ShakeExtras -> IO a
+getIdeGlobalExtras ShakeExtras{globals} = do
+ Just x <- HMap.lookup (typeRep (Proxy :: Proxy a)) <$> readVar globals
+ return $ fromDyn x $ error "Serious error, corrupt globals"
+
+getIdeGlobalAction :: forall a . IsIdeGlobal a => Action a
+getIdeGlobalAction = liftIO . getIdeGlobalExtras =<< getShakeExtras
+
+getIdeGlobalState :: forall a . IsIdeGlobal a => IdeState -> IO a
+getIdeGlobalState = getIdeGlobalExtras . shakeExtras
+
+
+-- | The state of the all values.
+type Values = HMap.HashMap (NormalizedFilePath, Key) (Value Dynamic)
+
+-- | Key type
+data Key = forall k . (Typeable k, Hashable k, Eq k, Show k) => Key k
+
+instance Show Key where
+ show (Key k) = show k
+
+instance Eq Key where
+ Key k1 == Key k2 | Just k2' <- cast k2 = k1 == k2'
+ | otherwise = False
+
+instance Hashable Key where
+ hashWithSalt salt (Key key) = hashWithSalt salt key
+
+-- | The result of an IDE operation. Warnings and errors are in the Diagnostic,
+-- and a value is in the Maybe. For operations that throw an error you
+-- expect a non-empty list of diagnostics, at least one of which is an error,
+-- and a Nothing. For operations that succeed you expect perhaps some warnings
+-- and a Just. For operations that depend on other failing operations you may
+-- get empty diagnostics and a Nothing, to indicate this phase throws no fresh
+-- errors but still failed.
+--
+-- A rule on a file should only return diagnostics for that given file. It should
+-- not propagate diagnostic errors through multiple phases.
+type IdeResult v = ([FileDiagnostic], Maybe v)
+
+data Value v
+ = Succeeded TextDocumentVersion v
+ | Stale TextDocumentVersion v
+ | Failed
+ deriving (Functor, Generic, Show)
+
+instance NFData v => NFData (Value v)
+
+-- | Convert a Value to a Maybe. This will only return `Just` for
+-- up2date results not for stale values.
+currentValue :: Value v -> Maybe v
+currentValue (Succeeded _ v) = Just v
+currentValue (Stale _ _) = Nothing
+currentValue Failed = Nothing
+
+-- | Return the most recent, potentially stale, value and a PositionMapping
+-- for the version of that value.
+lastValue :: NormalizedFilePath -> Value v -> Action (Maybe (v, PositionMapping))
+lastValue file v = do
+ ShakeExtras{positionMapping} <- getShakeExtras
+ allMappings <- liftIO $ readVar positionMapping
+ pure $ case v of
+ Succeeded ver v -> Just (v, mappingForVersion allMappings file ver)
+ Stale ver v -> Just (v, mappingForVersion allMappings file ver)
+ Failed -> Nothing
+
+valueVersion :: Value v -> Maybe TextDocumentVersion
+valueVersion = \case
+ Succeeded ver _ -> Just ver
+ Stale ver _ -> Just ver
+ Failed -> Nothing
+
+mappingForVersion
+ :: Map NormalizedUri (Map TextDocumentVersion PositionMapping)
+ -> NormalizedFilePath
+ -> TextDocumentVersion
+ -> PositionMapping
+mappingForVersion allMappings file ver =
+ fromMaybe idMapping $
+ Map.lookup ver =<<
+ Map.lookup (filePathToUri' file) allMappings
+
+type IdeRule k v =
+ ( Shake.RuleResult k ~ v
+ , Show k
+ , Typeable k
+ , NFData k
+ , Hashable k
+ , Eq k
+ , Show v
+ , Typeable v
+ , NFData v
+ )
+
+-- | A Shake database plus persistent store. Can be thought of as storing
+-- mappings from @(FilePath, k)@ to @RuleResult k@.
+data IdeState = IdeState
+ {shakeDb :: ShakeDatabase
+ ,shakeAbort :: Var (IO ()) -- close whoever was running last
+ ,shakeClose :: IO ()
+ ,shakeExtras :: ShakeExtras
+ ,shakeProfileDir :: Maybe FilePath
+ }
+
+
+-- This is debugging code that generates a series of profiles, if the Boolean is true
+shakeRunDatabaseProfile :: Maybe FilePath -> ShakeDatabase -> [Action a] -> IO [a]
+shakeRunDatabaseProfile mbProfileDir shakeDb acts = do
+ (time, (res,_)) <- duration $ shakeRunDatabase shakeDb acts
+ whenJust mbProfileDir $ \dir -> do
+ count <- modifyVar profileCounter $ \x -> let !y = x+1 in return (y,y)
+ let file = "ide-" ++ profileStartTime ++ "-" ++ takeEnd 5 ("0000" ++ show count) ++ "-" ++ showDP 2 time <.> "html"
+ shakeProfileDatabase shakeDb $ dir </> file
+ return res
+ where
+
+{-# NOINLINE profileStartTime #-}
+profileStartTime :: String
+profileStartTime = unsafePerformIO $ formatTime defaultTimeLocale "%Y%m%d-%H%M%S" <$> getCurrentTime
+
+{-# NOINLINE profileCounter #-}
+profileCounter :: Var Int
+profileCounter = unsafePerformIO $ newVar 0
+
+setValues :: IdeRule k v
+ => Var Values
+ -> k
+ -> NormalizedFilePath
+ -> Value v
+ -> IO ()
+setValues state key file val = modifyVar_ state $ \vals -> do
+ -- Force to make sure the old HashMap is not retained
+ evaluate $ HMap.insert (file, Key key) (fmap toDyn val) vals
+
+-- | We return Nothing if the rule has not run and Just Failed if it has failed to produce a value.
+getValues :: forall k v. IdeRule k v => Var Values -> k -> NormalizedFilePath -> IO (Maybe (Value v))
+getValues state key file = do
+ vs <- readVar state
+ case HMap.lookup (file, Key key) vs of
+ Nothing -> pure Nothing
+ Just v -> do
+ let r = fmap (fromJust . fromDynamic @v) v
+ -- Force to make sure we do not retain a reference to the HashMap
+ -- and we blow up immediately if the fromJust should fail
+ -- (which would be an internal error).
+ evaluate (r `seqValue` Just r)
+
+-- | Seq the result stored in the Shake value. This only
+-- evaluates the value to WHNF not NF. We take care of the latter
+-- elsewhere and doing it twice is expensive.
+seqValue :: Value v -> b -> b
+seqValue v b = case v of
+ Succeeded ver v -> rnf ver `seq` v `seq` b
+ Stale ver v -> rnf ver `seq` v `seq` b
+ Failed -> b
+
+-- | Open a 'IdeState', should be shut using 'shakeShut'.
+shakeOpen :: (LSP.FromServerMessage -> IO ()) -- ^ diagnostic handler
+ -> Logger
+ -> Maybe FilePath
+ -> IdeReportProgress
+ -> ShakeOptions
+ -> Rules ()
+ -> IO IdeState
+shakeOpen eventer logger shakeProfileDir (IdeReportProgress reportProgress) opts rules = do
+ shakeExtras <- do
+ globals <- newVar HMap.empty
+ state <- newVar HMap.empty
+ diagnostics <- newVar mempty
+ publishedDiagnostics <- newVar mempty
+ debouncer <- newDebouncer
+ positionMapping <- newVar Map.empty
+ pure ShakeExtras{..}
+ (shakeDb, shakeClose) <-
+ shakeOpenDatabase
+ opts
+ { shakeExtra = addShakeExtra shakeExtras $ shakeExtra opts
+ , shakeProgress = if reportProgress then lspShakeProgress eventer else const (pure ())
+ }
+ rules
+ shakeAbort <- newVar $ return ()
+ shakeDb <- shakeDb
+ return IdeState{..}
+
+lspShakeProgress :: (LSP.FromServerMessage -> IO ()) -> IO Progress -> IO ()
+lspShakeProgress sendMsg prog = do
+ u <- T.pack . show . hashUnique <$> newUnique
+ bracket_ (start u) (stop u) (loop u)
+ where
+ start id = sendMsg $ LSP.NotProgressStart $ LSP.fmServerProgressStartNotification
+ ProgressStartParams
+ { _id = id
+ , _title = "Processing"
+ , _cancellable = Nothing
+ , _message = Nothing
+ , _percentage = Nothing
+ }
+ stop id = sendMsg $ LSP.NotProgressDone $ LSP.fmServerProgressDoneNotification
+ ProgressDoneParams
+ { _id = id
+ }
+ sample = 0.1
+ loop id = forever $ do
+ sleep sample
+ p <- prog
+ let done = countSkipped p + countBuilt p
+ let todo = done + countUnknown p + countTodo p
+ sendMsg $ LSP.NotProgressReport $ LSP.fmServerProgressReportNotification
+ ProgressReportParams
+ { _id = id
+ , _message = Just $ T.pack $ show done <> "/" <> show todo
+ , _percentage = Nothing
+ }
+
+shakeProfile :: IdeState -> FilePath -> IO ()
+shakeProfile IdeState{..} = shakeProfileDatabase shakeDb
+
+shakeShut :: IdeState -> IO ()
+shakeShut IdeState{..} = withVar shakeAbort $ \stop -> do
+ -- Shake gets unhappy if you try to close when there is a running
+ -- request so we first abort that.
+ stop
+ shakeClose
+
+-- | Spawn immediately. If you are already inside a call to shakeRun that will be aborted with an exception.
+shakeRun :: IdeState -> [Action a] -> IO (IO [a])
+-- FIXME: If there is already a shakeRun queued up and waiting to send me a kill, I should probably
+-- not even start, which would make issues with async exceptions less problematic.
+shakeRun IdeState{shakeExtras=ShakeExtras{..}, ..} acts = modifyVar shakeAbort $ \stop -> do
+ (stopTime,_) <- duration stop
+ logDebug logger $ T.pack $ "Starting shakeRun (aborting the previous one took " ++ showDuration stopTime ++ ")"
+ bar <- newBarrier
+ start <- offsetTime
+ thread <- forkFinally (shakeRunDatabaseProfile shakeProfileDir shakeDb acts) $ \res -> do
+ runTime <- start
+ let res' = case res of
+ Left e -> "exception: " <> displayException e
+ Right _ -> "completed"
+ logDebug logger $ T.pack $
+ "Finishing shakeRun (took " ++ showDuration runTime ++ ", " ++ res' ++ ")"
+ signalBarrier bar res
+ -- important: we send an async exception to the thread, then wait for it to die, before continuing
+ return (do killThread thread; void $ waitBarrier bar, either throwIO return =<< waitBarrier bar)
+
+getDiagnostics :: IdeState -> IO [FileDiagnostic]
+getDiagnostics IdeState{shakeExtras = ShakeExtras{diagnostics}} = do
+ val <- readVar diagnostics
+ return $ getAllDiagnostics val
+
+-- | FIXME: This function is temporary! Only required because the files of interest doesn't work
+unsafeClearDiagnostics :: IdeState -> IO ()
+unsafeClearDiagnostics IdeState{shakeExtras = ShakeExtras{diagnostics}} =
+ writeVar diagnostics mempty
+
+-- | Clear the results for all files that do not match the given predicate.
+garbageCollect :: (NormalizedFilePath -> Bool) -> Action ()
+garbageCollect keep = do
+ ShakeExtras{state, diagnostics,publishedDiagnostics,positionMapping} <- getShakeExtras
+ liftIO $
+ do newState <- modifyVar state $ \values -> do
+ values <- evaluate $ HMap.filterWithKey (\(file, _) _ -> keep file) values
+ return $! dupe values
+ modifyVar_ diagnostics $ \diags -> return $! filterDiagnostics keep diags
+ modifyVar_ publishedDiagnostics $ \diags -> return $! Map.filterWithKey (\uri _ -> keep (fromUri uri)) diags
+ let versionsForFile =
+ Map.fromListWith Set.union $
+ mapMaybe (\((file, _key), v) -> (filePathToUri' file,) . Set.singleton <$> valueVersion v) $
+ HMap.toList newState
+ modifyVar_ positionMapping $ \mappings -> return $! filterVersionMap versionsForFile mappings
+define
+ :: IdeRule k v
+ => (k -> NormalizedFilePath -> Action (IdeResult v)) -> Rules ()
+define op = defineEarlyCutoff $ \k v -> (Nothing,) <$> op k v
+
+use :: IdeRule k v
+ => k -> NormalizedFilePath -> Action (Maybe v)
+use key file = head <$> uses key [file]
+
+useWithStale :: IdeRule k v
+ => k -> NormalizedFilePath -> Action (Maybe (v, PositionMapping))
+useWithStale key file = head <$> usesWithStale key [file]
+
+useNoFile :: IdeRule k v => k -> Action (Maybe v)
+useNoFile key = use key ""
+
+use_ :: IdeRule k v => k -> NormalizedFilePath -> Action v
+use_ key file = head <$> uses_ key [file]
+
+useNoFile_ :: IdeRule k v => k -> Action v
+useNoFile_ key = use_ key ""
+
+uses_ :: IdeRule k v => k -> [NormalizedFilePath] -> Action [v]
+uses_ key files = do
+ res <- uses key files
+ case sequence res of
+ Nothing -> liftIO $ throwIO BadDependency
+ Just v -> return v
+
+
+-- | When we depend on something that reported an error, and we fail as a direct result, throw BadDependency
+-- which short-circuits the rest of the action
+data BadDependency = BadDependency deriving Show
+instance Exception BadDependency
+
+isBadDependency :: SomeException -> Bool
+isBadDependency x
+ | Just (x :: ShakeException) <- fromException x = isBadDependency $ shakeExceptionInner x
+ | Just (_ :: BadDependency) <- fromException x = True
+ | otherwise = False
+
+newtype Q k = Q (k, NormalizedFilePath)
+ deriving (Eq,Hashable,NFData)
+
+-- Using Database we don't need Binary instances for keys
+instance Binary (Q k) where
+ put _ = return ()
+ get = fail "Binary.get not defined for type Development.IDE.Core.Shake.Q"
+
+instance Show k => Show (Q k) where
+ show (Q (k, file)) = show k ++ "; " ++ fromNormalizedFilePath file
+
+-- | Invariant: the 'v' must be in normal form (fully evaluated).
+-- Otherwise we keep repeatedly 'rnf'ing values taken from the Shake database
+-- Note (MK) I am not sure why we need the ShakeValue here, maybe we
+-- can just remove it?
+data A v = A (Value v) ShakeValue
+ deriving Show
+
+instance NFData (A v) where rnf (A v x) = v `seq` rnf x
+
+-- In the Shake database we only store one type of key/result pairs,
+-- namely Q (question) / A (answer).
+type instance RuleResult (Q k) = A (RuleResult k)
+
+
+-- | Return up2date results. Stale results will be ignored.
+uses :: IdeRule k v
+ => k -> [NormalizedFilePath] -> Action [Maybe v]
+uses key files = map (\(A value _) -> currentValue value) <$> apply (map (Q . (key,)) files)
+
+-- | Return the last computed result which might be stale.
+usesWithStale :: IdeRule k v
+ => k -> [NormalizedFilePath] -> Action [Maybe (v, PositionMapping)]
+usesWithStale key files = do
+ values <- map (\(A value _) -> value) <$> apply (map (Q . (key,)) files)
+ mapM (uncurry lastValue) (zip files values)
+
+defineEarlyCutoff
+ :: IdeRule k v
+ => (k -> NormalizedFilePath -> Action (Maybe BS.ByteString, IdeResult v))
+ -> Rules ()
+defineEarlyCutoff op = addBuiltinRule noLint noIdentity $ \(Q (key, file)) (old :: Maybe BS.ByteString) mode -> do
+ extras@ShakeExtras{state} <- getShakeExtras
+ val <- case old of
+ Just old | mode == RunDependenciesSame -> do
+ v <- liftIO $ getValues state key file
+ case v of
+ -- No changes in the dependencies and we have
+ -- an existing result.
+ Just v -> return $ Just $ RunResult ChangedNothing old $ A v (decodeShakeValue old)
+ _ -> return Nothing
+ _ -> return Nothing
+ case val of
+ Just res -> return res
+ Nothing -> do
+ (bs, (diags, res)) <- actionCatch
+ (do v <- op key file; liftIO $ evaluate $ force $ v) $
+ \(e :: SomeException) -> pure (Nothing, ([ideErrorText file $ T.pack $ show e | not $ isBadDependency e],Nothing))
+ modTime <- liftIO $ join . fmap currentValue <$> getValues state GetModificationTime file
+ (bs, res) <- case res of
+ Nothing -> do
+ staleV <- liftIO $ getValues state key file
+ pure $ case staleV of
+ Nothing -> (toShakeValue ShakeResult bs, Failed)
+ Just v -> case v of
+ Succeeded ver v -> (toShakeValue ShakeStale bs, Stale ver v)
+ Stale ver v -> (toShakeValue ShakeStale bs, Stale ver v)
+ Failed -> (toShakeValue ShakeResult bs, Failed)
+ Just v -> pure $ (maybe ShakeNoCutoff ShakeResult bs, Succeeded (vfsVersion =<< modTime) v)
+ liftIO $ setValues state key file res
+ updateFileDiagnostics file (Key key) extras $ map snd diags
+ let eq = case (bs, fmap decodeShakeValue old) of
+ (ShakeResult a, Just (ShakeResult b)) -> a == b
+ (ShakeStale a, Just (ShakeStale b)) -> a == b
+ -- If we do not have a previous result
+ -- or we got ShakeNoCutoff we always return False.
+ _ -> False
+ return $ RunResult
+ (if eq then ChangedRecomputeSame else ChangedRecomputeDiff)
+ (encodeShakeValue bs) $
+ A res bs
+
+toShakeValue :: (BS.ByteString -> ShakeValue) -> Maybe BS.ByteString -> ShakeValue
+toShakeValue = maybe ShakeNoCutoff
+
+data ShakeValue
+ = ShakeNoCutoff
+ -- ^ This is what we use when we get Nothing from
+ -- a rule.
+ | ShakeResult !BS.ByteString
+ -- ^ This is used both for `Failed`
+ -- as well as `Succeeded`.
+ | ShakeStale !BS.ByteString
+ deriving (Generic, Show)
+
+instance NFData ShakeValue
+
+encodeShakeValue :: ShakeValue -> BS.ByteString
+encodeShakeValue = \case
+ ShakeNoCutoff -> BS.empty
+ ShakeResult r -> BS.cons 'r' r
+ ShakeStale r -> BS.cons 's' r
+
+decodeShakeValue :: BS.ByteString -> ShakeValue
+decodeShakeValue bs = case BS.uncons bs of
+ Nothing -> ShakeNoCutoff
+ Just (x, xs)
+ | x == 'r' -> ShakeResult xs
+ | x == 's' -> ShakeStale xs
+ | otherwise -> error $ "Failed to parse shake value " <> show bs
+
+
+updateFileDiagnostics ::
+ NormalizedFilePath
+ -> Key
+ -> ShakeExtras
+ -> [Diagnostic] -- ^ current results
+ -> Action ()
+updateFileDiagnostics fp k ShakeExtras{diagnostics, publishedDiagnostics, state, debouncer, eventer} current = liftIO $ do
+ modTime <- join . fmap currentValue <$> getValues state GetModificationTime fp
+ mask_ $ do
+ -- Mask async exceptions to ensure that updated diagnostics are always
+ -- published. Otherwise, we might never publish certain diagnostics if
+ -- an exception strikes between modifyVar but before
+ -- publishDiagnosticsNotification.
+ newDiags <- modifyVar diagnostics $ \old -> do
+ let newDiagsStore = setStageDiagnostics fp (vfsVersion =<< modTime) (T.pack $ show k) current old
+ let newDiags = getFileDiagnostics fp newDiagsStore
+ _ <- evaluate newDiagsStore
+ _ <- evaluate newDiags
+ pure $! (newDiagsStore, newDiags)
+ let uri = filePathToUri' fp
+ let delay = if null newDiags then 0.1 else 0
+ registerEvent debouncer delay uri $ do
+ mask_ $ modifyVar_ publishedDiagnostics $ \published -> do
+ let lastPublish = Map.findWithDefault [] uri published
+ when (lastPublish /= newDiags) $
+ eventer $ publishDiagnosticsNotification (fromNormalizedUri uri) newDiags
+ pure $! Map.insert uri newDiags published
+
+publishDiagnosticsNotification :: Uri -> [Diagnostic] -> LSP.FromServerMessage
+publishDiagnosticsNotification uri diags =
+ LSP.NotPublishDiagnostics $
+ LSP.NotificationMessage "2.0" LSP.TextDocumentPublishDiagnostics $
+ LSP.PublishDiagnosticsParams uri (List diags)
+
+newtype Priority = Priority Double
+
+setPriority :: Priority -> Action ()
+setPriority (Priority p) = deprioritize p
+
+sendEvent :: LSP.FromServerMessage -> Action ()
+sendEvent e = do
+ ShakeExtras{eventer} <- getShakeExtras
+ liftIO $ eventer e
+
+ideLogger :: IdeState -> Logger
+ideLogger IdeState{shakeExtras=ShakeExtras{logger}} = logger
+
+actionLogger :: Action Logger
+actionLogger = do
+ ShakeExtras{logger} <- getShakeExtras
+ return logger
+
+
+data GetModificationTime = GetModificationTime
+ deriving (Eq, Show, Generic)
+instance Hashable GetModificationTime
+instance NFData GetModificationTime
+
+-- | Get the modification time of a file.
+type instance RuleResult GetModificationTime = FileVersion
+
+-- | We store the modification time as a ByteString since we need
+-- a ByteString anyway for Shake and we do not care about how times
+-- are represented.
+data FileVersion = VFSVersion Int | ModificationTime BS.ByteString
+ deriving (Show, Generic)
+
+instance NFData FileVersion
+
+vfsVersion :: FileVersion -> Maybe Int
+vfsVersion (VFSVersion i) = Just i
+vfsVersion (ModificationTime _) = Nothing
+
+
+
+getDiagnosticsFromStore :: StoreItem -> [Diagnostic]
+getDiagnosticsFromStore (StoreItem _ diags) = concatMap SL.fromSortedList $ Map.elems diags
+
+
+-- | Sets the diagnostics for a file and compilation step
+-- if you want to clear the diagnostics call this with an empty list
+setStageDiagnostics
+ :: NormalizedFilePath
+ -> TextDocumentVersion -- ^ the time that the file these diagnostics originate from was last edited
+ -> T.Text
+ -> [LSP.Diagnostic]
+ -> DiagnosticStore
+ -> DiagnosticStore
+setStageDiagnostics fp timeM stage diags ds =
+ updateDiagnostics ds uri timeM diagsBySource
+ where
+ diagsBySource = Map.singleton (Just stage) (SL.toSortedList diags)
+ uri = filePathToUri' fp
+
+getAllDiagnostics ::
+ DiagnosticStore ->
+ [FileDiagnostic]
+getAllDiagnostics =
+ concatMap (\(k,v) -> map (fromUri k,) $ getDiagnosticsFromStore v) . Map.toList
+
+getFileDiagnostics ::
+ NormalizedFilePath ->
+ DiagnosticStore ->
+ [LSP.Diagnostic]
+getFileDiagnostics fp ds =
+ maybe [] getDiagnosticsFromStore $
+ Map.lookup (filePathToUri' fp) ds
+
+filterDiagnostics ::
+ (NormalizedFilePath -> Bool) ->
+ DiagnosticStore ->
+ DiagnosticStore
+filterDiagnostics keep =
+ Map.filterWithKey (\uri _ -> maybe True (keep . toNormalizedFilePath) $ uriToFilePath' $ fromNormalizedUri uri)
+
+filterVersionMap
+ :: Map NormalizedUri (Set.Set TextDocumentVersion)
+ -> Map NormalizedUri (Map TextDocumentVersion a)
+ -> Map NormalizedUri (Map TextDocumentVersion a)
+filterVersionMap =
+ Map.merge Map.dropMissing Map.dropMissing $
+ Map.zipWithMatched $ \_ versionsToKeep versionMap -> Map.restrictKeys versionMap versionsToKeep
+
+updatePositionMapping :: IdeState -> VersionedTextDocumentIdentifier -> List TextDocumentContentChangeEvent -> IO ()
+updatePositionMapping IdeState{shakeExtras = ShakeExtras{positionMapping}} VersionedTextDocumentIdentifier{..} changes = do
+ modifyVar_ positionMapping $ \allMappings -> do
+ let uri = toNormalizedUri _uri
+ let mappingForUri = Map.findWithDefault Map.empty uri allMappings
+ let updatedMapping =
+ Map.insert _version idMapping $
+ Map.map (\oldMapping -> foldl' applyChange oldMapping changes) mappingForUri
+ pure $! Map.insert uri updatedMapping allMappings
diff --git a/src/Development/IDE/GHC/CPP.hs b/src/Development/IDE/GHC/CPP.hs
new file mode 100644
index 0000000..0e2eeca
--- /dev/null
+++ b/src/Development/IDE/GHC/CPP.hs
@@ -0,0 +1,197 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+-- Copied from https://github.com/ghc/ghc/blob/master/compiler/main/DriverPipeline.hs on 14 May 2019
+-- Requested to be exposed at https://gitlab.haskell.org/ghc/ghc/merge_requests/944.
+-- Update the above MR got merged to master on 31 May 2019. When it becomes avialable to ghc-lib, this file can be removed.
+
+{- HLINT ignore -} -- since copied from upstream
+
+{-# LANGUAGE CPP, NamedFieldPuns, NondecreasingIndentation, BangPatterns, MultiWayIf #-}
+{-# OPTIONS_GHC -Wno-incomplete-uni-patterns #-}
+
+-----------------------------------------------------------------------------
+--
+-- GHC Driver
+--
+-- (c) The University of Glasgow 2005
+--
+-----------------------------------------------------------------------------
+
+module Development.IDE.GHC.CPP(doCpp) where
+
+import Development.IDE.GHC.Compat
+import Packages
+import SysTools
+import Module
+import DynFlags
+import Panic
+import FileCleanup
+#ifndef GHC_STABLE
+import LlvmCodeGen (LlvmVersion (..))
+#endif
+
+import System.Directory
+import System.FilePath
+import Control.Monad
+import System.Info
+import Data.List ( intercalate )
+import Data.Maybe
+import Data.Version
+
+
+
+doCpp :: DynFlags -> Bool -> FilePath -> FilePath -> IO ()
+doCpp dflags raw input_fn output_fn = do
+ let hscpp_opts = picPOpts dflags
+ let cmdline_include_paths = includePaths dflags
+
+ pkg_include_dirs <- getPackageIncludePath dflags []
+ let include_paths_global = foldr (\ x xs -> ("-I" ++ x) : xs) []
+ (includePathsGlobal cmdline_include_paths ++ pkg_include_dirs)
+ let include_paths_quote = foldr (\ x xs -> ("-iquote" ++ x) : xs) []
+ (includePathsQuote cmdline_include_paths)
+ let include_paths = include_paths_quote ++ include_paths_global
+
+ let verbFlags = getVerbFlags dflags
+
+ let cpp_prog args | raw = SysTools.runCpp dflags args
+ | otherwise = SysTools.runCc
+ dflags (SysTools.Option "-E" : args)
+
+ let target_defs =
+ -- NEIL: Patched to use System.Info instead of constants from CPP
+ [ "-D" ++ os ++ "_BUILD_OS",
+ "-D" ++ arch ++ "_BUILD_ARCH",
+ "-D" ++ os ++ "_HOST_OS",
+ "-D" ++ arch ++ "_HOST_ARCH" ]
+ -- remember, in code we *compile*, the HOST is the same our TARGET,
+ -- and BUILD is the same as our HOST.
+
+ let sse_defs =
+ [ "-D__SSE__" | isSseEnabled dflags ] ++
+ [ "-D__SSE2__" | isSse2Enabled dflags ] ++
+ [ "-D__SSE4_2__" | isSse4_2Enabled dflags ]
+
+ let avx_defs =
+ [ "-D__AVX__" | isAvxEnabled dflags ] ++
+ [ "-D__AVX2__" | isAvx2Enabled dflags ] ++
+ [ "-D__AVX512CD__" | isAvx512cdEnabled dflags ] ++
+ [ "-D__AVX512ER__" | isAvx512erEnabled dflags ] ++
+ [ "-D__AVX512F__" | isAvx512fEnabled dflags ] ++
+ [ "-D__AVX512PF__" | isAvx512pfEnabled dflags ]
+
+ backend_defs <- getBackendDefs dflags
+
+ let th_defs = [ "-D__GLASGOW_HASKELL_TH__" ]
+ -- Default CPP defines in Haskell source
+ ghcVersionH <- getGhcVersionPathName dflags
+ let hsSourceCppOpts = [ "-include", ghcVersionH ]
+
+ -- MIN_VERSION macros
+ let uids = explicitPackages (pkgState dflags)
+ pkgs = catMaybes (map (lookupPackage dflags) uids)
+ mb_macro_include <-
+ if not (null pkgs) && gopt Opt_VersionMacros dflags
+ then do macro_stub <- newTempName dflags TFL_CurrentModule "h"
+ writeFile macro_stub (generatePackageVersionMacros pkgs)
+ -- Include version macros for every *exposed* package.
+ -- Without -hide-all-packages and with a package database
+ -- size of 1000 packages, it takes cpp an estimated 2
+ -- milliseconds to process this file. See #10970
+ -- comment 8.
+ return [SysTools.FileOption "-include" macro_stub]
+ else return []
+
+ cpp_prog ( map SysTools.Option verbFlags
+ ++ map SysTools.Option include_paths
+ ++ map SysTools.Option hsSourceCppOpts
+ ++ map SysTools.Option target_defs
+ ++ map SysTools.Option backend_defs
+ ++ map SysTools.Option th_defs
+ ++ map SysTools.Option hscpp_opts
+ ++ map SysTools.Option sse_defs
+ ++ map SysTools.Option avx_defs
+ ++ mb_macro_include
+ -- Set the language mode to assembler-with-cpp when preprocessing. This
+ -- alleviates some of the C99 macro rules relating to whitespace and the hash
+ -- operator, which we tend to abuse. Clang in particular is not very happy
+ -- about this.
+ ++ [ SysTools.Option "-x"
+ , SysTools.Option "assembler-with-cpp"
+ , SysTools.Option input_fn
+ -- We hackily use Option instead of FileOption here, so that the file
+ -- name is not back-slashed on Windows. cpp is capable of
+ -- dealing with / in filenames, so it works fine. Furthermore
+ -- if we put in backslashes, cpp outputs #line directives
+ -- with *double* backslashes. And that in turn means that
+ -- our error messages get double backslashes in them.
+ -- In due course we should arrange that the lexer deals
+ -- with these \\ escapes properly.
+ , SysTools.Option "-o"
+ , SysTools.FileOption "" output_fn
+ ])
+
+getBackendDefs :: DynFlags -> IO [String]
+getBackendDefs dflags | hscTarget dflags == HscLlvm = do
+ llvmVer <- figureLlvmVersion dflags
+ return $ case llvmVer of
+#ifdef GHC_STABLE
+ Just n -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format n ]
+#else
+ Just (LlvmVersion n) -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format (n,0) ]
+ Just (LlvmVersionOld m n) -> [ "-D__GLASGOW_HASKELL_LLVM__=" ++ format (m,n) ]
+#endif
+ _ -> []
+ where
+ format (major, minor)
+ | minor >= 100 = error "getBackendDefs: Unsupported minor version"
+ | otherwise = show $ (100 * major + minor :: Int) -- Contract is Int
+
+getBackendDefs _ =
+ return []
+
+-- ---------------------------------------------------------------------------
+-- Macros (cribbed from Cabal)
+
+generatePackageVersionMacros :: [PackageConfig] -> String
+generatePackageVersionMacros pkgs = concat
+ -- Do not add any C-style comments. See #3389.
+ [ generateMacros "" pkgname version
+ | pkg <- pkgs
+ , let version = packageVersion pkg
+ pkgname = map fixchar (packageNameString pkg)
+ ]
+
+fixchar :: Char -> Char
+fixchar '-' = '_'
+fixchar c = c
+
+generateMacros :: String -> String -> Version -> String
+generateMacros prefix name version =
+ concat
+ ["#define ", prefix, "VERSION_",name," ",show (showVersion version),"\n"
+ ,"#define MIN_", prefix, "VERSION_",name,"(major1,major2,minor) (\\\n"
+ ," (major1) < ",major1," || \\\n"
+ ," (major1) == ",major1," && (major2) < ",major2," || \\\n"
+ ," (major1) == ",major1," && (major2) == ",major2," && (minor) <= ",minor,")"
+ ,"\n\n"
+ ]
+ where
+ (major1:major2:minor:_) = map show (versionBranch version ++ repeat 0)
+
+
+-- | Find out path to @ghcversion.h@ file
+getGhcVersionPathName :: DynFlags -> IO FilePath
+getGhcVersionPathName dflags = do
+ candidates <- case ghcVersionFile dflags of
+ Just path -> return [path]
+ Nothing -> (map (</> "ghcversion.h")) <$>
+ (getPackageIncludePath dflags [toInstalledUnitId rtsUnitId])
+
+ found <- filterM doesFileExist candidates
+ case found of
+ [] -> throwGhcExceptionIO (InstallationError
+ ("ghcversion.h missing; tried: "
+ ++ intercalate ", " candidates))
+ (x:_) -> return x
diff --git a/src/Development/IDE/GHC/Compat.hs b/src/Development/IDE/GHC/Compat.hs
new file mode 100644
index 0000000..249d09d
--- /dev/null
+++ b/src/Development/IDE/GHC/Compat.hs
@@ -0,0 +1,78 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE CPP #-}
+
+-- | Attempt at hiding the GHC version differences we can.
+module Development.IDE.GHC.Compat(
+ HieFileResult(..),
+ HieFile(..),
+ mkHieFile,
+ writeHieFile,
+ readHieFile,
+ hPutStringBuffer,
+ includePathsGlobal,
+ includePathsQuote,
+ addIncludePathsQuote,
+ ghcEnumerateExtensions
+ ) where
+
+import StringBuffer
+import DynFlags
+import GHC.LanguageExtensions.Type
+
+#ifndef GHC_STABLE
+import HieAst
+import HieBin
+import HieTypes
+#else
+import GHC
+import GhcPlugins
+import NameCache
+import Avail
+import TcRnTypes
+import System.IO
+import Foreign.ForeignPtr
+
+
+hPutStringBuffer :: Handle -> StringBuffer -> IO ()
+hPutStringBuffer hdl (StringBuffer buf len cur)
+ = withForeignPtr (plusForeignPtr buf cur) $ \ptr ->
+ hPutBuf hdl ptr len
+
+mkHieFile :: ModSummary -> TcGblEnv -> RenamedSource -> Hsc HieFile
+mkHieFile _ _ _ = return (HieFile () [])
+
+writeHieFile :: FilePath -> HieFile -> IO ()
+writeHieFile _ _ = return ()
+
+readHieFile :: NameCache -> FilePath -> IO (HieFileResult, ())
+readHieFile _ _ = return (HieFileResult (HieFile () []), ())
+
+data HieFile = HieFile {hie_module :: (), hie_exports :: [AvailInfo]}
+data HieFileResult = HieFileResult { hie_file_result :: HieFile }
+#endif
+
+#if __GLASGOW_HASKELL__ < 806
+includePathsGlobal, includePathsQuote :: [String] -> [String]
+includePathsGlobal = id
+includePathsQuote = const []
+#endif
+
+
+addIncludePathsQuote :: FilePath -> DynFlags -> DynFlags
+#if __GLASGOW_HASKELL__ >= 806
+addIncludePathsQuote path x = x{includePaths = f $ includePaths x}
+ where f i = i{includePathsQuote = path : includePathsQuote i}
+#else
+addIncludePathsQuote path x = x{includePaths = path : includePaths x}
+#endif
+
+ghcEnumerateExtensions :: [Extension]
+#if __GLASGOW_HASKELL__ >= 808
+ghcEnumerateExtensions = enumerate
+#elif __GLASGOW_HASKELL__ >= 806
+ghcEnumerateExtensions = [Cpp .. StarIsType]
+#else
+ghcEnumerateExtensions = [Cpp .. EmptyDataDeriving]
+#endif
diff --git a/src/Development/IDE/GHC/Error.hs b/src/Development/IDE/GHC/Error.hs
new file mode 100644
index 0000000..9008469
--- /dev/null
+++ b/src/Development/IDE/GHC/Error.hs
@@ -0,0 +1,152 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+module Development.IDE.GHC.Error
+ (
+ -- * Producing Diagnostic values
+ diagFromErrMsgs
+ , diagFromErrMsg
+ , diagFromString
+ , diagFromStrings
+ , diagFromGhcException
+
+ -- * utilities working with spans
+ , srcSpanToLocation
+ , srcSpanToFilename
+ , zeroSpan
+ , realSpan
+ ) where
+
+import Development.IDE.Types.Diagnostics as D
+import qualified Data.Text as T
+import Development.IDE.Types.Location
+import Development.IDE.GHC.Orphans()
+import qualified FastString as FS
+import GHC
+import Bag
+import ErrUtils
+import SrcLoc
+import qualified Outputable as Out
+
+
+
+diagFromText :: T.Text -> D.DiagnosticSeverity -> SrcSpan -> T.Text -> FileDiagnostic
+diagFromText diagSource sev loc msg = (toNormalizedFilePath $ srcSpanToFilename loc,)
+ Diagnostic
+ { _range = srcSpanToRange loc
+ , _severity = Just sev
+ , _source = Just diagSource -- not shown in the IDE, but useful for ghcide developers
+ , _message = msg
+ , _code = Nothing
+ , _relatedInformation = Nothing
+ }
+
+-- | Produce a GHC-style error from a source span and a message.
+diagFromErrMsg :: T.Text -> DynFlags -> ErrMsg -> [FileDiagnostic]
+diagFromErrMsg diagSource dflags e =
+ [ diagFromText diagSource sev (errMsgSpan e) $ T.pack $ Out.showSDoc dflags $ ErrUtils.pprLocErrMsg e
+ | Just sev <- [toDSeverity $ errMsgSeverity e]]
+
+
+diagFromErrMsgs :: T.Text -> DynFlags -> Bag ErrMsg -> [FileDiagnostic]
+diagFromErrMsgs diagSource dflags = concatMap (diagFromErrMsg diagSource dflags) . bagToList
+
+
+-- | Convert a GHC SrcSpan to a DAML compiler Range
+srcSpanToRange :: SrcSpan -> Range
+srcSpanToRange (UnhelpfulSpan _) = noRange
+srcSpanToRange (RealSrcSpan real) = realSrcSpanToRange real
+
+realSrcSpanToRange :: RealSrcSpan -> Range
+realSrcSpanToRange real =
+ Range (Position (srcSpanStartLine real - 1) (srcSpanStartCol real - 1))
+ (Position (srcSpanEndLine real - 1) (srcSpanEndCol real - 1))
+
+-- | Extract a file name from a GHC SrcSpan (use message for unhelpful ones)
+-- FIXME This may not be an _absolute_ file name, needs fixing.
+srcSpanToFilename :: SrcSpan -> FilePath
+srcSpanToFilename (UnhelpfulSpan fs) = FS.unpackFS fs
+srcSpanToFilename (RealSrcSpan real) = FS.unpackFS $ srcSpanFile real
+
+srcSpanToLocation :: SrcSpan -> Location
+srcSpanToLocation src =
+ -- important that the URI's we produce have been properly normalized, otherwise they point at weird places in VS Code
+ Location (fromNormalizedUri $ filePathToUri' $ toNormalizedFilePath $ srcSpanToFilename src) (srcSpanToRange src)
+
+-- | Convert a GHC severity to a DAML compiler Severity. Severities below
+-- "Warning" level are dropped (returning Nothing).
+toDSeverity :: GHC.Severity -> Maybe D.DiagnosticSeverity
+toDSeverity SevOutput = Nothing
+toDSeverity SevInteractive = Nothing
+toDSeverity SevDump = Nothing
+toDSeverity SevInfo = Just DsInfo
+toDSeverity SevWarning = Just DsWarning
+toDSeverity SevError = Just DsError
+toDSeverity SevFatal = Just DsError
+
+
+-- | Produce a bag of GHC-style errors (@ErrorMessages@) from the given
+-- (optional) locations and message strings.
+diagFromStrings :: T.Text -> [(SrcSpan, String)] -> [FileDiagnostic]
+diagFromStrings diagSource = concatMap (uncurry (diagFromString diagSource))
+
+-- | Produce a GHC-style error from a source span and a message.
+diagFromString :: T.Text -> SrcSpan -> String -> [FileDiagnostic]
+diagFromString diagSource sp x = [diagFromText diagSource DsError sp $ T.pack x]
+
+
+-- | Produces an "unhelpful" source span with the given string.
+noSpan :: String -> SrcSpan
+noSpan = UnhelpfulSpan . FS.fsLit
+
+
+-- | creates a span with zero length in the filename of the argument passed
+zeroSpan :: FS.FastString -- ^ file path of span
+ -> RealSrcSpan
+zeroSpan file = realSrcLocSpan (mkRealSrcLoc file 1 1)
+
+realSpan :: SrcSpan
+ -> Maybe RealSrcSpan
+realSpan = \case
+ RealSrcSpan r -> Just r
+ UnhelpfulSpan _ -> Nothing
+
+
+diagFromGhcException :: T.Text -> DynFlags -> GhcException -> [FileDiagnostic]
+diagFromGhcException diagSource dflags exc = diagFromString diagSource (noSpan "<Internal>") (showGHCE dflags exc)
+
+showGHCE :: DynFlags -> GhcException -> String
+showGHCE dflags exc = case exc of
+ Signal n
+ -> "Signal: " <> show n
+
+ Panic s
+ -> unwords ["Compilation Issue:", s, "\n", requestReport]
+ PprPanic s sdoc
+ -> unlines ["Compilation Issue", s,""
+ , Out.showSDoc dflags sdoc
+ , requestReport ]
+
+ Sorry s
+ -> "Unsupported feature: " <> s
+ PprSorry s sdoc
+ -> unlines ["Unsupported feature: ", s,""
+ , Out.showSDoc dflags sdoc]
+
+
+ ---------- errors below should not happen at all --------
+ InstallationError str
+ -> "Installation error: " <> str
+
+ UsageError str -- should never happen
+ -> unlines ["Unexpected usage error", str]
+
+ CmdLineError str
+ -> unlines ["Unexpected usage error", str]
+
+ ProgramError str
+ -> "Program error: " <> str
+ PprProgramError str sdoc ->
+ unlines ["Program error:", str,""
+ , Out.showSDoc dflags sdoc]
+ where
+ requestReport = "Please report this bug to the compiler authors."
diff --git a/src/Development/IDE/GHC/Orphans.hs b/src/Development/IDE/GHC/Orphans.hs
new file mode 100644
index 0000000..5b4084e
--- /dev/null
+++ b/src/Development/IDE/GHC/Orphans.hs
@@ -0,0 +1,59 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE FlexibleInstances #-}
+{-# OPTIONS_GHC -Wno-orphans #-}
+
+-- | Orphan instances for GHC.
+-- Note that the 'NFData' instances may not be law abiding.
+module Development.IDE.GHC.Orphans() where
+
+import GHC
+import GhcPlugins
+import Development.IDE.GHC.Compat
+import qualified StringBuffer as SB
+import Control.DeepSeq
+import Data.Hashable
+import Development.IDE.GHC.Util
+
+
+-- Orphan instances for types from the GHC API.
+instance Show CoreModule where show = prettyPrint
+instance NFData CoreModule where rnf = rwhnf
+
+
+instance Show InstalledUnitId where
+ show = installedUnitIdString
+
+instance NFData InstalledUnitId where rnf = rwhnf
+
+instance NFData SB.StringBuffer where rnf = rwhnf
+
+instance Show Module where
+ show = moduleNameString . moduleName
+
+instance Show (GenLocated SrcSpan ModuleName) where show = prettyPrint
+
+instance NFData (GenLocated SrcSpan ModuleName) where
+ rnf = rwhnf
+
+instance Show ModSummary where
+ show = show . ms_mod
+
+instance Show ParsedModule where
+ show = show . pm_mod_summary
+
+instance NFData ModSummary where
+ rnf = rwhnf
+
+instance NFData ParsedModule where
+ rnf = rwhnf
+
+instance Hashable InstalledUnitId where
+ hashWithSalt salt = hashWithSalt salt . installedUnitIdString
+
+instance Show HieFile where
+ show = show . hie_module
+
+instance NFData HieFile where
+ rnf = rwhnf
diff --git a/src/Development/IDE/GHC/Util.hs b/src/Development/IDE/GHC/Util.hs
new file mode 100644
index 0000000..a38ea1a
--- /dev/null
+++ b/src/Development/IDE/GHC/Util.hs
@@ -0,0 +1,133 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# OPTIONS_GHC -Wno-missing-fields #-} -- to enable prettyPrint
+{-# LANGUAGE CPP #-}
+
+-- | GHC utility functions. Importantly, code using our GHC should never:
+--
+-- * Call runGhc, use runGhcFast instead. It's faster and doesn't require config we don't have.
+--
+-- * Call setSessionDynFlags, use modifyDynFlags instead. It's faster and avoids loading packages.
+module Development.IDE.GHC.Util(
+ lookupPackageConfig,
+ modifyDynFlags,
+ fakeDynFlags,
+ prettyPrint,
+ runGhcEnv,
+ textToStringBuffer,
+ moduleImportPaths,
+ HscEnvEq, hscEnv, newHscEnvEq
+ ) where
+
+import Config
+import Data.List.Extra
+#if __GLASGOW_HASKELL__ >= 806
+import Fingerprint
+#endif
+import GHC
+import GhcMonad
+import GhcPlugins hiding (Unique)
+import Data.IORef
+import Control.Exception
+import FileCleanup
+import Platform
+import Data.Unique
+import Development.Shake.Classes
+import qualified Data.Text as T
+import StringBuffer
+import System.FilePath
+
+
+----------------------------------------------------------------------
+-- GHC setup
+
+modifyDynFlags :: GhcMonad m => (DynFlags -> DynFlags) -> m ()
+modifyDynFlags f = do
+ newFlags <- f <$> getSessionDynFlags
+ -- We do not use setSessionDynFlags here since we handle package
+ -- initialization separately.
+ modifySession $ \h ->
+ h { hsc_dflags = newFlags, hsc_IC = (hsc_IC h) {ic_dflags = newFlags} }
+
+lookupPackageConfig :: UnitId -> HscEnv -> Maybe PackageConfig
+lookupPackageConfig unitId env =
+ lookupPackage' False pkgConfigMap unitId
+ where
+ pkgConfigMap =
+ -- For some weird reason, the GHC API does not provide a way to get the PackageConfigMap
+ -- from PackageState so we have to wrap it in DynFlags first.
+ getPackageConfigMap $ hsc_dflags env
+
+
+-- would be nice to do this more efficiently...
+textToStringBuffer :: T.Text -> StringBuffer
+textToStringBuffer = stringToStringBuffer . T.unpack
+
+
+prettyPrint :: Outputable a => a -> String
+prettyPrint = showSDoc fakeDynFlags . ppr
+
+runGhcEnv :: HscEnv -> Ghc a -> IO a
+runGhcEnv env act = do
+ filesToClean <- newIORef emptyFilesToClean
+ dirsToClean <- newIORef mempty
+ let dflags = (hsc_dflags env){filesToClean=filesToClean, dirsToClean=dirsToClean}
+ ref <- newIORef env{hsc_dflags=dflags}
+ unGhc act (Session ref) `finally` do
+ cleanTempFiles dflags
+ cleanTempDirs dflags
+
+-- Fake DynFlags which are mostly undefined, but define enough to do a
+-- little bit.
+fakeDynFlags :: DynFlags
+fakeDynFlags = defaultDynFlags settings mempty
+ where
+ settings = Settings
+ { sTargetPlatform = platform
+ , sPlatformConstants = platformConstants
+ , sProgramName = "ghc"
+ , sProjectVersion = cProjectVersion
+#if __GLASGOW_HASKELL__ >= 806
+ , sOpt_P_fingerprint = fingerprint0
+#endif
+ }
+ platform = Platform
+ { platformWordSize=8
+ , platformOS=OSUnknown
+ , platformUnregisterised=True
+ }
+ platformConstants = PlatformConstants
+ { pc_DYNAMIC_BY_DEFAULT=False
+ , pc_WORD_SIZE=8
+ }
+
+moduleImportPaths :: GHC.ParsedModule -> Maybe FilePath
+moduleImportPaths pm
+ | rootModDir == "." = Just rootPathDir
+ | otherwise =
+ dropTrailingPathSeparator <$> stripSuffix (normalise rootModDir) (normalise rootPathDir)
+ where
+ ms = GHC.pm_mod_summary pm
+ file = GHC.ms_hspp_file ms
+ mod' = GHC.ms_mod ms
+ rootPathDir = takeDirectory file
+ rootModDir = takeDirectory . moduleNameSlashes . GHC.moduleName $ mod'
+
+-- | An HscEnv with equality.
+data HscEnvEq = HscEnvEq Unique HscEnv
+
+hscEnv :: HscEnvEq -> HscEnv
+hscEnv (HscEnvEq _ x) = x
+
+newHscEnvEq :: HscEnv -> IO HscEnvEq
+newHscEnvEq e = do u <- newUnique; return $ HscEnvEq u e
+
+instance Show HscEnvEq where
+ show (HscEnvEq a _) = "HscEnvEq " ++ show (hashUnique a)
+
+instance Eq HscEnvEq where
+ HscEnvEq a _ == HscEnvEq b _ = a == b
+
+instance NFData HscEnvEq where
+ rnf (HscEnvEq a b) = rnf (hashUnique a) `seq` b `seq` ()
diff --git a/src/Development/IDE/GHC/Warnings.hs b/src/Development/IDE/GHC/Warnings.hs
new file mode 100644
index 0000000..7b85deb
--- /dev/null
+++ b/src/Development/IDE/GHC/Warnings.hs
@@ -0,0 +1,42 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+module Development.IDE.GHC.Warnings(withWarnings) where
+
+import GhcMonad
+import ErrUtils
+import GhcPlugins as GHC hiding (Var)
+
+import Control.Concurrent.Extra
+import Control.Monad.Extra
+import qualified Data.Text as T
+
+import Development.IDE.Types.Diagnostics
+import Development.IDE.GHC.Util
+import Development.IDE.GHC.Error
+
+
+-- | Take a GHC monadic action (e.g. @typecheckModule pm@ for some
+-- parsed module 'pm@') and produce a "decorated" action that will
+-- harvest any warnings encountered executing the action. The 'phase'
+-- argument classifies the context (e.g. "Parser", "Typechecker").
+--
+-- The ModSummary function is required because of
+-- https://github.com/ghc/ghc/blob/5f1d949ab9e09b8d95319633854b7959df06eb58/compiler/main/GHC.hs#L623-L640
+-- which basically says that log_action is taken from the ModSummary when GHC feels like it.
+-- The given argument lets you refresh a ModSummary log_action
+withWarnings :: GhcMonad m => T.Text -> ((ModSummary -> ModSummary) -> m a) -> m ([FileDiagnostic], a)
+withWarnings diagSource action = do
+ warnings <- liftIO $ newVar []
+ oldFlags <- getDynFlags
+ let newAction dynFlags _ _ loc _ msg = do
+ let d = diagFromErrMsg diagSource dynFlags $ mkPlainWarnMsg dynFlags loc msg
+ modifyVar_ warnings $ return . (d:)
+ setLogAction newAction
+ res <- action $ \x -> x{ms_hspp_opts = (ms_hspp_opts x){log_action = newAction}}
+ setLogAction $ log_action oldFlags
+ warns <- liftIO $ readVar warnings
+ return (reverse $ concat warns, res)
+
+setLogAction :: GhcMonad m => LogAction -> m ()
+setLogAction act = void $ modifyDynFlags $ \dyn -> dyn{log_action = act}
diff --git a/src/Development/IDE/Import/DependencyInformation.hs b/src/Development/IDE/Import/DependencyInformation.hs
new file mode 100644
index 0000000..a631192
--- /dev/null
+++ b/src/Development/IDE/Import/DependencyInformation.hs
@@ -0,0 +1,289 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+module Development.IDE.Import.DependencyInformation
+ ( DependencyInformation(..)
+ , ModuleImports(..)
+ , RawDependencyInformation(..)
+ , NodeError(..)
+ , ModuleParseError(..)
+ , TransitiveDependencies(..)
+ , FilePathId(..)
+
+ , PathIdMap
+ , emptyPathIdMap
+ , getPathId
+ , insertImport
+ , pathToId
+ , idToPath
+ , reachableModules
+
+ , processDependencyInformation
+ , transitiveDeps
+ ) where
+
+import Control.DeepSeq
+import Data.Bifunctor
+import Data.Coerce
+import Data.List
+import Development.IDE.GHC.Orphans()
+import Data.Either
+import Data.Graph
+import Data.List.NonEmpty (NonEmpty(..), nonEmpty)
+import qualified Data.List.NonEmpty as NonEmpty
+import Data.IntMap (IntMap)
+import qualified Data.IntMap.Strict as IntMap
+import qualified Data.IntMap.Lazy as IntMapLazy
+import Data.IntSet (IntSet)
+import qualified Data.IntSet as IntSet
+import Data.Map (Map)
+import qualified Data.Map.Strict as MS
+import Data.Maybe
+import Data.Set (Set)
+import qualified Data.Set as Set
+import Data.Tuple.Extra (fst3)
+import GHC.Generics (Generic)
+
+import Development.IDE.Types.Diagnostics
+import Development.IDE.Types.Location
+
+import GHC
+import Module
+
+-- | The imports for a given module.
+data ModuleImports = ModuleImports
+ { moduleImports :: ![(Located ModuleName, Maybe FilePathId)]
+ -- ^ Imports of a module in the current package and the file path of
+ -- that module on disk (if we found it)
+ , packageImports :: !(Set InstalledUnitId)
+ -- ^ Transitive package dependencies unioned for all imports.
+ }
+
+-- | For processing dependency information, we need lots of maps and sets
+-- of filepaths. Comparing Strings is really slow, so we work with IntMap/IntSet
+-- instead and only convert at the edges
+-- and
+newtype FilePathId = FilePathId { getFilePathId :: Int }
+ deriving (Show, NFData, Eq, Ord)
+
+data PathIdMap = PathIdMap
+ { idToPathMap :: !(IntMap NormalizedFilePath)
+ , pathToIdMap :: !(Map NormalizedFilePath FilePathId)
+ }
+ deriving (Show, Generic)
+
+instance NFData PathIdMap
+
+emptyPathIdMap :: PathIdMap
+emptyPathIdMap = PathIdMap IntMap.empty MS.empty
+
+getPathId :: NormalizedFilePath -> PathIdMap -> (FilePathId, PathIdMap)
+getPathId path m@PathIdMap{..} =
+ case MS.lookup path pathToIdMap of
+ Nothing ->
+ let !newId = FilePathId $ MS.size pathToIdMap
+ in (newId, insertPathId path newId m)
+ Just id -> (id, m)
+
+insertPathId :: NormalizedFilePath -> FilePathId -> PathIdMap -> PathIdMap
+insertPathId path id PathIdMap{..} =
+ PathIdMap (IntMap.insert (getFilePathId id) path idToPathMap) (MS.insert path id pathToIdMap)
+
+insertImport :: FilePathId -> Either ModuleParseError ModuleImports -> RawDependencyInformation -> RawDependencyInformation
+insertImport (FilePathId k) v rawDepInfo = rawDepInfo { rawImports = IntMap.insert k v (rawImports rawDepInfo) }
+
+pathToId :: PathIdMap -> NormalizedFilePath -> FilePathId
+pathToId PathIdMap{pathToIdMap} path = pathToIdMap MS.! path
+
+idToPath :: PathIdMap -> FilePathId -> NormalizedFilePath
+idToPath PathIdMap{idToPathMap} (FilePathId id) = idToPathMap IntMap.! id
+
+-- | Unprocessed results that we find by following imports recursively.
+data RawDependencyInformation = RawDependencyInformation
+ { rawImports :: !(IntMap (Either ModuleParseError ModuleImports))
+ , rawPathIdMap :: !PathIdMap
+ }
+
+pkgDependencies :: RawDependencyInformation -> IntMap (Set InstalledUnitId)
+pkgDependencies RawDependencyInformation{..} =
+ IntMap.map (either (const Set.empty) packageImports) rawImports
+
+data DependencyInformation =
+ DependencyInformation
+ { depErrorNodes :: !(IntMap (NonEmpty NodeError))
+ -- ^ Nodes that cannot be processed correctly.
+ , depModuleDeps :: !(IntMap IntSet)
+ -- ^ For a non-error node, this contains the set of module immediate dependencies
+ -- in the same package.
+ , depPkgDeps :: !(IntMap (Set InstalledUnitId))
+ -- ^ For a non-error node, this contains the set of immediate pkg deps.
+ , depPathIdMap :: !PathIdMap
+ } deriving (Show, Generic)
+
+reachableModules :: DependencyInformation -> [NormalizedFilePath]
+reachableModules DependencyInformation{..} =
+ map (idToPath depPathIdMap . FilePathId) $ IntMap.keys depErrorNodes <> IntMap.keys depModuleDeps
+
+instance NFData DependencyInformation
+
+-- | This does not contain the actual parse error as that is already reported by GetParsedModule.
+data ModuleParseError = ModuleParseError
+ deriving (Show, Generic)
+
+instance NFData ModuleParseError
+
+-- | Error when trying to locate a module.
+data LocateError = LocateError [Diagnostic]
+ deriving (Eq, Show, Generic)
+
+instance NFData LocateError
+
+-- | An error attached to a node in the dependency graph.
+data NodeError
+ = PartOfCycle (Located ModuleName) [FilePathId]
+ -- ^ This module is part of an import cycle. The module name corresponds
+ -- to the import that enters the cycle starting from this module.
+ -- The list of filepaths represents the elements
+ -- in the cycle in unspecified order.
+ | FailedToLocateImport (Located ModuleName)
+ -- ^ This module has an import that couldn’t be located.
+ | ParseError ModuleParseError
+ | ParentOfErrorNode (Located ModuleName)
+ -- ^ This module is the parent of a module that cannot be
+ -- processed (either it cannot be parsed, is part of a cycle
+ -- or the parent of another error node).
+ deriving (Show, Generic)
+
+instance NFData NodeError where
+ rnf (PartOfCycle m fs) = m `seq` rnf fs
+ rnf (FailedToLocateImport m) = m `seq` ()
+ rnf (ParseError e) = rnf e
+ rnf (ParentOfErrorNode m) = m `seq` ()
+
+-- | A processed node in the dependency graph. If there was any error
+-- during processing the node or any of its dependencies, this is an
+-- `ErrorNode`. Otherwise it is a `SuccessNode`.
+data NodeResult
+ = ErrorNode (NonEmpty NodeError)
+ | SuccessNode [(Located ModuleName, FilePathId)]
+ deriving Show
+
+partitionNodeResults
+ :: [(a, NodeResult)]
+ -> ([(a, NonEmpty NodeError)], [(a, [(Located ModuleName, FilePathId)])])
+partitionNodeResults = partitionEithers . map f
+ where f (a, ErrorNode errs) = Left (a, errs)
+ f (a, SuccessNode imps) = Right (a, imps)
+
+instance Semigroup NodeResult where
+ ErrorNode errs <> ErrorNode errs' = ErrorNode (errs <> errs')
+ ErrorNode errs <> SuccessNode _ = ErrorNode errs
+ SuccessNode _ <> ErrorNode errs = ErrorNode errs
+ SuccessNode a <> SuccessNode _ = SuccessNode a
+
+processDependencyInformation :: RawDependencyInformation -> DependencyInformation
+processDependencyInformation rawDepInfo@RawDependencyInformation{..} =
+ DependencyInformation
+ { depErrorNodes = IntMap.fromList errorNodes
+ , depModuleDeps = moduleDeps
+ , depPkgDeps = pkgDependencies rawDepInfo
+ , depPathIdMap = rawPathIdMap
+ }
+ where resultGraph = buildResultGraph rawImports
+ (errorNodes, successNodes) = partitionNodeResults $ IntMap.toList resultGraph
+ successEdges :: [(FilePathId, FilePathId, [FilePathId])]
+ successEdges =
+ map (\(file, imports) -> (FilePathId file, FilePathId file, map snd imports)) successNodes
+ moduleDeps =
+ IntMap.fromList $ map (\(_, FilePathId v, vs) -> (v, IntSet.fromList $ coerce vs)) successEdges
+
+-- | Given a dependency graph, buildResultGraph detects and propagates errors in that graph as follows:
+-- 1. Mark each node that is part of an import cycle as an error node.
+-- 2. Mark each node that has a parse error as an error node.
+-- 3. Mark each node whose immediate children could not be located as an error.
+-- 4. Recursively propagate errors to parents if they are not already error nodes.
+buildResultGraph :: IntMap (Either ModuleParseError ModuleImports) -> IntMap NodeResult
+buildResultGraph g = propagatedErrors
+ where
+ sccs = stronglyConnComp (graphEdges g)
+ (_, cycles) = partitionSCC sccs
+ cycleErrors :: IntMap NodeResult
+ cycleErrors = IntMap.unionsWith (<>) $ map errorsForCycle cycles
+ errorsForCycle :: [FilePathId] -> IntMap NodeResult
+ errorsForCycle files =
+ IntMap.fromListWith (<>) $ coerce $ concatMap (cycleErrorsForFile files) files
+ cycleErrorsForFile :: [FilePathId] -> FilePathId -> [(FilePathId,NodeResult)]
+ cycleErrorsForFile cycle f =
+ let entryPoints = mapMaybe (findImport f) cycle
+ in map (\imp -> (f, ErrorNode (PartOfCycle imp cycle :| []))) entryPoints
+ otherErrors = IntMap.map otherErrorsForFile g
+ otherErrorsForFile :: Either ModuleParseError ModuleImports -> NodeResult
+ otherErrorsForFile (Left err) = ErrorNode (ParseError err :| [])
+ otherErrorsForFile (Right ModuleImports{moduleImports}) =
+ let toEither (imp, Nothing) = Left imp
+ toEither (imp, Just path) = Right (imp, path)
+ (errs, imports') = partitionEithers (map toEither moduleImports)
+ in case nonEmpty errs of
+ Nothing -> SuccessNode imports'
+ Just errs' -> ErrorNode (NonEmpty.map FailedToLocateImport errs')
+
+ unpropagatedErrors = IntMap.unionWith (<>) cycleErrors otherErrors
+ -- The recursion here is fine since we use a lazy map and
+ -- we only recurse on SuccessNodes. In particular, we do not recurse
+ -- on nodes that are part of a cycle as they are already marked as
+ -- error nodes.
+ propagatedErrors =
+ IntMapLazy.map propagate unpropagatedErrors
+ propagate :: NodeResult -> NodeResult
+ propagate n@(ErrorNode _) = n
+ propagate n@(SuccessNode imps) =
+ let results = map (\(imp, FilePathId dep) -> (imp, propagatedErrors IntMap.! dep)) imps
+ (errs, _) = partitionNodeResults results
+ in case nonEmpty errs of
+ Nothing -> n
+ Just errs' -> ErrorNode (NonEmpty.map (ParentOfErrorNode . fst) errs')
+ findImport :: FilePathId -> FilePathId -> Maybe (Located ModuleName)
+ findImport (FilePathId file) importedFile =
+ case g IntMap.! file of
+ Left _ -> error "Tried to call findImport on a module with a parse error"
+ Right ModuleImports{moduleImports} ->
+ fmap fst $ find (\(_, resolvedImp) -> resolvedImp == Just importedFile) moduleImports
+
+graphEdges :: IntMap (Either ModuleParseError ModuleImports) -> [(FilePathId, FilePathId, [FilePathId])]
+graphEdges g =
+ map (\(k, v) -> (FilePathId k, FilePathId k, deps v)) $ IntMap.toList g
+ where deps :: Either e ModuleImports -> [FilePathId]
+ deps (Left _) = []
+ deps (Right ModuleImports{moduleImports}) = mapMaybe snd moduleImports
+
+partitionSCC :: [SCC a] -> ([a], [[a]])
+partitionSCC (CyclicSCC xs:rest) = second (xs:) $ partitionSCC rest
+partitionSCC (AcyclicSCC x:rest) = first (x:) $ partitionSCC rest
+partitionSCC [] = ([], [])
+
+transitiveDeps :: DependencyInformation -> NormalizedFilePath -> Maybe TransitiveDependencies
+transitiveDeps DependencyInformation{..} file = do
+ let !fileId = pathToId depPathIdMap file
+ reachableVs <-
+ IntSet.delete (getFilePathId fileId) .
+ IntSet.fromList . map (fst3 . fromVertex) .
+ reachable g <$> toVertex (getFilePathId fileId)
+ let transitiveModuleDepIds = filter (\v -> v `IntSet.member` reachableVs) $ map (fst3 . fromVertex) vs
+ let transitivePkgDeps =
+ Set.toList $ Set.unions $
+ map (\f -> IntMap.findWithDefault Set.empty f depPkgDeps) $
+ getFilePathId fileId : transitiveModuleDepIds
+ let transitiveModuleDeps = map (idToPath depPathIdMap . FilePathId) transitiveModuleDepIds
+ pure TransitiveDependencies {..}
+ where (g, fromVertex, toVertex) = graphFromEdges (map (\(f, fs) -> (f, f, IntSet.toList fs)) $ IntMap.toList depModuleDeps)
+ vs = topSort g
+
+data TransitiveDependencies = TransitiveDependencies
+ { transitiveModuleDeps :: [NormalizedFilePath]
+ -- ^ Transitive module dependencies in topological order.
+ -- The module itself is not included.
+ , transitivePkgDeps :: [InstalledUnitId]
+ -- ^ Transitive pkg dependencies in unspecified order.
+ } deriving (Eq, Show, Generic)
+
+instance NFData TransitiveDependencies
diff --git a/src/Development/IDE/Import/FindImports.hs b/src/Development/IDE/Import/FindImports.hs
new file mode 100644
index 0000000..ed307ed
--- /dev/null
+++ b/src/Development/IDE/Import/FindImports.hs
@@ -0,0 +1,126 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE CPP #-}
+
+module Development.IDE.Import.FindImports
+ ( locateModule
+ , Import(..)
+ ) where
+
+import Development.IDE.GHC.Error as ErrUtils
+import Development.IDE.GHC.Orphans()
+import Development.IDE.Types.Diagnostics
+import Development.IDE.Types.Location
+-- GHC imports
+import DynFlags
+import FastString
+import GHC
+import qualified Module as M
+import Packages
+import Outputable (showSDoc, ppr, pprPanic)
+import Finder
+import Control.DeepSeq
+
+-- standard imports
+import Control.Monad.Extra
+import Control.Monad.IO.Class
+import System.FilePath
+
+data Import
+ = FileImport !NormalizedFilePath
+ | PackageImport !M.InstalledUnitId
+ deriving (Show)
+
+instance NFData Import where
+ rnf (FileImport x) = rnf x
+ rnf (PackageImport x) = rnf x
+
+
+-- | locate a module in the file system. Where we go from *daml to Haskell
+locateModuleFile :: MonadIO m
+ => DynFlags
+ -> [String]
+ -> (NormalizedFilePath -> m Bool)
+ -> Bool
+ -> ModuleName
+ -> m (Maybe NormalizedFilePath)
+locateModuleFile dflags exts doesExist isSource modName = do
+ let candidates =
+ [ toNormalizedFilePath (prefix </> M.moduleNameSlashes modName <.> maybeBoot ext)
+ | prefix <- importPaths dflags, ext <- exts]
+ findM doesExist candidates
+ where
+ maybeBoot ext
+ | isSource = ext ++ "-boot"
+ | otherwise = ext
+
+-- | locate a module in either the file system or the package database. Where we go from *daml to
+-- Haskell
+locateModule
+ :: MonadIO m
+ => DynFlags
+ -> [String]
+ -> (NormalizedFilePath -> m Bool)
+ -> Located ModuleName
+ -> Maybe FastString
+ -> Bool
+ -> m (Either [FileDiagnostic] Import)
+locateModule dflags exts doesExist modName mbPkgName isSource = do
+ case mbPkgName of
+ -- if a package name is given we only go look for a package
+ Just _pkgName -> lookupInPackageDB dflags
+ Nothing -> do
+ -- first try to find the module as a file. If we can't find it try to find it in the package
+ -- database.
+ mbFile <- locateModuleFile dflags exts doesExist isSource $ unLoc modName
+ case mbFile of
+ Nothing -> lookupInPackageDB dflags
+ Just file -> return $ Right $ FileImport file
+ where
+ lookupInPackageDB dfs =
+ case lookupModuleWithSuggestions dfs (unLoc modName) mbPkgName of
+ LookupFound _m pkgConfig -> return $ Right $ PackageImport $ unitId pkgConfig
+ reason -> return $ Left $ notFoundErr dfs modName reason
+
+-- | Don't call this on a found module.
+notFoundErr :: DynFlags -> Located M.ModuleName -> LookupResult -> [FileDiagnostic]
+notFoundErr dfs modName reason =
+ mkError' $ ppr' $ cannotFindModule dfs modName0 $ lookupToFindResult reason
+ where
+ mkError' = diagFromString "not found" (getLoc modName)
+ modName0 = unLoc modName
+ ppr' = showSDoc dfs
+ -- We convert the lookup result to a find result to reuse GHC's cannotFindMoudle pretty printer.
+ lookupToFindResult =
+ \case
+ LookupFound _m _pkgConfig ->
+ pprPanic "Impossible: called lookupToFind on found module." (ppr modName0)
+ LookupMultiple rs -> FoundMultiple rs
+ LookupHidden pkg_hiddens mod_hiddens ->
+ notFound
+ { fr_pkgs_hidden = map (moduleUnitId . fst) pkg_hiddens
+ , fr_mods_hidden = map (moduleUnitId . fst) mod_hiddens
+ }
+#if __GLASGOW_HASKELL__ >= 806
+ LookupUnusable unusable ->
+ let unusables' = map get_unusable unusable
+ get_unusable (m, ModUnusable r) = (moduleUnitId m, r)
+ get_unusable (_, r) =
+ pprPanic "findLookupResult: unexpected origin" (ppr r)
+ in notFound {fr_unusables = unusables'}
+#endif
+ LookupNotFound suggest ->
+ notFound {fr_suggestions = suggest}
+
+notFound :: FindResult
+notFound = NotFound
+ { fr_paths = []
+ , fr_pkg = Nothing
+ , fr_pkgs_hidden = []
+ , fr_mods_hidden = []
+#if __GLASGOW_HASKELL__ >= 806
+ , fr_unusables = []
+#endif
+ , fr_suggestions = []
+ }
diff --git a/src/Development/IDE/LSP/CodeAction.hs b/src/Development/IDE/LSP/CodeAction.hs
new file mode 100644
index 0000000..0bdb066
--- /dev/null
+++ b/src/Development/IDE/LSP/CodeAction.hs
@@ -0,0 +1,97 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE DuplicateRecordFields #-}
+
+-- | Go to the definition of a variable.
+module Development.IDE.LSP.CodeAction
+ ( setHandlersCodeAction
+ ) where
+
+import Language.Haskell.LSP.Types
+import Development.IDE.GHC.Compat
+import Development.IDE.Core.Rules
+import Development.IDE.LSP.Server
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashSet as Set
+import qualified Language.Haskell.LSP.Core as LSP
+import Language.Haskell.LSP.VFS
+import Language.Haskell.LSP.Messages
+import qualified Data.Rope.UTF16 as Rope
+import Data.Char
+import qualified Data.Text as T
+
+-- | Generate code actions.
+codeAction
+ :: LSP.LspFuncs ()
+ -> IdeState
+ -> CodeActionParams
+ -> IO (List CAResult)
+codeAction lsp _ CodeActionParams{_textDocument=TextDocumentIdentifier uri,_context=CodeActionContext{_diagnostics=List xs}} = do
+ -- disable logging as its quite verbose
+ -- logInfo (ideLogger ide) $ T.pack $ "Code action req: " ++ show arg
+ contents <- LSP.getVirtualFileFunc lsp $ toNormalizedUri uri
+ let text = Rope.toText . (_text :: VirtualFile -> Rope.Rope) <$> contents
+ pure $ List
+ [ CACodeAction $ CodeAction title (Just CodeActionQuickFix) (Just $ List [x]) (Just edit) Nothing
+ | x <- xs, (title, tedit) <- suggestAction text x
+ , let edit = WorkspaceEdit (Just $ Map.singleton uri $ List tedit) Nothing
+ ]
+
+
+suggestAction :: Maybe T.Text -> Diagnostic -> [(T.Text, [TextEdit])]
+suggestAction contents Diagnostic{_range=_range@Range{..},..}
+
+-- File.hs:16:1: warning:
+-- The import of `Data.List' is redundant
+-- except perhaps to import instances from `Data.List'
+-- To import instances alone, use: import Data.List()
+ | "The import of " `T.isInfixOf` _message
+ , " is redundant" `T.isInfixOf` _message
+ , let newlineAfter = maybe False (T.isPrefixOf "\n" . T.dropWhile (\x -> isSpace x && x /= '\n') . snd . textAtPosition _end) contents
+ , let extend = newlineAfter && _character _start == 0 -- takes up an entire line, so remove the whole line
+ = [("Remove import", [TextEdit (if extend then Range _start (Position (_line _end + 1) 0) else _range) ""])]
+
+-- File.hs:22:8: error:
+-- Illegal lambda-case (use -XLambdaCase)
+-- File.hs:22:6: error:
+-- Illegal view pattern: x -> foo
+-- Use ViewPatterns to enable view patterns
+-- File.hs:26:8: error:
+-- Illegal `..' in record pattern
+-- Use RecordWildCards to permit this
+-- File.hs:53:28: error:
+-- Illegal tuple section: use TupleSections
+-- File.hs:238:29: error:
+-- * Can't make a derived instance of `Data FSATrace':
+-- You need DeriveDataTypeable to derive an instance for this class
+-- * In the data declaration for `FSATrace'
+-- C:\Neil\shake\src\Development\Shake\Command.hs:515:31: error:
+-- * Illegal equational constraint a ~ ()
+-- (Use GADTs or TypeFamilies to permit this)
+-- * In the context: a ~ ()
+-- While checking an instance declaration
+-- In the instance declaration for `Unit (m a)'
+ | exts@(_:_) <- filter (`Set.member` ghcExtensions) $ T.split (not . isAlpha) $ T.replace "-X" "" _message
+ = [("Add " <> x <> " extension", [TextEdit (Range (Position 0 0) (Position 0 0)) $ "{-# LANGUAGE " <> x <> " #-}\n"]) | x <- exts]
+
+suggestAction _ _ = []
+
+
+-- | All the GHC extensions
+ghcExtensions :: Set.HashSet T.Text
+ghcExtensions = Set.fromList $ map (T.pack . show) ghcEnumerateExtensions
+
+
+textAtPosition :: Position -> T.Text -> (T.Text, T.Text)
+textAtPosition (Position row col) x
+ | (preRow, mid:postRow) <- splitAt row $ T.splitOn "\n" x
+ , (preCol, postCol) <- T.splitAt col mid
+ = (T.intercalate "\n" $ preRow ++ [preCol], T.intercalate "\n" $ postCol : postRow)
+ | otherwise = (x, T.empty)
+
+
+setHandlersCodeAction :: PartialHandlers
+setHandlersCodeAction = PartialHandlers $ \WithMessage{..} x -> return x{
+ LSP.codeActionHandler = withResponse RspCodeAction codeAction
+ }
diff --git a/src/Development/IDE/LSP/Definition.hs b/src/Development/IDE/LSP/Definition.hs
new file mode 100644
index 0000000..3ddaa43
--- /dev/null
+++ b/src/Development/IDE/LSP/Definition.hs
@@ -0,0 +1,43 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+
+-- | Go to the definition of a variable.
+module Development.IDE.LSP.Definition
+ ( setHandlersDefinition
+ ) where
+
+import Language.Haskell.LSP.Types
+import Development.IDE.Types.Location
+
+import Development.IDE.Types.Logger
+import Development.IDE.Core.Rules
+import Development.IDE.Core.Service
+import Development.IDE.LSP.Server
+import qualified Language.Haskell.LSP.Core as LSP
+import Language.Haskell.LSP.Messages
+
+import qualified Data.Text as T
+
+-- | Go to the definition of a variable.
+gotoDefinition
+ :: IdeState
+ -> TextDocumentPositionParams
+ -> IO LocationResponseParams
+gotoDefinition ide (TextDocumentPositionParams (TextDocumentIdentifier uri) pos) = do
+ mbResult <- case uriToFilePath' uri of
+ Just path -> do
+ logInfo (ideLogger ide) $
+ "Definition request at position " <> T.pack (showPosition pos) <>
+ " in file: " <> T.pack path
+ runAction ide $ getDefinition (toNormalizedFilePath path) pos
+ Nothing -> pure Nothing
+ pure $ case mbResult of
+ Nothing -> MultiLoc []
+ Just loc -> SingleLoc loc
+
+
+setHandlersDefinition :: PartialHandlers
+setHandlersDefinition = PartialHandlers $ \WithMessage{..} x -> return x{
+ LSP.definitionHandler = withResponse RspDefinition $ const gotoDefinition
+ }
diff --git a/src/Development/IDE/LSP/Hover.hs b/src/Development/IDE/LSP/Hover.hs
new file mode 100644
index 0000000..7de2add
--- /dev/null
+++ b/src/Development/IDE/LSP/Hover.hs
@@ -0,0 +1,47 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+
+-- | Display information on hover.
+module Development.IDE.LSP.Hover
+ ( setHandlersHover
+ ) where
+
+import Language.Haskell.LSP.Types
+import Development.IDE.Types.Location
+import Development.IDE.Core.Service
+import Development.IDE.LSP.Server
+import Development.IDE.Types.Logger
+import qualified Language.Haskell.LSP.Core as LSP
+import Language.Haskell.LSP.Messages
+
+import qualified Data.Text as T
+
+import Development.IDE.Core.Rules
+
+-- | Display information on hover.
+onHover
+ :: IdeState
+ -> TextDocumentPositionParams
+ -> IO (Maybe Hover)
+onHover ide (TextDocumentPositionParams (TextDocumentIdentifier uri) pos) = do
+ mbResult <- case uriToFilePath' uri of
+ Just (toNormalizedFilePath -> filePath) -> do
+ logInfo (ideLogger ide) $
+ "Hover request at position " <> T.pack (showPosition pos) <>
+ " in file: " <> T.pack (fromNormalizedFilePath filePath)
+ runAction ide $ getAtPoint filePath pos
+ Nothing -> pure Nothing
+
+ case mbResult of
+ Just (mbRange, contents) ->
+ pure $ Just $ Hover
+ (HoverContents $ MarkupContent MkMarkdown $ T.intercalate sectionSeparator contents)
+ mbRange
+
+ Nothing -> pure Nothing
+
+setHandlersHover :: PartialHandlers
+setHandlersHover = PartialHandlers $ \WithMessage{..} x -> return x{
+ LSP.hoverHandler = withResponse RspHover $ const onHover
+ }
diff --git a/src/Development/IDE/LSP/LanguageServer.hs b/src/Development/IDE/LSP/LanguageServer.hs
new file mode 100644
index 0000000..5300c10
--- /dev/null
+++ b/src/Development/IDE/LSP/LanguageServer.hs
@@ -0,0 +1,187 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE ExistentialQuantification #-}
+
+-- WARNING: A copy of DA.Daml.LanguageServer, try to keep them in sync
+-- This version removes the daml: handling
+module Development.IDE.LSP.LanguageServer
+ ( runLanguageServer
+ ) where
+
+import Language.Haskell.LSP.Types
+import Language.Haskell.LSP.Types.Capabilities
+import Development.IDE.LSP.Server
+import qualified Language.Haskell.LSP.Control as LSP
+import qualified Language.Haskell.LSP.Core as LSP
+import Control.Concurrent.Chan
+import Control.Concurrent.Extra
+import Control.Concurrent.Async
+import Control.Concurrent.STM
+import Control.Exception.Safe
+import Data.Default
+import Data.Maybe
+import qualified Data.Set as Set
+import qualified Data.Text as T
+import GHC.IO.Handle (hDuplicate, hDuplicateTo)
+import System.IO
+import Control.Monad.Extra
+
+import Development.IDE.LSP.Definition
+import Development.IDE.LSP.Hover
+import Development.IDE.LSP.CodeAction
+import Development.IDE.LSP.Notifications
+import Development.IDE.Core.Service
+import Development.IDE.Types.Logger
+import Development.IDE.Core.FileStore
+import Language.Haskell.LSP.Core (LspFuncs(..))
+import Language.Haskell.LSP.Messages
+
+
+runLanguageServer
+ :: LSP.Options
+ -> PartialHandlers
+ -> ((FromServerMessage -> IO ()) -> VFSHandle -> ClientCapabilities -> IO IdeState)
+ -> IO ()
+runLanguageServer options userHandlers getIdeState = do
+ -- Move stdout to another file descriptor and duplicate stderr
+ -- to stdout. This guards against stray prints from corrupting the JSON-RPC
+ -- message stream.
+ newStdout <- hDuplicate stdout
+ stderr `hDuplicateTo` stdout
+ hSetBuffering stderr NoBuffering
+ hSetBuffering stdout NoBuffering
+
+ -- Print out a single space to assert that the above redirection works.
+ -- This is interleaved with the logger, hence we just print a space here in
+ -- order not to mess up the output too much. Verified that this breaks
+ -- the language server tests without the redirection.
+ putStr " " >> hFlush stdout
+
+ -- Send everything over a channel, since you need to wait until after initialise before
+ -- LspFuncs is available
+ clientMsgChan :: Chan Message <- newChan
+
+ -- These barriers are signaled when the threads reading from these chans exit.
+ -- This should not happen but if it does, we will make sure that the whole server
+ -- dies and can be restarted instead of losing threads silently.
+ clientMsgBarrier <- newBarrier
+
+ -- The set of requests ids that we have received but not finished processing
+ pendingRequests <- newTVarIO Set.empty
+ -- The set of requests that have been cancelled and are also in pendingRequests
+ cancelledRequests <- newTVarIO Set.empty
+
+ let withResponse wrap f = Just $ \r@RequestMessage{_id} -> do
+ atomically $ modifyTVar pendingRequests (Set.insert _id)
+ writeChan clientMsgChan $ Response r wrap f
+ let withNotification old f = Just $ \r -> writeChan clientMsgChan $ Notification r (\lsp ide x -> f lsp ide x >> whenJust old ($ r))
+ let cancelRequest reqId = atomically $ do
+ queued <- readTVar pendingRequests
+ -- We want to avoid that the list of cancelled requests
+ -- keeps growing if we receive cancellations for requests
+ -- that do not exist or have already been processed.
+ when (reqId `elem` queued) $
+ modifyTVar cancelledRequests (Set.insert reqId)
+ let clearReqId reqId = atomically $ do
+ modifyTVar pendingRequests (Set.delete reqId)
+ modifyTVar cancelledRequests (Set.delete reqId)
+ -- We implement request cancellation by racing waitForCancel against
+ -- the actual request handler.
+ let waitForCancel reqId = atomically $ do
+ cancelled <- readTVar cancelledRequests
+ unless (reqId `Set.member` cancelled) retry
+ let PartialHandlers parts =
+ setHandlersIgnore <> -- least important
+ setHandlersDefinition <> setHandlersHover <> setHandlersCodeAction <> -- useful features someone may override
+ userHandlers <>
+ setHandlersNotifications <> -- absolutely critical, join them with user notifications
+ cancelHandler cancelRequest
+ -- Cancel requests are special since they need to be handled
+ -- out of order to be useful. Existing handlers are run afterwards.
+ handlers <- parts WithMessage{withResponse, withNotification} def
+
+ let initializeCallbacks = LSP.InitializeCallbacks
+ { LSP.onInitialConfiguration = const $ Right ()
+ , LSP.onConfigurationChange = const $ Right ()
+ , LSP.onStartup = handleInit (signalBarrier clientMsgBarrier ()) clearReqId waitForCancel clientMsgChan
+ }
+
+ void $ waitAnyCancel =<< traverse async
+ [ void $ LSP.runWithHandles
+ stdin
+ newStdout
+ initializeCallbacks
+ handlers
+ (modifyOptions options)
+ Nothing
+ , void $ waitBarrier clientMsgBarrier
+ ]
+ where
+ handleInit :: IO () -> (LspId -> IO ()) -> (LspId -> IO ()) -> Chan Message -> LSP.LspFuncs () -> IO (Maybe err)
+ handleInit exitClientMsg clearReqId waitForCancel clientMsgChan lspFuncs@LSP.LspFuncs{..} = do
+ ide <- getIdeState sendFunc (makeLSPVFSHandle lspFuncs) clientCapabilities
+ _ <- flip forkFinally (const exitClientMsg) $ forever $ do
+ msg <- readChan clientMsgChan
+ case msg of
+ Notification x@NotificationMessage{_params} act -> do
+ catch (act lspFuncs ide _params) $ \(e :: SomeException) ->
+ logError (ideLogger ide) $ T.pack $
+ "Unexpected exception on notification, please report!\n" ++
+ "Message: " ++ show x ++ "\n" ++
+ "Exception: " ++ show e
+ Response x@RequestMessage{_id, _params} wrap act ->
+ flip finally (clearReqId _id) $
+ catch (do
+ -- We could optimize this by first checking if the id
+ -- is in the cancelled set. However, this is unlikely to be a
+ -- bottleneck and the additional check might hide
+ -- issues with async exceptions that need to be fixed.
+ cancelOrRes <- race (waitForCancel _id) $ act lspFuncs ide _params
+ case cancelOrRes of
+ Left () -> do
+ logDebug (ideLogger ide) $ T.pack $
+ "Cancelled request " <> show _id
+ sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) Nothing $
+ Just $ ResponseError RequestCancelled "" Nothing
+ Right res ->
+ sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) (Just res) Nothing
+ ) $ \(e :: SomeException) -> do
+ logError (ideLogger ide) $ T.pack $
+ "Unexpected exception on request, please report!\n" ++
+ "Message: " ++ show x ++ "\n" ++
+ "Exception: " ++ show e
+ sendFunc $ wrap $ ResponseMessage "2.0" (responseId _id) Nothing $
+ Just $ ResponseError InternalError (T.pack $ show e) Nothing
+ pure Nothing
+
+
+-- | Things that get sent to us, but we don't deal with.
+-- Set them to avoid a warning in VS Code output.
+setHandlersIgnore :: PartialHandlers
+setHandlersIgnore = PartialHandlers $ \_ x -> return x
+ {LSP.initializedHandler = none
+ }
+ where none = Just $ const $ return ()
+
+cancelHandler :: (LspId -> IO ()) -> PartialHandlers
+cancelHandler cancelRequest = PartialHandlers $ \_ x -> return x
+ {LSP.cancelNotificationHandler = Just $ \msg@NotificationMessage {_params = CancelParams {_id}} -> do
+ cancelRequest _id
+ whenJust (LSP.cancelNotificationHandler x) ($ msg)
+ }
+
+
+-- | A message that we need to deal with - the pieces are split up with existentials to gain additional type safety
+-- and defer precise processing until later (allows us to keep at a higher level of abstraction slightly longer)
+data Message
+ = forall m req resp . (Show m, Show req) => Response (RequestMessage m req resp) (ResponseMessage resp -> FromServerMessage) (LSP.LspFuncs () -> IdeState -> req -> IO resp)
+ | forall m req . (Show m, Show req) => Notification (NotificationMessage m req) (LSP.LspFuncs () -> IdeState -> req -> IO ())
+
+
+modifyOptions :: LSP.Options -> LSP.Options
+modifyOptions x = x{LSP.textDocumentSync = Just $ tweak orig}
+ where
+ tweak x = x{_openClose=Just True, _change=Just TdSyncIncremental, _save=Just $ SaveOptions Nothing}
+ orig = fromMaybe tdsDefault $ LSP.textDocumentSync x
+ tdsDefault = TextDocumentSyncOptions Nothing Nothing Nothing Nothing Nothing
diff --git a/src/Development/IDE/LSP/Notifications.hs b/src/Development/IDE/LSP/Notifications.hs
new file mode 100644
index 0000000..9a16b43
--- /dev/null
+++ b/src/Development/IDE/LSP/Notifications.hs
@@ -0,0 +1,55 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE DuplicateRecordFields #-}
+{-# LANGUAGE RankNTypes #-}
+
+module Development.IDE.LSP.Notifications
+ ( setHandlersNotifications
+ ) where
+
+import Language.Haskell.LSP.Types
+import Development.IDE.LSP.Server
+import qualified Language.Haskell.LSP.Core as LSP
+import qualified Language.Haskell.LSP.Types as LSP
+
+import Development.IDE.Types.Logger
+import Development.IDE.Core.Service
+import Development.IDE.Types.Location
+
+import Control.Monad.Extra
+import qualified Data.Set as S
+
+import Development.IDE.Core.FileStore
+import Development.IDE.Core.OfInterest
+
+
+whenUriFile :: Uri -> (NormalizedFilePath -> IO ()) -> IO ()
+whenUriFile uri act = whenJust (LSP.uriToFilePath uri) $ act . toNormalizedFilePath
+
+setHandlersNotifications :: PartialHandlers
+setHandlersNotifications = PartialHandlers $ \WithMessage{..} x -> return x
+ {LSP.didOpenTextDocumentNotificationHandler = withNotification (LSP.didOpenTextDocumentNotificationHandler x) $
+ \_ ide (DidOpenTextDocumentParams TextDocumentItem{_uri,_version}) -> do
+ updatePositionMapping ide (VersionedTextDocumentIdentifier _uri (Just _version)) (List [])
+ whenUriFile _uri $ \file -> do
+ modifyFilesOfInterest ide (S.insert file)
+ logInfo (ideLogger ide) $ "Opened text document: " <> getUri _uri
+
+ ,LSP.didChangeTextDocumentNotificationHandler = withNotification (LSP.didChangeTextDocumentNotificationHandler x) $
+ \_ ide (DidChangeTextDocumentParams identifier@VersionedTextDocumentIdentifier{_uri} changes) -> do
+ updatePositionMapping ide identifier changes
+ setSomethingModified ide
+ logInfo (ideLogger ide) $ "Modified text document: " <> getUri _uri
+
+ ,LSP.didSaveTextDocumentNotificationHandler = withNotification (LSP.didSaveTextDocumentNotificationHandler x) $
+ \_ ide (DidSaveTextDocumentParams TextDocumentIdentifier{_uri}) -> do
+ setSomethingModified ide
+ logInfo (ideLogger ide) $ "Saved text document: " <> getUri _uri
+
+ ,LSP.didCloseTextDocumentNotificationHandler = withNotification (LSP.didCloseTextDocumentNotificationHandler x) $
+ \_ ide (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> do
+ whenUriFile _uri $ \file -> do
+ modifyFilesOfInterest ide (S.delete file)
+ logInfo (ideLogger ide) $ "Closed text document: " <> getUri _uri
+ }
diff --git a/src/Development/IDE/LSP/Protocol.hs b/src/Development/IDE/LSP/Protocol.hs
new file mode 100644
index 0000000..1c1870e
--- /dev/null
+++ b/src/Development/IDE/LSP/Protocol.hs
@@ -0,0 +1,23 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+{-# LANGUAGE PatternSynonyms #-}
+
+module Development.IDE.LSP.Protocol
+ ( pattern EventFileDiagnostics
+ ) where
+
+import Development.IDE.Types.Diagnostics
+import Development.IDE.Types.Location
+import Language.Haskell.LSP.Messages
+import Language.Haskell.LSP.Types
+
+----------------------------------------------------------------------------------------------------
+-- Pretty printing
+----------------------------------------------------------------------------------------------------
+
+-- | Pattern synonym to make it a bit more convenient to match on diagnostics
+-- in things like damlc test.
+pattern EventFileDiagnostics :: FilePath -> [Diagnostic] -> FromServerMessage
+pattern EventFileDiagnostics fp diags <-
+ NotPublishDiagnostics
+ (NotificationMessage _ _ (PublishDiagnosticsParams (uriToFilePath' -> Just fp) (List diags)))
diff --git a/src/Development/IDE/LSP/Server.hs b/src/Development/IDE/LSP/Server.hs
new file mode 100644
index 0000000..180392e
--- /dev/null
+++ b/src/Development/IDE/LSP/Server.hs
@@ -0,0 +1,40 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE DuplicateRecordFields #-}
+{-# LANGUAGE RankNTypes #-}
+module Development.IDE.LSP.Server
+ ( WithMessage(..)
+ , PartialHandlers(..)
+ ) where
+
+
+import Data.Default
+
+import Language.Haskell.LSP.Types
+import qualified Language.Haskell.LSP.Core as LSP
+import qualified Language.Haskell.LSP.Messages as LSP
+import Development.IDE.Core.Service
+
+
+data WithMessage = WithMessage
+ {withResponse :: forall m req resp . (Show m, Show req) =>
+ (ResponseMessage resp -> LSP.FromServerMessage) -> -- how to wrap a response
+ (LSP.LspFuncs () -> IdeState -> req -> IO resp) -> -- actual work
+ Maybe (LSP.Handler (RequestMessage m req resp))
+ ,withNotification :: forall m req . (Show m, Show req) =>
+ Maybe (LSP.Handler (NotificationMessage m req)) -> -- old notification handler
+ (LSP.LspFuncs () -> IdeState -> req -> IO ()) -> -- actual work
+ Maybe (LSP.Handler (NotificationMessage m req))
+ }
+
+newtype PartialHandlers = PartialHandlers (WithMessage -> LSP.Handlers -> IO LSP.Handlers)
+
+instance Default PartialHandlers where
+ def = PartialHandlers $ \_ x -> pure x
+
+instance Semigroup PartialHandlers where
+ PartialHandlers a <> PartialHandlers b = PartialHandlers $ \w x -> a w x >>= b w
+
+instance Monoid PartialHandlers where
+ mempty = def
diff --git a/src/Development/IDE/Spans/AtPoint.hs b/src/Development/IDE/Spans/AtPoint.hs
new file mode 100644
index 0000000..649238e
--- /dev/null
+++ b/src/Development/IDE/Spans/AtPoint.hs
@@ -0,0 +1,144 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+-- | Gives information about symbols at a given point in DAML files.
+-- These are all pure functions that should execute quickly.
+module Development.IDE.Spans.AtPoint (
+ atPoint
+ , gotoDefinition
+ ) where
+
+import Development.IDE.Spans.Documentation
+import Development.IDE.GHC.Error
+import Development.IDE.GHC.Orphans()
+import Development.IDE.Types.Location
+
+-- DAML compiler and infrastructure
+import Development.Shake
+import Development.IDE.GHC.Util
+import Development.IDE.GHC.Compat
+import Development.IDE.Types.Options
+import Development.IDE.Spans.Type as SpanInfo
+
+-- GHC API imports
+import Avail
+import GHC
+import DynFlags
+import FastString
+import Name
+import Outputable hiding ((<>))
+import SrcLoc
+
+import Control.Monad.Extra
+import Control.Monad.Trans.Maybe
+import Control.Monad.IO.Class
+import Data.Maybe
+import Data.List
+import qualified Data.Text as T
+
+-- | Locate the definition of the name at a given position.
+gotoDefinition
+ :: MonadIO m
+ => (FilePath -> m (Maybe HieFile))
+ -> IdeOptions
+ -> HscEnv
+ -> [SpanInfo]
+ -> Position
+ -> m (Maybe Location)
+gotoDefinition getHieFile ideOpts pkgState srcSpans pos =
+ listToMaybe <$> locationsAtPoint getHieFile ideOpts pkgState pos srcSpans
+
+-- | Synopsis for the name at a given position.
+atPoint
+ :: IdeOptions
+ -> [TypecheckedModule]
+ -> [SpanInfo]
+ -> Position
+ -> Maybe (Maybe Range, [T.Text])
+atPoint IdeOptions{..} tcs srcSpans pos = do
+ SpanInfo{..} <- listToMaybe $ orderSpans $ spansAtPoint pos srcSpans
+ ty <- spaninfoType
+ let mbName = getNameM spaninfoSource
+ mbDefinedAt = fmap (\name -> "**Defined " <> T.pack (showSDocUnsafe $ pprNameDefnLoc name) <> "**\n") mbName
+ docInfo = maybe [] (\name -> getDocumentation name tcs) mbName
+ range = Range
+ (Position spaninfoStartLine spaninfoStartCol)
+ (Position spaninfoEndLine spaninfoEndCol)
+ colon = if optNewColonConvention then ":" else "::"
+ wrapLanguageSyntax x = T.unlines [ "```" <> T.pack optLanguageSyntax, x, "```"]
+ typeSig = wrapLanguageSyntax $ case mbName of
+ Nothing -> colon <> " " <> showName ty
+ Just name ->
+ let modulePrefix = maybe "" (<> ".") (getModuleNameAsText name)
+ in modulePrefix <> showName name <> "\n " <> colon <> " " <> showName ty
+ hoverInfo = docInfo <> [typeSig] <> maybeToList mbDefinedAt
+ return (Just range, hoverInfo)
+ where
+ -- NOTE(RJR): This is a bit hacky.
+ -- We don't want to show the user type signatures generated from Eq and Show
+ -- instances, as they do not appear in the source program.
+ -- However the user could have written an `==` or `show` function directly,
+ -- in which case we still want to show information for that.
+ -- Hence we just move such information later in the list of spans.
+ orderSpans :: [SpanInfo] -> [SpanInfo]
+ orderSpans = uncurry (++) . partition (not . isTypeclassDeclSpan)
+ isTypeclassDeclSpan :: SpanInfo -> Bool
+ isTypeclassDeclSpan spanInfo =
+ case getNameM (spaninfoSource spanInfo) of
+ Just name -> any (`isInfixOf` getOccString name) ["==", "showsPrec"]
+ Nothing -> False
+
+locationsAtPoint :: forall m . MonadIO m => (FilePath -> m (Maybe HieFile)) -> IdeOptions -> HscEnv -> Position -> [SpanInfo] -> m [Location]
+locationsAtPoint getHieFile IdeOptions{..} pkgState pos =
+ fmap (map srcSpanToLocation) .
+ mapMaybeM (getSpan . spaninfoSource) .
+ spansAtPoint pos
+ where getSpan :: SpanSource -> m (Maybe SrcSpan)
+ getSpan NoSource = pure Nothing
+ getSpan (SpanS sp) = pure $ Just sp
+ getSpan (Named name) = case nameSrcSpan name of
+ sp@(RealSrcSpan _) -> pure $ Just sp
+ sp@(UnhelpfulSpan _) -> runMaybeT $ do
+ guard (sp /= wiredInSrcSpan)
+ -- This case usually arises when the definition is in an external package.
+ -- In this case the interface files contain garbage source spans
+ -- so we instead read the .hie files to get useful source spans.
+ let mod = nameModule name
+ let unitId = moduleUnitId mod
+ pkgConfig <- MaybeT $ pure $ lookupPackageConfig unitId pkgState
+ hiePath <- MaybeT $ liftIO $ optLocateHieFile optPkgLocationOpts pkgConfig mod
+ hieFile <- MaybeT $ getHieFile hiePath
+ avail <- MaybeT $ pure $ listToMaybe (filterAvails (eqName name) $ hie_exports hieFile)
+ srcPath <- MaybeT $ liftIO $ optLocateSrcFile optPkgLocationOpts pkgConfig mod
+ -- The location will point to the source file used during compilation.
+ -- This file might no longer exists and even if it does the path will be relative
+ -- to the compilation directory which we don’t know.
+ let span = setFileName srcPath $ nameSrcSpan $ availName avail
+ pure span
+ -- We ignore uniques and source spans and only compare the name and the module.
+ eqName :: Name -> Name -> Bool
+ eqName n n' = nameOccName n == nameOccName n' && nameModule n == nameModule n'
+ setFileName f (RealSrcSpan span) = RealSrcSpan (span { srcSpanFile = mkFastString f })
+ setFileName _ span@(UnhelpfulSpan _) = span
+
+spansAtPoint :: Position -> [SpanInfo] -> [SpanInfo]
+spansAtPoint pos = filter atp where
+ line = _line pos
+ cha = _character pos
+ atp SpanInfo{..} = spaninfoStartLine <= line
+ && spaninfoEndLine >= line
+ && spaninfoStartCol <= cha
+ -- The end col points to the column after the
+ -- last character so we use > instead of >=
+ && spaninfoEndCol > cha
+
+showName :: Outputable a => a -> T.Text
+showName = T.pack . prettyprint
+ where
+ prettyprint x = renderWithStyle unsafeGlobalDynFlags (ppr x) style
+ style = mkUserStyle unsafeGlobalDynFlags neverQualify AllTheWay
+
+getModuleNameAsText :: Name -> Maybe T.Text
+getModuleNameAsText n = do
+ m <- nameModule_maybe n
+ return . T.pack . moduleNameString $ moduleName m
diff --git a/src/Development/IDE/Spans/Calculate.hs b/src/Development/IDE/Spans/Calculate.hs
new file mode 100644
index 0000000..7f27d70
--- /dev/null
+++ b/src/Development/IDE/Spans/Calculate.hs
@@ -0,0 +1,187 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+-- ORIGINALLY COPIED FROM https://github.com/commercialhaskell/intero
+
+{-# LANGUAGE CPP #-}
+{-# LANGUAGE RankNTypes #-}
+
+-- | Get information on modules, identifiers, etc.
+
+module Development.IDE.Spans.Calculate(getSrcSpanInfos,listifyAllSpans) where
+
+import ConLike
+import Control.Monad
+import qualified CoreUtils
+import Data.Data
+import qualified Data.Generics
+import Data.List
+import Data.Maybe
+import DataCon
+import Desugar
+import GHC
+import GhcMonad
+import FastString (mkFastString)
+import Development.IDE.Types.Location
+import Development.IDE.Spans.Type
+import Development.IDE.GHC.Error (zeroSpan)
+import Prelude hiding (mod)
+import TcHsSyn
+import Var
+import Development.IDE.Core.Compile
+import Development.IDE.GHC.Util
+
+
+-- A lot of things gained an extra X argument in GHC 8.6, which we mostly ignore
+-- this U ignores that arg in 8.6, but is hidden in 8.4
+#if __GLASGOW_HASKELL__ >= 806
+#define U _
+#else
+#define U
+#endif
+
+-- | Get source span info, used for e.g. AtPoint and Goto Definition.
+getSrcSpanInfos
+ :: HscEnv
+ -> [(Located ModuleName, Maybe NormalizedFilePath)]
+ -> TcModuleResult
+ -> IO [SpanInfo]
+getSrcSpanInfos env imports tc =
+ runGhcEnv env
+ . getSpanInfo imports
+ $ tmrModule tc
+
+-- | Get ALL source spans in the module.
+getSpanInfo :: GhcMonad m
+ => [(Located ModuleName, Maybe NormalizedFilePath)] -- ^ imports
+ -> TypecheckedModule
+ -> m [SpanInfo]
+getSpanInfo mods tcm =
+ do let tcs = tm_typechecked_source tcm
+ bs = listifyAllSpans tcs :: [LHsBind GhcTc]
+ es = listifyAllSpans tcs :: [LHsExpr GhcTc]
+ ps = listifyAllSpans' tcs :: [Pat GhcTc]
+ ts = listifyAllSpans $ tm_renamed_source tcm :: [LHsType GhcRn]
+ bts <- mapM (getTypeLHsBind tcm) bs -- binds
+ ets <- mapM (getTypeLHsExpr tcm) es -- expressions
+ pts <- mapM (getTypeLPat tcm) ps -- patterns
+ tts <- mapM (getLHsType tcm) ts -- types
+ let imports = importInfo mods
+ let exports = getExports tcm
+ let exprs = exports ++ imports ++ concat bts ++ concat tts ++ catMaybes (ets ++ pts)
+ return (mapMaybe toSpanInfo (sortBy cmp exprs))
+ where cmp (_,a,_) (_,b,_)
+ | a `isSubspanOf` b = LT
+ | b `isSubspanOf` a = GT
+ | otherwise = EQ
+
+getExports :: TypecheckedModule -> [(SpanSource, SrcSpan, Maybe Type)]
+getExports m
+ | Just (_, _, Just exports, _) <- renamedSource m =
+ [ (Named $ unLoc n, getLoc n, Nothing)
+ | (e, _) <- exports
+ , n <- ieLNames $ unLoc e
+ ]
+getExports _ = []
+
+-- | Variant of GHC's ieNames that produces LIdP instead of IdP
+ieLNames :: IE pass -> [Located (IdP pass)]
+ieLNames (IEVar U n ) = [ieLWrappedName n]
+ieLNames (IEThingAbs U n ) = [ieLWrappedName n]
+ieLNames (IEThingAll U n ) = [ieLWrappedName n]
+ieLNames (IEThingWith U n _ ns _) = ieLWrappedName n : map ieLWrappedName ns
+ieLNames _ = []
+
+-- | Get the name and type of a binding.
+getTypeLHsBind :: (GhcMonad m)
+ => TypecheckedModule
+ -> LHsBind GhcTc
+ -> m [(SpanSource, SrcSpan, Maybe Type)]
+getTypeLHsBind _ (L _spn FunBind{fun_id = pid,fun_matches = MG{}}) =
+ return [(Named $ getName (unLoc pid), getLoc pid, Just (varType (unLoc pid)))]
+getTypeLHsBind _ _ = return []
+
+-- | Get the name and type of an expression.
+getTypeLHsExpr :: (GhcMonad m)
+ => TypecheckedModule
+ -> LHsExpr GhcTc
+ -> m (Maybe (SpanSource, SrcSpan, Maybe Type))
+getTypeLHsExpr _ e = do
+ hs_env <- getSession
+ (_, mbe) <- liftIO (deSugarExpr hs_env e)
+ return $
+ case mbe of
+ Just expr ->
+ Just (getSpanSource (unLoc e), getLoc e, Just (CoreUtils.exprType expr))
+ Nothing -> Nothing
+ where
+ getSpanSource :: HsExpr GhcTc -> SpanSource
+ getSpanSource (HsVar U (L _ i)) = Named (getName i)
+ getSpanSource (HsConLikeOut U (RealDataCon dc)) = Named (dataConName dc)
+ getSpanSource RecordCon {rcon_con_name} = Named (getName rcon_con_name)
+ getSpanSource (HsWrap U _ xpr) = getSpanSource xpr
+ getSpanSource (HsPar U xpr) = getSpanSource (unLoc xpr)
+ getSpanSource _ = NoSource
+
+-- | Get the name and type of a pattern.
+getTypeLPat :: (GhcMonad m)
+ => TypecheckedModule
+ -> Pat GhcTc
+ -> m (Maybe (SpanSource, SrcSpan, Maybe Type))
+getTypeLPat _ pat =
+ let (src, spn) = getSpanSource pat in
+ return $ Just (src, spn, Just (hsPatType pat))
+ where
+ getSpanSource :: Pat GhcTc -> (SpanSource, SrcSpan)
+ getSpanSource (VarPat U (L spn vid)) = (Named (getName vid), spn)
+ getSpanSource (ConPatOut (L spn (RealDataCon dc)) _ _ _ _ _ _) =
+ (Named (dataConName dc), spn)
+ getSpanSource _ = (NoSource, noSrcSpan)
+
+getLHsType
+ :: GhcMonad m
+ => TypecheckedModule
+ -> LHsType GhcRn
+ -> m [(SpanSource, SrcSpan, Maybe Type)]
+getLHsType _ (L spn (HsTyVar U _ v)) = pure [(Named $ unLoc v, spn, Nothing)]
+getLHsType _ _ = pure []
+
+importInfo :: [(Located ModuleName, Maybe NormalizedFilePath)]
+ -> [(SpanSource, SrcSpan, Maybe Type)]
+importInfo = mapMaybe (uncurry wrk) where
+ wrk :: Located ModuleName -> Maybe NormalizedFilePath -> Maybe (SpanSource, SrcSpan, Maybe Type)
+ wrk modName = \case
+ Nothing -> Nothing
+ Just fp -> Just (fpToSpanSource $ fromNormalizedFilePath fp, getLoc modName, Nothing)
+
+ -- TODO make this point to the module name
+ fpToSpanSource :: FilePath -> SpanSource
+ fpToSpanSource fp = SpanS $ RealSrcSpan $ zeroSpan $ mkFastString fp
+
+-- | Get ALL source spans in the source.
+listifyAllSpans :: (Typeable a, Data m) => m -> [Located a]
+listifyAllSpans tcs =
+ Data.Generics.listify p tcs
+ where p (L spn _) = isGoodSrcSpan spn
+-- This is a version of `listifyAllSpans` specialized on picking out
+-- patterns. It comes about since GHC now defines `type LPat p = Pat
+-- p` (no top-level locations).
+listifyAllSpans' :: Typeable a
+ => TypecheckedSource -> [Pat a]
+listifyAllSpans' tcs = Data.Generics.listify (const True) tcs
+
+
+-- | Pretty print the types into a 'SpanInfo'.
+toSpanInfo :: (SpanSource, SrcSpan, Maybe Type) -> Maybe SpanInfo
+toSpanInfo (name,mspan,typ) =
+ case mspan of
+ RealSrcSpan spn ->
+ -- GHC’s line and column numbers are 1-based while LSP’s line and column
+ -- numbers are 0-based.
+ Just (SpanInfo (srcSpanStartLine spn - 1)
+ (srcSpanStartCol spn - 1)
+ (srcSpanEndLine spn - 1)
+ (srcSpanEndCol spn - 1)
+ typ
+ name)
+ _ -> Nothing
diff --git a/src/Development/IDE/Spans/Documentation.hs b/src/Development/IDE/Spans/Documentation.hs
new file mode 100644
index 0000000..5530fb8
--- /dev/null
+++ b/src/Development/IDE/Spans/Documentation.hs
@@ -0,0 +1,92 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+module Development.IDE.Spans.Documentation (
+ getDocumentation
+ ) where
+
+import Control.Monad
+import Data.List.Extra
+import qualified Data.Map as M
+import Data.Maybe
+import qualified Data.Text as T
+import Development.IDE.GHC.Error
+import Development.IDE.Spans.Calculate
+import FastString
+import GHC
+import SrcLoc
+
+
+getDocumentation
+ :: Name -- ^ The name you want documentation for.
+ -> [TypecheckedModule] -- ^ All of the possible modules it could be defined in.
+ -> [T.Text]
+-- This finds any documentation between the name you want
+-- documentation for and the one before it. This is only an
+-- approximately correct algorithm and there are easily constructed
+-- cases where it will be wrong (if so then usually slightly but there
+-- may be edge cases where it is very wrong).
+-- TODO : Build a version of GHC exactprint to extract this information
+-- more accurately.
+getDocumentation targetName tcs = fromMaybe [] $ do
+ -- Find the module the target is defined in.
+ targetNameSpan <- realSpan $ nameSrcSpan targetName
+ tc <-
+ listToMaybe
+ $ filter ((==) (Just $ srcSpanFile targetNameSpan) . annotationFileName)
+ $ reverse tcs -- TODO : Is reversing the list here really neccessary?
+ -- Names bound by the module (we want to exclude non-"top-level"
+ -- bindings but unfortunately we get all here).
+ let bs = mapMaybe name_of_bind
+ (listifyAllSpans (tm_typechecked_source tc) :: [LHsBind GhcTc])
+ -- Sort the names' source spans.
+ let sortedSpans = sortedNameSpans bs
+ -- Now go ahead and extract the docs.
+ let docs = ann tc
+ nameInd <- elemIndex targetNameSpan sortedSpans
+ let prevNameSpan =
+ if nameInd >= 1
+ then sortedSpans !! (nameInd - 1)
+ else zeroSpan $ srcSpanFile targetNameSpan
+ -- Annoyingly "-- |" documentation isn't annotated with a location,
+ -- so you have to pull it out from the elements.
+ pure
+ $ docHeaders
+ $ filter (\(L target _) -> isBetween target prevNameSpan targetNameSpan)
+ $ mapMaybe (\(L l v) -> L <$> realSpan l <*> pure v)
+ $ join
+ $ M.elems
+ docs
+ where
+ -- Get the name bound by a binding. We only concern ourselves with
+ -- @FunBind@ (which covers functions and variables).
+ name_of_bind :: LHsBind GhcTc -> Maybe Name
+ name_of_bind (L _ FunBind {fun_id}) = Just (getName (unLoc fun_id))
+ name_of_bind _ = Nothing
+ -- Get source spans from names, discard unhelpful spans, remove
+ -- duplicates and sort.
+ sortedNameSpans :: [Name] -> [RealSrcSpan]
+ sortedNameSpans ls = nubSort (mapMaybe (realSpan . nameSrcSpan) ls)
+ isBetween target before after = before <= target && target <= after
+ ann = snd . pm_annotations . tm_parsed_module
+ annotationFileName :: TypecheckedModule -> Maybe FastString
+ annotationFileName = fmap srcSpanFile . listToMaybe . realSpans . ann
+ realSpans :: M.Map SrcSpan [Located a] -> [RealSrcSpan]
+ realSpans =
+ mapMaybe (realSpan . getLoc)
+ . join
+ . M.elems
+
+-- | Shows this part of the documentation
+docHeaders :: [RealLocated AnnotationComment]
+ -> [T.Text]
+docHeaders = mapMaybe (\(L _ x) -> wrk x)
+ where
+ wrk = \case
+ -- When `Opt_Haddock` is enabled.
+ AnnDocCommentNext s -> Just $ T.pack s
+ -- When `Opt_KeepRawTokenStream` enabled.
+ AnnLineComment s -> if "-- |" `isPrefixOf` s
+ then Just $ T.pack s
+ else Nothing
+ _ -> Nothing
diff --git a/src/Development/IDE/Spans/Type.hs b/src/Development/IDE/Spans/Type.hs
new file mode 100644
index 0000000..5591c20
--- /dev/null
+++ b/src/Development/IDE/Spans/Type.hs
@@ -0,0 +1,61 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+-- ORIGINALLY COPIED FROM https://github.com/commercialhaskell/intero
+
+-- | Types used separate to GHCi vanilla.
+
+module Development.IDE.Spans.Type(
+ SpanInfo(..)
+ , SpanSource(..)
+ , getNameM
+ ) where
+
+import GHC
+import Control.DeepSeq
+import Data.Maybe
+import OccName
+
+
+-- | Type of some span of source code. Most of these fields are
+-- unboxed but Haddock doesn't show that.
+data SpanInfo =
+ SpanInfo {spaninfoStartLine :: {-# UNPACK #-} !Int
+ -- ^ Start line of the span, zero-based.
+ ,spaninfoStartCol :: {-# UNPACK #-} !Int
+ -- ^ Start column of the span, zero-based.
+ ,spaninfoEndLine :: {-# UNPACK #-} !Int
+ -- ^ End line of the span (absolute), zero-based.
+ ,spaninfoEndCol :: {-# UNPACK #-} !Int
+ -- ^ End column of the span (absolute), zero-based.
+ ,spaninfoType :: !(Maybe Type)
+ -- ^ A pretty-printed representation fo the type.
+ ,spaninfoSource :: !SpanSource
+ -- ^ The actutal 'Name' associated with the span, if
+ -- any. This can be useful for accessing a variety of
+ -- information about the identifier such as module,
+ -- locality, definition location, etc.
+ }
+instance Show SpanInfo where
+ show (SpanInfo sl sc el ec t n) = show [show sl, show sc, show el, show ec, show $ isJust t, show n]
+
+instance NFData SpanInfo where
+ rnf = rwhnf
+
+
+-- we don't always get a name out so sometimes manually annotating source is more appropriate
+data SpanSource = Named Name
+ | SpanS SrcSpan
+ | NoSource
+ deriving (Eq)
+
+instance Show SpanSource where
+ show = \case
+ Named n -> "Named " ++ occNameString (occName n)
+ SpanS sp -> "Span " ++ show sp
+ NoSource -> "NoSource"
+
+getNameM :: SpanSource -> Maybe Name
+getNameM = \case
+ Named name -> Just name
+ _ -> Nothing
diff --git a/src/Development/IDE/Types/Diagnostics.hs b/src/Development/IDE/Types/Diagnostics.hs
new file mode 100644
index 0000000..ccfb0ca
--- /dev/null
+++ b/src/Development/IDE/Types/Diagnostics.hs
@@ -0,0 +1,112 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+
+module Development.IDE.Types.Diagnostics (
+ LSP.Diagnostic(..),
+ FileDiagnostic,
+ LSP.DiagnosticSeverity(..),
+ DiagnosticStore,
+ List(..),
+ ideErrorText,
+ showDiagnostics,
+ showDiagnosticsColored,
+ ) where
+
+import Data.Maybe as Maybe
+import qualified Data.Text as T
+import Data.Text.Prettyprint.Doc
+import qualified Language.Haskell.LSP.Types as LSP
+import Language.Haskell.LSP.Types as LSP (
+ DiagnosticSeverity(..)
+ , Diagnostic(..)
+ , List(..)
+ )
+import Language.Haskell.LSP.Diagnostics
+import Data.Text.Prettyprint.Doc.Render.Text
+import qualified Data.Text.Prettyprint.Doc.Render.Terminal as Terminal
+import Data.Text.Prettyprint.Doc.Render.Terminal (Color(..), color)
+
+import Development.IDE.Types.Location
+
+
+ideErrorText :: NormalizedFilePath -> T.Text -> FileDiagnostic
+ideErrorText fp msg = (fp, LSP.Diagnostic {
+ _range = noRange,
+ _severity = Just LSP.DsError,
+ _code = Nothing,
+ _source = Just "compiler",
+ _message = msg,
+ _relatedInformation = Nothing
+ })
+
+
+-- | Human readable diagnostics for a specific file.
+--
+-- This type packages a pretty printed, human readable error message
+-- along with the related source location so that we can display the error
+-- on either the console or in the IDE at the right source location.
+--
+type FileDiagnostic = (NormalizedFilePath, Diagnostic)
+
+prettyRange :: Range -> Doc Terminal.AnsiStyle
+prettyRange Range{..} = f _start <> "-" <> f _end
+ where f Position{..} = pretty (_line+1) <> colon <> pretty _character
+
+stringParagraphs :: T.Text -> Doc a
+stringParagraphs = vcat . map (fillSep . map pretty . T.words) . T.lines
+
+showDiagnostics :: [FileDiagnostic] -> T.Text
+showDiagnostics = srenderPlain . prettyDiagnostics
+
+showDiagnosticsColored :: [FileDiagnostic] -> T.Text
+showDiagnosticsColored = srenderColored . prettyDiagnostics
+
+
+prettyDiagnostics :: [FileDiagnostic] -> Doc Terminal.AnsiStyle
+prettyDiagnostics = vcat . map prettyDiagnostic
+
+prettyDiagnostic :: FileDiagnostic -> Doc Terminal.AnsiStyle
+prettyDiagnostic (fp, LSP.Diagnostic{..}) =
+ vcat
+ [ slabel_ "File: " $ pretty (fromNormalizedFilePath fp)
+ , slabel_ "Range: " $ prettyRange _range
+ , slabel_ "Source: " $ pretty _source
+ , slabel_ "Severity:" $ pretty $ show sev
+ , slabel_ "Message: "
+ $ case sev of
+ LSP.DsError -> annotate $ color Red
+ LSP.DsWarning -> annotate $ color Yellow
+ LSP.DsInfo -> annotate $ color Blue
+ LSP.DsHint -> annotate $ color Magenta
+ $ stringParagraphs _message
+ ]
+ where
+ sev = fromMaybe LSP.DsError _severity
+
+
+-- | Label a document.
+slabel_ :: String -> Doc a -> Doc a
+slabel_ t d = nest 2 $ sep [pretty t, d]
+
+-- | The layout options used for the SDK assistant.
+cliLayout ::
+ Int
+ -- ^ Rendering width of the pretty printer.
+ -> LayoutOptions
+cliLayout renderWidth = LayoutOptions
+ { layoutPageWidth = AvailablePerLine renderWidth 0.9
+ }
+
+-- | Render without any syntax annotations
+srenderPlain :: Doc ann -> T.Text
+srenderPlain = renderStrict . layoutSmart (cliLayout defaultTermWidth)
+
+-- | Render a 'Document' as an ANSII colored string.
+srenderColored :: Doc Terminal.AnsiStyle -> T.Text
+srenderColored =
+ Terminal.renderStrict .
+ layoutSmart defaultLayoutOptions { layoutPageWidth = AvailablePerLine 100 1.0 }
+
+defaultTermWidth :: Int
+defaultTermWidth = 80
diff --git a/src/Development/IDE/Types/Location.hs b/src/Development/IDE/Types/Location.hs
new file mode 100644
index 0000000..a837199
--- /dev/null
+++ b/src/Development/IDE/Types/Location.hs
@@ -0,0 +1,100 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+
+-- | Types and functions for working with source code locations.
+module Development.IDE.Types.Location
+ ( Location(..)
+ , noFilePath
+ , noRange
+ , Position(..)
+ , showPosition
+ , Range(..)
+ , Uri(..)
+ , NormalizedUri
+ , LSP.toNormalizedUri
+ , LSP.fromNormalizedUri
+ , NormalizedFilePath
+ , fromUri
+ , toNormalizedFilePath
+ , fromNormalizedFilePath
+ , filePathToUri
+ , filePathToUri'
+ , uriToFilePath'
+ ) where
+
+import Language.Haskell.LSP.Types (Location(..), Range(..), Position(..))
+import Control.DeepSeq
+import Data.Maybe as Maybe
+import Data.Hashable
+import Data.String
+import System.FilePath
+import System.Info.Extra
+import qualified Language.Haskell.LSP.Types as LSP
+import Language.Haskell.LSP.Types as LSP (
+ filePathToUri
+ , NormalizedUri(..)
+ , Uri(..)
+ , toNormalizedUri
+ , fromNormalizedUri
+ )
+
+
+-- | Newtype wrapper around FilePath that always has normalized slashes.
+newtype NormalizedFilePath = NormalizedFilePath FilePath
+ deriving (Eq, Ord, Show, Hashable, NFData)
+
+instance IsString NormalizedFilePath where
+ fromString = toNormalizedFilePath
+
+toNormalizedFilePath :: FilePath -> NormalizedFilePath
+toNormalizedFilePath "" = NormalizedFilePath ""
+toNormalizedFilePath fp = NormalizedFilePath $ normalise' fp
+ where
+ -- We do not use System.FilePath’s normalise here since that
+ -- also normalises things like the case of the drive letter
+ -- which NormalizedUri does not normalise so we get VFS lookup failures.
+ normalise' :: FilePath -> FilePath
+ normalise' = oneSlash . map (\c -> if isPathSeparator c then pathSeparator else c)
+
+ -- Allow double slashes as the very first element of the path for UNC drives on Windows
+ -- otherwise turn adjacent slashes into one. These slashes often arise from dodgy CPP
+ oneSlash :: FilePath -> FilePath
+ oneSlash (x:xs) | isWindows = x : f xs
+ oneSlash xs = f xs
+
+ f (x:y:xs) | isPathSeparator x, isPathSeparator y = f (x:xs)
+ f (x:xs) = x : f xs
+ f [] = []
+
+
+fromNormalizedFilePath :: NormalizedFilePath -> FilePath
+fromNormalizedFilePath (NormalizedFilePath fp) = fp
+
+-- | We use an empty string as a filepath when we don’t have a file.
+-- However, haskell-lsp doesn’t support that in uriToFilePath and given
+-- that it is not a valid filepath it does not make sense to upstream a fix.
+-- So we have our own wrapper here that supports empty filepaths.
+uriToFilePath' :: Uri -> Maybe FilePath
+uriToFilePath' uri
+ | uri == filePathToUri "" = Just ""
+ | otherwise = LSP.uriToFilePath uri
+
+filePathToUri' :: NormalizedFilePath -> NormalizedUri
+filePathToUri' = toNormalizedUri . filePathToUri . fromNormalizedFilePath
+
+
+fromUri :: LSP.NormalizedUri -> NormalizedFilePath
+fromUri = toNormalizedFilePath . fromMaybe noFilePath . uriToFilePath' . fromNormalizedUri
+
+
+noFilePath :: FilePath
+noFilePath = "<unknown>"
+
+-- A dummy range to use when range is unknown
+noRange :: Range
+noRange = Range (Position 0 0) (Position 100000 0)
+
+
+showPosition :: Position -> String
+showPosition Position{..} = show (_line + 1) ++ ":" ++ show (_character + 1)
diff --git a/src/Development/IDE/Types/Logger.hs b/src/Development/IDE/Types/Logger.hs
new file mode 100644
index 0000000..a4e5ec3
--- /dev/null
+++ b/src/Development/IDE/Types/Logger.hs
@@ -0,0 +1,50 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE RankNTypes #-}
+-- | This is a compatibility module that abstracts over the
+-- concrete choice of logging framework so users can plug in whatever
+-- framework they want to.
+module Development.IDE.Types.Logger
+ ( Priority(..)
+ , Logger(..)
+ , logError, logWarning, logInfo, logDebug
+ , noLogging
+ ) where
+
+import qualified Data.Text as T
+
+
+data Priority
+-- Don't change the ordering of this type or you will mess up the Ord
+-- instance
+ = Debug -- ^ Verbose debug logging.
+ | Info -- ^ Useful information in case an error has to be understood.
+ | Warning
+ -- ^ These error messages should not occur in a expected usage, and
+ -- should be investigated.
+ | Error -- ^ Such log messages must never occur in expected usage.
+ deriving (Eq, Show, Ord, Enum, Bounded)
+
+
+-- | Note that this is logging actions _of the program_, not of the user.
+-- You shouldn't call warning/error if the user has caused an error, only
+-- if our code has gone wrong and is itself erroneous (e.g. we threw an exception).
+data Logger = Logger {logPriority :: Priority -> T.Text -> IO ()}
+
+
+logError :: Logger -> T.Text -> IO ()
+logError x = logPriority x Error
+
+logWarning :: Logger -> T.Text -> IO ()
+logWarning x = logPriority x Warning
+
+logInfo :: Logger -> T.Text -> IO ()
+logInfo x = logPriority x Info
+
+logDebug :: Logger -> T.Text -> IO ()
+logDebug x = logPriority x Debug
+
+
+noLogging :: Logger
+noLogging = Logger $ \_ _ -> return ()
diff --git a/src/Development/IDE/Types/Options.hs b/src/Development/IDE/Types/Options.hs
new file mode 100644
index 0000000..b2e39eb
--- /dev/null
+++ b/src/Development/IDE/Types/Options.hs
@@ -0,0 +1,82 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE RankNTypes #-}
+
+-- | Options
+module Development.IDE.Types.Options
+ ( IdeOptions(..)
+ , IdeReportProgress(..)
+ , clientSupportsProgress
+ , IdePkgLocationOptions(..)
+ , defaultIdeOptions
+ ) where
+
+import Data.Maybe
+import Development.Shake
+import Development.IDE.GHC.Util
+import GHC hiding (parseModule, typecheckModule)
+import GhcPlugins as GHC hiding (fst3, (<>))
+import qualified Language.Haskell.LSP.Types.Capabilities as LSP
+
+data IdeOptions = IdeOptions
+ { optPreprocessor :: GHC.ParsedSource -> ([(GHC.SrcSpan, String)], GHC.ParsedSource)
+ -- ^ Preprocessor to run over all parsed source trees, generating a list of warnings
+ -- along with a new parse tree.
+ , optGhcSession :: IO (FilePath -> Action HscEnvEq)
+ -- ^ Setup a GHC session for a given file, e.g. @Foo.hs@.
+ -- The 'IO' will be called once, then the resulting function will be applied once per file.
+ -- It is desirable that many files get the same 'HscEnvEq', so that more IDE features work.
+ -- You should not use 'newCacheIO' to get that caching, because of
+ -- https://github.com/ndmitchell/shake/issues/725.
+ , optPkgLocationOpts :: IdePkgLocationOptions
+ -- ^ How to locate source and @.hie@ files given a module name.
+ , optExtensions :: [String]
+ -- ^ File extensions to search for code, defaults to Haskell sources (including @.hs@)
+
+ , optThreads :: Int
+ -- ^ Number of threads to use. Use 0 for number of threads on the machine.
+ , optShakeProfiling :: Maybe FilePath
+ -- ^ Set to 'Just' to create a directory of profiling reports.
+ , optReportProgress :: IdeReportProgress
+ -- ^ Whether to report progress during long operations.
+ , optLanguageSyntax :: String
+ -- ^ the ```language to use
+ , optNewColonConvention :: Bool
+ -- ^ whether to use new colon convention
+ }
+
+newtype IdeReportProgress = IdeReportProgress Bool
+
+clientSupportsProgress :: LSP.ClientCapabilities -> IdeReportProgress
+clientSupportsProgress caps = IdeReportProgress $ fromMaybe False $
+ LSP._progress =<< LSP._window (caps :: LSP.ClientCapabilities)
+
+defaultIdeOptions :: IO (FilePath -> Action HscEnvEq) -> IdeOptions
+defaultIdeOptions session = IdeOptions
+ {optPreprocessor = (,) []
+ ,optGhcSession = session
+ ,optExtensions = ["hs", "lhs"]
+ ,optPkgLocationOpts = defaultIdePkgLocationOptions
+ ,optThreads = 0
+ ,optShakeProfiling = Nothing
+ ,optReportProgress = IdeReportProgress False
+ ,optLanguageSyntax = "haskell"
+ ,optNewColonConvention = False
+ }
+
+
+-- | The set of options used to locate files belonging to external packages.
+data IdePkgLocationOptions = IdePkgLocationOptions
+ { optLocateHieFile :: PackageConfig -> Module -> IO (Maybe FilePath)
+ -- ^ Locate the HIE file for the given module. The PackageConfig can be
+ -- used to lookup settings like importDirs.
+ , optLocateSrcFile :: PackageConfig -> Module -> IO (Maybe FilePath)
+ -- ^ Locate the source file for the given module. The PackageConfig can be
+ -- used to lookup settings like importDirs. For DAML, we place them in the package DB.
+ -- For cabal this could point somewhere in ~/.cabal/packages.
+ }
+
+defaultIdePkgLocationOptions :: IdePkgLocationOptions
+defaultIdePkgLocationOptions = IdePkgLocationOptions f f
+ where f _ _ = return Nothing
diff --git a/test/cabal/Development/IDE/Test/Runfiles.hs b/test/cabal/Development/IDE/Test/Runfiles.hs
new file mode 100644
index 0000000..ef9d176
--- /dev/null
+++ b/test/cabal/Development/IDE/Test/Runfiles.hs
@@ -0,0 +1,12 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+module Development.IDE.Test.Runfiles
+ ( locateGhcideExecutable
+ ) where
+
+import System.FilePath (FilePath)
+
+
+locateGhcideExecutable :: IO FilePath
+locateGhcideExecutable = pure "ghcide"
diff --git a/test/exe/Main.hs b/test/exe/Main.hs
new file mode 100644
index 0000000..fd0edb3
--- /dev/null
+++ b/test/exe/Main.hs
@@ -0,0 +1,214 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+{-# LANGUAGE DuplicateRecordFields #-}
+
+module Main (main) where
+
+import Control.Monad (void)
+import qualified Data.Text as T
+import Development.IDE.Test
+import Development.IDE.Test.Runfiles
+import Language.Haskell.LSP.Test
+import Language.Haskell.LSP.Types
+import Language.Haskell.LSP.Types.Capabilities
+import System.Environment.Blank (setEnv)
+import System.IO.Extra
+import Test.Tasty
+import Test.Tasty.HUnit
+
+
+main :: IO ()
+main = defaultMain $ testGroup "HIE"
+ [ testSession "open close" $ do
+ doc <- openDoc' "Testing.hs" "haskell" ""
+ void (message :: Session ProgressStartNotification)
+ closeDoc doc
+ void (message :: Session ProgressDoneNotification)
+ , diagnosticTests
+ ]
+
+
+diagnosticTests :: TestTree
+diagnosticTests = testGroup "diagnostics"
+ [ testSession "fix syntax error" $ do
+ let content = T.unlines [ "module Testing wher" ]
+ doc <- openDoc' "Testing.hs" "haskell" content
+ expectDiagnostics [("Testing.hs", [(DsError, (0, 15), "parse error")])]
+ let change = TextDocumentContentChangeEvent
+ { _range = Just (Range (Position 0 15) (Position 0 19))
+ , _rangeLength = Nothing
+ , _text = "where"
+ }
+ changeDoc doc [change]
+ expectDiagnostics [("Testing.hs", [])]
+ , testSession "introduce syntax error" $ do
+ let content = T.unlines [ "module Testing where" ]
+ doc <- openDoc' "Testing.hs" "haskell" content
+ void (message :: Session ProgressStartNotification)
+ let change = TextDocumentContentChangeEvent
+ { _range = Just (Range (Position 0 15) (Position 0 18))
+ , _rangeLength = Nothing
+ , _text = "wher"
+ }
+ changeDoc doc [change]
+ expectDiagnostics [("Testing.hs", [(DsError, (0, 15), "parse error")])]
+ , testSession "variable not in scope" $ do
+ let content = T.unlines
+ [ "module Testing where"
+ , "foo :: Int -> Int -> Int"
+ , "foo a b = a + ab"
+ , "bar :: Int -> Int -> Int"
+ , "bar a b = cd + b"
+ ]
+ _ <- openDoc' "Testing.hs" "haskell" content
+ expectDiagnostics
+ [ ( "Testing.hs"
+ , [ (DsError, (2, 14), "Variable not in scope: ab")
+ , (DsError, (4, 10), "Variable not in scope: cd")
+ ]
+ )
+ ]
+ , testSession "type error" $ do
+ let content = T.unlines
+ [ "module Testing where"
+ , "foo :: Int -> String -> Int"
+ , "foo a b = a + b"
+ ]
+ _ <- openDoc' "Testing.hs" "haskell" content
+ expectDiagnostics
+ [ ( "Testing.hs"
+ , [(DsError, (2, 14), "Couldn't match type '[Char]' with 'Int'")]
+ )
+ ]
+ , testSession "remove required module" $ do
+ let contentA = T.unlines [ "module ModuleA where" ]
+ docA <- openDoc' "ModuleA.hs" "haskell" contentA
+ let contentB = T.unlines
+ [ "module ModuleB where"
+ , "import ModuleA"
+ ]
+ _ <- openDoc' "ModuleB.hs" "haskell" contentB
+ let change = TextDocumentContentChangeEvent
+ { _range = Just (Range (Position 0 0) (Position 0 20))
+ , _rangeLength = Nothing
+ , _text = ""
+ }
+ changeDoc docA [change]
+ expectDiagnostics [("ModuleB.hs", [(DsError, (1, 0), "Could not find module")])]
+ , testSession "add missing module" $ do
+ let contentB = T.unlines
+ [ "module ModuleB where"
+ , "import ModuleA"
+ ]
+ _ <- openDoc' "ModuleB.hs" "haskell" contentB
+ expectDiagnostics [("ModuleB.hs", [(DsError, (1, 7), "Could not find module")])]
+ let contentA = T.unlines [ "module ModuleA where" ]
+ _ <- openDoc' "ModuleA.hs" "haskell" contentA
+ expectDiagnostics [("ModuleB.hs", [])]
+ , testSession "cyclic module dependency" $ do
+ let contentA = T.unlines
+ [ "module ModuleA where"
+ , "import ModuleB"
+ ]
+ let contentB = T.unlines
+ [ "module ModuleB where"
+ , "import ModuleA"
+ ]
+ _ <- openDoc' "ModuleA.hs" "haskell" contentA
+ _ <- openDoc' "ModuleB.hs" "haskell" contentB
+ expectDiagnostics
+ [ ( "ModuleA.hs"
+ , [(DsError, (1, 7), "Cyclic module dependency between ModuleA, ModuleB")]
+ )
+ , ( "ModuleB.hs"
+ , [(DsError, (1, 7), "Cyclic module dependency between ModuleA, ModuleB")]
+ )
+ ]
+ , testSession "cyclic module dependency with hs-boot" $ do
+ let contentA = T.unlines
+ [ "module ModuleA where"
+ , "import {-# SOURCE #-} ModuleB"
+ ]
+ let contentB = T.unlines
+ [ "module ModuleB where"
+ , "import ModuleA"
+ ]
+ let contentBboot = T.unlines
+ [ "module ModuleB where"
+ ]
+ _ <- openDoc' "ModuleA.hs" "haskell" contentA
+ _ <- openDoc' "ModuleB.hs" "haskell" contentB
+ _ <- openDoc' "ModuleB.hs-boot" "haskell" contentBboot
+ expectDiagnostics []
+ , testSession "correct reference used with hs-boot" $ do
+ let contentB = T.unlines
+ [ "module ModuleB where"
+ , "import {-# SOURCE #-} ModuleA"
+ ]
+ let contentA = T.unlines
+ [ "module ModuleA where"
+ , "import ModuleB"
+ , "x = 5"
+ ]
+ let contentAboot = T.unlines
+ [ "module ModuleA where"
+ ]
+ let contentC = T.unlines
+ [ "module ModuleC where"
+ , "import ModuleA"
+ -- this reference will fail if it gets incorrectly
+ -- resolved to the hs-boot file
+ , "y = x"
+ ]
+ _ <- openDoc' "ModuleB.hs" "haskell" contentB
+ _ <- openDoc' "ModuleA.hs" "haskell" contentA
+ _ <- openDoc' "ModuleA.hs-boot" "haskell" contentAboot
+ _ <- openDoc' "ModuleC.hs" "haskell" contentC
+ expectDiagnostics []
+ , testSession "redundant import" $ do
+ let contentA = T.unlines ["module ModuleA where"]
+ let contentB = T.unlines
+ [ "{-# OPTIONS_GHC -Wunused-imports #-}"
+ , "module ModuleB where"
+ , "import ModuleA"
+ ]
+ _ <- openDoc' "ModuleA.hs" "haskell" contentA
+ _ <- openDoc' "ModuleB.hs" "haskell" contentB
+ expectDiagnostics
+ [ ( "ModuleB.hs"
+ , [(DsWarning, (2, 0), "The import of 'ModuleA' is redundant")]
+ )
+ ]
+ ]
+
+
+----------------------------------------------------------------------
+-- Utils
+
+
+testSession :: String -> Session () -> TestTree
+testSession name =
+ testCase name . run .
+ -- Check that any diagnostics produced were already consumed by the test case.
+ --
+ -- If in future we add test cases where we don't care about checking the diagnostics,
+ -- this could move elsewhere.
+ --
+ -- Experimentally, 0.5s seems to be long enough to wait for any final diagnostics to appear.
+ ( >> expectNoMoreDiagnostics 0.5)
+
+
+run :: Session a -> IO a
+run s = withTempDir $ \dir -> do
+ ghcideExe <- locateGhcideExecutable
+ let cmd = unwords [ghcideExe, "--lsp", "--cwd", dir]
+ -- HIE calls getXgdDirectory which assumes that HOME is set.
+ -- Only sets HOME if it wasn't already set.
+ setEnv "HOME" "/homeless-shelter" False
+ runSessionWithConfig conf cmd fullCaps { _window = Just $ WindowClientCapabilities $ Just True } dir s
+ where
+ conf = defaultConfig
+ -- If you uncomment this you can see all messages
+ -- which can be quite useful for debugging.
+ -- { logMessages = True, logColor = False, logStdErr = True }
diff --git a/test/src/Development/IDE/Test.hs b/test/src/Development/IDE/Test.hs
new file mode 100644
index 0000000..8af6ddf
--- /dev/null
+++ b/test/src/Development/IDE/Test.hs
@@ -0,0 +1,105 @@
+-- Copyright (c) 2019 The DAML Authors. All rights reserved.
+-- SPDX-License-Identifier: Apache-2.0
+
+module Development.IDE.Test
+ ( Cursor
+ , cursorPosition
+ , requireDiagnostic
+ , expectDiagnostics
+ , expectNoMoreDiagnostics
+ ) where
+
+import Control.Applicative.Combinators
+import Control.Lens hiding (List)
+import Control.Monad
+import Control.Monad.IO.Class
+import qualified Data.Map.Strict as Map
+import qualified Data.Text as T
+import Language.Haskell.LSP.Test hiding (message, openDoc')
+import qualified Language.Haskell.LSP.Test as LspTest
+import Language.Haskell.LSP.Types
+import Language.Haskell.LSP.Types.Lens as Lsp
+import System.Time.Extra
+import Test.Tasty.HUnit
+
+
+-- | (0-based line number, 0-based column number)
+type Cursor = (Int, Int)
+
+cursorPosition :: Cursor -> Position
+cursorPosition (line, col) = Position line col
+
+requireDiagnostic :: List Diagnostic -> (DiagnosticSeverity, Cursor, T.Text) -> Assertion
+requireDiagnostic actuals expected@(severity, cursor, expectedMsg) = do
+ unless (any match actuals) $
+ assertFailure $
+ "Could not find " <> show expected <>
+ " in " <> show actuals
+ where
+ match :: Diagnostic -> Bool
+ match d =
+ Just severity == _severity d
+ && cursorPosition cursor == d ^. range . start
+ && standardizeQuotes (T.toLower expectedMsg) `T.isInfixOf`
+ standardizeQuotes (T.toLower $ d ^. message)
+
+-- |wait for @timeout@ seconds and report an assertion failure
+-- if any diagnostic messages arrive in that period
+expectNoMoreDiagnostics :: Seconds -> Session ()
+expectNoMoreDiagnostics timeout = do
+ -- Give any further diagnostic messages time to arrive.
+ liftIO $ sleep timeout
+ -- Send a dummy message to provoke a response from the server.
+ -- This guarantees that we have at least one message to
+ -- process, so message won't block or timeout.
+ void $ sendRequest (CustomClientMethod "non-existent-method") ()
+ handleMessages
+ where
+ handleMessages = handleDiagnostic <|> handleCustomMethodResponse <|> ignoreOthers
+ handleDiagnostic = do
+ diagsNot <- LspTest.message :: Session PublishDiagnosticsNotification
+ let fileUri = diagsNot ^. params . uri
+ actual = diagsNot ^. params . diagnostics
+ liftIO $ assertFailure $
+ "Got unexpected diagnostics for " <> show fileUri <>
+ " got " <> show actual
+ handleCustomMethodResponse =
+ -- the CustomClientMethod triggers a log message about ignoring it
+ -- handle that and then exit
+ void (LspTest.message :: Session LogMessageNotification)
+ ignoreOthers = void anyMessage >> handleMessages
+
+expectDiagnostics :: [(FilePath, [(DiagnosticSeverity, Cursor, T.Text)])] -> Session ()
+expectDiagnostics expected = do
+ expected' <- Map.fromListWith (<>) <$> traverseOf (traverse . _1) (fmap toNormalizedUri . getDocUri) expected
+ go expected'
+ where
+ go m
+ | Map.null m = pure ()
+ | otherwise = do
+ diagsNot <- skipManyTill anyMessage LspTest.message :: Session PublishDiagnosticsNotification
+ let fileUri = diagsNot ^. params . uri
+ case Map.lookup (diagsNot ^. params . uri . to toNormalizedUri) m of
+ Nothing -> do
+ let actual = diagsNot ^. params . diagnostics
+ liftIO $ assertFailure $
+ "Got diagnostics for " <> show fileUri <>
+ " but only expected diagnostics for " <> show (Map.keys m) <>
+ " got " <> show actual
+ Just expected -> do
+ let actual = diagsNot ^. params . diagnostics
+ liftIO $ mapM_ (requireDiagnostic actual) expected
+ liftIO $ unless (length expected == length actual) $
+ assertFailure $
+ "Incorrect number of diagnostics for " <> show fileUri <>
+ ", expected " <> show expected <>
+ " but got " <> show actual
+ go $ Map.delete (diagsNot ^. params . uri . to toNormalizedUri) m
+
+standardizeQuotes :: T.Text -> T.Text
+standardizeQuotes msg = let
+ repl '‘' = '\''
+ repl '’' = '\''
+ repl '`' = '\''
+ repl c = c
+ in T.map repl msg