summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorborsboom <>2017-12-07 04:38:00 (GMT)
committerhdiff <hdiff@hdiff.luite.com>2017-12-07 04:38:00 (GMT)
commitd72d72546aede58850ad1a9caf908d16faa0d20e (patch)
tree90b73b9ea9dab1d85cd6ea0912e001bde7c2763c
parenta2780d42db29c696cb2f56b39ad3f8a546f3296b (diff)
version 1.6.1HEAD1.6.1master
-rw-r--r--CONTRIBUTING.md13
-rw-r--r--ChangeLog.md215
-rw-r--r--Setup.hs21
-rw-r--r--doc/CONTRIBUTING.md13
-rw-r--r--doc/ChangeLog.md215
-rw-r--r--doc/GUIDE.md127
-rw-r--r--doc/MAINTAINER_GUIDE.md2
-rw-r--r--doc/README.md11
-rw-r--r--doc/SIGNING_KEY.md2
-rw-r--r--doc/architecture.md2
-rw-r--r--doc/build_command.md10
-rw-r--r--doc/coverage.md2
-rw-r--r--doc/custom_snapshot.md124
-rw-r--r--doc/dependency_visualization.md2
-rw-r--r--doc/docker_integration.md2
-rw-r--r--doc/faq.md15
-rw-r--r--doc/ghci.md21
-rw-r--r--doc/ghcjs.md2
-rw-r--r--doc/install_and_upgrade.md2
-rw-r--r--doc/nix_integration.md2
-rw-r--r--doc/nonstandard_project_init.md6
-rw-r--r--doc/shell_autocompletion.md2
-rw-r--r--doc/travis_ci.md19
-rw-r--r--doc/yaml_configuration.md463
-rw-r--r--package.yaml340
-rw-r--r--src/Control/Concurrent/Execute.hs12
-rw-r--r--src/Data/Aeson/Extended.hs26
-rw-r--r--src/Data/Attoparsec/Args.hs10
-rw-r--r--src/Data/Attoparsec/Combinators.hs4
-rw-r--r--src/Data/Attoparsec/Interpreter.hs14
-rw-r--r--src/Data/IORef/RunOnce.hs18
-rw-r--r--src/Data/Maybe/Extra.hs25
-rw-r--r--src/Data/Monoid/Extra.hs12
-rw-r--r--src/Data/Store/VersionTagged.hs40
-rw-r--r--src/Data/Text/Extra.hs9
-rw-r--r--src/Distribution/Version/Extra.hs30
-rw-r--r--src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs1
-rw-r--r--src/Network/HTTP/Download.hs37
-rw-r--r--src/Network/HTTP/Download/Verified.hs76
-rw-r--r--src/Options/Applicative/Args.hs17
-rw-r--r--src/Options/Applicative/Builder/Extra.hs16
-rw-r--r--src/Options/Applicative/Complicated.hs20
-rw-r--r--src/Path/CheckInstall.hs61
-rw-r--r--src/Path/Extra.hs21
-rw-r--r--src/Path/Find.hs7
-rw-r--r--src/Stack/Build.hs120
-rw-r--r--src/Stack/Build/Cache.hs192
-rw-r--r--src/Stack/Build/ConstructPlan.hs367
-rw-r--r--src/Stack/Build/Execute.hs672
-rw-r--r--src/Stack/Build/Haddock.hs57
-rw-r--r--src/Stack/Build/Installed.hs42
-rw-r--r--src/Stack/Build/Source.hs470
-rw-r--r--src/Stack/Build/Target.hs678
-rw-r--r--src/Stack/BuildPlan.hs726
-rw-r--r--src/Stack/Clean.hs46
-rw-r--r--src/Stack/Config.hs525
-rw-r--r--src/Stack/Config/Build.hs8
-rw-r--r--src/Stack/Config/Docker.hs17
-rw-r--r--src/Stack/Config/Nix.hs23
-rw-r--r--src/Stack/Config/Urls.hs3
-rw-r--r--src/Stack/ConfigCmd.hs61
-rw-r--r--src/Stack/Constants.hs176
-rw-r--r--src/Stack/Constants.hs-boot3
-rw-r--r--src/Stack/Constants/Config.hs147
-rw-r--r--src/Stack/Coverage.hs147
-rw-r--r--src/Stack/Docker.hs142
-rw-r--r--src/Stack/Docker/GlobalDB.hs15
-rw-r--r--src/Stack/Dot.hs63
-rw-r--r--src/Stack/Exec.hs30
-rw-r--r--src/Stack/Fetch.hs481
-rw-r--r--src/Stack/FileWatch.hs12
-rw-r--r--src/Stack/GhcPkg.hs60
-rw-r--r--src/Stack/Ghci.hs470
-rw-r--r--src/Stack/Ghci/Script.hs26
-rw-r--r--src/Stack/Hoogle.hs129
-rw-r--r--src/Stack/IDE.hs27
-rw-r--r--src/Stack/Image.hs67
-rw-r--r--src/Stack/Init.hs230
-rw-r--r--src/Stack/New.hs55
-rw-r--r--src/Stack/Nix.hs42
-rw-r--r--src/Stack/Options/BenchParser.hs3
-rw-r--r--src/Stack/Options/BuildMonoidParser.hs24
-rw-r--r--src/Stack/Options/BuildParser.hs4
-rw-r--r--src/Stack/Options/CleanParser.hs3
-rw-r--r--src/Stack/Options/Completion.hs44
-rw-r--r--src/Stack/Options/ConfigParser.hs13
-rw-r--r--src/Stack/Options/DockerParser.hs5
-rw-r--r--src/Stack/Options/DotParser.hs3
-rw-r--r--src/Stack/Options/ExecParser.hs22
-rw-r--r--src/Stack/Options/GhcBuildParser.hs3
-rw-r--r--src/Stack/Options/GhcVariantParser.hs3
-rw-r--r--src/Stack/Options/GhciParser.hs6
-rw-r--r--src/Stack/Options/GlobalParser.hs11
-rw-r--r--src/Stack/Options/HaddockParser.hs4
-rw-r--r--src/Stack/Options/HpcReportParser.hs3
-rw-r--r--src/Stack/Options/LogLevelParser.hs4
-rw-r--r--src/Stack/Options/NewParser.hs3
-rw-r--r--src/Stack/Options/NixParser.hs3
-rw-r--r--src/Stack/Options/PackageParser.hs3
-rw-r--r--src/Stack/Options/ResolverParser.hs8
-rw-r--r--src/Stack/Options/SDistParser.hs3
-rw-r--r--src/Stack/Options/ScriptParser.hs12
-rw-r--r--src/Stack/Options/SolverParser.hs2
-rw-r--r--src/Stack/Options/TestParser.hs4
-rw-r--r--src/Stack/Options/Utils.hs3
-rw-r--r--src/Stack/Package.hs689
-rw-r--r--src/Stack/PackageDump.hs37
-rw-r--r--src/Stack/PackageIndex.hs332
-rw-r--r--src/Stack/PackageLocation.hs289
-rw-r--r--src/Stack/Path.hs25
-rw-r--r--src/Stack/Prelude.hs216
-rw-r--r--src/Stack/PrettyPrint.hs251
-rw-r--r--src/Stack/Runners.hs124
-rw-r--r--src/Stack/SDist.hs235
-rw-r--r--src/Stack/Script.hs121
-rw-r--r--src/Stack/Setup.hs676
-rw-r--r--src/Stack/Setup/Installed.hs45
-rw-r--r--src/Stack/SetupCmd.hs27
-rw-r--r--src/Stack/Sig.hs1
-rw-r--r--src/Stack/Sig/GPG.hs17
-rw-r--r--src/Stack/Sig/Sign.hs25
-rw-r--r--src/Stack/Snapshot.hs784
-rw-r--r--src/Stack/Solver.hs323
-rw-r--r--src/Stack/StaticBytes.hs235
-rw-r--r--src/Stack/Types/Build.hs144
-rw-r--r--src/Stack/Types/BuildPlan.hs747
-rw-r--r--src/Stack/Types/Compiler.hs47
-rw-r--r--src/Stack/Types/CompilerBuild.hs3
-rw-r--r--src/Stack/Types/Config.hs680
-rw-r--r--src/Stack/Types/Config.hs-boot37
-rw-r--r--src/Stack/Types/Config/Build.hs24
-rw-r--r--src/Stack/Types/Docker.hs17
-rw-r--r--src/Stack/Types/FlagName.hs34
-rw-r--r--src/Stack/Types/GhcPkgId.hs31
-rw-r--r--src/Stack/Types/Image.hs8
-rw-r--r--src/Stack/Types/Internal.hs80
-rw-r--r--src/Stack/Types/Nix.hs7
-rw-r--r--src/Stack/Types/Package.hs113
-rw-r--r--src/Stack/Types/PackageDump.hs6
-rw-r--r--src/Stack/Types/PackageIdentifier.hs203
-rw-r--r--src/Stack/Types/PackageIndex.hs75
-rw-r--r--src/Stack/Types/PackageName.hs21
-rw-r--r--src/Stack/Types/Resolver.hs289
-rw-r--r--src/Stack/Types/Resolver.hs-boot21
-rw-r--r--src/Stack/Types/Runner.hs307
-rw-r--r--src/Stack/Types/Sig.hs11
-rw-r--r--src/Stack/Types/StackT.hs337
-rw-r--r--src/Stack/Types/StringError.hs20
-rw-r--r--src/Stack/Types/TemplateName.hs13
-rw-r--r--src/Stack/Types/Urls.hs7
-rw-r--r--src/Stack/Types/Version.hs25
-rw-r--r--src/Stack/Types/VersionIntervals.hs86
-rw-r--r--src/Stack/Upgrade.hs138
-rw-r--r--src/Stack/Upload.hs43
-rw-r--r--src/System/Process/Log.hs74
-rw-r--r--src/System/Process/PagerEditor.hs7
-rw-r--r--src/System/Process/Read.hs53
-rw-r--r--src/System/Process/Run.hs32
-rw-r--r--src/Text/PrettyPrint/Leijen/Extended.hs33
-rw-r--r--src/main/Main.hs239
-rw-r--r--src/test/Network/HTTP/Download/VerifiedSpec.hs24
-rw-r--r--src/test/Spec.hs1
-rw-r--r--src/test/Stack/ArgsSpec.hs9
-rw-r--r--src/test/Stack/Build/ExecuteSpec.hs2
-rw-r--r--src/test/Stack/Build/TargetSpec.hs4
-rw-r--r--src/test/Stack/BuildPlanSpec.hs118
-rw-r--r--src/test/Stack/ConfigSpec.hs69
-rw-r--r--src/test/Stack/DotSpec.hs11
-rw-r--r--src/test/Stack/Ghci/ScriptSpec.hs10
-rw-r--r--src/test/Stack/GhciSpec.hs17
-rw-r--r--src/test/Stack/NixSpec.hs19
-rw-r--r--src/test/Stack/PackageDumpSpec.hs19
-rw-r--r--src/test/Stack/SolverSpec.hs2
-rw-r--r--src/test/Stack/StaticBytesSpec.hs73
-rw-r--r--src/test/Stack/StoreSpec.hs27
-rw-r--r--src/test/Stack/Untar/UntarSpec.hs2
-rw-r--r--src/unix/System/Terminal.hsc35
-rw-r--r--src/windows/System/Terminal.hs6
-rw-r--r--stack.cabal1049
-rw-r--r--stack.yaml14
-rw-r--r--test/integration/IntegrationSpec.hs19
-rw-r--r--test/integration/lib/StackTest.hs61
182 files changed, 10728 insertions, 7941 deletions
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 6c1fb60..b59bd46 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -44,9 +44,9 @@ discuss the change before plowing into writing code.
If you'd like to help out but aren't sure what to work on, look for issues with
the
-[awaiting pr](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pr%22)
+[awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22)
label. Issues that are suitable for newcomers to the codebase have the
-[newcomer](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pr%22+label%3Anewcomer)
+[newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22+label%3a%22newcomer+friendly%22)
label. Best to post a comment to the issue before you start work, in case anyone
has already started.
@@ -64,8 +64,8 @@ quality tool.
Note that stack contributors need not dogmatically follow the suggested hints
but are encouraged to debate their usefulness. If you find a hint is not useful
and detracts from readability, consider marking it in the [configuration
-file](https://github.com/commercialhaskell/stack/blob/master/HLint.hs) to
-be ignored. Please refer to the [HLint manual](https://github.com/ndmitchell/hlint#ignoring-hints)
+file](https://github.com/commercialhaskell/stack/blob/master/.hlint.yaml) to
+be ignored. Please refer to the [HLint manual](https://github.com/ndmitchell/hlint#readme)
for configuration syntax.
Quoting [@mgsloan](https://github.com/commercialhaskell/stack/pulls?utf8=%E2%9C%93&q=is%3Apr%20author%3Amgsloan):
@@ -85,8 +85,7 @@ stack install hlint
Once installed, you can check your changes with:
```
-hlint src/ test/ --cpp-simple --hint=HLint.hs
+hlint src/ test/ --cpp-simple
```
-Where `--cpp-simple` strips `#` lines and `--hint` explicitly specifies the
-configuration file.
+Where `--cpp-simple` strips `#` lines.
diff --git a/ChangeLog.md b/ChangeLog.md
index d5ef7e5..fe58562 100644
--- a/ChangeLog.md
+++ b/ChangeLog.md
@@ -1,5 +1,214 @@
# Changelog
+## v1.6.1
+
+Major changes:
+
+* Complete overhaul of how snapshots are defined, the `packages` and
+ `extra-deps` fields, and a number of related items. For full
+ details, please see
+ [the writeup on these changes](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots). [PR #3249](https://github.com/commercialhaskell/stack/pull/3249),
+ see the PR description for a number of related issues.
+* Upgraded to version 2.0 of the Cabal library.
+
+Behavior changes:
+
+* The `--install-ghc` flag is now on by default. For example, if you
+ run `stack build` in a directory requiring a GHC that you do not
+ currently have, Stack will automatically download and install that
+ GHC. You can explicitly set `install-ghc: false` or pass the flag
+ `--no-install-ghc` to regain the previous behavior.
+* `stack ghci` no longer loads modules grouped by package. This is
+ always an improvement for plain ghci - it makes loading faster and
+ less noisy. For intero, this has the side-effect that it will no
+ longer load multiple packages that depend on TH loading relative
+ paths. TH relative paths will still work when loading a single
+ package into intero. See
+ [#3309](https://github.com/commercialhaskell/stack/issues/3309)
+* Setting GHC options for a package via `ghc-options:` in your
+ `stack.yaml` will promote it to a local package, providing for more
+ consistency with flags and better reproducibility. See:
+ [#849](https://github.com/commercialhaskell/stack/issues/849)
+* The `package-indices` setting with Hackage no longer works with the
+ `00-index.tar.gz` tarball, but must use the `01-index.tar.gz` file
+ to allow revised packages to be found.
+* Options passsed via `--ghci-options` are now passed to the end of the
+ invocation of ghci, instead of the middle. This allows using `+RTS`
+ without an accompanying `-RTS`.
+* When auto-detecting `--ghc-build`, `tinfo6` is now preferred over
+ `standard` if both versions of libtinfo are installed
+* Addition of `stack build --copy-compiler-tool`, to allow tools like
+ intero to be installed globally for a particular compiler.
+ [#2643](https://github.com/commercialhaskell/stack/issues/2643)
+* Stack will ask before saving hackage credentials to file. This new
+ prompt can be avoided by using the `save-hackage-creds` setting. Please
+ see [#2159](https://github.com/commercialhaskell/stack/issues/2159).
+* The `GHCRTS` environment variable will no longer be passed through to
+ every program stack runs. Instead, it will only be passed through
+ commands like `exec`, `runghc`, `script`, `ghci`, etc.
+ See [#3444](https://github.com/commercialhaskell/stack/issues/3444).
+* `ghc-options:` for specific packages will now come after the options
+ specified for all packages / particular sets of packages. See
+ [#3573](https://github.com/commercialhaskell/stack/issues/3573).
+* The `pvp-bounds` feature is no longer fully functional, due to some
+ issues with the Cabal library's printer. See
+ [#3550](https://github.com/commercialhaskell/stack/issues/3550).
+
+Other enhancements:
+
+* The `with-hpack` configuration option specifies an Hpack executable to use
+ instead of the Hpack bundled with Stack. Please
+ see [#3179](https://github.com/commercialhaskell/stack/issues/3179).
+* It's now possible to skip tests and benchmarks using `--skip`
+ flag
+* `GitSHA1` is now `StaticSHA256` and is implemented using the `StaticSize 64 ByteString` for improved performance.
+ See [#3006](https://github.com/commercialhaskell/stack/issues/3006)
+* Dependencies via HTTP(S) archives have been generalized to allow
+ local file path archives, as well as to support setting a
+ cryptographic hash (SHA256) of the contents for better
+ reproducibility.
+* Allow specifying `--git-branch` when upgrading
+* When running `stack upgrade` from a file which is different from the
+ default executable path (e.g., on POSIX systems,
+ `~/.local/bin/stack`), it will now additionally copy the new
+ executable over the currently running `stack` executable. If
+ permission is denied (such as in `/usr/local/bin/stack`), the user
+ will be prompted to try again using `sudo`. This is intended to
+ assist with the user experience when the `PATH` environment variable
+ has not been properly configured, see
+ [#3232](https://github.com/commercialhaskell/stack/issues/3232).
+* `stack setup` for ghcjs will now install `alex` and `happy` if
+ they are not present. See
+ [#3109](https://github.com/commercialhaskell/stack/issues/3232).
+* Added `stack ghci --only-main` flag, to skip loading / importing
+ all but main modules. See the ghci documentation page
+ for further info.
+* Allow GHC's colored output to show through. GHC colors output
+ starting with version 8.2.1, for older GHC this does nothing.
+ Sometimes GHC's heuristics would work fine even before this change,
+ for example in `stack ghci`, but this override's GHC's heuristics
+ when they're broken by our collecting and processing GHC's output.
+* Extended the `ghc-options` field to support `$locals`, `$targets`,
+ and `$everything`. See:
+ [#3329](https://github.com/commercialhaskell/stack/issues/3329)
+* Better error message for case that `stack ghci` file targets are
+ combined with invalid package targets. See:
+ [#3342](https://github.com/commercialhaskell/stack/issues/3342)
+* For profiling now uses `-fprof-auto -fprof-cafs` instead of
+ the deprecated `-auto-all -caf-all`. See:
+ [#3360](https://github.com/commercialhaskell/stack/issues/3360)
+* Better descriptions are now available for `stack upgrade --help`. See:
+ [#3070](https://github.com/commercialhaskell/stack/issues/3070)
+* When using Nix, nix-shell now depends always on gcc to prevent build errors
+ when using the FFI. As ghc depends on gcc anyway, this doesn't increase the
+ dependency footprint.
+* `--cwd DIR` can now be passed to `stack exec` in order to execute the
+ program in a different directory. See:
+ [#3264](https://github.com/commercialhaskell/stack/issues/3264)
+* Plan construction will detect if you add an executable-only package
+ as a library dependency, resulting in much clearer error
+ messages. See:
+ [#2195](https://github.com/commercialhaskell/stack/issues/2195).
+* Addition of `--ghc-options` to `stack script` to pass options directly
+ to GHC. See:
+ [#3454](https://github.com/commercialhaskell/stack/issues/3454)
+* Add hpack `package.yaml` to build Stack itself
+* Add `ignore-revision-mismatch` setting. See:
+ [#3520](https://github.com/commercialhaskell/stack/issues/3520).
+* Log when each individual test suite finishes. See:
+ [#3552](https://github.com/commercialhaskell/stack/issues/3552).
+* Avoid spurious rebuilds when using `--file-watch` by not watching files for
+ executable, test and benchmark components that aren't a target. See:
+ [#3483](https://github.com/commercialhaskell/stack/issues/3483).
+* Stack will now try to detect the width of the running terminal
+ (only on POSIX for the moment) and use that to better display
+ output messages. Work is ongoing, so some messages will not
+ be optimal yet. The terminal width can be overriden with the
+ new `--terminal-width` command-line option (this works even on
+ non-POSIX).
+* Passing non local packages as targets to `stack ghci` will now
+ cause them to be used as `-package` args along with package
+ hiding.
+* Detect when user changed .cabal file instead of package.yaml. This
+ was implemented upstream in hpack. See
+ [#3383](https://github.com/commercialhaskell/stack/issues/3383).
+* Automatically run `autoreconf -i` as necessary when a `configure`
+ script is missing. See
+ [#3534](https://github.com/commercialhaskell/stack/issues/3534)
+* GHC bindists can now be identified by their SHA256 checksum in addition to
+ their SHA1 checksum, allowing for more security in download.
+* For filesystem setup-info paths, it's no longer assumed that the
+ directory is writable, instead a temp dir is used. See
+ [#3188](https://github.com/commercialhaskell/stack/issues/3188).
+
+Bug fixes:
+
+* `stack hoogle` correctly generates Hoogle databases. See:
+ [#3362](https://github.com/commercialhaskell/stack/issues/3362)
+* `stack --docker-help` is now clearer about --docker implying
+ system-ghc: true, rather than both --docker and --no-docker.
+* `stack haddock` now includes package names for all modules in the
+ Haddock index page. See:
+ [#2886](https://github.com/commercialhaskell/stack/issues/2886)
+* Fixed an issue where Stack wouldn't detect missing Docker images
+ properly with newer Docker versions.
+ [#3171](https://github.com/commercialhaskell/stack/pull/3171)
+* Previously, cabal files with just test-suite could cause build to fail
+ ([#2862](https://github.com/commercialhaskell/stack/issues/2862))
+* If an invalid snapshot file has been detected (usually due to
+ mismatched hashes), Stack will delete the downloaded file and
+ recommend either retrying or filing an issue upstream. See
+ [#3319](https://github.com/commercialhaskell/stack/issues/3319).
+* Modified the flag parser within Stack to match the behavior of
+ Cabal's flag parser, which allows multiple sequential dashes. See
+ [#3345](https://github.com/commercialhaskell/stack/issues/3345)
+* Now clears the hackage index cache if it is older than the
+ downloaded index. Fixes potential issue if stack was interrupted when
+ updating index.
+ See [#3033](https://github.com/commercialhaskell/stack/issues/3033)
+* The Stack install script now respects the `-d` option.
+ See [#3366](https://github.com/commercialhaskell/stack/pull/3366).
+* `stack script` can now handle relative paths to source files.
+ See [#3372](https://github.com/commercialhaskell/stack/issues/3372).
+* Fixes explanation of why a target is needed by the build plan, when the
+ target is an extra dependency from the commandline.
+ See [#3378](https://github.com/commercialhaskell/stack/issues/3378).
+* Previously, if you delete a yaml file from ~/.stack/build-plan, it would
+ trust the etag and not re-download. Fixed in this version.
+* Invoking `stack --docker` in parallel now correctly locks the sqlite database.
+ See [#3400](https://github.com/commercialhaskell/stack/issues/3400).
+* docs.haskellstack.org RTD documentation search is replaced by the mkdocs
+ search. Please see
+ [#3376](https://github.com/commercialhaskell/stack/issues/3376).
+* `stack clean` now works with nix. See
+ [#3468](https://github.com/commercialhaskell/stack/issues/3376).
+* `stack build --only-dependencies` no longer builds local project packages
+ that are depended on. See
+ [#3476](https://github.com/commercialhaskell/stack/issues/3476).
+* Properly handle relative paths stored in the precompiled cache files. See
+ [#3431](https://github.com/commercialhaskell/stack/issues/3431).
+* In some cases, Cabal does not realize that it needs to reconfigure, and must
+ be told to do so automatically. This would manifest as a "shadowed
+ dependency" error message. We now force a reconfigure whenever a dependency is
+ built, even if the package ID remained the same. See
+ [#2781](https://github.com/commercialhaskell/stack/issues/2781).
+* When `--pvp-bounds` is enabled for sdist or upload, internal
+ dependencies could cause errors when uploaded to hackage. This is
+ fixed, see [#3290](https://github.com/commercialhaskell/stack/issues/3290)
+* Fixes a bug where nonexistent hackage versions would cause stack to
+ suggest the same package name, without giving version info. See
+ [#3562](https://github.com/commercialhaskell/stack/issues/3562)
+* Fixes a bug that has existed since 1.5.0, where
+ `stack setup --upgrade-cabal` would say that Cabal is already the latest
+ version, when it wasn't.
+* Ensure that an `extra-dep` from a local directory is not treated as
+ a `$locals` for GHC options purposes. See
+ [#3574](https://github.com/commercialhaskell/stack/issues/3574).
+* Building all executables only happens once instead of every
+ time. See
+ [#3229](https://github.com/commercialhaskell/stack/issues/3229) for
+ more info.
+
## 1.5.1
@@ -193,7 +402,7 @@ Other enhancements:
([#2986](https://github.com/commercialhaskell/stack/issues/2986))
* `stack exec` now takes `--rts-options` which passes the given arguments inside of
`+RTS ... args .. -RTS` to the executable. This works around stack itself consuming
- the RTS flags on Windows. ([#2986](https://github.com/commercialhaskell/stack/issues/2640))
+ the RTS flags on Windows. ([#2640](https://github.com/commercialhaskell/stack/issues/2640))
* Upgraded `http-client-tls` version, which now offers support for the
`socks5://` and `socks5h://` values in the `http_proxy` and `https_proxy`
environment variables.
@@ -1071,8 +1280,8 @@ Other enhancements:
* `stack ghci` now accepts all the flags accepted by `stack build`. See
[#1186](https://github.com/commercialhaskell/stack/issues/1186)
* `stack ghci` builds the project before launching GHCi. If the build fails,
- optimistically launch GHCi anyway. Use `stack ghci --no-build` option to
- disable [#1065](https://github.com/commercialhaskell/stack/issues/1065)
+ try to launch GHCi anyway. Use `stack ghci --no-build` option to disable
+ [#1065](https://github.com/commercialhaskell/stack/issues/1065)
* `stack ghci` now detects and warns about various circumstances where it is
liable to fail. See
[#1270](https://github.com/commercialhaskell/stack/issues/1270)
diff --git a/Setup.hs b/Setup.hs
index 179845c..458817f 100644
--- a/Setup.hs
+++ b/Setup.hs
@@ -3,8 +3,7 @@ module Main (main) where
import Data.List ( nub, sortBy )
import Data.Ord ( comparing )
-import Data.Version ( showVersion )
-import Distribution.Package ( PackageName(PackageName), PackageId, InstalledPackageId, packageVersion, packageName )
+import Distribution.Package ( PackageId, InstalledPackageId, packageVersion, packageName )
import Distribution.PackageDescription ( PackageDescription(), Executable(..) )
import Distribution.InstalledPackageInfo (sourcePackageId, installedPackageId)
import Distribution.Simple ( defaultMainWithHooks, UserHooks(..), simpleUserHooks )
@@ -13,7 +12,10 @@ import Distribution.Simple.BuildPaths ( autogenModulesDir )
import Distribution.Simple.PackageIndex (allPackages, dependencyClosure)
import Distribution.Simple.Setup ( BuildFlags(buildVerbosity), fromFlag )
import Distribution.Simple.LocalBuildInfo ( installedPkgs, withLibLBI, withExeLBI, LocalBuildInfo(), ComponentLocalBuildInfo(componentPackageDeps) )
+import Distribution.Types.PackageName (PackageName, unPackageName)
+import Distribution.Types.UnqualComponentName (unUnqualComponentName)
import Distribution.Verbosity ( Verbosity )
+import Distribution.Version ( showVersion )
import System.FilePath ( (</>) )
main :: IO ()
@@ -29,27 +31,28 @@ generateBuildModule verbosity pkg lbi = do
createDirectoryIfMissingVerbose verbosity True dir
withLibLBI pkg lbi $ \_ libcfg -> do
withExeLBI pkg lbi $ \exe clbi ->
- rewriteFile (dir </> "Build_" ++ exeName exe ++ ".hs") $ unlines
- [ "module Build_" ++ exeName exe ++ " where"
+ rewriteFile (dir </> "Build_" ++ exeName' exe ++ ".hs") $ unlines
+ [ "module Build_" ++ exeName' exe ++ " where"
, ""
, "deps :: [String]"
, "deps = " ++ (show $ formatdeps (transDeps libcfg clbi))
]
where
+ exeName' = unUnqualComponentName . exeName
formatdeps = map formatone . sortBy (comparing unPackageName')
formatone p = unPackageName' p ++ "-" ++ showVersion (packageVersion p)
- unPackageName' p = case packageName p of PackageName n -> n
+ unPackageName' = unPackageName . packageName
transDeps xs ys =
either (map sourcePackageId . allPackages) handleDepClosureFailure $ dependencyClosure allInstPkgsIdx availInstPkgIds
where
allInstPkgsIdx = installedPkgs lbi
allInstPkgIds = map installedPackageId $ allPackages allInstPkgsIdx
- -- instPkgIds includes `stack-X.X.X`, which is not a depedency hence is missing from allInstPkgsIdx. Filter that out.
- availInstPkgIds = filter (`elem` allInstPkgIds) . map fst $ testDeps xs ys
+ -- instPkgIds includes `stack-X.X.X`, which is not a dependency hence is missing from allInstPkgsIdx. Filter that out.
+ availInstPkgIds = filter (`elem` allInstPkgIds) $ testDeps xs ys
handleDepClosureFailure unsatisfied =
error $
"Computation of transitive dependencies failed." ++
if null unsatisfied then "" else " Unresolved dependencies: " ++ show unsatisfied
-testDeps :: ComponentLocalBuildInfo -> ComponentLocalBuildInfo -> [(InstalledPackageId, PackageId)]
-testDeps xs ys = nub $ componentPackageDeps xs ++ componentPackageDeps ys
+testDeps :: ComponentLocalBuildInfo -> ComponentLocalBuildInfo -> [InstalledPackageId]
+testDeps xs ys = map fst $ nub $ componentPackageDeps xs ++ componentPackageDeps ys
diff --git a/doc/CONTRIBUTING.md b/doc/CONTRIBUTING.md
index 6c1fb60..b59bd46 100644
--- a/doc/CONTRIBUTING.md
+++ b/doc/CONTRIBUTING.md
@@ -44,9 +44,9 @@ discuss the change before plowing into writing code.
If you'd like to help out but aren't sure what to work on, look for issues with
the
-[awaiting pr](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pr%22)
+[awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22)
label. Issues that are suitable for newcomers to the codebase have the
-[newcomer](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pr%22+label%3Anewcomer)
+[newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22+label%3a%22newcomer+friendly%22)
label. Best to post a comment to the issue before you start work, in case anyone
has already started.
@@ -64,8 +64,8 @@ quality tool.
Note that stack contributors need not dogmatically follow the suggested hints
but are encouraged to debate their usefulness. If you find a hint is not useful
and detracts from readability, consider marking it in the [configuration
-file](https://github.com/commercialhaskell/stack/blob/master/HLint.hs) to
-be ignored. Please refer to the [HLint manual](https://github.com/ndmitchell/hlint#ignoring-hints)
+file](https://github.com/commercialhaskell/stack/blob/master/.hlint.yaml) to
+be ignored. Please refer to the [HLint manual](https://github.com/ndmitchell/hlint#readme)
for configuration syntax.
Quoting [@mgsloan](https://github.com/commercialhaskell/stack/pulls?utf8=%E2%9C%93&q=is%3Apr%20author%3Amgsloan):
@@ -85,8 +85,7 @@ stack install hlint
Once installed, you can check your changes with:
```
-hlint src/ test/ --cpp-simple --hint=HLint.hs
+hlint src/ test/ --cpp-simple
```
-Where `--cpp-simple` strips `#` lines and `--hint` explicitly specifies the
-configuration file.
+Where `--cpp-simple` strips `#` lines.
diff --git a/doc/ChangeLog.md b/doc/ChangeLog.md
index d5ef7e5..fe58562 100644
--- a/doc/ChangeLog.md
+++ b/doc/ChangeLog.md
@@ -1,5 +1,214 @@
# Changelog
+## v1.6.1
+
+Major changes:
+
+* Complete overhaul of how snapshots are defined, the `packages` and
+ `extra-deps` fields, and a number of related items. For full
+ details, please see
+ [the writeup on these changes](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots). [PR #3249](https://github.com/commercialhaskell/stack/pull/3249),
+ see the PR description for a number of related issues.
+* Upgraded to version 2.0 of the Cabal library.
+
+Behavior changes:
+
+* The `--install-ghc` flag is now on by default. For example, if you
+ run `stack build` in a directory requiring a GHC that you do not
+ currently have, Stack will automatically download and install that
+ GHC. You can explicitly set `install-ghc: false` or pass the flag
+ `--no-install-ghc` to regain the previous behavior.
+* `stack ghci` no longer loads modules grouped by package. This is
+ always an improvement for plain ghci - it makes loading faster and
+ less noisy. For intero, this has the side-effect that it will no
+ longer load multiple packages that depend on TH loading relative
+ paths. TH relative paths will still work when loading a single
+ package into intero. See
+ [#3309](https://github.com/commercialhaskell/stack/issues/3309)
+* Setting GHC options for a package via `ghc-options:` in your
+ `stack.yaml` will promote it to a local package, providing for more
+ consistency with flags and better reproducibility. See:
+ [#849](https://github.com/commercialhaskell/stack/issues/849)
+* The `package-indices` setting with Hackage no longer works with the
+ `00-index.tar.gz` tarball, but must use the `01-index.tar.gz` file
+ to allow revised packages to be found.
+* Options passsed via `--ghci-options` are now passed to the end of the
+ invocation of ghci, instead of the middle. This allows using `+RTS`
+ without an accompanying `-RTS`.
+* When auto-detecting `--ghc-build`, `tinfo6` is now preferred over
+ `standard` if both versions of libtinfo are installed
+* Addition of `stack build --copy-compiler-tool`, to allow tools like
+ intero to be installed globally for a particular compiler.
+ [#2643](https://github.com/commercialhaskell/stack/issues/2643)
+* Stack will ask before saving hackage credentials to file. This new
+ prompt can be avoided by using the `save-hackage-creds` setting. Please
+ see [#2159](https://github.com/commercialhaskell/stack/issues/2159).
+* The `GHCRTS` environment variable will no longer be passed through to
+ every program stack runs. Instead, it will only be passed through
+ commands like `exec`, `runghc`, `script`, `ghci`, etc.
+ See [#3444](https://github.com/commercialhaskell/stack/issues/3444).
+* `ghc-options:` for specific packages will now come after the options
+ specified for all packages / particular sets of packages. See
+ [#3573](https://github.com/commercialhaskell/stack/issues/3573).
+* The `pvp-bounds` feature is no longer fully functional, due to some
+ issues with the Cabal library's printer. See
+ [#3550](https://github.com/commercialhaskell/stack/issues/3550).
+
+Other enhancements:
+
+* The `with-hpack` configuration option specifies an Hpack executable to use
+ instead of the Hpack bundled with Stack. Please
+ see [#3179](https://github.com/commercialhaskell/stack/issues/3179).
+* It's now possible to skip tests and benchmarks using `--skip`
+ flag
+* `GitSHA1` is now `StaticSHA256` and is implemented using the `StaticSize 64 ByteString` for improved performance.
+ See [#3006](https://github.com/commercialhaskell/stack/issues/3006)
+* Dependencies via HTTP(S) archives have been generalized to allow
+ local file path archives, as well as to support setting a
+ cryptographic hash (SHA256) of the contents for better
+ reproducibility.
+* Allow specifying `--git-branch` when upgrading
+* When running `stack upgrade` from a file which is different from the
+ default executable path (e.g., on POSIX systems,
+ `~/.local/bin/stack`), it will now additionally copy the new
+ executable over the currently running `stack` executable. If
+ permission is denied (such as in `/usr/local/bin/stack`), the user
+ will be prompted to try again using `sudo`. This is intended to
+ assist with the user experience when the `PATH` environment variable
+ has not been properly configured, see
+ [#3232](https://github.com/commercialhaskell/stack/issues/3232).
+* `stack setup` for ghcjs will now install `alex` and `happy` if
+ they are not present. See
+ [#3109](https://github.com/commercialhaskell/stack/issues/3232).
+* Added `stack ghci --only-main` flag, to skip loading / importing
+ all but main modules. See the ghci documentation page
+ for further info.
+* Allow GHC's colored output to show through. GHC colors output
+ starting with version 8.2.1, for older GHC this does nothing.
+ Sometimes GHC's heuristics would work fine even before this change,
+ for example in `stack ghci`, but this override's GHC's heuristics
+ when they're broken by our collecting and processing GHC's output.
+* Extended the `ghc-options` field to support `$locals`, `$targets`,
+ and `$everything`. See:
+ [#3329](https://github.com/commercialhaskell/stack/issues/3329)
+* Better error message for case that `stack ghci` file targets are
+ combined with invalid package targets. See:
+ [#3342](https://github.com/commercialhaskell/stack/issues/3342)
+* For profiling now uses `-fprof-auto -fprof-cafs` instead of
+ the deprecated `-auto-all -caf-all`. See:
+ [#3360](https://github.com/commercialhaskell/stack/issues/3360)
+* Better descriptions are now available for `stack upgrade --help`. See:
+ [#3070](https://github.com/commercialhaskell/stack/issues/3070)
+* When using Nix, nix-shell now depends always on gcc to prevent build errors
+ when using the FFI. As ghc depends on gcc anyway, this doesn't increase the
+ dependency footprint.
+* `--cwd DIR` can now be passed to `stack exec` in order to execute the
+ program in a different directory. See:
+ [#3264](https://github.com/commercialhaskell/stack/issues/3264)
+* Plan construction will detect if you add an executable-only package
+ as a library dependency, resulting in much clearer error
+ messages. See:
+ [#2195](https://github.com/commercialhaskell/stack/issues/2195).
+* Addition of `--ghc-options` to `stack script` to pass options directly
+ to GHC. See:
+ [#3454](https://github.com/commercialhaskell/stack/issues/3454)
+* Add hpack `package.yaml` to build Stack itself
+* Add `ignore-revision-mismatch` setting. See:
+ [#3520](https://github.com/commercialhaskell/stack/issues/3520).
+* Log when each individual test suite finishes. See:
+ [#3552](https://github.com/commercialhaskell/stack/issues/3552).
+* Avoid spurious rebuilds when using `--file-watch` by not watching files for
+ executable, test and benchmark components that aren't a target. See:
+ [#3483](https://github.com/commercialhaskell/stack/issues/3483).
+* Stack will now try to detect the width of the running terminal
+ (only on POSIX for the moment) and use that to better display
+ output messages. Work is ongoing, so some messages will not
+ be optimal yet. The terminal width can be overriden with the
+ new `--terminal-width` command-line option (this works even on
+ non-POSIX).
+* Passing non local packages as targets to `stack ghci` will now
+ cause them to be used as `-package` args along with package
+ hiding.
+* Detect when user changed .cabal file instead of package.yaml. This
+ was implemented upstream in hpack. See
+ [#3383](https://github.com/commercialhaskell/stack/issues/3383).
+* Automatically run `autoreconf -i` as necessary when a `configure`
+ script is missing. See
+ [#3534](https://github.com/commercialhaskell/stack/issues/3534)
+* GHC bindists can now be identified by their SHA256 checksum in addition to
+ their SHA1 checksum, allowing for more security in download.
+* For filesystem setup-info paths, it's no longer assumed that the
+ directory is writable, instead a temp dir is used. See
+ [#3188](https://github.com/commercialhaskell/stack/issues/3188).
+
+Bug fixes:
+
+* `stack hoogle` correctly generates Hoogle databases. See:
+ [#3362](https://github.com/commercialhaskell/stack/issues/3362)
+* `stack --docker-help` is now clearer about --docker implying
+ system-ghc: true, rather than both --docker and --no-docker.
+* `stack haddock` now includes package names for all modules in the
+ Haddock index page. See:
+ [#2886](https://github.com/commercialhaskell/stack/issues/2886)
+* Fixed an issue where Stack wouldn't detect missing Docker images
+ properly with newer Docker versions.
+ [#3171](https://github.com/commercialhaskell/stack/pull/3171)
+* Previously, cabal files with just test-suite could cause build to fail
+ ([#2862](https://github.com/commercialhaskell/stack/issues/2862))
+* If an invalid snapshot file has been detected (usually due to
+ mismatched hashes), Stack will delete the downloaded file and
+ recommend either retrying or filing an issue upstream. See
+ [#3319](https://github.com/commercialhaskell/stack/issues/3319).
+* Modified the flag parser within Stack to match the behavior of
+ Cabal's flag parser, which allows multiple sequential dashes. See
+ [#3345](https://github.com/commercialhaskell/stack/issues/3345)
+* Now clears the hackage index cache if it is older than the
+ downloaded index. Fixes potential issue if stack was interrupted when
+ updating index.
+ See [#3033](https://github.com/commercialhaskell/stack/issues/3033)
+* The Stack install script now respects the `-d` option.
+ See [#3366](https://github.com/commercialhaskell/stack/pull/3366).
+* `stack script` can now handle relative paths to source files.
+ See [#3372](https://github.com/commercialhaskell/stack/issues/3372).
+* Fixes explanation of why a target is needed by the build plan, when the
+ target is an extra dependency from the commandline.
+ See [#3378](https://github.com/commercialhaskell/stack/issues/3378).
+* Previously, if you delete a yaml file from ~/.stack/build-plan, it would
+ trust the etag and not re-download. Fixed in this version.
+* Invoking `stack --docker` in parallel now correctly locks the sqlite database.
+ See [#3400](https://github.com/commercialhaskell/stack/issues/3400).
+* docs.haskellstack.org RTD documentation search is replaced by the mkdocs
+ search. Please see
+ [#3376](https://github.com/commercialhaskell/stack/issues/3376).
+* `stack clean` now works with nix. See
+ [#3468](https://github.com/commercialhaskell/stack/issues/3376).
+* `stack build --only-dependencies` no longer builds local project packages
+ that are depended on. See
+ [#3476](https://github.com/commercialhaskell/stack/issues/3476).
+* Properly handle relative paths stored in the precompiled cache files. See
+ [#3431](https://github.com/commercialhaskell/stack/issues/3431).
+* In some cases, Cabal does not realize that it needs to reconfigure, and must
+ be told to do so automatically. This would manifest as a "shadowed
+ dependency" error message. We now force a reconfigure whenever a dependency is
+ built, even if the package ID remained the same. See
+ [#2781](https://github.com/commercialhaskell/stack/issues/2781).
+* When `--pvp-bounds` is enabled for sdist or upload, internal
+ dependencies could cause errors when uploaded to hackage. This is
+ fixed, see [#3290](https://github.com/commercialhaskell/stack/issues/3290)
+* Fixes a bug where nonexistent hackage versions would cause stack to
+ suggest the same package name, without giving version info. See
+ [#3562](https://github.com/commercialhaskell/stack/issues/3562)
+* Fixes a bug that has existed since 1.5.0, where
+ `stack setup --upgrade-cabal` would say that Cabal is already the latest
+ version, when it wasn't.
+* Ensure that an `extra-dep` from a local directory is not treated as
+ a `$locals` for GHC options purposes. See
+ [#3574](https://github.com/commercialhaskell/stack/issues/3574).
+* Building all executables only happens once instead of every
+ time. See
+ [#3229](https://github.com/commercialhaskell/stack/issues/3229) for
+ more info.
+
## 1.5.1
@@ -193,7 +402,7 @@ Other enhancements:
([#2986](https://github.com/commercialhaskell/stack/issues/2986))
* `stack exec` now takes `--rts-options` which passes the given arguments inside of
`+RTS ... args .. -RTS` to the executable. This works around stack itself consuming
- the RTS flags on Windows. ([#2986](https://github.com/commercialhaskell/stack/issues/2640))
+ the RTS flags on Windows. ([#2640](https://github.com/commercialhaskell/stack/issues/2640))
* Upgraded `http-client-tls` version, which now offers support for the
`socks5://` and `socks5h://` values in the `http_proxy` and `https_proxy`
environment variables.
@@ -1071,8 +1280,8 @@ Other enhancements:
* `stack ghci` now accepts all the flags accepted by `stack build`. See
[#1186](https://github.com/commercialhaskell/stack/issues/1186)
* `stack ghci` builds the project before launching GHCi. If the build fails,
- optimistically launch GHCi anyway. Use `stack ghci --no-build` option to
- disable [#1065](https://github.com/commercialhaskell/stack/issues/1065)
+ try to launch GHCi anyway. Use `stack ghci --no-build` option to disable
+ [#1065](https://github.com/commercialhaskell/stack/issues/1065)
* `stack ghci` now detects and warns about various circumstances where it is
liable to fail. See
[#1270](https://github.com/commercialhaskell/stack/issues/1270)
diff --git a/doc/GUIDE.md b/doc/GUIDE.md
index e11b5d6..1179779 100644
--- a/doc/GUIDE.md
+++ b/doc/GUIDE.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# User guide
stack is a modern, cross-platform build tool for Haskell code.
@@ -1083,7 +1085,7 @@ The following changes will be made to stack.yaml:
- aeson-0.10.0.0
- aeson-compat-0.3.0.0
- attoparsec-0.13.0.1
- - conduit-extra-1.1.9.2
+ - conduit-extra-1.2.0
- email-validate-2.2.0
- hex-0.1.2
- http-api-data-0.2.2
@@ -1246,7 +1248,7 @@ executables to the local bin path. You may recognize the default value for that
path:
```
-michael@d30748af6d3d:~/helloworld$ stack path --local-bin-path
+michael@d30748af6d3d:~/helloworld$ stack path --local-bin
/home/michael/.local/bin
```
@@ -1577,8 +1579,9 @@ what needs to be removed:
We've already used `stack exec` used multiple times in this guide. As you've
likely already guessed, it allows you to run executables, but with a slightly
modified environment. In particular: `stack exec` looks for executables on
-stack's bin paths, and sets a few additional environment variables (like
-`GHC_PACKAGE_PATH`, which tells GHC which package databases to use).
+stack's bin paths, and sets a few additional environment variables (like adding
+those paths to `PATH`, and setting `GHC_PACKAGE_PATH`, which tells GHC which
+package databases to use).
If you want to see exactly what the modified environment looks like, try:
@@ -1787,7 +1790,7 @@ it. Here is an example:
-}
```
-## Finding project configs, and the implicit global
+## Finding project configs, and the implicit global project
Whenever you run something with stack, it needs a `stack.yaml` project file. The
algorithm stack uses to find this is:
@@ -1818,6 +1821,15 @@ configuration. It has no impact on projects at all. Every package you install
with it is put into isolated databases just like everywhere else. The only magic
is that it's the catch-all project whenever you're running stack somewhere else.
+## Setting stack root location
+
+`stack path --stack-root` will tell you the location of the "stack root". Among
+other things, this is where stack stores downloaded programs and snapshot
+packages. This location can be configured by setting the STACK_ROOT environment
+variable or passing the `--stack-root` commandline option. It is particularly
+useful to do this on Windows, where filepaths are limited (MAX_PATH), and things
+can break when this limit is exceeded.
+
## `stack.yaml` vs `.cabal` files
Now that we've covered a lot of stack use cases, this quick summary of
@@ -1897,21 +1909,6 @@ __Other tools for comparison (including active and historical)__
* [cabal-src](https://hackage.haskell.org/package/cabal-src) is mostly irrelevant in the presence of both stack and cabal sandboxes, both of which make it easier to add additional package sources easily. The mega-sdist executable that ships with cabal-src is, however, still relevant. Its functionality may some day be folded into stack
* [stackage-cli](https://hackage.haskell.org/package/stackage-cli) was an initial attempt to make cabal-install work more easily with curated snapshots, but due to a slight impedance mismatch between cabal.config constraints and snapshots, it did not work as well as hoped. It is deprecated in favor of stack.
-## More resources
-
-There are lots of resources available for learning more about stack:
-
-* `stack --help`
-* `stack --version` — identify the version and Git hash of the stack executable
-* `--verbose` (or `-v`) — much more info about internal operations (useful for bug reports)
-* The [home page](http://haskellstack.org)
-* The [stack mailing list](https://groups.google.com/d/forum/haskell-stack)
-* The [the FAQ](faq.md)
-* The [stack wiki](https://github.com/commercialhaskell/stack/wiki)
-* The [haskell-stack tag on Stack Overflow](http://stackoverflow.com/questions/tagged/haskell-stack)
-* [Another getting started with stack tutorial](http://seanhess.github.io/2015/08/04/practical-haskell-getting-started.html)
-* [Why is stack not cabal?](https://www.fpcomplete.com/blog/2015/06/why-is-stack-not-cabal)
-
## Fun features
@@ -1960,11 +1957,11 @@ As a starting point you can use [the "simple" template](https://github.com/comme
An introduction into template-writing and a place for submitting official templates,
you will find at [the stack-templates repository](https://github.com/commercialhaskell/stack-templates#readme).
-### IDE
+### Editor integration
-stack has a work-in-progress suite of editor integrations, to do things like
-getting type information in Emacs. For more information, see
-[stack-ide](https://github.com/commercialhaskell/stack-ide#readme).
+For editor integration, stack has a related project called
+[intero](https://github.com/commercialhaskell/intero). It is particularly well
+supported by emacs, but some other editors have integration for it as well.
### Visualizing dependencies
@@ -1985,7 +1982,7 @@ the following (or add it to `.bashrc`):
eval "$(stack --bash-completion-script stack)"
For more information and other shells, see [the Shell auto-completion wiki
-page](https://github.com/commercialhaskell/stack/wiki/Shell-autocompletion)
+page](https://docs.haskellstack.org/en/stable/shell_autocompletion)
### Docker
@@ -2007,34 +2004,41 @@ to stack.yaml:
```yaml
image:
- # YOU NEED A `container` YAML SECTION FOR `stack image container`
- container:
- # YOU NEED A BASE IMAGE NAME. STACK LAYERS EXES ON TOP OF
- # THE BASE IMAGE. PREPARE YOUR PROJECT IMAGE IN ADVANCE. PUT
- # ALL YOUR RUNTIME DEPENDENCIES IN THE IMAGE.
- base: "fpco/ubuntu-with-libgmp:14.04"
- # YOU CAN OPTIONALY NAME THE IMAGE. STACK WILL USE THE PROJECT
- # DIRECTORY NAME IF YOU LEAVE OUT THIS OPTION.
- name: "fpco/hello-world"
- # OPTIONALLY ADD A HASH OF LOCAL PROJECT DIRECTORIES AND THEIR
- # DESTINATIONS INSIDE THE DOCKER IMAGE.
- add:
- man/: /usr/local/share/man/
- # OPTIONALLY SPECIFY A LIST OF EXECUTABLES. STACK WILL CREATE
- # A TAGGED IMAGE FOR EACH IN THE LIST. THESE IMAGES WILL HAVE
- # THEIR RESPECTIVE "ENTRYPOINT" SET.
- entrypoints:
- - stack
+
+ # You need a `containers` yaml section for `stack image container`.
+ # A `container` section that does not contain a list is also valid.
+ containers:
+
+ # This example just has one container.
+ -
+ # You need a base image name. Stack layers exes on top of
+ # the base image. Prepare your project image in advance by
+ # putting all your runtime dependencies in the image.
+ base: "fpco/ubuntu-with-libgmp:14.04"
+
+ # You can optionally name the image. Stack will use the project
+ # directory name if you leave out this option.
+ name: "fpco/hello-world"
+
+ # Optionally add a directory to a path inside the docker image.
+ add:
+ man/: /usr/local/share/man/
+
+ # Optionally specify a list of executables. Stack will create
+ # a tagged image for each in the list. these images will have
+ # their respective "ENTRYPOINT" set.
+ entrypoints:
+ - stack
```
and then run `stack image container` and then `docker images` to list
the images.
-Note that the executable will be built in the development environment
-and copied to the container, so the dev OS must match that of the
+Note that the executable will be built in the development environment
+and copied to the container, so the dev OS must match that of the
container OS. This is easily accomplished using [Docker integration](docker_integration.md),
-under which the exe emitted by `stack build` will be built on the
-Docker container, not the local OS.
+under which the exe emitted by `stack build` will be built on the
+Docker container, not the local OS.
The executable will be stored under `/usr/local/bin/<your-project>-exe`
in the running container.
@@ -2150,6 +2154,14 @@ build:
executable-profiling: true
```
+### Further reading
+
+For more commands and uses, see [the official GHC chapter on
+profiling](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/profiling.html),
+[the Haskell wiki](https://wiki.haskell.org/How_to_profile_a_Haskell_program),
+and [the chapter on profiling in Real World
+Haskell](http://book.realworldhaskell.org/read/profiling-and-optimization.html).
+
### Tracing
To generate a backtrace in case of exceptions during a test or benchmarks run,
@@ -2160,14 +2172,21 @@ but adds the `+RTS -xc` runtime option.
`stack` now supports debugging and profiling with
[DWARF information](https://ghc.haskell.org/trac/ghc/wiki/DWARF),
-using the `--no-strip`, `--no-library-stripping`, and `--no-executable-shipping`
+using the `--no-strip`, `--no-library-stripping`, and `--no-executable-stripping`
flags to disable the default behavior of removing such information from compiled
libraries and executables.
-### Further reading
+## More resources
-For more commands and uses, see [the official GHC chapter on
-profiling](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/profiling.html),
-[the Haskell wiki](https://wiki.haskell.org/How_to_profile_a_Haskell_program),
-and [the chapter on profiling in Real World
-Haskell](http://book.realworldhaskell.org/read/profiling-and-optimization.html).
+There are lots of resources available for learning more about stack:
+
+* `stack --help`
+* `stack --version` — identify the version and Git hash of the stack executable
+* `--verbose` (or `-v`) — much more info about internal operations (useful for bug reports)
+* The [home page](http://haskellstack.org)
+* The [stack mailing list](https://groups.google.com/d/forum/haskell-stack)
+* The [the FAQ](faq.md)
+* The [stack wiki](https://github.com/commercialhaskell/stack/wiki)
+* The [haskell-stack tag on Stack Overflow](http://stackoverflow.com/questions/tagged/haskell-stack)
+* [Another getting started with stack tutorial](http://seanhess.github.io/2015/08/04/practical-haskell-getting-started.html)
+* [Why is stack not cabal?](https://www.fpcomplete.com/blog/2015/06/why-is-stack-not-cabal)
diff --git a/doc/MAINTAINER_GUIDE.md b/doc/MAINTAINER_GUIDE.md
index e5eba5c..011bfb8 100644
--- a/doc/MAINTAINER_GUIDE.md
+++ b/doc/MAINTAINER_GUIDE.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Maintainer guide
## Next release:
diff --git a/doc/README.md b/doc/README.md
index 7877257..5cbac6e 100644
--- a/doc/README.md
+++ b/doc/README.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# The Haskell Tool Stack
Stack is a cross-platform program for developing Haskell
@@ -53,7 +55,7 @@ the needed files to start a project correctly.
- The `stack build` command will build the minimal project.
- `stack exec my-project-exe` will execute the command.
- If you just want to install an executable using stack, then all you have to do
-is`stack install <package-name>`.
+is `stack install <package-name>`.
If you want to launch a REPL:
@@ -116,7 +118,10 @@ installed.
4. Once `stack` finishes building, check the stack version with
`stack exec stack -- --version`. Make sure the version is the latest.
5. Look for issues tagged with
- [`newcomer` and `awaiting-pr` labels](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3Anewcomer+label%3A%22awaiting+pr%22).
+ [newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3a%22newcomer+friendly%22)
+ and
+ [awaiting pull request](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3A%22awaiting+pull+request%22)
+ labels.
Build from source as a one-liner:
@@ -173,4 +178,4 @@ commercial Haskell users, and has since become a thriving open source
project meeting the needs of Haskell users of all stripes.
If you'd like to get involved with Stack, check out the
-[newcomers label on the Github issue tracker](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3Anewcomer).
+[newcomer friendly](https://github.com/commercialhaskell/stack/issues?q=is%3Aopen+is%3Aissue+label%3a%22newcomer+friendly%22) label on the Github issue tracker.
diff --git a/doc/SIGNING_KEY.md b/doc/SIGNING_KEY.md
index bce4a5b..d01645c 100644
--- a/doc/SIGNING_KEY.md
+++ b/doc/SIGNING_KEY.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Signing key
Releases are signed with this key:
diff --git a/doc/architecture.md b/doc/architecture.md
index f228515..cb60d6a 100644
--- a/doc/architecture.md
+++ b/doc/architecture.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Architecture
## Terminology
diff --git a/doc/build_command.md b/doc/build_command.md
index 6bec71e..fd671af 100644
--- a/doc/build_command.md
+++ b/doc/build_command.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Build command
## Overview
@@ -140,6 +142,14 @@ following flags:
* `--keep-going`, to continue building packages even after some build step
fails. The packages which depend upon the failed build won't get built.
+* `--skip`, to skip building components of a local package. It allows
+ you to skip test suites and benchmark without specifying other components
+ (e.g. `stack test --skip long-test-suite` will run the tests without the
+ `long-test-suite` test suite). Be aware that skipping executables won't work
+ the first time the package is built due to
+ [an issue in cabal](https://github.com/commercialhaskell/stack/issues/3229).
+ This option can be specified multiple times to skip multiple components.
+
## Flags
There are a number of other flags accepted by `stack build`. Instead of listing
diff --git a/doc/coverage.md b/doc/coverage.md
index 4c34829..9c55c48 100644
--- a/doc/coverage.md
+++ b/doc/coverage.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Code Coverage
Code coverage is enabled by passing the `--coverage` flag to `stack build`.
diff --git a/doc/custom_snapshot.md b/doc/custom_snapshot.md
index 38bed74..335ff90 100644
--- a/doc/custom_snapshot.md
+++ b/doc/custom_snapshot.md
@@ -1,29 +1,61 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Custom Snapshots
-Custom snapshots allow you to create your own snapshots, which provide a list of
-specific hackage packages to use, along with flags and ghc-options. The
-definition of a basic snapshot looks like the following:
+Custom snapshots were totally reworked with the extensible snapshots
+overhaul in Stack 1.6.0, see
+[the writeup](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots)
+and
+[PR #3249](https://github.com/commercialhaskell/stack/pull/3249)). This
+documentation covers the new syntax only.
+
+Custom snapshots allow you to create your own snapshots, which provide
+a list of packages to use, along with flags, ghc-options, and a few
+other settings. Custom snapshots may extend any other snapshot that
+can be specified in a `resolver` field. The packages specified follow
+the syntax of `extra-deps` in the `stack.yaml` file, with one
+exception: to ensure reproducibility of snapshots, local directories
+are not allowed for custom snapshots (as they are expected to change
+regularly).
```yaml
-resolver: ghc-8.0
+resolver: lts-8.21 # Inherits GHC version and package set
+compiler: ghc-8.0.1 # Overwrites GHC version in the resolver, optional
+
+name: my-snapshot # User-friendly name
+# Additional packages, follows extra-deps syntax
packages:
- - unordered-containers-0.2.7.1
- - hashable-1.2.4.0
- - text-1.2.2.1
+- unordered-containers-0.2.7.1
+- hashable-1.2.4.0
+- text-1.2.2.1
+# Override flags, can also override flags in the parent snapshot
flags:
unordered-containers:
debug: true
+
+# Packages from the parent snapshot to ignore
+drop-packages:
+- wai-extra
+
+# Packages which should be hidden (affects script command's import
+# parser
+hidden:
+ wai: true
+ warp: false
+
+# Set GHC options for specific packages
+ghc-options:
+ warp:
+ - -O2
```
If you put this in a `snapshot.yaml` file in the same directory as your project,
you can now use the custom snapshot like this:
```yaml
-resolver:
- name: simple-snapshot # Human readable name for the snapshot
- location: simple-snapshot.yaml
+resolver: snapshot.yaml
```
This is an example of a custom snapshot stored in the filesystem. They are
@@ -38,24 +70,6 @@ For efficiency, URLs are treated differently. If I uploaded the snapshot to
`https://domain.org/snapshot-1.yaml`, it is expected to be immutable. If you
change that file, then you lose any reproducibility guarantees.
-## Extending snapshots
-
-The example custom snapshot above uses a compiler resolver, and so has few
-packages. We can also extend existing snapshots, by using the usual
-[resolver setting found in stack configurations](yaml_configuration.md#resolver).
-All possible resolver choices are valid, so this means that custom snapshots can
-even extend other custom snapshots.
-
-Lets say that we want to use `lts-7.1`, but use a different version of `text`
-than the one it comes with, `1.2.2.1`. To downgrade it to `1.2.2.0`, we need a
-custom snapshot file with the following:
-
-```yaml
-resolver: lts-7.1
-packages:
- - text-1.2.2.0
-```
-
### Overriding the compiler
The following snapshot specification will be identical to `lts-7.1`, but instead
@@ -117,57 +131,3 @@ flags:
text:
developer: true
```
-
-## YAML format
-
-In summary, the YAML format of custom snapshots has the following fields which
-are directly related to the same fields in the
-[build configuration format](yaml_configuration.md):
-
-* `resolver`, which specifies which snapshot to extend. It takes the same values
- as the [`resolver` field in stack.yaml](yaml_configuration.md#resolver).
-
-* `compiler`, which specifies or overrides the selection of compiler. If
- `resolver` is absent, then a specification of `compiler` is required. Its
- semantics are the same as the
- [`compiler` field in stack.yaml](yaml_configuration.md#compiler).
-
-Some fields look similar, but behave differently:
-
-* `flags` specifies which cabal flags to use with each package. In order to
- specify a flag for a package, it *must* be listed in the `packages` list.
-
-* `ghc-options`, which specifies which cabal flags to use with each package. In
- order to specify ghc-options for a package, it *must* be listed in the
- `packages` list. The `*` member of the map specifies flags that apply to every
- package in the `packages` list.
-
-There are two fields which work differently than in the build configuration
-format:
-
-* `packages`, which specifies a list of hackage package versions. Note that
- when a package version is overridden, no `flags` or `ghc-options` are taken
- from the snapshot that is being extended. If you want the same options as the
- snapshot being extended, they must be re-specified.
-
-* `drop-packages`, which specifies a list of packages to drop from the snapshot
- being overridden.
-
-## Future enhancements
-
-We plan to enhance extensible snapshots in several ways in the future. See
-[issue #1265, about "implicit snapshots"](https://github.com/commercialhaskell/stack/issues/1265).
-In summary, in the future:
-
-1) It will be possible to use a specific git repository + commit hash in the
-`packages` list, like in regular stack.yaml configuration. Currently, custom
-snapshots only work with packages on hackage.
-
-2) `stack.yaml` configurations will implicitly create a snapshot. This means
-that the non-local packages will get shared between your projects, so there is
-less redundant compilation!
-
-3) `flags` and `ghc-options` for packages which are not listed in `packages` are
-silently ignored. See
-[#2654](https://github.com/commercialhaskell/stack/issues/2654) for the current
-status of this.
diff --git a/doc/dependency_visualization.md b/doc/dependency_visualization.md
index 994b673..795b74a 100644
--- a/doc/dependency_visualization.md
+++ b/doc/dependency_visualization.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Dependency visualization
You can use stack to visualize the dependencies between your packages and
diff --git a/doc/docker_integration.md b/doc/docker_integration.md
index 22f80ca..7afac31 100644
--- a/doc/docker_integration.md
+++ b/doc/docker_integration.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
Docker integration
===============================================================================
diff --git a/doc/faq.md b/doc/faq.md
index f3d39fe..001be29 100644
--- a/doc/faq.md
+++ b/doc/faq.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# FAQ
So that this doesn't become repetitive: for the reasons behind the answers
@@ -434,11 +436,7 @@ such hardening flags by default which may be the cause of some instances of the
problem. Therefore, a possible workaround might be to turn off PIE related
flags.
-In Arch Linux, the support for this is provided by the `hardening-wrapper`
-package. Some possible workarounds:
-
-* Selectively disabling its PIE forcing by setting `HARDENING_PIE=0` in `/etc/hardening-wrapper.conf`.
-* Uninstalling the `hardening-wrapper` package and logging out then into your account again.
+On Arch Linux, installing the `ncurses5-compat-libs` package from AUR resolves [this issue](https://github.com/commercialhaskell/stack/issues/2712).
If you manage to work around this in other distributions, please include instructions here.
@@ -521,4 +519,9 @@ where you keep your SSL certificates.
Unfortunately `stack build` does not have an obvious equivalent to `cabal build -vN` which shows verbose output from GHC when building. The easiest workaround is to add `ghc-options: -vN` to the .cabal file or pass it via `stack build --ghc-options="-v"`.
## Does Stack support the Hpack specification?
-Yes. You can run `stack init` as usual and Stack will create a matching `stack.yaml`.
+
+Yes:
+
+* If a package directory contains an Hpack `package.yaml` file, then Stack will use it to generate a `.cabal` file when building the package.
+* You can run `stack init` to initialize a `stack.yaml` file regardless of whether your packages are declared with `.cabal` files or with Hpack `package.yaml` files.
+* You can use the `with-hpack` configuration option to specify an Hpack executable to use instead of the Hpack bundled with Stack.
diff --git a/doc/ghci.md b/doc/ghci.md
index ccfb3f5..ea423f4 100644
--- a/doc/ghci.md
+++ b/doc/ghci.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# GHCi
`stack ghci` allows you to load components and files of your project into
@@ -31,6 +33,25 @@ There are two ways to speed up the initial startup of ghci:
* `--no-load`, to skip loading all defined modules into ghci. You can then
directly use `:load MyModule` to load a specific module in your project.
+## Loading just the main module
+
+By default, `stack ghci` loads and imports all of the modules in the package.
+This allows you to easily use anything exported by your package. This is
+usually quite convenient, but in some cases it makes sense to only load one
+module, or no modules at all. The `--only-main` flag allows this. It specifies
+that only the main module will be loaded, if any. This is particularly useful
+in the following circumstances:
+
+1. You're loading the project in order to run it in ghci (e.g. via `main`), and
+ you intend to reload while developing. Without the `--only-main` flag, you
+ will need to quit and restart ghci whenever a module gets deleted. With the
+ flag, reloading should work fine in this case.
+
+2. If many of your modules have exports named the same thing, then you'll need to
+ refer to them using qualified names. To avoid this, it may be easier to use
+ `--only-main` to start with a blank slate and just import the modules you are
+ interested in.
+
## Loading a filepath directly
Instead of the `TARGET` syntax, it is also possible to directly run
diff --git a/doc/ghcjs.md b/doc/ghcjs.md
index 6be3b17..24754fb 100644
--- a/doc/ghcjs.md
+++ b/doc/ghcjs.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# GHCJS
To use GHCJS with stack, place a GHCJS version in the [`compiler`](yaml_configuration.md#compiler) field of `stack.yaml`. After this, all stack commands should work with GHCJS! In particular:
diff --git a/doc/install_and_upgrade.md b/doc/install_and_upgrade.md
index d184bc2..db2e59c 100644
--- a/doc/install_and_upgrade.md
+++ b/doc/install_and_upgrade.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Install/upgrade
For common Un*x operating systems (including macOS), all you need to do is run:
diff --git a/doc/nix_integration.md b/doc/nix_integration.md
index f20e3ed..d813638 100644
--- a/doc/nix_integration.md
+++ b/doc/nix_integration.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Nix integration
(since 0.1.10.0)
diff --git a/doc/nonstandard_project_init.md b/doc/nonstandard_project_init.md
index d54de4a..b51f596 100644
--- a/doc/nonstandard_project_init.md
+++ b/doc/nonstandard_project_init.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Non-standard project initialization
## Introduction
@@ -5,7 +7,9 @@ The purpose of this page is to collect information about issues that arise when
users either have an existing cabal project or another nonstandard setup such
as a private hackage database.
-## Using a Cabal File New users may be confused by the fact that you must add
+## Using a Cabal File
+
+New users may be confused by the fact that you must add
dependencies to the package's cabal file, even in the case when you have
already listed the package in the `stack.yaml`. In most cases, dependencies for
your package that are in the Stackage snapshot need *only* be added to the
diff --git a/doc/shell_autocompletion.md b/doc/shell_autocompletion.md
index 34d6788..7fdc1f4 100644
--- a/doc/shell_autocompletion.md
+++ b/doc/shell_autocompletion.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Shell Auto-completion
Note: if you installed a package for you Linux distribution, the bash
diff --git a/doc/travis_ci.md b/doc/travis_ci.md
index ba0b13a..44c0b9d 100644
--- a/doc/travis_ci.md
+++ b/doc/travis_ci.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# Travis CI
This page documents how to use Stack on
@@ -5,11 +7,11 @@ This page documents how to use Stack on
familiarity with Travis. We provide two fully baked example files
ready to be used on your projects:
-* [The simple Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/master/doc/travis-simple.yml)
+* [The simple Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/travis-simple.yml)
is intended for applications that do not require multiple GHC
support or cross-platform support. It builds and tests your project
with just the settings present in your `stack.yaml` file.
-* [The complex Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/master/doc/travis-complex.yml)
+* [The complex Travis configuration](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/travis-complex.yml)
is intended for projects that need to support multiple GHC versions
and multiple OSes, such as open source libraries to be released to
Hackage. It tests against cabal-install, as well as Stack on Linux
@@ -28,7 +30,9 @@ repo, enable Travis on the repo, and you're good to go.
You may also be interested in using AppVeyor, which supports Windows
builds, for more cross-platform testing. There's a
-[short blog post available on how to do this](http://www.snoyman.com/blog/2016/08/appveyor-haskell-windows-ci).
+[short blog post available on how to do this](http://www.snoyman.com/blog/2016/08/appveyor-haskell-windows-ci),
+or just copy in
+[the appveyor.yml file](https://raw.githubusercontent.com/commercialhaskell/stack/stable/doc/appveyor.yml)
The rest of this document explains the details of common Travis
configurations for those of you who want to tweak the above
@@ -70,15 +74,6 @@ before_install:
- travis_retry curl -L https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack'
```
-Once Travis whitelists the stack .deb files, we'll be able to simply include
-stack in the `addons` section, and automatically use the newest version of
-stack, avoiding that complicated `before_install` section This is being
-tracked in the
-[apt-source-whitelist](https://github.com/travis-ci/apt-source-whitelist/pull/7)
-and
-[apt-package-whitelist](https://github.com/travis-ci/apt-package-whitelist/issues/379)
-issue trackers.
-
## Installing GHC
There are two ways to install GHC:
diff --git a/doc/yaml_configuration.md b/doc/yaml_configuration.md
index 9a800ae..380f7b4 100644
--- a/doc/yaml_configuration.md
+++ b/doc/yaml_configuration.md
@@ -1,3 +1,5 @@
+<div class="hidden-warning"><a href="https://docs.haskellstack.org/"><img src="https://rawgit.com/commercialhaskell/stack/master/doc/img/hidden-warning.svg"></a></div>
+
# YAML Configuration
This page is intended to fully document all configuration options available in
@@ -27,7 +29,7 @@ project, not in the user or global config files.
> Note: We define **project** to mean a directory that contains a `stack.yaml`
> file, which specifies how to build a set of packages. We define **package** to
-> be a package with a `.cabal` file.
+> be a package with a `.cabal` file or Hpack `package.yaml` file.
In your project-specific options, you specify both **which local packages** to
build and **which dependencies to use** when building these packages. Unlike the
@@ -41,161 +43,215 @@ it will be used even if you're using a snapshot that specifies a particular
version. Similarly, `extra-deps` will shadow the version specified in the
resolver.
-### packages
+### resolver
+
+Specifies which snapshot is to be used for this project. A snapshot
+defines a GHC version, a number of packages available for
+installation, and various settings like build flags. It is called a
+resolver since a snapshot states how dependencies are resolved. There
+are currently four resolver types:
+
+* LTS Haskell snapshots, e.g. `resolver: lts-2.14`
+* Stackage Nightly snapshot, e.g. `resolver: nightly-2015-06-16`
+* No snapshot, just use packages shipped with the compiler
+ * For GHC this looks like `resolver: ghc-7.10.2`
+ * For GHCJS this looks like `resolver: ghcjs-0.1.0_ghc-7.10.2`.
+* [Custom snapshot](custom_snapshot.md)
+
+Each of these resolvers will also determine what constraints are placed on the
+compiler version. See the [compiler-check](#compiler-check) option for some
+additional control over compiler version.
+
+### packages and extra-deps
+
+_NOTE_ The contents of this section have changed significantly since
+extensible snapshots were implemented (see:
+[writeup](https://www.fpcomplete.com/blog/2017/07/stacks-new-extensible-snapshots)
+and
+[PR #3249](https://github.com/commercialhaskell/stack/pull/3249)). Most
+old syntax is still supported with newer versions of Stack, but will
+not be documented here. Instead, this section contains the recommended
+syntax as of Stack v1.6.0.
+
+There are two types of packages that can be defined in your
+`stack.yaml` file:
+
+* __Project packages__, those which you are actually working on in
+ your current project. These are local file paths in your project
+ directory.
+* __Extra dependencies__, which are packages provided locally on top
+ of the snapshot definition of available packages. These can come
+ from Hackage (or an alternative package index you've defined, see
+ [package-indices](#package-indices)), an HTTP(S) or local archive, a
+ Git or Mercurial repository, or a local file path.
-The `packages` section lists all local (project) packages. The term _local
-package_ should be differentiated from a _dependency package_. A local package
-is something that you are developing as part of the project. Whereas a
-dependency package is an external package that your project depends on.
+These two sets of packages are both installed into your local package
+database within your project. However, beyond that, they are
+completely different:
-In its simplest usage, it will be a list of directories or HTTP(S) URLs to a
-tarball or a zip. For example:
+* Project packages will be built by default with a `stack build`
+ without specific targets. Extra dependencies will only be built if
+ they are depended upon.
+* Test suites and benchmarks may be run for project packages. They are
+ never run for extra dependencies.
+
+The `packages` key is a simple list of file paths, which will be
+treated as relative to the directory containing your `stack.yaml`
+file. For example:
```yaml
packages:
- - .
- - dir1/dir2
- - https://example.com/foo/bar/baz-0.0.2.tar.gz
+- .
+- dir1/dir2
```
-Each package directory or location specified must have a valid cabal file
-present. Note that the subdirectories of the directory are not searched for
-cabal files. Subdirectories will have to be specified as independent items in
-the list of packages.
+Each package directory or location specified must have a valid cabal
+file or hpack `package.yaml` file present. Note that the
+subdirectories of the directory are not searched for cabal
+files. Subdirectories will have to be specified as independent items
+in the list of packages.
When the `packages` field is not present, it defaults to looking for a package
in the project's root directory:
```yaml
packages:
- - .
+- .
+```
+
+The `extra-deps` key is given a list of all extra dependencies. If
+omitted, it is taken as the empty list, e.g.:
+
+```yaml
+extra-deps: []
```
-#### Complex package locations (`location`)
-More complex package locations can be specified in a key-value format with
-`location` as a mandatory key. In addition to `location` some optional
-key-value pairs can be specified to include specific subdirectories or to
-specify package attributes as descibed later in this section.
+It supports four different styles of values:
-In its simplest form a `location` key can have a single value in the same way
-as described above for single value items. Alternativel it can have key-value
-pairs as subfields to describe a git or mercurial repository location. For
-example:
+#### Package index
+
+Packages can be stated by a name/version combination, which will be
+looked up in the package index (by default, Hackage). The basic syntax
+for this is:
```yaml
-packages:
-- location: .
-- location: dir1/dir2
-- location: https://example.com/foo/bar/baz-0.0.2.tar.gz
-- location: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip
-- location:
- git: git@github.com:commercialhaskell/stack.git
- commit: 6a86ee32e5b869a877151f74064572225e1a0398
-- location:
- hg: https://example.com/hg/repo
- commit: da39a3ee5e6b4b0d3255bfef95601890afd80709
+extra-deps:
+- acme-missiles-0.3
```
-Note: it is highly recommended that you only use SHA1 values for a Git or
-Mercurial commit. Other values may work, but they are not officially supported,
-and may result in unexpected behavior (namely, stack will not automatically
-pull to update to new versions).
+Using this syntax, the most recent Cabal file revision available will
+be used. For more reproducibility of builds, it is recommended to
+state the SHA256 hash of the cabal file contents as well, like this:
-A `location` key can be accompanied by a `subdirs` key to look for cabal files
-in a list of subdirectories as well in addition to the top level directory.
+```yaml
+extra-deps:
+- acme-missiles-0.3@sha256:2ba66a092a32593880a87fb00f3213762d7bca65a687d45965778deb8694c5d1
+```
-This could be useful for mega-repos like
-[wai](https://github.com/yesodweb/wai/) or
-[digestive-functors](https://github.com/jaspervdj/digestive-functors).
+Or a specific revision number, with `0` being the original file:
-The `subdirs` key can have multiple nested series items specifying a list of
-subdirectories. For example:
```yaml
-packages:
-- location: .
- subdirs:
- - subdir1
- - subdir2
-- location:
- git: git@github.com:yesodweb/wai
- commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f
- subdirs:
- - auto-update
- - wai
-- location: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip
- subdirs:
- - auto-update
- - wai
+extra-deps:
+- acme-missiles-0.3@rev:0
```
+Note that specifying via SHA256 is slightly more resilient in that it
+does not rely on correct ordering in the package index, while revision
+number is likely simpler to use. In practice, both should guarantee
+equally reproducible build plans.
+
If unspecified, `subdirs` defaults to `['.']` (i.e. look only in the top-level
directory). Note that if you specify a value of `subdirs`, then `'.'` is _not_
included by default and needs to be explicitly specified if a required package
is found in the top-level directory of the repository.
-#### Local dependency packages (`extra-dep`)
-A `location` key can be accompanied by an `extra-dep` key. When the
-`extra-dep` key is set to `true` it indicates that the package should be
-treated in the same way as a dependency package and not as part of the project.
-This means the following:
-* A _dependency package_ is built only if a user package or its dependencies
- depend on it. Note that a regular _project package_ is built anyway even if
- no other package depends on it.
-* Its test suites and benchmarks will not be run.
-* It will not be directly loaded in ghci when `stack ghci` is run. This is
- important because if you specify huge dependencies as project packages then
- ghci will have a nightmare loading everything.
-
-This is especially useful when you are tweaking upstream packages or want to
-use latest versions of the upstream packages which are not yet on Hackage or
-Stackage.
-
-For example:
+#### Local file path
+
+Like `packages`, local file paths can be used in `extra-deps`, and
+will be relative to the directory containing the `stack.yaml` file.
+
```yaml
-packages:
-- location: .
-- location: vendor/binary
- extra-dep: true
-- location:
- git: git@github.com:yesodweb/wai
- commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f
- subdirs:
- - auto-update
- - wai
- extra-dep: true
+extra-deps:
+- vendor/somelib
```
-### extra-deps
+Note that if a local directory can be parsed as a package identifier,
+Stack will treat it as a package identifier. In other words, if you
+have a local directory named `foo-1.2.3`, instead of:
+
+```yaml
+extra-deps:
+- foo-1.2.3
+```
-This is a list of package identifiers for additional packages from upstream to
-be included. This is usually used to augment an LTS Haskell or Stackage Nightly
-snapshot with a package that is not present or is at an different version than you
-wish to use.
+You should use the following to be explicit:
```yaml
extra-deps:
-- acme-missiles-0.3
+- ./foo-1.2.3
```
-Note that the `extra-dep` attribute in the `packages` section as described in
-an earlier section is used for non-index local or remote packages while the
-`extra-deps` section is for packages to be automatically pulled from an index
-like Hackage.
+#### Git and Mercurial repos
-### resolver
+You can give a Git or Mercurial repo at a specific commit, and Stack
+will clone that repo.
-Specifies how dependencies are resolved. There are currently four resolver types:
+```yaml
+extra-deps:
+- git: git@github.com:commercialhaskell/stack.git
+ commit: 6a86ee32e5b869a877151f74064572225e1a0398
+- hg: https://example.com/hg/repo
+ commit: da39a3ee5e6b4b0d3255bfef95601890afd80709
+```
-* LTS Haskell snapshots, e.g. `resolver: lts-2.14`
-* Stackage Nightly snapshot, e.g. `resolver: nightly-2015-06-16`
-* No snapshot, just use packages shipped with the compiler
- * For GHC this looks like `resolver: ghc-7.10.2`
- * For GHCJS this looks like `resolver: ghcjs-0.1.0_ghc-7.10.2`.
-* [Custom snapshot](custom_snapshot.md)
+__NOTE__ It is highly recommended that you only use SHA1 values for a
+Git or Mercurial commit. Other values may work, but they are not
+officially supported, and may result in unexpected behavior (namely,
+Stack will not automatically pull to update to new versions).
+Another problem with this is that your build will not be deterministic,
+because when someone else tries to build the project they can get a
+different checkout of the package.
-Each of these resolvers will also determine what constraints are placed on the
-compiler version. See the [compiler-check](#compiler-check) option for some
-additional control over compiler version.
+A common practice in the Haskell world is to use "megarepos", or
+repositories with multiple packages in various subdirectories. Some
+common examples include [wai](https://github.com/yesodweb/wai/) and
+[digestive-functors](https://github.com/jaspervdj/digestive-functors). To
+support this, you may also specify `subdirs` for repositories, e.g.:
+
+```yaml
+extra-deps:
+- git: git@github.com:yesodweb/wai
+ commit: 2f8a8e1b771829f4a8a77c0111352ce45a14c30f
+ subdirs:
+ - auto-update
+ - wai
+```
+
+If unspecified, `subdirs` defaults to `subdirs: [.]`, or looking for a
+package in the root of the repo.
+
+#### Archives (HTTP(S) or local filepath)
+
+This one's pretty straightforward: you can use HTTP and HTTPS URLs and
+local filepaths referring to either tarballs or ZIP files.
+
+__NOTE__ Stack assumes that these files never change after downloading
+to avoid needing to make an HTTP request on each build.
+
+```yaml
+extra-deps:
+- https://example.com/foo/bar/baz-0.0.2.tar.gz
+- archive: http://github.com/yesodweb/wai/archive/2f8a8e1b771829f4a8a77c0111352ce45a14c30f.zip
+ subdirs:
+ - wai
+ - warp
+- archive: ../acme-missiles-0.3.tar.gz
+ sha256: e563d8b524017a06b32768c4db8eff1f822f3fb22a90320b7e414402647b735b
+```
+
+Note that HTTP(S) URLs also support `subdirs` like repos to allow for
+archives of megarepos. In order to leverage this, use `location:
+http://...`.
### flags
@@ -207,8 +263,8 @@ flags:
flag-name: true
```
-Flags will only affect packages in your `packages` and `extra-deps` settings.
-Packages that come from the snapshot global database are not affected.
+If a specified flag is different than the one specified for a snapshot package,
+then the snapshot package will automatically be promoted to be an extra-dep.
### image
@@ -246,7 +302,7 @@ image:
- app-backend
```
-will build one container tagged `myproject:latest` which contains the project
+will build one container tagged `myproject:latest` which contains the project
including the `/etc/app-backend` configuration data.
Another container tagged `myproject-app-backend:latest` based on the `myproject:latest`
@@ -265,7 +321,7 @@ user-message: ! 'Warning: Some packages were found to be incompatible with the r
Warning: Specified resolver could not satisfy all dependencies. Some external packages
have been added as dependencies.
- You can suppress this message by removing it from stack.yaml
+ You can omit this message by removing it from stack.yaml
'
```
@@ -321,7 +377,7 @@ package-indices:
download-prefix: https://s3.amazonaws.com/hackage.fpcomplete.com/package/
# HTTP location of the package index
- http: https://s3.amazonaws.com/hackage.fpcomplete.com/00-index.tar.gz
+ http: https://s3.amazonaws.com/hackage.fpcomplete.com/01-index.tar.gz
# Or, if using Hackage Security below, give the root URL:
http: https://s3.amazonaws.com/hackage.fpcomplete.com/
@@ -348,6 +404,13 @@ gpg-verify: false
Will now be ignored.
+__IMPORTANT__ Hackage and its mirrors typically have two index files
+available: `00-index.tar.gz` and `01-index.tar.gz`. The former is a
+legacy file for backwards compatibility. It does not contain the cabal
+file revisions produced by Hackage, and therefore _will not work_ with
+most snapshots. Instead, you need to use `01-index.tar.gz` to ensure
+that exact revisions can be found, ensuring more reproducible builds.
+
### system-ghc
Enables or disables using the GHC available on the PATH.
@@ -362,8 +425,9 @@ system-ghc: true
### install-ghc
-Whether or not to automatically install GHC when necessary. Default is `false`,
-which means stack will prompt you to run `stack setup` as needed.
+Whether or not to automatically install GHC when necessary. Since
+Stack 1.5.0, the default is `true`, which means Stack will not ask you
+before downloading and installing GHC.
### skip-ghc-check
@@ -398,6 +462,13 @@ extra-lib-dirs:
- /opt/foo/lib
```
+Since these are system-dependent absolute paths, it is recommended that you
+specify these in your `config.yaml` within the stack root (usually, `~/.stack`).
+If you control the build environment in your project's ``stack.yaml``, perhaps
+through docker or other means, then it may well make sense to include these
+there as well.
+
+
### with-gcc
Specify a path to gcc explicitly, rather than relying on the normal path resolution.
@@ -406,6 +477,14 @@ Specify a path to gcc explicitly, rather than relying on the normal path resolut
with-gcc: /usr/local/bin/gcc-5
```
+### with-hpack
+
+Use an Hpack executable, rather than using the bundled Hpack.
+
+```yaml
+with-hpack: /usr/local/bin/hpack
+```
+
### compiler-check
(Since 0.1.4)
@@ -447,14 +526,29 @@ Allows specifying per-package and global GHC options:
```yaml
ghc-options:
# All packages
- "*": -Wall
+ "$locals": -Wall
+ "$targets": -Werror
+ "$everything": -O2
some-package: -DSOME_CPP_FLAG
```
-Caveat emptor: setting options like this will affect your snapshot packages,
-which can lead to unpredictable behavior versus official Stackage snapshots.
-This is in contrast to the `ghc-options` command line flag, which will only
-affect the packages specified by the [`apply-ghc-options` option](yaml_configuration.md#apply-ghc-options).
+Since 1.6.0, setting a GHC options for a specific package will
+automatically promote it to a local package (much like setting a
+custom package flag). However, setting options via `$everything` on all flags
+will not do so (see
+[Github discussion](https://github.com/commercialhaskell/stack/issues/849#issuecomment-320892095)
+for reasoning). This can lead to unpredicable behavior by affecting
+your snapshot packages.
+
+The behavior of the `$locals`, `$targets`, and `$everything` special
+keys mirrors the behavior for the
+[`apply-ghc-options` setting](#apply-ghc-options), which affects
+command line parameters.
+
+NOTE: Prior to version 1.6.0, the `$locals`, `$targets`, and
+`$everything` keys were not support. Instead, you could use `"*"` for
+the behavior represented now by `$everything`. It is highly
+recommended to switch to the new, more expressive, keys.
### apply-ghc-options
@@ -535,6 +629,13 @@ setup-info: "https://raw.githubusercontent.com/fpco/stackage-content/master/stac
(Since 0.1.5)
+__NOTE__ As of Stack 1.6.0, this feature does not reliably work, due
+to issues with the Cabal library's printer. Stack will generate a
+warning when a lossy conversion occurs, in which case you may need to
+disable this setting. See
+[#3550](https://github.com/commercialhaskell/stack/issues/3550) for
+more information.
+
When using the `sdist` and `upload` commands, this setting determines whether
the cabal file's dependencies should be modified to reflect PVP lower and upper
bounds. Values are `none` (unchanged), `upper` (add upper bounds), `lower` (add
@@ -713,7 +814,7 @@ The 5 parameters are: `author-email`, `author-name`, `category`, `copyright` and
set per project by passing `-p "category:value"` to the `stack new` command.
* _copyright_ - sets the `copyright` property in cabal. It is typically the
name of the holder of the copyright on the package and the year(s) from which
- copyright is claimed. For example: `Copyright: (c) 2006-2007 Joe Bloggs`
+ copyright is claimed. For example: `Copyright (c) 2006-2007 Joe Bloggs`
* _github-username_ - used to generate `homepage` and `source-repository` in
cabal. For instance `github-username: myusername` and `stack new my-project new-template`
would result:
@@ -733,7 +834,7 @@ templates:
author-name: Your Name
author-email: youremail@example.com
category: Your Projects Category
- copyright: 'Copyright: (c) 2017 Your Name'
+ copyright: 'Copyright (c) 2017 Your Name'
github-username: yourusername
```
@@ -756,8 +857,26 @@ save-hackage-creds: true
```
Since 1.5.0
-
-# urls
+
+### ignore-revision-mismatch
+
+Cabal files in packages can be specified via exact revisions to deal
+with Hackage revision metadata. The default behavior of Stack (since
+1.6.0) is to fail if an exact match is not found. In some cases
+(specifically, when using a legacy `00-index.tar.gz` file), users may
+wish to allow a mismatch. In such cases, you can change
+`ignore-revision-mismatch` from `false` to `true`.
+
+```yaml
+ignore-revision-mismatch: false
+```
+
+For more information, see
+[the Github issue #3520 discussion](https://github.com/commercialhaskell/stack/issues/3520).
+
+Since 1.6.0
+
+### urls
Customize the URLs where `stack` looks for snapshot build plans.
@@ -772,3 +891,91 @@ urls:
**Note:** The `latest-snapshot-url` field has been deprecated in favor of `latest-snapshot`
and will be removed in a future version of `stack`.
+
+### jobs
+
+Specifies how many build tasks should be run in parallel. This can be overloaded
+on the commandline via `-jN`, for example `-j2`. The default is to use the
+number of processors reported by your CPU. One usage for this might be to avoid
+running out of memory by setting it to 1, like this:
+
+```yaml
+jobs: 1
+```
+
+### work-dir
+
+Specifies relative path of work directory (default is `.stack-work`. This can
+also be specified by env var or cli flag, in particular, the earlier items in
+this list take precedence:
+
+1. `--work-dir DIR` passed on the commandline
+2. `work-dir` in stack.yaml
+3. `STACK_WORK` environment variable
+
+Since 0.1.10.0
+
+### skip-msys
+
+Skips checking for and installing msys2 when stack is setting up the
+environment. This is only useful on Windows machines, and usually doesn't make
+sense in project configurations, just in `config.yaml`. Defaults to `false`, so
+if this is used, it only really makes sense to use it like this:
+
+```yaml
+skip-msys: true
+```
+
+Since 0.1.2.0
+
+### concurrent-tests
+
+This option specifies whether test-suites should be executed concurrently with
+each-other. The default for this is true, since this is usually fine and it
+often means that tests can complete earlier. However, if some test-suites
+require exclusive access to some resource, or require a great deal of CPU or
+memory resources, then it makes sense to set this to `false` (the default is
+`true`).
+
+```yaml
+concurrent-tests: false
+```
+
+Since 0.1.2.0
+
+### extra-path
+
+This option specifies additional directories to prepend to the PATH environment
+variable. These will be used when resolving the location of executables, and
+will also be visible in the `PATH` variable of processes run by stack.
+
+For example, to prepend `/path-to-some-dep/bin` to your PATh:
+
+```yaml
+extra-path:
+- /path-to-some-dep/bin
+```
+
+One thing to note is that other paths added by stack - things like the project's
+bin dir and the compiler's bin dir - will take precedence over those specified
+here (the automatic paths get prepended).
+
+Since 0.1.4.0
+
+### local-programs-path
+
+This overrides the location of the programs directory, where tools like ghc and
+msys get installed.
+
+On most systems, this defaults to a folder called `programs`
+within the stack root directory. On windows, if the `LOCALAPPDATA` environment
+variable exists, then it defaults to `$LOCALAPPDATA/Programs/stack/`, which
+follows windows conventions.
+
+Since 1.3.0
+
+### default-template
+
+This option specifies which template to use with `stack new`, when none is
+specified. The default is called `new-template`. The other templates are listed
+in [the stack-templates repo](https://github.com/commercialhaskell/stack-templates/).
diff --git a/package.yaml b/package.yaml
new file mode 100644
index 0000000..3cf284b
--- /dev/null
+++ b/package.yaml
@@ -0,0 +1,340 @@
+name: stack
+version: '1.6.1'
+synopsis: The Haskell Tool Stack
+description: ! 'Please see the README.md for usage information, and
+ the wiki on Github for more details. Also, note that
+ the API for the library is not currently stable, and may
+ change significantly, even between minor releases. It is
+ currently only intended for use by the executable.'
+category: Development
+author: Commercial Haskell SIG
+maintainer: manny@fpcomplete.com
+license: BSD3
+github: commercialhaskell/stack.git
+homepage: http://haskellstack.org
+custom-setup:
+ dependencies:
+ - base
+ - Cabal
+ - filepath
+extra-source-files:
+- CONTRIBUTING.md
+- ChangeLog.md
+- README.md
+- doc/*.md
+- package.yaml
+- src/setup-shim/StackSetupShim.hs
+- stack.yaml
+- test/package-dump/ghc-7.10.txt
+- test/package-dump/ghc-7.8.4-osx.txt
+- test/package-dump/ghc-7.8.txt
+ghc-options:
+- -Wall
+- -fwarn-tabs
+- -fwarn-incomplete-uni-patterns
+- -fwarn-incomplete-record-updates
+dependencies:
+- Cabal
+- aeson
+- annotated-wl-pprint
+- ansi-terminal
+- async
+- attoparsec
+- base >=4.9 && < 5
+- base64-bytestring
+- blaze-builder
+- bytestring
+- clock
+- conduit
+- conduit-extra
+- containers
+- cryptonite
+- cryptonite-conduit
+- deepseq
+- directory
+- echo
+- exceptions
+- extra
+- fast-logger
+- file-embed
+- filelock
+- filepath
+- fsnotify
+- generic-deriving
+- hackage-security
+- hashable
+- hastache
+- hpack
+- hpc
+- http-client
+- http-client-tls
+- http-conduit
+- http-types
+- memory
+- microlens
+- microlens-mtl
+- mintty
+- monad-logger
+- mono-traversable
+- mtl
+- neat-interpolation
+- network-uri
+- open-browser
+- optparse-applicative
+- path
+- path-io
+- persistent
+- persistent-sqlite
+- persistent-template
+- pretty
+- primitive
+- process
+- project-template
+- regex-applicative-text
+- resourcet
+- retry
+- semigroups
+- split
+- stm
+- store
+- store-core
+- streaming-commons
+- tar
+- template-haskell
+- temporary
+- text
+- text-metrics
+- th-reify-many
+- time
+- tls
+- transformers
+- unicode-transforms
+- unix-compat
+- unliftio
+- unordered-containers
+- vector
+- yaml
+- zip-archive
+- zlib
+when:
+- condition: os(windows)
+ then:
+ cpp-options: -DWINDOWS
+ dependencies:
+ - Win32
+ else:
+ build-tools:
+ - hsc2hs
+ dependencies:
+ - bindings-uname
+ - pid1
+ - unix
+library:
+ source-dirs: src/
+ ghc-options:
+ - -fwarn-identities
+ exposed-modules:
+ - Control.Concurrent.Execute
+ - Data.Aeson.Extended
+ - Data.Attoparsec.Args
+ - Data.Attoparsec.Combinators
+ - Data.Attoparsec.Interpreter
+ - Data.IORef.RunOnce
+ - Data.Store.VersionTagged
+ - Network.HTTP.Download
+ - Network.HTTP.Download.Verified
+ - Options.Applicative.Args
+ - Options.Applicative.Builder.Extra
+ - Options.Applicative.Complicated
+ - Path.CheckInstall
+ - Path.Extra
+ - Path.Find
+ - Paths_stack
+ - Stack.Build
+ - Stack.Build.Cache
+ - Stack.Build.ConstructPlan
+ - Stack.Build.Execute
+ - Stack.Build.Haddock
+ - Stack.Build.Installed
+ - Stack.Build.Source
+ - Stack.Build.Target
+ - Stack.BuildPlan
+ - Stack.Clean
+ - Stack.Config
+ - Stack.Config.Build
+ - Stack.Config.Urls
+ - Stack.Config.Docker
+ - Stack.Config.Nix
+ - Stack.ConfigCmd
+ - Stack.Constants
+ - Stack.Constants.Config
+ - Stack.Coverage
+ - Stack.Docker
+ - Stack.Docker.GlobalDB
+ - Stack.Dot
+ - Stack.Exec
+ - Stack.Fetch
+ - Stack.FileWatch
+ - Stack.GhcPkg
+ - Stack.Ghci
+ - Stack.Ghci.Script
+ - Stack.Hoogle
+ - Stack.IDE
+ - Stack.Image
+ - Stack.Init
+ - Stack.New
+ - Stack.Nix
+ - Stack.Options.BenchParser
+ - Stack.Options.BuildMonoidParser
+ - Stack.Options.BuildParser
+ - Stack.Options.CleanParser
+ - Stack.Options.ConfigParser
+ - Stack.Options.Completion
+ - Stack.Options.DockerParser
+ - Stack.Options.DotParser
+ - Stack.Options.ExecParser
+ - Stack.Options.GhcBuildParser
+ - Stack.Options.GhciParser
+ - Stack.Options.GhcVariantParser
+ - Stack.Options.GlobalParser
+ - Stack.Options.HaddockParser
+ - Stack.Options.HpcReportParser
+ - Stack.Options.LogLevelParser
+ - Stack.Options.NewParser
+ - Stack.Options.NixParser
+ - Stack.Options.PackageParser
+ - Stack.Options.ResolverParser
+ - Stack.Options.ScriptParser
+ - Stack.Options.SDistParser
+ - Stack.Options.SolverParser
+ - Stack.Options.TestParser
+ - Stack.Options.Utils
+ - Stack.Package
+ - Stack.PackageDump
+ - Stack.PackageIndex
+ - Stack.PackageLocation
+ - Stack.Path
+ - Stack.Prelude
+ - Stack.PrettyPrint
+ - Stack.Runners
+ - Stack.Script
+ - Stack.SDist
+ - Stack.Setup
+ - Stack.Setup.Installed
+ - Stack.SetupCmd
+ - Stack.Sig
+ - Stack.Sig.GPG
+ - Stack.Sig.Sign
+ - Stack.Snapshot
+ - Stack.Solver
+ - Stack.StaticBytes
+ - Stack.Types.Build
+ - Stack.Types.BuildPlan
+ - Stack.Types.CompilerBuild
+ - Stack.Types.Urls
+ - Stack.Types.Compiler
+ - Stack.Types.Config
+ - Stack.Types.Config.Build
+ - Stack.Types.Docker
+ - Stack.Types.FlagName
+ - Stack.Types.GhcPkgId
+ - Stack.Types.Image
+ - Stack.Types.Nix
+ - Stack.Types.Package
+ - Stack.Types.PackageDump
+ - Stack.Types.PackageIdentifier
+ - Stack.Types.PackageIndex
+ - Stack.Types.PackageName
+ - Stack.Types.Resolver
+ - Stack.Types.Runner
+ - Stack.Types.Sig
+ - Stack.Types.TemplateName
+ - Stack.Types.Version
+ - Stack.Types.VersionIntervals
+ - Stack.Upgrade
+ - Stack.Upload
+ - Text.PrettyPrint.Leijen.Extended
+ - System.Process.Log
+ - System.Process.PagerEditor
+ - System.Process.Read
+ - System.Process.Run
+ - System.Terminal
+ other-modules:
+ - Hackage.Security.Client.Repository.HttpLib.HttpClient
+ when:
+ - condition: 'os(windows)'
+ then:
+ source-dirs: src/windows/
+ else:
+ source-dirs: src/unix/
+executables:
+ stack:
+ main: Main.hs
+ source-dirs: src/main
+ ghc-options:
+ - -threaded
+ dependencies:
+ - stack
+ other-modules:
+ - Paths_stack
+ when:
+ - condition: flag(static)
+ ld-options:
+ - -static
+ - -pthread
+ - condition: ! '!(flag(disable-git-info))'
+ cpp-options: -DUSE_GIT_INFO
+ dependencies:
+ - gitrev
+ - optparse-simple
+ - condition: flag(hide-dependency-versions)
+ cpp-options: -DHIDE_DEP_VERSIONS
+ - condition: flag(supported-build)
+ cpp-options: -DSUPPORTED_BUILD
+tests:
+ stack-test:
+ main: Test.hs
+ source-dirs: src/test
+ ghc-options:
+ - -threaded
+ dependencies:
+ - QuickCheck
+ - hspec
+ - stack
+ - smallcheck
+ stack-integration-test:
+ main: IntegrationSpec.hs
+ source-dirs:
+ - test/integration
+ - test/integration/lib
+ ghc-options:
+ - -threaded
+ - -rtsopts
+ - -with-rtsopts=-N
+ dependencies:
+ - hspec
+ when:
+ - condition: ! '!(flag(integration-tests))'
+ buildable: false
+flags:
+ static:
+ description: Pass -static/-pthread to ghc when linking the stack binary.
+ manual: true
+ default: false
+ disable-git-info:
+ description: Disable compile-time inclusion of current git info in stack
+ manual: true
+ default: false
+ hide-dependency-versions:
+ description: Hides dependency versions from "stack --version", used only by building
+ with stack.yaml
+ manual: true
+ default: false
+ integration-tests:
+ description: Run the integration test suite
+ manual: true
+ default: false
+ supported-build:
+ description: If false, causes "stack --version" to issue a warning about the build being unsupported. True only if building with stack.yaml
+ manual: true
+ default: false
diff --git a/src/Control/Concurrent/Execute.hs b/src/Control/Concurrent/Execute.hs
index 55d0828..ef0f49f 100644
--- a/src/Control/Concurrent/Execute.hs
+++ b/src/Control/Concurrent/Execute.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE RecordWildCards #-}
-- Concurrent execution with dependencies. Types currently hard-coded for needs
@@ -10,16 +11,9 @@ module Control.Concurrent.Execute
, runActions
) where
-import Control.Applicative
-import Control.Concurrent.Async (Concurrently (..), async)
import Control.Concurrent.STM
-import Control.Exception
-import Control.Monad (join, unless)
-import Data.Foldable (sequenceA_)
-import Data.Set (Set)
+import Stack.Prelude
import qualified Data.Set as Set
-import Data.Typeable (Typeable)
-import Prelude -- Fix AMP warning
import Stack.Types.PackageIdentifier
data ActionType
@@ -79,7 +73,7 @@ runActions threads keepGoing concurrentFinal actions0 withProgress = do
_ <- async $ withProgress $ esCompleted es
if threads <= 1
then runActions' es
- else runConcurrently $ sequenceA_ $ replicate threads $ Concurrently $ runActions' es
+ else replicateConcurrently_ threads $ runActions' es
readTVarIO $ esExceptions es
runActions' :: ExecuteState -> IO ()
diff --git a/src/Data/Aeson/Extended.hs b/src/Data/Aeson/Extended.hs
index dccb41c..3c9d925 100644
--- a/src/Data/Aeson/Extended.hs
+++ b/src/Data/Aeson/Extended.hs
@@ -1,7 +1,6 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE RecordWildCards #-}
-{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
-- | Extensions to Aeson parsing of objects.
@@ -27,23 +26,16 @@ module Data.Aeson.Extended (
, (..!=)
) where
-import Control.Monad.Logger (MonadLogger, logWarn)
-import Control.Monad.Trans (lift)
import Control.Monad.Trans.Writer.Strict (WriterT, mapWriterT, runWriterT, tell)
import Data.Aeson as Export hiding ((.:), (.:?))
import qualified Data.Aeson as A
import Data.Aeson.Types hiding ((.:), (.:?))
import qualified Data.HashMap.Strict as HashMap
-import Data.Monoid
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.Text (unpack, Text)
+import Data.Text (unpack)
import qualified Data.Text as T
-import Data.Traversable
-import qualified Data.Traversable as Traversable
-import GHC.Generics (Generic)
import Generics.Deriving.Monoid (mappenddefault, memptydefault)
-import Prelude -- Fix redundant import warnings
+import Stack.Prelude
-- | Extends @.:@ warning to include field name.
(.:) :: FromJSON a => Object -> Text -> Parser a
@@ -111,7 +103,7 @@ logJSONWarnings
:: MonadLogger m
=> FilePath -> [JSONWarning] -> m ()
logJSONWarnings fp =
- mapM_ (\w -> $logWarn ("Warning: " <> T.pack fp <> ": " <> T.pack (show w)))
+ mapM_ (\w -> logWarn ("Warning: " <> T.pack fp <> ": " <> T.pack (show w)))
-- | Handle warnings in a sub-object.
jsonSubWarnings :: WarningParser (WithJSONWarnings a) -> WarningParser a
@@ -128,7 +120,7 @@ jsonSubWarningsT
:: Traversable t
=> WarningParser (t (WithJSONWarnings a)) -> WarningParser (t a)
jsonSubWarningsT f =
- Traversable.mapM (jsonSubWarnings . return) =<< f
+ mapM (jsonSubWarnings . return) =<< f
-- | Handle warnings in a @Maybe Traversable@ of sub-objects.
jsonSubWarningsTT
@@ -136,7 +128,7 @@ jsonSubWarningsTT
=> WarningParser (u (t (WithJSONWarnings a)))
-> WarningParser (u (t a))
jsonSubWarningsTT f =
- Traversable.mapM (jsonSubWarningsT . return) =<< f
+ mapM (jsonSubWarningsT . return) =<< f
-- Parsed JSON value without any warnings
noJSONWarnings :: a -> WithJSONWarnings a
@@ -153,6 +145,8 @@ data WarningParserMonoid = WarningParserMonoid
instance Monoid WarningParserMonoid where
mempty = memptydefault
mappend = mappenddefault
+instance IsString WarningParserMonoid where
+ fromString s = mempty { wpmWarnings = [fromString s] }
-- Parsed JSON value with its warnings
data WithJSONWarnings a = WithJSONWarnings a [JSONWarning]
@@ -165,8 +159,12 @@ instance Monoid a => Monoid (WithJSONWarnings a) where
-- | Warning output from 'WarningParser'.
data JSONWarning = JSONUnrecognizedFields String [Text]
+ | JSONGeneralWarning !Text
instance Show JSONWarning where
show (JSONUnrecognizedFields obj [field]) =
"Unrecognized field in " <> obj <> ": " <> T.unpack field
show (JSONUnrecognizedFields obj fields) =
"Unrecognized fields in " <> obj <> ": " <> T.unpack (T.intercalate ", " fields)
+ show (JSONGeneralWarning t) = T.unpack t
+instance IsString JSONWarning where
+ fromString = JSONGeneralWarning . T.pack
diff --git a/src/Data/Attoparsec/Args.hs b/src/Data/Attoparsec/Args.hs
index b695ec2..9fcdad5 100644
--- a/src/Data/Attoparsec/Args.hs
+++ b/src/Data/Attoparsec/Args.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Parsing of stack command line arguments
@@ -5,12 +6,13 @@ module Data.Attoparsec.Args
( EscapingMode(..)
, argsParser
, parseArgs
+ , parseArgsFromString
) where
-import Control.Applicative
import Data.Attoparsec.Text ((<?>))
import qualified Data.Attoparsec.Text as P
-import Data.Text (Text)
+import qualified Data.Text as T
+import Stack.Prelude
-- | Mode for parsing escape characters.
data EscapingMode
@@ -22,6 +24,10 @@ data EscapingMode
parseArgs :: EscapingMode -> Text -> Either String [String]
parseArgs mode = P.parseOnly (argsParser mode)
+-- | Parse using 'argsParser' from a string.
+parseArgsFromString :: EscapingMode -> String -> Either String [String]
+parseArgsFromString mode = P.parseOnly (argsParser mode) . T.pack
+
-- | A basic argument parser. It supports space-separated text, and
-- string quotation with identity escaping: \x -> x.
argsParser :: EscapingMode -> P.Parser [String]
diff --git a/src/Data/Attoparsec/Combinators.hs b/src/Data/Attoparsec/Combinators.hs
index f6b4ce1..205db35 100644
--- a/src/Data/Attoparsec/Combinators.hs
+++ b/src/Data/Attoparsec/Combinators.hs
@@ -1,9 +1,9 @@
+{-# LANGUAGE NoImplicitPrelude #-}
-- | More readable combinators for writing parsers.
module Data.Attoparsec.Combinators where
-import Control.Applicative
-import Data.Monoid
+import Stack.Prelude
-- | Concatenate two parsers.
appending :: (Applicative f,Monoid a)
diff --git a/src/Data/Attoparsec/Interpreter.hs b/src/Data/Attoparsec/Interpreter.hs
index d1a5e87..9c29a8a 100644
--- a/src/Data/Attoparsec/Interpreter.hs
+++ b/src/Data/Attoparsec/Interpreter.hs
@@ -1,4 +1,6 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE CPP #-}
{- | This module implements parsing of additional arguments embedded in a
comment when stack is invoked as a script interpreter
@@ -52,7 +54,6 @@ module Data.Attoparsec.Interpreter
, getInterpreterArgs
) where
-import Control.Applicative
import Data.Attoparsec.Args
import Data.Attoparsec.Text ((<?>))
import qualified Data.Attoparsec.Text as P
@@ -64,8 +65,9 @@ import Data.Conduit.Text (decodeUtf8)
import Data.List (intercalate)
import Data.Text (pack)
import Stack.Constants
+import Stack.Prelude
import System.FilePath (takeExtension)
-import System.IO (IOMode (ReadMode), withBinaryFile, stderr, hPutStrLn)
+import System.IO (stderr, hPutStrLn)
-- | Parser to extract the stack command line embedded inside a comment
-- after validating the placement and formatting rules for a valid
@@ -138,14 +140,18 @@ getInterpreterArgs file = do
parseArgStr str =
case P.parseOnly (argsParser Escaping) (pack str) of
- Left err -> handleFailure ("Error parsing command specified in the \
- \stack options comment: " ++ err)
+ Left err -> handleFailure ("Error parsing command specified in the "
+ ++ "stack options comment: " ++ err)
Right [] -> handleFailure "Empty argument list in stack options comment"
Right args -> return args
decodeError e =
case e of
+#if MIN_VERSION_conduit_extra(1,2,0)
+ ParseError ctxs _ (Position line col _) ->
+#else
ParseError ctxs _ (Position line col) ->
+#endif
if null ctxs
then "Parse error"
else ("Expecting " ++ intercalate " or " ctxs)
diff --git a/src/Data/IORef/RunOnce.hs b/src/Data/IORef/RunOnce.hs
index 4244d31..0cfe583 100644
--- a/src/Data/IORef/RunOnce.hs
+++ b/src/Data/IORef/RunOnce.hs
@@ -1,16 +1,16 @@
+{-# LANGUAGE NoImplicitPrelude #-}
module Data.IORef.RunOnce (runOnce) where
-import Control.Monad.IO.Class
-import Data.IORef
+import Stack.Prelude
-runOnce :: MonadIO m => m a -> m (m a)
-runOnce f = do
- ref <- liftIO $ newIORef Nothing
- return $ do
- mval <- liftIO $ readIORef ref
+runOnce :: (MonadUnliftIO m, MonadIO n) => m a -> m (n a)
+runOnce f = withRunInIO $ \run -> do
+ ref <- newIORef Nothing
+ return $ liftIO $ do
+ mval <- readIORef ref
case mval of
Just val -> return val
Nothing -> do
- val <- f
- liftIO $ writeIORef ref (Just val)
+ val <- run f
+ writeIORef ref (Just val)
return val
diff --git a/src/Data/Maybe/Extra.hs b/src/Data/Maybe/Extra.hs
deleted file mode 100644
index 2c83ff6..0000000
--- a/src/Data/Maybe/Extra.hs
+++ /dev/null
@@ -1,25 +0,0 @@
--- | Extra Maybe utilities.
-
-module Data.Maybe.Extra where
-
-import Control.Applicative
-import Control.Monad
-import Data.Traversable hiding (mapM)
-import Data.Maybe
-import Prelude -- Silence redundant import warnings
-
--- | Applicative 'mapMaybe'.
-mapMaybeA :: Applicative f => (a -> f (Maybe b)) -> [a] -> f [b]
-mapMaybeA f = fmap catMaybes . traverse f
-
--- | @'forMaybeA' '==' 'flip' 'mapMaybeA'@
-forMaybeA :: Applicative f => [a] -> (a -> f (Maybe b)) -> f [b]
-forMaybeA = flip mapMaybeA
-
--- | Monadic 'mapMaybe'.
-mapMaybeM :: Monad m => (a -> m (Maybe b)) -> [a] -> m [b]
-mapMaybeM f = liftM catMaybes . mapM f
-
--- | @'forMaybeM' '==' 'flip' 'mapMaybeM'@
-forMaybeM :: Monad m => [a] -> (a -> m (Maybe b)) -> m [b]
-forMaybeM = flip mapMaybeM
diff --git a/src/Data/Monoid/Extra.hs b/src/Data/Monoid/Extra.hs
deleted file mode 100644
index 2bd2de7..0000000
--- a/src/Data/Monoid/Extra.hs
+++ /dev/null
@@ -1,12 +0,0 @@
--- | Extra Monoid utilities.
-
-module Data.Monoid.Extra
- ( fromFirst
- , module Data.Monoid
- ) where
-
-import Data.Maybe
-import Data.Monoid
-
-fromFirst :: a -> First a -> a
-fromFirst x = fromMaybe x . getFirst
diff --git a/src/Data/Store/VersionTagged.hs b/src/Data/Store/VersionTagged.hs
index 5073b76..f375a6e 100644
--- a/src/Data/Store/VersionTagged.hs
+++ b/src/Data/Store/VersionTagged.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
@@ -14,15 +15,9 @@ module Data.Store.VersionTagged
, storeVersionConfig
) where
-import Control.Applicative
-import Control.Exception.Lifted (catch, IOException, assert)
-import Control.Monad.IO.Class (MonadIO, liftIO)
-import Control.Monad.Logger
-import Control.Monad.Trans.Control (MonadBaseControl)
+import Stack.Prelude
import qualified Data.ByteString as BS
-import Data.Data (Data)
import qualified Data.Map as M
-import Data.Monoid ((<>))
import qualified Data.Set as S
import Data.Store
import Data.Store.Core (unsafeEncodeWith)
@@ -31,7 +26,6 @@ import qualified Data.Text as T
import Language.Haskell.TH
import Path
import Path.IO (ensureDir)
-import Prelude
versionedEncodeFile :: Data a => VersionConfig a -> Q Exp
versionedEncodeFile vc = [e| storeEncodeFile $(encodeWithVersionQ vc) $(decodeWithVersionQ vc) |]
@@ -51,17 +45,17 @@ storeEncodeFile :: (Store a, MonadIO m, MonadLogger m, Eq a)
-> m ()
storeEncodeFile pokeFunc peekFunc fp x = do
let fpt = T.pack (toFilePath fp)
- $logDebug $ "Encoding " <> fpt
+ logDebug $ "Encoding " <> fpt
ensureDir (parent fp)
let (sz, poker) = pokeFunc x
encoded = unsafeEncodeWith poker sz
assert (decodeExWith peekFunc encoded == x) $ liftIO $ BS.writeFile (toFilePath fp) encoded
- $logDebug $ "Finished writing " <> fpt
+ logDebug $ "Finished writing " <> fpt
-- | Read from the given file. If the read fails, run the given action and
-- write that back to the file. Always starts the file off with the
-- version tag.
-versionedDecodeOrLoadImpl :: (Store a, Eq a, MonadIO m, MonadLogger m, MonadBaseControl IO m)
+versionedDecodeOrLoadImpl :: (Store a, Eq a, MonadUnliftIO m, MonadLogger m)
=> (a -> (Int, Poke ()))
-> Peek a
-> Path Abs File
@@ -69,32 +63,32 @@ versionedDecodeOrLoadImpl :: (Store a, Eq a, MonadIO m, MonadLogger m, MonadBase
-> m a
versionedDecodeOrLoadImpl pokeFunc peekFunc fp mx = do
let fpt = T.pack (toFilePath fp)
- $logDebug $ "Trying to decode " <> fpt
+ logDebug $ "Trying to decode " <> fpt
mres <- versionedDecodeFileImpl peekFunc fp
case mres of
Just x -> do
- $logDebug $ "Success decoding " <> fpt
+ logDebug $ "Success decoding " <> fpt
return x
_ -> do
- $logDebug $ "Failure decoding " <> fpt
+ logDebug $ "Failure decoding " <> fpt
x <- mx
storeEncodeFile pokeFunc peekFunc fp x
return x
-versionedDecodeFileImpl :: (Store a, MonadIO m, MonadLogger m, MonadBaseControl IO m)
+versionedDecodeFileImpl :: (Store a, MonadUnliftIO m, MonadLogger m)
=> Peek a
-> Path loc File
-> m (Maybe a)
versionedDecodeFileImpl peekFunc fp = do
mbs <- liftIO (Just <$> BS.readFile (toFilePath fp)) `catch` \(err :: IOException) -> do
- $logDebug ("Exception ignored when attempting to load " <> T.pack (toFilePath fp) <> ": " <> T.pack (show err))
+ logDebug ("Exception ignored when attempting to load " <> T.pack (toFilePath fp) <> ": " <> T.pack (show err))
return Nothing
case mbs of
Nothing -> return Nothing
Just bs ->
liftIO (Just <$> decodeIOWith peekFunc bs) `catch` \(err :: PeekException) -> do
let fpt = T.pack (toFilePath fp)
- $logDebug ("Error while decoding " <> fpt <> ": " <> T.pack (show err) <> " (this might not be an error, when switching between stack versions)")
+ logDebug ("Error while decoding " <> fpt <> ": " <> T.pack (show err) <> " (this might not be an error, when switching between stack versions)")
return Nothing
storeVersionConfig :: String -> String -> VersionConfig a
@@ -104,5 +98,15 @@ storeVersionConfig name hash = (namedVersionConfig name hash)
, "Data.ByteString.Internal.ByteString"
]
, vcRenames = M.fromList
- [ ( "Data.Maybe.Maybe", "GHC.Base.Maybe") ]
+ [ ( "Data.Maybe.Maybe", "GHC.Base.Maybe")
+ , ( "Stack.Types.Compiler.CVActual"
+ , "Stack.Types.Compiler.'CVActual"
+ )
+ , ( "Stack.Types.Compiler.CVWanted"
+ , "Stack.Types.Compiler.'CVWanted"
+ )
+ -- moved in containers 0.5.9.1
+ , ( "Data.Map.Internal.Map", "Data.Map.Base.Map")
+ , ( "Data.Set.Internal.Set", "Data.Set.Base.Set")
+ ]
}
diff --git a/src/Data/Text/Extra.hs b/src/Data/Text/Extra.hs
deleted file mode 100644
index f4acea1..0000000
--- a/src/Data/Text/Extra.hs
+++ /dev/null
@@ -1,9 +0,0 @@
-{-# LANGUAGE OverloadedStrings #-}
-module Data.Text.Extra where
-
-import Data.Maybe (fromMaybe)
-import qualified Data.Text as T
-
--- | Strip trailing carriage return from Text
-stripCR :: T.Text -> T.Text
-stripCR t = fromMaybe t (T.stripSuffix "\r" t)
diff --git a/src/Distribution/Version/Extra.hs b/src/Distribution/Version/Extra.hs
deleted file mode 100644
index 03ccc9d..0000000
--- a/src/Distribution/Version/Extra.hs
+++ /dev/null
@@ -1,30 +0,0 @@
--- A separate module so that we can contain all usage of deprecated identifiers here
-{-# OPTIONS_GHC -fno-warn-warnings-deprecations #-}
-module Distribution.Version.Extra
- ( hasUpper
- , hasLower
- ) where
-
-import Distribution.Version (VersionRange (..))
-
--- | Does the version range have an upper bound?
-hasUpper :: VersionRange -> Bool
-hasUpper AnyVersion = False
-hasUpper (ThisVersion _) = True
-hasUpper (LaterVersion _) = False
-hasUpper (EarlierVersion _) = True
-hasUpper (WildcardVersion _) = True
-hasUpper (UnionVersionRanges x y) = hasUpper x && hasUpper y
-hasUpper (IntersectVersionRanges x y) = hasUpper x || hasUpper y
-hasUpper (VersionRangeParens x) = hasUpper x
-
--- | Does the version range have a lower bound?
-hasLower :: VersionRange -> Bool
-hasLower AnyVersion = False
-hasLower (ThisVersion _) = True
-hasLower (LaterVersion _) = True
-hasLower (EarlierVersion _) = False
-hasLower (WildcardVersion _) = True
-hasLower (UnionVersionRanges x y) = hasLower x && hasLower y
-hasLower (IntersectVersionRanges x y) = hasLower x || hasLower y
-hasLower (VersionRangeParens x) = hasLower x
diff --git a/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs b/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs
index 37554ca..7d95e1b 100644
--- a/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs
+++ b/src/Hackage/Security/Client/Repository/HttpLib/HttpClient.hs
@@ -1,3 +1,4 @@
+-- Explicitly disabling due to external code {-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
diff --git a/src/Network/HTTP/Download.hs b/src/Network/HTTP/Download.hs
index ca94ea3..02e66a8 100644
--- a/src/Network/HTTP/Download.hs
+++ b/src/Network/HTTP/Download.hs
@@ -1,7 +1,7 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE TemplateHaskell #-}
module Network.HTTP.Download
( verifiedDownload
, DownloadRequest(..)
@@ -20,32 +20,23 @@ module Network.HTTP.Download
, setGithubHeaders
) where
-import Control.Exception (Exception)
-import Control.Exception.Safe (handleIO)
-import Control.Monad (void)
-import Control.Monad.Catch (throwM)
-import Control.Monad.IO.Class (MonadIO, liftIO)
-import Control.Monad.Logger (MonadLogger, logDebug)
+import Stack.Prelude
+import Stack.Types.Runner
import qualified Data.ByteString.Lazy as L
-import Data.Conduit (runConduit, runConduitRes, (.|), yield)
+import Data.Conduit (yield)
import Data.Conduit.Binary (sourceHandle)
import qualified Data.Conduit.Binary as CB
-import Data.Foldable (forM_)
-import Data.Monoid ((<>))
import Data.Text.Encoding.Error (lenientDecode)
import Data.Text.Encoding (decodeUtf8With)
-import Data.Typeable (Typeable)
import Network.HTTP.Client (Request, Response, path, checkResponse, parseUrlThrow, parseRequest)
import Network.HTTP.Client.Conduit (requestHeaders)
import Network.HTTP.Download.Verified
import Network.HTTP.Simple (httpJSON, withResponse, getResponseBody, getResponseHeaders, getResponseStatusCode,
setRequestHeader)
-import Path (Abs, File, Path, toFilePath)
+import Path.IO (doesFileExist)
import System.Directory (createDirectoryIfMissing,
removeFile)
import System.FilePath (takeDirectory, (<.>))
-import System.IO (IOMode (ReadMode),
- withBinaryFile)
-- | Download the given URL to the given location. If the file already exists,
-- no download is performed. Otherwise, creates the parent directory, downloads
@@ -53,7 +44,7 @@ import System.IO (IOMode (ReadMode),
-- appropriate destination.
--
-- Throws an exception if things go wrong
-download :: (MonadIO m, MonadLogger m)
+download :: (MonadUnliftIO m, MonadLogger m, HasRunner env, MonadReader env m)
=> Request
-> Path Abs File -- ^ destination
-> m Bool -- ^ Was a downloaded performed (True) or did the file already exist (False)?
@@ -70,18 +61,22 @@ download req destpath = do
-- | Same as 'download', but will download a file a second time if it is already present.
--
-- Returns 'True' if the file was downloaded, 'False' otherwise
-redownload :: (MonadIO m, MonadLogger m)
+redownload :: (MonadUnliftIO m, MonadLogger m, HasRunner env, MonadReader env m)
=> Request
-> Path Abs File -- ^ destination
-> m Bool
redownload req0 dest = do
- $logDebug $ "Downloading " <> decodeUtf8With lenientDecode (path req0)
+ logDebug $ "Downloading " <> decodeUtf8With lenientDecode (path req0)
let destFilePath = toFilePath dest
etagFilePath = destFilePath <.> "etag"
- metag <- liftIO $ handleIO (const $ return Nothing) $ fmap Just $
- withBinaryFile etagFilePath ReadMode $ \h ->
- runConduit $ sourceHandle h .| CB.take 512
+ metag <- do
+ exists <- doesFileExist dest
+ if not exists
+ then return Nothing
+ else liftIO $ handleIO (const $ return Nothing) $ fmap Just $
+ withBinaryFile etagFilePath ReadMode $ \h ->
+ runConduit $ sourceHandle h .| CB.take 512
let req1 =
case metag of
@@ -92,7 +87,7 @@ redownload req0 dest = do
[("If-None-Match", L.toStrict etag)]
}
req2 = req1 { checkResponse = \_ _ -> return () }
- liftIO $ recoveringHttp drRetryPolicyDefault $
+ recoveringHttp drRetryPolicyDefault $ liftIO $
withResponse req2 $ \res -> case getResponseStatusCode res of
200 -> do
createDirectoryIfMissing True $ takeDirectory destFilePath
diff --git a/src/Network/HTTP/Download/Verified.hs b/src/Network/HTTP/Download/Verified.hs
index fad8236..e3a3026 100644
--- a/src/Network/HTTP/Download/Verified.hs
+++ b/src/Network/HTTP/Download/Verified.hs
@@ -1,14 +1,13 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE MultiParamTypeClasses #-}
-{-# LANGUAGE PackageImports #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE StandaloneDeriving #-}
-{-# LANGUAGE TemplateHaskell #-}
module Network.HTTP.Download.Verified
( verifiedDownload
, recoveringHttp
@@ -28,35 +27,29 @@ import qualified Data.Conduit.List as CL
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
-import Control.Applicative
import Control.Monad
-import Control.Monad.Catch
-import Control.Monad.IO.Class
-import Control.Monad.Logger (logDebug, MonadLogger)
-import Control.Retry (recovering,limitRetries,RetryPolicy,constantDelay)
+import Control.Monad.Catch (Handler (..)) -- would be nice if retry exported this itself
+import Stack.Prelude hiding (Handler (..))
+import Control.Retry (recovering,limitRetries,RetryPolicy,constantDelay,RetryStatus(..))
import Crypto.Hash
import Crypto.Hash.Conduit (sinkHash)
import Data.ByteArray as Mem (convert)
import Data.ByteArray.Encoding as Mem (convertToBase, Base(Base16))
-import Data.ByteString (ByteString)
import Data.ByteString.Char8 (readInteger)
import Data.Conduit
import Data.Conduit.Binary (sourceHandle, sinkHandle)
-import Data.Foldable (traverse_,for_)
-import Data.Monoid
-import Data.String
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
-import Data.Typeable (Typeable)
import GHC.IO.Exception (IOException(..),IOErrorType(..))
import Network.HTTP.Client (getUri, path)
import Network.HTTP.Simple (Request, HttpException, httpSink, getResponseHeaders)
import Network.HTTP.Types.Header (hContentLength, hContentMD5)
import Path
-import Prelude -- Fix AMP warning
+import Stack.Types.Runner
+import Stack.PrettyPrint
import System.Directory
-import System.FilePath ((<.>))
-import System.IO
+import qualified System.FilePath as FP ((<.>))
+import System.IO (hFileSize)
-- | A request together with some checks to perform.
data DownloadRequest = DownloadRequest
@@ -117,7 +110,7 @@ instance Show VerifiedDownloadException where
show (WrongDigest req algo expected actual) =
"Download expectation failure: content hash (" ++ algo ++ ")\n"
++ "Expected: " ++ displayCheckHexDigest expected ++ "\n"
- ++ "Actual: " ++ show actual ++ "\n"
+ ++ "Actual: " ++ actual ++ "\n"
++ "For: " ++ show (getUri req)
instance Exception VerifiedDownloadException
@@ -188,19 +181,39 @@ hashChecksToZipSink :: MonadThrow m => Request -> [HashCheck] -> ZipSink ByteStr
hashChecksToZipSink req = traverse_ (ZipSink . sinkCheckHash req)
-- 'Control.Retry.recovering' customized for HTTP failures
-recoveringHttp :: (MonadMask m, MonadIO m)
+recoveringHttp :: (MonadUnliftIO m, MonadLogger m, HasRunner env, MonadReader env m)
=> RetryPolicy -> m a -> m a
recoveringHttp retryPolicy =
#if MIN_VERSION_retry(0,7,0)
- recovering retryPolicy handlers . const
+ helper $ \run -> recovering retryPolicy (handlers run) . const
#else
- recovering retryPolicy handlers
+ helper $ \run -> recovering retryPolicy (handlers run)
#endif
where
- handlers = [const $ Handler alwaysRetryHttp,const $ Handler retrySomeIO]
+ helper :: (MonadUnliftIO m, HasRunner env, MonadReader env m) => (UnliftIO m -> IO a -> IO a) -> m a -> m a
+ helper wrapper action = withUnliftIO $ \run -> wrapper run (unliftIO run action)
+
+ handlers :: (MonadLogger m, HasRunner env, MonadReader env m) => UnliftIO m -> [RetryStatus -> Handler IO Bool]
+ handlers run = [Handler . alwaysRetryHttp (unliftIO run),const $ Handler retrySomeIO]
- alwaysRetryHttp :: Monad m => HttpException -> m Bool
- alwaysRetryHttp _ = return True
+ alwaysRetryHttp :: (MonadLogger m', Monad m, HasRunner env, MonadReader env m') => (m' () -> m ()) -> RetryStatus -> HttpException -> m Bool
+ alwaysRetryHttp run rs _ = do
+ run $
+ prettyWarn $ vcat
+ [ flow $ unwords
+ [ "Retry number"
+ , show (rsIterNumber rs)
+ , "after a total delay of"
+ , show (rsCumulativeDelay rs)
+ , "us"
+ ]
+ , flow $ unwords
+ [ "If you see this warning and stack fails to download,"
+ , "but running the command again solves the problem,"
+ , "please report here: https://github.com/commercialhaskell/stack/issues/3510"
+ ]
+ ]
+ return True
retrySomeIO :: Monad m => IOException -> m Bool
retrySomeIO e = return $ case ioe_type e of
@@ -222,7 +235,7 @@ recoveringHttp retryPolicy =
-- Throws VerifiedDownloadException.
-- Throws IOExceptions related to file system operations.
-- Throws HttpException.
-verifiedDownload :: (MonadIO m, MonadLogger m)
+verifiedDownload :: (MonadUnliftIO m, MonadLogger m, HasRunner env, MonadReader env m)
=> DownloadRequest
-> Path Abs File -- ^ destination
-> (Maybe Integer -> Sink ByteString IO ()) -- ^ custom hook to observe progress
@@ -230,20 +243,19 @@ verifiedDownload :: (MonadIO m, MonadLogger m)
verifiedDownload DownloadRequest{..} destpath progressSink = do
let req = drRequest
whenM' (liftIO getShouldDownload) $ do
- $logDebug $ "Downloading " <> decodeUtf8With lenientDecode (path req)
- liftIO $ do
- createDirectoryIfMissing True dir
- recoveringHttp drRetryPolicy $
- withBinaryFile fptmp WriteMode $ \h ->
- httpSink req (go h)
- renameFile fptmp fp
+ logDebug $ "Downloading " <> decodeUtf8With lenientDecode (path req)
+ liftIO $ createDirectoryIfMissing True dir
+ recoveringHttp drRetryPolicy $ liftIO $
+ withBinaryFile fptmp WriteMode $ \h ->
+ httpSink req (go h)
+ liftIO $ renameFile fptmp fp
where
whenM' mp m = do
p <- mp
if p then m >> return True else return False
fp = toFilePath destpath
- fptmp = fp <.> "tmp"
+ fptmp = fp FP.<.> "tmp"
dir = toFilePath $ parent destpath
getShouldDownload = do
@@ -261,7 +273,7 @@ verifiedDownload DownloadRequest{..} destpath progressSink = do
`catch` \(_ :: VerifyFileException) -> return False)
`catch` \(_ :: VerifiedDownloadException) -> return False
- checkExpectations = bracket (openFile fp ReadMode) hClose $ \h -> do
+ checkExpectations = withBinaryFile fp ReadMode $ \h -> do
for_ drLengthCheck $ checkFileSizeExpectations h
sourceHandle h $$ getZipSink (hashChecksToZipSink drRequest drHashChecks)
diff --git a/src/Options/Applicative/Args.hs b/src/Options/Applicative/Args.hs
index bc7a025..6d076af 100644
--- a/src/Options/Applicative/Args.hs
+++ b/src/Options/Applicative/Args.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
-- | Accepting arguments to be passed through to a sub-process.
@@ -5,39 +6,33 @@
module Options.Applicative.Args
(argsArgument
,argsOption
- ,cmdOption
- ,parseArgsFromString)
+ ,cmdOption)
where
import Data.Attoparsec.Args
-import qualified Data.Attoparsec.Text as P
-import qualified Data.Text as T
import qualified Options.Applicative as O
+import Stack.Prelude
-- | An argument which accepts a list of arguments e.g. @--ghc-options="-X P.hs \"this\""@.
argsArgument :: O.Mod O.ArgumentFields [String] -> O.Parser [String]
argsArgument =
O.argument
(do string <- O.str
- either O.readerError return (parseArgsFromString string))
+ either O.readerError return (parseArgsFromString Escaping string))
-- | An option which accepts a list of arguments e.g. @--ghc-options="-X P.hs \"this\""@.
argsOption :: O.Mod O.OptionFields [String] -> O.Parser [String]
argsOption =
O.option
(do string <- O.str
- either O.readerError return (parseArgsFromString string))
+ either O.readerError return (parseArgsFromString Escaping string))
-- | An option which accepts a command and a list of arguments e.g. @--exec "echo hello world"@
cmdOption :: O.Mod O.OptionFields (String, [String]) -> O.Parser (String, [String])
cmdOption =
O.option
(do string <- O.str
- xs <- either O.readerError return (parseArgsFromString string)
+ xs <- either O.readerError return (parseArgsFromString Escaping string)
case xs of
[] -> O.readerError "Must provide a command"
x:xs' -> return (x, xs'))
-
--- | Parse from a string.
-parseArgsFromString :: String -> Either String [String]
-parseArgsFromString = P.parseOnly (argsParser Escaping) . T.pack
diff --git a/src/Options/Applicative/Builder/Extra.hs b/src/Options/Applicative/Builder/Extra.hs
index 555369a..25b0614 100644
--- a/src/Options/Applicative/Builder/Extra.hs
+++ b/src/Options/Applicative/Builder/Extra.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
@@ -29,17 +30,14 @@ module Options.Applicative.Builder.Extra
,unescapeBashArg
) where
-import Control.Exception (IOException, catch)
-import Control.Monad (when, forM)
-import Data.Either.Combinators
import Data.List (isPrefixOf)
import Data.Maybe
import Data.Monoid
-import Data.Text (Text)
import qualified Data.Text as T
import Options.Applicative
import Options.Applicative.Types (readerAsk)
import Path hiding ((</>))
+import Stack.Prelude
import System.Directory (getCurrentDirectory, getDirectoryContents, doesDirectoryExist)
import System.Environment (withArgs)
import System.FilePath (takeBaseName, (</>), splitFileName, isRelative, takeExtension)
@@ -110,6 +108,11 @@ enableDisableFlagsNoDefault enabledValue disabledValue name helpSuffix mods =
(long ("[no-]" ++ name) <>
help ("Enable/disable " ++ helpSuffix) <>
mods))
+ where
+ last xs =
+ case reverse xs of
+ [] -> impureThrow $ stringException "enableDisableFlagsNoDefault.last"
+ x:_ -> x
-- | Show an extra help option (e.g. @--docker-help@ shows help for all @--docker*@ args).
--
@@ -141,7 +144,7 @@ execExtraHelp args helpOpt parser pd =
_ <- execParser (info (hiddenHelper <*>
((,) <$>
parser <*>
- some (strArgument (metavar "OTHER ARGUMENTS"))))
+ some (strArgument (metavar "OTHER ARGUMENTS") :: Parser String)))
(fullDesc <> progDesc pd))
return ()
where hiddenHelper = abortOption ShowHelpText (long "help" <> hidden <> internal)
@@ -244,9 +247,10 @@ unescapeBashArg :: String -> String
unescapeBashArg ('\'' : rest) = rest
unescapeBashArg ('\"' : rest) = go rest
where
+ pattern = "$`\"\\\n" :: String
go [] = []
go ('\\' : x : xs)
- | x `elem` "$`\"\\\n" = x : xs
+ | x `elem` pattern = x : xs
| otherwise = '\\' : x : go xs
go (x : xs) = x : go xs
unescapeBashArg input = go input
diff --git a/src/Options/Applicative/Complicated.hs b/src/Options/Applicative/Complicated.hs
index dded341..8e2bdaa 100644
--- a/src/Options/Applicative/Complicated.hs
+++ b/src/Options/Applicative/Complicated.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
-- | Simple interface to complicated program arguments.
--
-- This is a "fork" of the @optparse-simple@ package that has some workarounds for
@@ -12,14 +13,13 @@ module Options.Applicative.Complicated
, complicatedParser
) where
-import Control.Monad.Trans.Class (lift)
-import Control.Monad.Trans.Either
+import Control.Monad.Trans.Except
import Control.Monad.Trans.Writer
-import Data.Monoid
import Data.Version
import Options.Applicative
import Options.Applicative.Types
import Options.Applicative.Builder.Internal
+import Stack.Prelude
import System.Environment
-- | Generate and execute a complicated options parser.
@@ -42,7 +42,7 @@ complicatedOptions
-> Maybe (ParserFailure ParserHelp -> [String] -> IO (a,(b,a)))
-- ^ optional handler for parser failure; 'handleParseResult' is called by
-- default
- -> EitherT b (Writer (Mod CommandFields (b,a))) ()
+ -> ExceptT b (Writer (Mod CommandFields (b,a))) ()
-- ^ commands (use 'addCommand')
-> IO (a,b)
complicatedOptions numericVersion versionString numericHpackVersion h pd footerStr commonParser mOnFailure commandParser =
@@ -82,7 +82,7 @@ addCommand :: String -- ^ command string
-> (a -> b) -- ^ constructor to wrap up command in common data type
-> Parser c -- ^ common parser
-> Parser a -- ^ command parser
- -> EitherT b (Writer (Mod CommandFields (b,c))) ()
+ -> ExceptT b (Writer (Mod CommandFields (b,c))) ()
addCommand cmd title footerStr constr =
addCommand' cmd title footerStr (\a c -> (constr a,c))
@@ -97,9 +97,9 @@ addSubCommands
-- ^ footer of command help
-> Parser c
-- ^ common parser
- -> EitherT b (Writer (Mod CommandFields (b,c))) ()
+ -> ExceptT b (Writer (Mod CommandFields (b,c))) ()
-- ^ sub-commands (use 'addCommand')
- -> EitherT b (Writer (Mod CommandFields (b,c))) ()
+ -> ExceptT b (Writer (Mod CommandFields (b,c))) ()
addSubCommands cmd title footerStr commonParser commandParser =
addCommand' cmd
title
@@ -115,7 +115,7 @@ addCommand' :: String -- ^ command string
-> (a -> c -> (b,c)) -- ^ constructor to wrap up command in common data type
-> Parser c -- ^ common parser
-> Parser a -- ^ command parser
- -> EitherT b (Writer (Mod CommandFields (b,c))) ()
+ -> ExceptT b (Writer (Mod CommandFields (b,c))) ()
addCommand' cmd title footerStr constr commonParser inner =
lift (tell (command cmd
(info (constr <$> inner <*> commonParser)
@@ -128,13 +128,13 @@ complicatedParser
-- ^ metavar for the sub-command
-> Parser a
-- ^ common settings
- -> EitherT b (Writer (Mod CommandFields (b,a))) ()
+ -> ExceptT b (Writer (Mod CommandFields (b,a))) ()
-- ^ commands (use 'addCommand')
-> Parser (a,(b,a))
complicatedParser commandMetavar commonParser commandParser =
(,) <$>
commonParser <*>
- case runWriter (runEitherT commandParser) of
+ case runWriter (runExceptT commandParser) of
(Right (),d) -> hsubparser' commandMetavar d
(Left b,_) -> pure (b,mempty)
diff --git a/src/Path/CheckInstall.hs b/src/Path/CheckInstall.hs
index ab19d99..1feccfc 100644
--- a/src/Path/CheckInstall.hs
+++ b/src/Path/CheckInstall.hs
@@ -1,23 +1,21 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
-{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
module Path.CheckInstall where
-import Control.Monad (unless)
import Control.Monad.Extra (anyM, (&&^))
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Data.Foldable (forM_)
-import Data.Text (Text)
import qualified Data.Text as T
+import Stack.Prelude
+import Stack.PrettyPrint
+import Stack.Types.Config
import qualified System.Directory as D
import qualified System.FilePath as FP
-- | Checks if the installed executable will be available on the user's
-- PATH. This doesn't use @envSearchPath menv@ because it includes paths
-- only visible when running in the stack environment.
-warnInstallSearchPathIssues :: (MonadIO m, MonadLogger m) => FilePath -> [Text] -> m ()
+warnInstallSearchPathIssues :: HasConfig env => FilePath -> [Text] -> RIO env ()
warnInstallSearchPathIssues destDir installed = do
searchPath <- liftIO FP.getSearchPath
destDirIsInPATH <- liftIO $
@@ -29,32 +27,27 @@ warnInstallSearchPathIssues destDir installed = do
Just exePath -> do
exeDir <- (liftIO . fmap FP.takeDirectory . D.canonicalizePath) exePath
unless (exeDir `FP.equalFilePath` destDir) $ do
- $logWarn ""
- $logWarn $ T.concat
- [ "WARNING: The \""
- , exe
- , "\" executable found on the PATH environment variable is "
- , T.pack exePath
- , ", and not the version that was just installed."
- ]
- $logWarn $ T.concat
- [ "This means that \""
- , exe
- , "\" calls on the command line will not use this version."
- ]
+ prettyWarnL
+ [ flow "The"
+ , styleFile . fromString . T.unpack $ exe
+ , flow "executable found on the PATH environment variable is"
+ , styleFile . fromString $ exePath
+ , flow "and not the version that was just installed."
+ , flow "This means that"
+ , styleFile . fromString . T.unpack $ exe
+ , "calls on the command line will not use this version."
+ ]
Nothing -> do
- $logWarn ""
- $logWarn $ T.concat
- [ "WARNING: Installation path "
- , T.pack destDir
- , " is on the PATH but the \""
- , exe
- , "\" executable that was just installed could not be found on the PATH."
- ]
+ prettyWarnL
+ [ flow "Installation path"
+ , styleDir . fromString $ destDir
+ , flow "is on the PATH but the"
+ , styleFile . fromString . T.unpack $ exe
+ , flow "executable that was just installed could not be found on the PATH."
+ ]
else do
- $logWarn ""
- $logWarn $ T.concat
- [ "WARNING: Installation path "
- , T.pack destDir
- , " not found on the PATH environment variable"
- ]
+ prettyWarnL
+ [ flow "Installation path "
+ , styleDir . fromString $ destDir
+ , "not found on the PATH environment variable."
+ ]
diff --git a/src/Path/Extra.hs b/src/Path/Extra.hs
index b5f8631..ac82295 100644
--- a/src/Path/Extra.hs
+++ b/src/Path/Extra.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ViewPatterns #-}
-- | Extra Path utilities.
@@ -13,19 +14,20 @@ module Path.Extra
,pathToByteString
,pathToLazyByteString
,pathToText
+ ,tryGetModificationTime
) where
-import qualified Data.ByteString.Lazy.Char8 as BSL
-import qualified Data.ByteString.Char8 as BS
-import qualified Data.Text as T
-import qualified Data.Text.Encoding as T
-import Control.Monad (liftM)
-import Control.Monad.Catch
-import Control.Monad.IO.Class
import Data.Bool (bool)
+import Data.Time (UTCTime)
import Path
import Path.IO
import Path.Internal (Path(..))
+import Stack.Prelude
+import System.IO.Error (isDoesNotExistError)
+import qualified Data.ByteString.Char8 as BS
+import qualified Data.ByteString.Lazy.Char8 as BSL
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as T
import qualified System.FilePath as FP
-- | Convert to FilePath but don't add a trailing slash.
@@ -62,7 +64,7 @@ concatAndColapseAbsDir base rel = parseCollapsedAbsDir (toFilePath base FP.</> r
--
-- (adapted from @Text.Pandoc.Shared@)
collapseFilePath :: FilePath -> FilePath
-collapseFilePath = FP.joinPath . reverse . foldl go [] . FP.splitDirectories
+collapseFilePath = FP.joinPath . reverse . foldl' go [] . FP.splitDirectories
where
go rs "." = rs
go r@(p:rs) ".." = case p of
@@ -117,3 +119,6 @@ pathToByteString = T.encodeUtf8 . pathToText
pathToText :: Path b t -> T.Text
pathToText = T.pack . toFilePath
+
+tryGetModificationTime :: MonadIO m => Path Abs File -> m (Either () UTCTime)
+tryGetModificationTime = liftIO . tryJust (guard . isDoesNotExistError) . getModificationTime
diff --git a/src/Path/Find.hs b/src/Path/Find.hs
index a8f4599..7476528 100644
--- a/src/Path/Find.hs
+++ b/src/Path/Find.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DataKinds #-}
-- | Finding files.
@@ -9,11 +10,7 @@ module Path.Find
,findInParents)
where
-import Control.Exception (evaluate)
-import Control.DeepSeq (force)
-import Control.Monad
-import Control.Monad.Catch
-import Control.Monad.IO.Class
+import Stack.Prelude
import System.IO.Error (isPermissionError)
import Data.List
import Path
diff --git a/src/Stack/Build.hs b/src/Stack/Build.hs
index 7d4a5cd..1072f9f 100644
--- a/src/Stack/Build.hs
+++ b/src/Stack/Build.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
@@ -20,37 +21,21 @@ module Stack.Build
,CabalVersionException(..))
where
-import Control.Exception (Exception)
-import Control.Monad
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Control.Monad.Reader (MonadReader)
-import Control.Monad.Trans.Resource
-import Control.Monad.Trans.Unlift (MonadBaseUnlift)
+import Stack.Prelude
import Data.Aeson (Value (Object, Array), (.=), object)
-import Data.Function
import qualified Data.HashMap.Strict as HM
import Data.List ((\\))
import Data.List.Extra (groupSort)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NE
import qualified Data.Map as Map
-import Data.Map.Strict (Map)
-import Data.Maybe (catMaybes)
-import Data.Monoid
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.String
-import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import qualified Data.Text.IO as TIO
import Data.Text.Read (decimal)
-import Data.Typeable (Typeable)
import qualified Data.Vector as V
import qualified Data.Yaml as Yaml
-import Path
-import Prelude hiding (FilePath, writeFile)
import Stack.Build.ConstructPlan
import Stack.Build.Execute
import Stack.Build.Haddock
@@ -59,16 +44,14 @@ import Stack.Build.Source
import Stack.Build.Target
import Stack.Fetch as Fetch
import Stack.Package
-import Stack.PackageIndex
-import Stack.PrettyPrint
+import Stack.PackageLocation (parseSingleCabalFileIndex)
import Stack.Types.Build
+import Stack.Types.BuildPlan
import Stack.Types.Config
import Stack.Types.FlagName
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT
-import Stack.Types.StringError
import Stack.Types.Version
#ifdef WINDOWS
@@ -78,7 +61,6 @@ import System.FileLock (FileLock, unlockFile)
#ifdef WINDOWS
import System.Win32.Console (setConsoleCP, setConsoleOutputCP, getConsoleCP, getConsoleOutputCP)
-import qualified Control.Monad.Catch as Catch
#endif
-- | Build.
@@ -86,25 +68,31 @@ import qualified Control.Monad.Catch as Catch
-- If a buildLock is passed there is an important contract here. That lock must
-- protect the snapshot, and it must be safe to unlock it if there are no further
-- modifications to the snapshot to be performed by this build.
-build :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
+build :: HasEnvConfig env
=> (Set (Path Abs File) -> IO ()) -- ^ callback after discovering all local files
-> Maybe FileLock
-> BuildOptsCLI
- -> m ()
+ -> RIO env ()
build setLocalFiles mbuildLk boptsCli = fixCodePage $ do
bopts <- view buildOptsL
let profiling = boptsLibProfile bopts || boptsExeProfile bopts
let symbols = not (boptsLibStrip bopts || boptsExeStrip bopts)
menv <- getMinimalEnvOverride
- (targets, mbp, locals, extraToBuild, extraDeps, sourceMap) <- loadSourceMapFull NeedTargets boptsCli
+ (targets, mbp, locals, extraToBuild, sourceMap) <- loadSourceMapFull NeedTargets boptsCli
-- Set local files, necessary for file watching
stackYaml <- view stackYamlL
liftIO $ setLocalFiles
$ Set.insert stackYaml
$ Set.unions
- $ map lpFiles locals
+ -- The `locals` value above only contains local project
+ -- packages, not local dependencies. This will get _all_
+ -- of the local files we're interested in
+ -- watching. Arguably, we should not bother watching repo
+ -- and archive files, since those shouldn't
+ -- change. That's a possible optimization to consider.
+ [lpFiles lp | PSFiles lp _ <- Map.elems sourceMap]
(installedMap, globalDumpPkgs, snapshotDumpPkgs, localDumpPkgs) <-
getInstalled menv
@@ -114,8 +102,6 @@ build setLocalFiles mbuildLk boptsCli = fixCodePage $ do
, getInstalledSymbols = symbols }
sourceMap
- warnMissingExtraDeps installedMap extraDeps
-
baseConfigOpts <- mkBaseConfigOpts boptsCli
plan <- withLoadPackage $ \loadPackage ->
constructPlan mbp baseConfigOpts locals extraToBuild localDumpPkgs loadPackage sourceMap installedMap (boptsCLIInitialBuildSteps boptsCli)
@@ -131,7 +117,7 @@ build setLocalFiles mbuildLk boptsCli = fixCodePage $ do
-- NOTE: This policy is too conservative. In the future we should be able to
-- schedule unlocking as an Action that happens after all non-local actions are
-- complete.
- (Just lk,True) -> do $logDebug "All installs are local; releasing snapshot lock early."
+ (Just lk,True) -> do logDebug "All installs are local; releasing snapshot lock early."
liftIO $ unlockFile lk
_ -> return ()
@@ -167,7 +153,7 @@ justLocals =
Map.elems .
planTasks
-checkCabalVersion :: (StackM env m, HasEnvConfig env) => m ()
+checkCabalVersion :: HasEnvConfig env => RIO env ()
checkCabalVersion = do
allowNewer <- view $ configL.to configAllowNewer
cabalVer <- view cabalVersionL
@@ -184,30 +170,11 @@ newtype CabalVersionException = CabalVersionException { unCabalVersionException
instance Show CabalVersionException where show = unCabalVersionException
instance Exception CabalVersionException
-warnMissingExtraDeps
- :: (StackM env m, HasConfig env)
- => InstalledMap -> Map PackageName Version -> m ()
-warnMissingExtraDeps installed extraDeps = do
- missingExtraDeps <-
- fmap catMaybes $ forM (Map.toList extraDeps) $ \(n, v) ->
- if Map.member n installed
- then return Nothing
- else do
- vs <- getPackageVersions n
- if Set.null vs
- then return $ Just $
- fromString (packageNameString n ++ "-" ++ versionString v)
- else return Nothing
- unless (null missingExtraDeps) $
- $prettyWarn $
- "Some extra-deps are neither installed nor in the index:" <> line <>
- indent 4 (bulletedList missingExtraDeps)
-
-- | See https://github.com/commercialhaskell/stack/issues/1198.
warnIfExecutablesWithSameNameCouldBeOverwritten
:: MonadLogger m => [LocalPackage] -> Plan -> m ()
warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do
- $logDebug "Checking if we are going to build multiple executables with the same name"
+ logDebug "Checking if we are going to build multiple executables with the same name"
forM_ (Map.toList warnings) $ \(exe,(toBuild,otherLocals)) -> do
let exe_s
| length toBuild > 1 = "several executables with the same name:"
@@ -216,7 +183,7 @@ warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do
T.intercalate
", "
["'" <> packageNameText p <> ":" <> exe <> "'" | p <- pkgs]
- ($logWarn . T.unlines . concat)
+ (logWarn . T.unlines . concat)
[ [ "Building " <> exe_s <> " " <> exesText toBuild <> "." ]
, [ "Only one of them will be available via 'stack exec' or locally installed."
| length toBuild > 1
@@ -259,14 +226,9 @@ warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do
collect
[ (exe,pkgName)
| (pkgName,task) <- Map.toList (planTasks plan)
- , isLocal task
- , exe <- (Set.toList . exeComponents . lpComponents . taskLP) task
+ , TTFiles lp _ <- [taskType task] -- FIXME analyze logic here, do we need to check for Local?
+ , exe <- (Set.toList . exeComponents . lpComponents) lp
]
- where
- isLocal Task{taskType = (TTLocal _)} = True
- isLocal _ = False
- taskLP Task{taskType = (TTLocal lp)} = lp
- taskLP _ = error "warnIfExecutablesWithSameNameCouldBeOverwritten/taskLP: task isn't local"
localExes :: Map Text (NonEmpty PackageName)
localExes =
collect
@@ -279,7 +241,7 @@ warnIfExecutablesWithSameNameCouldBeOverwritten locals plan = do
warnAboutSplitObjs :: MonadLogger m => BuildOpts -> m ()
warnAboutSplitObjs bopts | boptsSplitObjs bopts = do
- $logWarn $ "Building with --split-objs is enabled. " <> T.pack splitObjsWarning
+ logWarn $ "Building with --split-objs is enabled. " <> T.pack splitObjsWarning
warnAboutSplitObjs _ = return ()
splitObjsWarning :: String
@@ -311,23 +273,18 @@ mkBaseConfigOpts boptsCli = do
}
-- | Provide a function for loading package information from the package index
-withLoadPackage :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
- => ((PackageName -> Version -> Map FlagName Bool -> [Text] -> IO Package) -> m a)
- -> m a
+withLoadPackage :: HasEnvConfig env
+ => ((PackageLocationIndex FilePath -> Map FlagName Bool -> [Text] -> IO Package) -> RIO env a)
+ -> RIO env a
withLoadPackage inner = do
econfig <- view envConfigL
- withCabalLoader $ \cabalLoader ->
- inner $ \name version flags ghcOptions -> do
- bs <- cabalLoader $ PackageIdentifier name version
-
- -- Intentionally ignore warnings, as it's not really
- -- appropriate to print a bunch of warnings out while
- -- resolving the package index.
- (_warnings,pkg) <- readPackageBS
+ root <- view projectRootL
+ run <- askRunInIO
+ withCabalLoader $ \loadFromIndex ->
+ inner $ \loc flags ghcOptions -> run $
+ resolvePackage
(depPackageConfig econfig flags ghcOptions)
- (PackageIdentifier name version)
- bs
- return pkg
+ <$> parseSingleCabalFileIndex loadFromIndex root loc
where
-- | Package config to be used for dependencies
depPackageConfig :: EnvConfig -> Map FlagName Bool -> [Text] -> PackageConfig
@@ -342,8 +299,8 @@ withLoadPackage inner = do
-- | Set the code page for this process as necessary. Only applies to Windows.
-- See: https://github.com/commercialhaskell/stack/issues/738
+fixCodePage :: HasEnvConfig env => RIO env a -> RIO env a
#ifdef WINDOWS
-fixCodePage :: (StackM env m, HasBuildConfig env, HasEnvConfig env) => m a -> m a
fixCodePage inner = do
mcp <- view $ configL.to configModifyCodePage
ghcVersion <- view $ actualCompilerVersionL.to getGhcVersion
@@ -359,13 +316,13 @@ fixCodePage inner = do
let setInput = origCPI /= expected
setOutput = origCPO /= expected
fixInput
- | setInput = Catch.bracket_
+ | setInput = bracket_
(liftIO $ do
setConsoleCP expected)
(liftIO $ setConsoleCP origCPI)
| otherwise = id
fixOutput
- | setOutput = Catch.bracket_
+ | setOutput = bracket_
(liftIO $ do
setConsoleOutputCP expected)
(liftIO $ setConsoleOutputCP origCPO)
@@ -379,20 +336,19 @@ fixCodePage inner = do
fixInput $ fixOutput inner
expected = 65001 -- UTF-8
- warn typ = $logInfo $ T.concat
+ warn typ = logInfo $ T.concat
[ "Setting"
, typ
, " codepage to UTF-8 (65001) to ensure correct output from GHC"
]
#else
-fixCodePage :: a -> a
fixCodePage = id
#endif
-- | Query information about the build and print the result to stdout in YAML format.
-queryBuildInfo :: (StackM env m, HasEnvConfig env)
+queryBuildInfo :: HasEnvConfig env
=> [Text] -- ^ selectors
- -> m ()
+ -> RIO env ()
queryBuildInfo selectors0 =
rawBuildInfo
>>= select id selectors0
@@ -414,10 +370,10 @@ queryBuildInfo selectors0 =
_ -> err $ "Cannot apply selector to " ++ show value
where
cont = select (front . (sel:)) sels
- err msg = errorString $ msg ++ ": " ++ show (front [sel])
+ err msg = throwString $ msg ++ ": " ++ show (front [sel])
-- | Get the raw build information object
-rawBuildInfo :: (StackM env m, HasEnvConfig env) => m Value
+rawBuildInfo :: HasEnvConfig env => RIO env Value
rawBuildInfo = do
(locals, _sourceMap) <- loadSourceMap NeedTargets defaultBuildOptsCLI
return $ object
diff --git a/src/Stack/Build/Cache.hs b/src/Stack/Build/Cache.hs
index 4484ba9..81adab2 100644
--- a/src/Stack/Build/Cache.hs
+++ b/src/Stack/Build/Cache.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
@@ -31,48 +32,31 @@ module Stack.Build.Cache
, BuildCache(..)
) where
-import Control.Applicative
-import Control.DeepSeq (NFData)
-import Control.Exception.Safe (handleIO, tryAnyDeep)
-import Control.Monad (liftM)
-import Control.Monad.Catch (MonadThrow, MonadCatch)
-import Control.Monad.IO.Class
-import Control.Monad.Logger (MonadLogger)
-import Control.Monad.Reader (MonadReader)
-import Control.Monad.Trans.Control (MonadBaseControl)
+import Stack.Prelude
import Crypto.Hash (hashWith, SHA256(..))
-import Data.Binary (Binary (..))
-import qualified Data.Binary as Binary
-import Data.Binary.Tagged (HasStructuralInfo, HasSemanticVersion)
-import qualified Data.Binary.Tagged as BinaryTagged
+import Control.Monad.Trans.Maybe
import qualified Data.ByteArray as Mem (convert)
-import qualified Data.ByteArray.Encoding as Mem (convertToBase, Base(Base16))
import qualified Data.ByteString.Base64.URL as B64URL
+import qualified Data.ByteString as B
import qualified Data.ByteString.Char8 as S8
-import qualified Data.ByteString.Lazy as LBS
-import Data.Foldable (forM_)
-import Data.Map (Map)
import qualified Data.Map as M
-import Data.Maybe (fromMaybe, mapMaybe)
-import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Store as Store
import Data.Store.VersionTagged
-import Data.Text (Text)
import qualified Data.Text as T
-import Data.Traversable (forM)
+import qualified Data.Text.Encoding as TE
import Path
import Path.IO
-import Prelude -- Fix redundant import warnings
-import Stack.Constants
+import Stack.Constants.Config
import Stack.Types.Build
+import Stack.Types.BuildPlan
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.GhcPkgId
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.Version
-import qualified System.FilePath as FilePath
+import qualified System.FilePath as FP
-- | Directory containing files to mark an executable as installed
exeInstalledDir :: (MonadReader env m, HasEnvConfig env, MonadThrow m)
@@ -97,7 +81,7 @@ getInstalledExes loc = do
mapMaybe (parsePackageIdentifierFromString . toFilePath . filename) files
-- | Mark the given executable as installed
-markExeInstalled :: (MonadReader env m, HasEnvConfig env, MonadIO m, MonadCatch m)
+markExeInstalled :: (MonadReader env m, HasEnvConfig env, MonadIO m, MonadThrow m)
=> InstallLocation -> PackageIdentifier -> m ()
markExeInstalled loc ident = do
dir <- exeInstalledDir loc
@@ -112,28 +96,28 @@ markExeInstalled loc ident = do
-- TODO consideration for the future: list all of the executables
-- installed, and invalidate this file in getInstalledExes if they no
-- longer exist
- liftIO $ writeFile fp "Installed"
+ liftIO $ B.writeFile fp "Installed"
-- | Mark the given executable as not installed
-markExeNotInstalled :: (MonadReader env m, HasEnvConfig env, MonadIO m, MonadCatch m)
+markExeNotInstalled :: (MonadReader env m, HasEnvConfig env, MonadIO m, MonadThrow m)
=> InstallLocation -> PackageIdentifier -> m ()
markExeNotInstalled loc ident = do
dir <- exeInstalledDir loc
ident' <- parseRelFile $ packageIdentifierString ident
- ignoringAbsence (removeFile $ dir </> ident')
+ liftIO $ ignoringAbsence (removeFile $ dir </> ident')
-- | Try to read the dirtiness cache for the given package directory.
-tryGetBuildCache :: (MonadIO m, MonadReader env m, MonadThrow m, MonadLogger m, HasEnvConfig env, MonadBaseControl IO m)
+tryGetBuildCache :: (MonadUnliftIO m, MonadReader env m, MonadThrow m, MonadLogger m, HasEnvConfig env)
=> Path Abs Dir -> m (Maybe (Map FilePath FileCacheInfo))
tryGetBuildCache dir = liftM (fmap buildCacheTimes) . $(versionedDecodeFile buildCacheVC) =<< buildCacheFile dir
-- | Try to read the dirtiness cache for the given package directory.
-tryGetConfigCache :: (MonadIO m, MonadReader env m, MonadThrow m, HasEnvConfig env, MonadBaseControl IO m, MonadLogger m)
+tryGetConfigCache :: (MonadUnliftIO m, MonadReader env m, MonadThrow m, HasEnvConfig env, MonadLogger m)
=> Path Abs Dir -> m (Maybe ConfigCache)
tryGetConfigCache dir = $(versionedDecodeFile configCacheVC) =<< configCacheFile dir
-- | Try to read the mod time of the cabal file from the last build
-tryGetCabalMod :: (MonadIO m, MonadReader env m, MonadThrow m, HasEnvConfig env, MonadBaseControl IO m, MonadLogger m)
+tryGetCabalMod :: (MonadUnliftIO m, MonadReader env m, MonadThrow m, HasEnvConfig env, MonadLogger m)
=> Path Abs Dir -> m (Maybe ModTime)
tryGetCabalMod dir = $(versionedDecodeFile modTimeVC) =<< configCabalMod dir
@@ -165,7 +149,7 @@ writeCabalMod dir x = do
$(versionedEncodeFile modTimeVC) fp x
-- | Delete the caches for the project.
-deleteCaches :: (MonadIO m, MonadReader env m, MonadCatch m, HasEnvConfig env)
+deleteCaches :: (MonadIO m, MonadReader env m, HasEnvConfig env, MonadThrow m)
=> Path Abs Dir -> m ()
deleteCaches dir = do
{- FIXME confirm that this is acceptable to remove
@@ -173,7 +157,7 @@ deleteCaches dir = do
removeFileIfExists bfp
-}
cfp <- configCacheFile dir
- ignoringAbsence (removeFile cfp)
+ liftIO $ ignoringAbsence (removeFile cfp)
flagCacheFile :: (MonadIO m, MonadThrow m, MonadReader env m, HasEnvConfig env)
=> Installed
@@ -181,13 +165,13 @@ flagCacheFile :: (MonadIO m, MonadThrow m, MonadReader env m, HasEnvConfig env)
flagCacheFile installed = do
rel <- parseRelFile $
case installed of
- Library _ gid -> ghcPkgIdString gid
+ Library _ gid _ -> ghcPkgIdString gid
Executable ident -> packageIdentifierString ident
dir <- flagCacheLocal
return $ dir </> rel
-- | Loads the flag cache for the given installed extra-deps
-tryGetFlagCache :: (MonadIO m, MonadThrow m, MonadReader env m, HasEnvConfig env, MonadBaseControl IO m, MonadLogger m)
+tryGetFlagCache :: (MonadUnliftIO m, MonadThrow m, MonadReader env m, HasEnvConfig env, MonadLogger m)
=> Installed
-> m (Maybe ConfigCache)
tryGetFlagCache gid = do
@@ -220,7 +204,7 @@ unsetTestSuccess dir = do
$(versionedEncodeFile testSuccessVC) fp False
-- | Check if the test suite already passed
-checkTestSuccess :: (MonadIO m, MonadThrow m, MonadReader env m, HasEnvConfig env, MonadBaseControl IO m, MonadLogger m)
+checkTestSuccess :: (MonadUnliftIO m, MonadThrow m, MonadReader env m, HasEnvConfig env, MonadLogger m)
=> Path Abs Dir
-> m Bool
checkTestSuccess dir =
@@ -246,60 +230,77 @@ checkTestSuccess dir =
-- We only pay attention to non-directory options. We don't want to avoid a
-- cache hit just because it was installed in a different directory.
precompiledCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env, MonadLogger m)
- => PackageIdentifier
+ => PackageLocationIndex FilePath
-> ConfigureOpts
-> Set GhcPkgId -- ^ dependencies
- -> m (Path Abs File, m (Path Abs File))
-precompiledCacheFile pkgident copts installedPackageIDs = do
- ec <- view envConfigL
+ -> m (Maybe (Path Abs File))
+precompiledCacheFile loc copts installedPackageIDs = do
+ ec <- view envConfigL
- compiler <- view actualCompilerVersionL >>= parseRelDir . compilerVersionString
- cabal <- view cabalVersionL >>= parseRelDir . versionString
- pkg <- parseRelDir $ packageIdentifierString pkgident
- platformRelDir <- platformGhcRelDir
+ compiler <- view actualCompilerVersionL >>= parseRelDir . compilerVersionString
+ cabal <- view cabalVersionL >>= parseRelDir . versionString
+ let mpkgRaw =
+ -- The goal here is to come up with a string representing the
+ -- package location which is unique. For archives and repos,
+ -- we rely upon cryptographic hashes paired with
+ -- subdirectories to identify this specific package version.
+ case loc of
+ PLIndex pir -> Just $ packageIdentifierRevisionString pir
+ PLOther other -> case other of
+ PLFilePath _ -> assert False Nothing -- no PLFilePaths should end up in a snapshot
+ PLArchive a -> fmap
+ (\h -> T.unpack (staticSHA256ToText h) ++ archiveSubdirs a)
+ (archiveHash a)
+ PLRepo r -> Just $ T.unpack (repoCommit r) ++ repoSubdirs r
- let input = (coNoDirs copts, installedPackageIDs)
+ forM mpkgRaw $ \pkgRaw -> do
+ pkg <-
+ case parseRelDir pkgRaw of
+ Just x -> return x
+ Nothing -> parseRelDir
+ $ T.unpack
+ $ TE.decodeUtf8
+ $ B64URL.encode
+ $ TE.encodeUtf8
+ $ T.pack pkgRaw
+ platformRelDir <- platformGhcRelDir
-- In Cabal versions 1.22 and later, the configure options contain the
-- installed package IDs, which is what we need for a unique hash.
-- Unfortunately, earlier Cabals don't have the information, so we must
-- supplement it with the installed package IDs directly.
-- See issue: https://github.com/commercialhaskell/stack/issues/1103
- let oldHash = Mem.convertToBase Mem.Base16 $ hashWith SHA256 $ LBS.toStrict $
- if view cabalVersionL ec >= $(mkVersion "1.22")
- then Binary.encode (coNoDirs copts)
- else Binary.encode input
- hashToPath hash = do
- hashPath <- parseRelFile $ S8.unpack hash
- return $ view stackRootL ec
- </> $(mkRelDir "precompiled")
- </> platformRelDir
- </> compiler
- </> cabal
- </> pkg
- </> hashPath
+ let input = (coNoDirs copts, installedPackageIDs)
+ hashPath <- parseRelFile $ S8.unpack $ B64URL.encode
+ $ Mem.convert $ hashWith SHA256 $ Store.encode input
- newPath <- hashToPath $ B64URL.encode $ Mem.convert $ hashWith SHA256 $ Store.encode input
- return (newPath, hashToPath oldHash)
+ return $ view stackRootL ec
+ </> $(mkRelDir "precompiled")
+ </> platformRelDir
+ </> compiler
+ </> cabal
+ </> pkg
+ </> hashPath
-- | Write out information about a newly built package
writePrecompiledCache :: (MonadThrow m, MonadReader env m, HasEnvConfig env, MonadIO m, MonadLogger m)
=> BaseConfigOpts
- -> PackageIdentifier
+ -> PackageLocationIndex FilePath
-> ConfigureOpts
-> Set GhcPkgId -- ^ dependencies
-> Installed -- ^ library
-> Set Text -- ^ executables
-> m ()
-writePrecompiledCache baseConfigOpts pkgident copts depIDs mghcPkgId exes = do
- (file, _) <- precompiledCacheFile pkgident copts depIDs
+writePrecompiledCache baseConfigOpts loc copts depIDs mghcPkgId exes = do
+ mfile <- precompiledCacheFile loc copts depIDs
+ forM_ mfile $ \file -> do
ensureDir (parent file)
ec <- view envConfigL
let stackRootRelative = makeRelative (view stackRootL ec)
mlibpath <-
case mghcPkgId of
Executable _ -> return Nothing
- Library _ ipid -> liftM Just $ do
+ Library _ ipid _ -> liftM Just $ do
ipid' <- parseRelFile $ ghcPkgIdString ipid ++ ".conf"
relPath <- stackRootRelative $ bcoSnapDB baseConfigOpts </> ipid'
return $ toFilePath relPath
@@ -314,46 +315,27 @@ writePrecompiledCache baseConfigOpts pkgident copts depIDs mghcPkgId exes = do
-- | Check the cache for a precompiled package matching the given
-- configuration.
-readPrecompiledCache :: (MonadThrow m, MonadReader env m, HasEnvConfig env, MonadIO m, MonadLogger m, MonadBaseControl IO m)
- => PackageIdentifier -- ^ target package
+readPrecompiledCache :: forall env. HasEnvConfig env
+ => PackageLocationIndex FilePath -- ^ target package
-> ConfigureOpts
-> Set GhcPkgId -- ^ dependencies
- -> m (Maybe PrecompiledCache)
-readPrecompiledCache pkgident copts depIDs = do
- ec <- view envConfigL
- let toAbsPath path = do
- if FilePath.isAbsolute path
- then path -- Only older version store absolute path
- else toFilePath (view stackRootL ec) FilePath.</> path
- let toAbsPC pc =
- PrecompiledCache
- { pcLibrary = fmap toAbsPath (pcLibrary pc)
- , pcExes = map toAbsPath (pcExes pc)
- }
-
- (file, getOldFile) <- precompiledCacheFile pkgident copts depIDs
- mres <- $(versionedDecodeFile precompiledCacheVC) file
- case mres of
- Just res -> return (Just $ toAbsPC res)
- Nothing -> do
- -- Fallback on trying the old binary format.
- oldFile <- getOldFile
- mpc <- fmap toAbsPC <$> binaryDecodeFileOrFailDeep oldFile
- -- Write out file in new format. Keep old file around for
- -- the benefit of older stack versions.
- forM_ mpc ($(versionedEncodeFile precompiledCacheVC) file)
- return mpc
-
--- | Ensure that there are no lurking exceptions deep inside the parsed
--- value... because that happens unfortunately. See
--- https://github.com/commercialhaskell/stack/issues/554
-binaryDecodeFileOrFailDeep :: (BinarySchema a, MonadIO m)
- => Path loc File
- -> m (Maybe a)
-binaryDecodeFileOrFailDeep fp = liftIO $ fmap (either (const Nothing) id) $ tryAnyDeep $ do
- eres <- BinaryTagged.taggedDecodeFileOrFail (toFilePath fp)
- case eres of
- Left _ -> return Nothing
- Right x -> return (Just x)
-
-type BinarySchema a = (Binary a, NFData a, HasStructuralInfo a, HasSemanticVersion a)
+ -> RIO env (Maybe PrecompiledCache)
+readPrecompiledCache loc copts depIDs = runMaybeT $
+ MaybeT (precompiledCacheFile loc copts depIDs) >>=
+ MaybeT . $(versionedDecodeFile precompiledCacheVC) >>=
+ lift . mkAbs
+ where
+ -- Since commit ed9ccc08f327bad68dd2d09a1851ce0d055c0422,
+ -- pcLibrary paths are stored as relative to the stack
+ -- root. Therefore, we need to prepend the stack root when
+ -- checking that the file exists. For the older cached paths, the
+ -- file will contain an absolute path, which will make `stackRoot
+ -- </>` a no-op.
+ mkAbs :: PrecompiledCache -> RIO env PrecompiledCache
+ mkAbs pc0 = do
+ stackRoot <- view stackRootL
+ let mkAbs' = (toFilePath stackRoot FP.</>)
+ return PrecompiledCache
+ { pcLibrary = mkAbs' <$> pcLibrary pc0
+ , pcExes = mkAbs' <$> pcExes pc0
+ }
diff --git a/src/Stack/Build/ConstructPlan.hs b/src/Stack/Build/ConstructPlan.hs
index 76996cc..4a4b994 100644
--- a/src/Stack/Build/ConstructPlan.hs
+++ b/src/Stack/Build/ConstructPlan.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
@@ -17,49 +18,36 @@ module Stack.Build.ConstructPlan
( constructPlan
) where
-import Control.Exception.Lifted
-import Control.Monad
-import Control.Monad.IO.Class
-import Control.Monad.Logger
+import Stack.Prelude
import Control.Monad.RWS.Strict
import Control.Monad.State.Strict (execState)
-import Control.Monad.Trans.Resource
-import Data.Either
-import Data.Function
import qualified Data.HashSet as HashSet
import Data.List
import Data.List.Extra (nubOrd)
-import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Map.Strict as Map
-import Data.Maybe
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.String (fromString)
-import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8With)
import Data.Text.Encoding.Error (lenientDecode)
-import Data.Typeable
-import qualified Distribution.Package as Cabal
import qualified Distribution.Text as Cabal
import qualified Distribution.Version as Cabal
-import GHC.Generics (Generic)
+import Distribution.Types.BuildType (BuildType (Configure))
import Generics.Deriving.Monoid (memptydefault, mappenddefault)
import Lens.Micro (lens)
-import Path
-import Prelude hiding (pi, writeFile)
import Stack.Build.Cache
import Stack.Build.Haddock
import Stack.Build.Installed
import Stack.Build.Source
import Stack.BuildPlan
+import Stack.Config (getLocalPackages)
import Stack.Constants
import Stack.Package
import Stack.PackageDump
import Stack.PackageIndex
import Stack.PrettyPrint
import Stack.Types.Build
+import Stack.Types.BuildPlan
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.FlagName
@@ -67,8 +55,9 @@ import Stack.Types.GhcPkgId
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT (StackM)
+import Stack.Types.Runner
import Stack.Types.Version
+import System.IO (putStrLn)
import System.Process.Read (findExecutable)
data PackageInfo
@@ -129,29 +118,32 @@ instance Monoid W where
mempty = memptydefault
mappend = mappenddefault
-type M = RWST
+type M = RWST -- TODO replace with more efficient WS stack on top of StackT
Ctx
W
(Map PackageName (Either ConstructPlanException AddDepRes))
IO
data Ctx = Ctx
- { mbp :: !MiniBuildPlan
+ { ls :: !LoadedSnapshot
, baseConfigOpts :: !BaseConfigOpts
- , loadPackage :: !(PackageName -> Version -> Map FlagName Bool -> [Text] -> IO Package)
+ , loadPackage :: !(PackageLocationIndex FilePath -> Map FlagName Bool -> [Text] -> IO Package)
, combinedMap :: !CombinedMap
- , toolToPackages :: !(Cabal.Dependency -> Map PackageName VersionRange)
+ , toolToPackages :: !(ExeName -> Map PackageName VersionRange)
, ctxEnvConfig :: !EnvConfig
, callStack :: ![PackageName]
, extraToBuild :: !(Set PackageName)
, getVersions :: !(PackageName -> IO (Set Version))
, wanted :: !(Set PackageName)
, localNames :: !(Set PackageName)
- , logFunc :: Loc -> LogSource -> LogLevel -> LogStr -> IO ()
}
instance HasPlatform Ctx
instance HasGHCVariant Ctx
+instance HasLogFunc Ctx where
+ logFuncL = configL.logFuncL
+instance HasRunner Ctx where
+ runnerL = configL.runnerL
instance HasConfig Ctx
instance HasBuildConfig Ctx
instance HasEnvConfig Ctx where
@@ -173,30 +165,31 @@ instance HasEnvConfig Ctx where
--
-- 3) It will only rebuild a local package if its files are dirty or
-- some of its dependencies have changed.
-constructPlan :: forall env m. (StackM env m, HasEnvConfig env)
- => MiniBuildPlan
+constructPlan :: forall env. HasEnvConfig env
+ => LoadedSnapshot
-> BaseConfigOpts
-> [LocalPackage]
-> Set PackageName -- ^ additional packages that must be built
-> [DumpPackage () () ()] -- ^ locally registered
- -> (PackageName -> Version -> Map FlagName Bool -> [Text] -> IO Package) -- ^ load upstream package
+ -> (PackageLocationIndex FilePath -> Map FlagName Bool -> [Text] -> IO Package) -- ^ load upstream package
-> SourceMap
-> InstalledMap
-> Bool
- -> m Plan
-constructPlan mbp0 baseConfigOpts0 locals extraToBuild0 localDumpPkgs loadPackage0 sourceMap installedMap initialBuildSteps = do
- $logDebug "Constructing the build plan"
- getVersions0 <- getPackageVersionsIO
+ -> RIO env Plan
+constructPlan ls0 baseConfigOpts0 locals extraToBuild0 localDumpPkgs loadPackage0 sourceMap installedMap initialBuildSteps = do
+ logDebug "Constructing the build plan"
+ u <- askUnliftIO
econfig <- view envConfigL
let onWanted = void . addDep False . packageName . lpPackage
let inner = do
mapM_ onWanted $ filter lpWanted locals
mapM_ (addDep False) $ Set.toList extraToBuild0
- lf <- askLoggerIO
+ lp <- getLocalPackages
+ let ctx = mkCtx econfig (unliftIO u . getPackageVersions) lp
((), m, W efinals installExes dirtyReason deps warnings parents) <-
- liftIO $ runRWST inner (ctx econfig getVersions0 lf) M.empty
- mapM_ $logWarn (warnings [])
+ liftIO $ runRWST inner ctx M.empty
+ mapM_ logWarn (warnings [])
let toEither (_, Left e) = Left e
toEither (k, Right v) = Right (k, v)
(errlibs, adrs) = partitionEithers $ map toEither $ M.toList m
@@ -217,36 +210,34 @@ constructPlan mbp0 baseConfigOpts0 locals extraToBuild0 localDumpPkgs loadPackag
, planFinals = M.fromList finals
, planUnregisterLocal = mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps
, planInstallExes =
- if boptsInstallExes $ bcoBuildOpts baseConfigOpts0
+ if boptsInstallExes (bcoBuildOpts baseConfigOpts0) ||
+ boptsInstallCompilerTool (bcoBuildOpts baseConfigOpts0)
then installExes
else Map.empty
}
else do
planDebug $ show errs
stackYaml <- view stackYamlL
- $prettyError $ pprintExceptions errs stackYaml parents (wantedLocalPackages locals)
+ prettyErrorNoIndent $ pprintExceptions errs stackYaml parents (wanted ctx)
throwM $ ConstructPlanFailed "Plan construction failed."
where
- ctx econfig getVersions0 lf = Ctx
- { mbp = mbp0
+ mkCtx econfig getVersions0 lp = Ctx
+ { ls = ls0
, baseConfigOpts = baseConfigOpts0
, loadPackage = loadPackage0
, combinedMap = combineMap sourceMap installedMap
- , toolToPackages = \(Cabal.Dependency name _) ->
+ , toolToPackages = \name ->
maybe Map.empty (Map.fromSet (const Cabal.anyVersion)) $
- Map.lookup (T.pack . packageNameString . fromCabalPackageName $ name) toolMap
+ Map.lookup name toolMap
, ctxEnvConfig = econfig
, callStack = []
, extraToBuild = extraToBuild0
, getVersions = getVersions0
, wanted = wantedLocalPackages locals <> extraToBuild0
, localNames = Set.fromList $ map (packageName . lpPackage) locals
- , logFunc = lf
}
- -- TODO Currently, this will only consider and install tools from the
- -- snapshot. It will not automatically install build tools from extra-deps
- -- or local packages.
- toolMap = getToolMap mbp0
+ where
+ toolMap = getToolMap ls0 lp
-- | State to be maintained during the calculation of local packages
-- to unregister.
@@ -319,7 +310,7 @@ mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps =
then Nothing
else Just $ fromMaybe "" $ Map.lookup name dirtyReason
-- Check if we're no longer using the local version
- | Just (PSUpstream _ Snap _ _ _) <- Map.lookup name sourceMap
+ | Just (piiLocation -> Snap) <- Map.lookup name sourceMap
= Just "Switching to snapshot installed package"
-- Check if a dependency is going to be unregistered
| (dep, _):_ <- mapMaybe (`Map.lookup` toUnregister) deps
@@ -342,7 +333,7 @@ mkUnregisterLocal tasks dirtyReason localDumpPkgs sourceMap initialBuildSteps =
-- step.
addFinal :: LocalPackage -> Package -> Bool -> M ()
addFinal lp package isAllInOne = do
- depsRes <- addPackageDeps package
+ depsRes <- addPackageDeps False package
res <- case depsRes of
Left e -> return $ Left e
Right (missing, present, _minLoc) -> do
@@ -361,12 +352,21 @@ addFinal lp package isAllInOne = do
Local
package
, taskPresent = present
- , taskType = TTLocal lp
+ , taskType = TTFiles lp Local -- FIXME we can rely on this being Local, right?
, taskAllInOne = isAllInOne
, taskCachePkgSrc = CacheSrcLocal (toFilePath (lpDir lp))
+ , taskAnyMissing = not $ Set.null missing
+ , taskBuildTypeConfig = packageBuildTypeConfig package
}
tell mempty { wFinals = Map.singleton (packageName package) res }
+-- | Is this package being used as a library, or just as a build tool?
+-- If the former, we need to ensure that a library actually
+-- exists. See
+-- <https://github.com/commercialhaskell/stack/issues/2195>
+data DepType = AsLibrary | AsBuildTool
+ deriving (Show, Eq)
+
-- | Given a 'PackageName', adds all of the build tasks to build the
-- package, if needed.
--
@@ -402,32 +402,38 @@ addDep treatAsDep' name = do
-- recommendation available
Nothing -> return $ Left $ UnknownPackage name
Just (PIOnlyInstalled loc installed) -> do
- -- slightly hacky, no flags since they likely won't affect executable names
- tellExecutablesUpstream name (installedVersion installed) loc Map.empty
+ -- FIXME Slightly hacky, no flags since
+ -- they likely won't affect executable
+ -- names. This code does not feel right.
+ tellExecutablesUpstream
+ (PackageIdentifierRevision (PackageIdentifier name (installedVersion installed)) CFILatest)
+ loc
+ Map.empty
return $ Right $ ADRFound loc installed
Just (PIOnlySource ps) -> do
- tellExecutables name ps
- installPackage name ps Nothing
+ tellExecutables ps
+ installPackage treatAsDep name ps Nothing
Just (PIBoth ps installed) -> do
- tellExecutables name ps
- installPackage name ps (Just installed)
+ tellExecutables ps
+ installPackage treatAsDep name ps (Just installed)
updateLibMap name res
return res
-tellExecutables :: PackageName -> PackageSource -> M ()
-tellExecutables _ (PSLocal lp)
+-- FIXME what's the purpose of this? Add a Haddock!
+tellExecutables :: PackageSource -> M ()
+tellExecutables (PSFiles lp _)
| lpWanted lp = tellExecutablesPackage Local $ lpPackage lp
| otherwise = return ()
-- Ignores ghcOptions because they don't matter for enumerating
-- executables.
-tellExecutables name (PSUpstream version loc flags _ghcOptions _gitSha) =
- tellExecutablesUpstream name version loc flags
+tellExecutables (PSIndex loc flags _ghcOptions pir) =
+ tellExecutablesUpstream pir loc flags
-tellExecutablesUpstream :: PackageName -> Version -> InstallLocation -> Map FlagName Bool -> M ()
-tellExecutablesUpstream name version loc flags = do
+tellExecutablesUpstream :: PackageIdentifierRevision -> InstallLocation -> Map FlagName Bool -> M ()
+tellExecutablesUpstream pir@(PackageIdentifierRevision (PackageIdentifier name _) _) loc flags = do
ctx <- ask
when (name `Set.member` extraToBuild ctx) $ do
- p <- liftIO $ loadPackage ctx name version flags []
+ p <- liftIO $ loadPackage ctx (PLIndex pir) flags []
tellExecutablesPackage loc p
tellExecutablesPackage :: InstallLocation -> Package -> M ()
@@ -441,10 +447,10 @@ tellExecutablesPackage loc p = do
Just (PIOnlySource ps) -> goSource ps
Just (PIBoth ps _) -> goSource ps
- goSource (PSLocal lp)
+ goSource (PSFiles lp _)
| lpWanted lp = exeComponents (lpComponents lp)
| otherwise = Set.empty
- goSource PSUpstream{} = Set.empty
+ goSource PSIndex{} = Set.empty
tell mempty { wInstall = Map.fromList $ map (, loc) $ Set.toList $ filterComps myComps $ packageExes p }
where
@@ -454,30 +460,30 @@ tellExecutablesPackage loc p = do
-- | Given a 'PackageSource' and perhaps an 'Installed' value, adds
-- build 'Task's for the package and its dependencies.
-installPackage
- :: PackageName
- -> PackageSource
- -> Maybe Installed
- -> M (Either ConstructPlanException AddDepRes)
-installPackage name ps minstalled = do
+installPackage :: Bool -- ^ is this being used by a dependency?
+ -> PackageName
+ -> PackageSource
+ -> Maybe Installed
+ -> M (Either ConstructPlanException AddDepRes)
+installPackage treatAsDep name ps minstalled = do
ctx <- ask
case ps of
- PSUpstream version _ flags ghcOptions _ -> do
+ PSIndex _ flags ghcOptions pkgLoc -> do
planDebug $ "installPackage: Doing all-in-one build for upstream package " ++ show name
- package <- liftIO $ loadPackage ctx name version flags ghcOptions
- resolveDepsAndInstall True ps package minstalled
- PSLocal lp ->
+ package <- liftIO $ loadPackage ctx (PLIndex pkgLoc) flags ghcOptions -- FIXME be more efficient! Get this from the LoadedPackageInfo!
+ resolveDepsAndInstall True treatAsDep ps package minstalled
+ PSFiles lp _ ->
case lpTestBench lp of
Nothing -> do
planDebug $ "installPackage: No test / bench component for " ++ show name ++ " so doing an all-in-one build."
- resolveDepsAndInstall True ps (lpPackage lp) minstalled
+ resolveDepsAndInstall True treatAsDep ps (lpPackage lp) minstalled
Just tb -> do
-- Attempt to find a plan which performs an all-in-one
-- build. Ignore the writer action + reset the state if
-- it fails.
s <- get
res <- pass $ do
- res <- addPackageDeps tb
+ res <- addPackageDeps treatAsDep tb
let writerFunc w = case res of
Left _ -> mempty
_ -> w
@@ -498,7 +504,7 @@ installPackage name ps minstalled = do
put s
-- Otherwise, fall back on building the
-- tests / benchmarks in a separate step.
- res' <- resolveDepsAndInstall False ps (lpPackage lp) minstalled
+ res' <- resolveDepsAndInstall False treatAsDep ps (lpPackage lp) minstalled
when (isRight res') $ do
-- Insert it into the map so that it's
-- available for addFinal.
@@ -507,12 +513,13 @@ installPackage name ps minstalled = do
return res'
resolveDepsAndInstall :: Bool
+ -> Bool
-> PackageSource
-> Package
-> Maybe Installed
-> M (Either ConstructPlanException AddDepRes)
-resolveDepsAndInstall isAllInOne ps package minstalled = do
- res <- addPackageDeps package
+resolveDepsAndInstall isAllInOne treatAsDep ps package minstalled = do
+ res <- addPackageDeps treatAsDep package
case res of
Left err -> return $ Left err
Right deps -> liftM Right $ installPackageGivenDeps isAllInOne ps package minstalled deps
@@ -561,12 +568,18 @@ installPackageGivenDeps isAllInOne ps package minstalled (missing, present, minL
, taskPresent = present
, taskType =
case ps of
- PSLocal lp -> TTLocal lp
- PSUpstream _ loc _ _ sha -> TTUpstream package (loc <> minLoc) sha
+ PSFiles lp loc -> TTFiles lp (loc <> minLoc)
+ PSIndex loc _ _ pkgLoc -> TTIndex package (loc <> minLoc) pkgLoc
, taskAllInOne = isAllInOne
, taskCachePkgSrc = toCachePkgSrc ps
+ , taskAnyMissing = not $ Set.null missing
+ , taskBuildTypeConfig = packageBuildTypeConfig package
}
+-- | Is the build type of the package Configure
+packageBuildTypeConfig :: Package -> Bool
+packageBuildTypeConfig pkg = packageBuildType pkg == Just Configure
+
-- Update response in the lib map. If it is an error, and there's
-- already an error about cyclic dependencies, prefer the cyclic error.
updateLibMap :: PackageName -> Either ConstructPlanException AddDepRes -> M ()
@@ -590,12 +603,13 @@ addEllipsis t
-- then the parent package must be installed locally. Otherwise, if it
-- is 'Snap', then it can either be installed locally or in the
-- snapshot.
-addPackageDeps :: Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, InstallLocation))
-addPackageDeps package = do
+addPackageDeps :: Bool -- ^ is this being used by a dependency?
+ -> Package -> M (Either ConstructPlanException (Set PackageIdentifier, Map PackageIdentifier GhcPkgId, InstallLocation))
+addPackageDeps treatAsDep package = do
ctx <- ask
deps' <- packageDepsWithTools package
- deps <- forM (Map.toList deps') $ \(depname, range) -> do
- eres <- addDep True depname
+ deps <- forM (Map.toList deps') $ \(depname, (range, depType)) -> do
+ eres <- addDep treatAsDep depname
let getLatestApplicable = do
vs <- liftIO $ getVersions ctx depname
return (latestApplicableVersion range vs)
@@ -608,6 +622,8 @@ addPackageDeps package = do
_ -> Couldn'tResolveItsDependencies (packageVersion package)
mlatestApplicable <- getLatestApplicable
return $ Left (depname, (range, mlatestApplicable, bd))
+ Right adr | depType == AsLibrary && not (adrHasLibrary adr) ->
+ return $ Left (depname, (range, Nothing, HasNoLibrary))
Right adr -> do
addParent depname range Nothing
inRange <- if adrVersion adr `withinRange` range
@@ -645,7 +661,7 @@ addPackageDeps package = do
(Set.singleton $ taskProvides task, Map.empty, taskLocation task)
ADRFound loc (Executable _) -> return $ Right
(Set.empty, Map.empty, loc)
- ADRFound loc (Library ident gid) -> return $ Right
+ ADRFound loc (Library ident gid _) -> return $ Right
(Set.empty, Map.singleton ident gid, loc)
else do
mlatestApplicable <- getLatestApplicable
@@ -669,6 +685,23 @@ addPackageDeps package = do
where
val = (First mversion, [(packageIdentifier package, range)])
+ adrHasLibrary :: AddDepRes -> Bool
+ adrHasLibrary (ADRToInstall task) = taskHasLibrary task
+ adrHasLibrary (ADRFound _ Library{}) = True
+ adrHasLibrary (ADRFound _ Executable{}) = False
+
+ taskHasLibrary :: Task -> Bool
+ taskHasLibrary task =
+ case taskType task of
+ TTFiles lp _ -> packageHasLibrary $ lpPackage lp
+ TTIndex p _ _ -> packageHasLibrary p
+
+ packageHasLibrary :: Package -> Bool
+ packageHasLibrary p =
+ case packageLibraries p of
+ HasLibraries _ -> True
+ NoLibraries -> False
+
checkDirtiness :: PackageSource
-> Installed
-> Package
@@ -677,7 +710,7 @@ checkDirtiness :: PackageSource
-> M Bool
checkDirtiness ps installed package present wanted = do
ctx <- ask
- moldOpts <- flip runLoggingT (logFunc ctx) $ tryGetFlagCache installed
+ moldOpts <- runRIO ctx $ tryGetFlagCache installed
let configOpts = configureOpts
(view envConfigL ctx)
(baseConfigOpts ctx)
@@ -691,8 +724,8 @@ checkDirtiness ps installed package present wanted = do
, configCacheDeps = Set.fromList $ Map.elems present
, configCacheComponents =
case ps of
- PSLocal lp -> Set.map renderComponent $ lpComponents lp
- PSUpstream{} -> Set.empty
+ PSFiles lp _ -> Set.map renderComponent $ lpComponents lp
+ PSIndex{} -> Set.empty
, configCacheHaddock =
shouldHaddockPackage buildOpts wanted (packageName package) ||
-- Disabling haddocks when old config had haddocks doesn't make dirty.
@@ -782,76 +815,74 @@ describeConfigDiff config old new
pkgSrcName CacheSrcUpstream = "upstream source"
psForceDirty :: PackageSource -> Bool
-psForceDirty (PSLocal lp) = lpForceDirty lp
-psForceDirty PSUpstream{} = False
+psForceDirty (PSFiles lp _) = lpForceDirty lp
+psForceDirty PSIndex{} = False
psDirty :: PackageSource -> Maybe (Set FilePath)
-psDirty (PSLocal lp) = lpDirtyFiles lp
-psDirty PSUpstream{} = Nothing -- files never change in an upstream package
+psDirty (PSFiles lp _) = lpDirtyFiles lp
+psDirty PSIndex{} = Nothing -- files never change in an upstream package
psLocal :: PackageSource -> Bool
-psLocal (PSLocal _) = True
-psLocal PSUpstream{} = False
+psLocal (PSFiles _ loc) = loc == Local -- FIXME this is probably not the right logic, see configureOptsNoDir. We probably want to check if this appears in packages:
+psLocal PSIndex{} = False
-- | Get all of the dependencies for a given package, including guessed build
-- tool dependencies.
-packageDepsWithTools :: Package -> M (Map PackageName VersionRange)
+packageDepsWithTools :: Package -> M (Map PackageName (VersionRange, DepType))
packageDepsWithTools p = do
ctx <- ask
-- TODO: it would be cool to defer these warnings until there's an
-- actual issue building the package.
- let toEither (Cabal.Dependency (Cabal.PackageName name) _) mp =
+ let toEither name mp =
case Map.toList mp of
- [] -> Left (NoToolFound name (packageName p))
+ [] -> Left (ToolWarning name (packageName p) Nothing)
[_] -> Right mp
- xs -> Left (AmbiguousToolsFound name (packageName p) (map fst xs))
+ ((x, _):(y, _):zs) ->
+ Left (ToolWarning name (packageName p) (Just (x, y, map fst zs)))
(warnings0, toolDeps) =
partitionEithers $
- map (\dep -> toEither dep (toolToPackages ctx dep)) (packageTools p)
+ map (\dep -> toEither dep (toolToPackages ctx dep)) (Map.keys (packageTools p))
-- Check whether the tool is on the PATH before warning about it.
- warnings <- fmap catMaybes $ forM warnings0 $ \warning -> do
- let toolName = case warning of
- NoToolFound tool _ -> tool
- AmbiguousToolsFound tool _ _ -> tool
+ warnings <- fmap catMaybes $ forM warnings0 $ \warning@(ToolWarning (ExeName toolName) _ _) -> do
config <- view configL
menv <- liftIO $ configEnvOverride config minimalEnvSettings { esIncludeLocals = True }
- mfound <- findExecutable menv toolName
+ mfound <- findExecutable menv $ T.unpack toolName
case mfound of
Nothing -> return (Just warning)
Just _ -> return Nothing
tell mempty { wWarnings = (map toolWarningText warnings ++) }
- when (any isNoToolFound warnings) $ do
- let msg = T.unlines
- [ "Missing build-tools may be caused by dependencies of the build-tool being overridden by extra-deps."
- , "This should be fixed soon - see this issue https://github.com/commercialhaskell/stack/issues/595"
- ]
- tell mempty { wWarnings = (msg:) }
- return $ Map.unionsWith intersectVersionRanges
- $ packageDeps p
- : toolDeps
-
-data ToolWarning
- = NoToolFound String PackageName
- | AmbiguousToolsFound String PackageName [PackageName]
-
-isNoToolFound :: ToolWarning -> Bool
-isNoToolFound NoToolFound{} = True
-isNoToolFound _ = False
+ return $ Map.unionsWith
+ (\(vr1, dt1) (vr2, dt2) ->
+ ( intersectVersionRanges vr1 vr2
+ , case dt1 of
+ AsLibrary -> AsLibrary
+ AsBuildTool -> dt2
+ )
+ )
+ $ ((, AsLibrary) <$> packageDeps p)
+ : (Map.map (, AsBuildTool) <$> toolDeps)
+
+-- | Warn about tools in the snapshot definition. States the tool name
+-- expected, the package name using it, and found packages. If the
+-- last value is Nothing, it means the tool was not found
+-- anywhere. For a Just value, it was found in at least two packages.
+data ToolWarning = ToolWarning ExeName PackageName (Maybe (PackageName, PackageName, [PackageName]))
+ deriving Show
toolWarningText :: ToolWarning -> Text
-toolWarningText (NoToolFound toolName pkgName) =
+toolWarningText (ToolWarning (ExeName toolName) pkgName Nothing) =
"No packages found in snapshot which provide a " <>
T.pack (show toolName) <>
" executable, which is a build-tool dependency of " <>
T.pack (show (packageNameString pkgName))
-toolWarningText (AmbiguousToolsFound toolName pkgName options) =
+toolWarningText (ToolWarning (ExeName toolName) pkgName (Just (option1, option2, options))) =
"Multiple packages found in snapshot which provide a " <>
T.pack (show toolName) <>
" exeuctable, which is a build-tool dependency of " <>
T.pack (show (packageNameString pkgName)) <>
", so none will be installed.\n" <>
"Here's the list of packages which provide it: " <>
- T.intercalate ", " (map packageNameText options) <>
+ T.intercalate ", " (map packageNameText (option1:option2:options)) <>
"\nSince there's no good way to choose, you may need to install it manually."
-- | Strip out anything from the @Plan@ intended for the local database
@@ -863,11 +894,7 @@ stripLocals plan = plan
, planInstallExes = Map.filter (/= Local) $ planInstallExes plan
}
where
- checkTask task =
- case taskType task of
- TTLocal _ -> False
- TTUpstream _ Local _ -> False
- TTUpstream _ Snap _ -> True
+ checkTask task = taskLocation task == Snap
stripNonDeps :: Set PackageName -> Plan -> Plan
stripNonDeps deps plan = plan
@@ -884,12 +911,12 @@ markAsDep name = tell mempty { wDeps = Set.singleton name }
-- | Is the given package/version combo defined in the snapshot?
inSnapshot :: PackageName -> Version -> M Bool
inSnapshot name version = do
- p <- asks mbp
+ p <- asks ls
ls <- asks localNames
return $ fromMaybe False $ do
guard $ not $ name `Set.member` ls
- mpi <- Map.lookup name (mbpPackages p)
- return $ mpiVersion mpi == version
+ lpi <- Map.lookup name (lsPackages p)
+ return $ lpiVersion lpi == version
data ConstructPlanException
= DependencyCycleDetected [PackageName]
@@ -908,6 +935,8 @@ data BadDependency
= NotInBuildPlan
| Couldn'tResolveItsDependencies Version
| DependencyMismatch Version
+ | HasNoLibrary
+ -- ^ See description of 'DepType'
deriving (Typeable, Eq, Ord, Show)
-- TODO: Consider intersecting version ranges for multiple deps on a
@@ -920,17 +949,28 @@ pprintExceptions
-> Set PackageName
-> AnsiDoc
pprintExceptions exceptions stackYaml parentMap wanted =
- "While constructing the build plan, the following exceptions were encountered:" <> line <> line <>
- mconcat (intersperse (line <> line) (mapMaybe pprintException exceptions')) <> line <>
- if Map.null extras then "" else
- line <>
- "Recommended action: try adding the following to your extra-deps in" <+>
- toAnsiDoc (display stackYaml) <> ":" <>
- line <>
- vsep (map pprintExtra (Map.toList extras)) <>
- line <>
- line <>
- "You may also want to try the 'stack solver' command"
+ mconcat $
+ [ flow "While constructing the build plan, the following exceptions were encountered:"
+ , line <> line
+ , mconcat (intersperse (line <> line) (mapMaybe pprintException exceptions'))
+ , line <> line
+ , flow "Some potential ways to resolve this:"
+ , line <> line
+ ] ++
+ (if Map.null extras then [] else
+ [ " *" <+> align
+ (flow "Recommended action: try adding the following to your extra-deps in" <+>
+ toAnsiDoc (display stackYaml) <> ":")
+ , line <> line
+ , vsep (map pprintExtra (Map.toList extras))
+ , line <> line
+ ]
+ ) ++
+ [ " *" <+> align (flow "Set 'allow-newer: true' to ignore all version constraints and build anyway.")
+ , line <> line
+ , " *" <+> align (flow "You may also want to try using the 'stack solver' command.")
+ , line
+ ]
where
exceptions' = nubOrd exceptions
@@ -940,8 +980,11 @@ pprintExceptions exceptions stackYaml parentMap wanted =
getExtras (DependencyPlanFailures _ m) =
Map.unions $ map go $ Map.toList m
where
+ -- TODO: Likely a good idea to distinguish these to the user. In particular, for DependencyMismatch
go (name, (_range, Just version, NotInBuildPlan)) =
Map.singleton name version
+ go (name, (_range, Just version, DependencyMismatch{})) =
+ Map.singleton name version
go _ = Map.empty
pprintExtra (name, version) =
fromString (concat ["- ", packageNameString name, "-", versionString version])
@@ -952,32 +995,33 @@ pprintExceptions exceptions stackYaml parentMap wanted =
toNotInBuildPlan _ = []
pprintException (DependencyCycleDetected pNames) = Just $
- "Dependency cycle detected in packages:" <> line <>
- indent 4 (encloseSep "[" "]" "," (map (errorRed . fromString . packageNameString) pNames))
+ flow "Dependency cycle detected in packages:" <> line <>
+ indent 4 (encloseSep "[" "]" "," (map (styleError . display) pNames))
pprintException (DependencyPlanFailures pkg pDeps) =
case mapMaybe pprintDep (Map.toList pDeps) of
[] -> Nothing
depErrors -> Just $
- "In the dependencies for" <+> pkgIdent <>
+ flow "In the dependencies for" <+> pkgIdent <>
pprintFlags (packageFlags pkg) <> ":" <> line <>
indent 4 (vsep depErrors) <>
case getShortestDepsPath parentMap wanted (packageName pkg) of
- Nothing -> line <> "needed for unknown reason - stack invariant violated."
- Just [] -> line <> "needed since" <+> pkgIdent <+> "is a build target."
- Just (target:path) -> line <> "needed due to " <> encloseSep "" "" " -> " pathElems
+ Nothing -> line <> flow "needed for unknown reason - stack invariant violated."
+ Just [] -> line <> flow "needed since" <+> pkgName <+> flow "is a build target."
+ Just (target:path) -> line <> flow "needed due to" <+> encloseSep "" "" " -> " pathElems
where
pathElems =
- [displayTargetPkgId target] ++
+ [styleTarget . display $ target] ++
map display path ++
[pkgIdent]
where
- pkgIdent = displayCurrentPkgId (packageIdentifier pkg)
+ pkgName = styleCurrent . display $ packageName pkg
+ pkgIdent = styleCurrent . display $ packageIdentifier pkg
-- Skip these when they are redundant with 'NotInBuildPlan' info.
pprintException (UnknownPackage name)
| name `Set.member` allNotInBuildPlan = Nothing
| name `HashSet.member` wiredInPackages =
- Just $ "Can't build a package with same name as a wired-in-package:" <+> displayCurrentPkgName name
- | otherwise = Just $ "Unknown package:" <+> displayCurrentPkgName name
+ Just $ flow "Can't build a package with same name as a wired-in-package:" <+> (styleCurrent . display $ name)
+ | otherwise = Just $ flow "Unknown package:" <+> (styleCurrent . display $ name)
pprintFlags flags
| Map.null flags = ""
@@ -987,29 +1031,32 @@ pprintExceptions exceptions stackYaml parentMap wanted =
pprintDep (name, (range, mlatestApplicable, badDep)) = case badDep of
NotInBuildPlan -> Just $
- errorRed (display name) <+>
- align ("must match" <+> goodRange <> "," <> softline <>
- "but the stack configuration has no specified version" <>
+ styleError (display name) <+>
+ align (flow "must match" <+> goodRange <> "," <> softline <>
+ flow "but the stack configuration has no specified version" <>
latestApplicable Nothing)
-- TODO: For local packages, suggest editing constraints
DependencyMismatch version -> Just $
- displayErrorPkgId (PackageIdentifier name version) <+>
- align ("must match" <+> goodRange <>
+ (styleError . display) (PackageIdentifier name version) <+>
+ align (flow "from stack configuration does not match" <+> goodRange <>
latestApplicable (Just version))
-- I think the main useful info is these explain why missing
-- packages are needed. Instead lets give the user the shortest
-- path from a target to the package.
Couldn'tResolveItsDependencies _version -> Nothing
+ HasNoLibrary -> Just $
+ styleError (display name) <+>
+ align (flow "is a library dependency, but the package provides no library")
where
- goodRange = goodGreen (fromString (Cabal.display range))
+ goodRange = styleGood (fromString (Cabal.display range))
latestApplicable mversion =
case mlatestApplicable of
Nothing -> ""
Just la
| mlatestApplicable == mversion -> softline <>
- "(latest applicable is specified)"
+ flow "(latest matching version is specified)"
| otherwise -> softline <>
- "(latest applicable is " <> goodGreen (display la) <> ")"
+ flow "(latest matching version is" <+> styleGood (display la) <> ")"
-- | Get the shortest reason for the package to be in the build plan. In
-- other words, trace the parent dependencies back to a 'wanted'
diff --git a/src/Stack/Build/Execute.hs b/src/Stack/Build/Execute.hs
index f685a2a..6b3181f 100644
--- a/src/Stack/Build/Execute.hs
+++ b/src/Stack/Build/Execute.hs
@@ -1,5 +1,7 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
+{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
@@ -8,6 +10,7 @@
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE RankNTypes #-}
-- | Perform a build
module Stack.Build.Execute
( printPlan
@@ -20,51 +23,29 @@ module Stack.Build.Execute
, ExcludeTHLoading(..)
) where
-import Control.Applicative
-import Control.Arrow ((&&&), second)
import Control.Concurrent.Execute
-import Control.Concurrent.MVar.Lifted
import Control.Concurrent.STM
-import Control.Exception.Safe (catchIO)
-import Control.Exception.Lifted
-import Control.Monad (liftM, when, unless, void)
-import Control.Monad.Catch (MonadCatch)
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Control.Monad.Trans.Control (liftBaseWith)
-import Control.Monad.Trans.Resource
+import Stack.Prelude
import Crypto.Hash
import Data.Attoparsec.Text hiding (try)
import qualified Data.ByteArray as Mem (convert)
import qualified Data.ByteString as S
import qualified Data.ByteString.Base64.URL as B64URL
import Data.Char (isSpace)
-import Data.Conduit
+import Data.Conduit hiding (runConduitRes)
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
import qualified Data.Conduit.Text as CT
-import Data.Either (isRight)
import Data.FileEmbed (embedFile, makeRelativeToProject)
-import Data.Foldable (forM_, any)
-import Data.Function
-import Data.IORef
import Data.IORef.RunOnce (runOnce)
import Data.List hiding (any)
-import Data.Map.Strict (Map)
+import qualified Data.Map.Strict as M
import qualified Data.Map.Strict as Map
-import Data.Maybe
-import Data.Maybe.Extra (forMaybeM)
-import Data.Monoid
-import Data.Set (Set)
import qualified Data.Set as Set
import Data.Streaming.Process hiding (callProcess, env)
-import Data.String
-import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8, encodeUtf8)
-import Data.Text.Extra (stripCR)
import Data.Time.Clock (getCurrentTime)
-import Data.Traversable (forM)
import Data.Tuple
import qualified Distribution.PackageDescription as C
import qualified Distribution.Simple.Build.Macros as C
@@ -75,8 +56,7 @@ import Language.Haskell.TH as TH (location)
import Path
import Path.CheckInstall
import Path.Extra (toFilePathNoTrailingSep, rejectMissingFile)
-import Path.IO hiding (findExecutable, makeAbsolute)
-import Prelude hiding (FilePath, writeFile, any)
+import Path.IO hiding (findExecutable, makeAbsolute, withSystemTempDir)
import Stack.Build.Cache
import Stack.Build.Haddock
import Stack.Build.Installed
@@ -84,6 +64,7 @@ import Stack.Build.Source
import Stack.Build.Target
import Stack.Config
import Stack.Constants
+import Stack.Constants.Config
import Stack.Coverage
import Stack.Fetch as Fetch
import Stack.GhcPkg
@@ -94,11 +75,10 @@ import Stack.Types.Build
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.GhcPkgId
-import Stack.Types.Internal
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT
+import Stack.Types.Runner
import Stack.Types.Version
import qualified System.Directory as D
import System.Environment (getExecutablePath)
@@ -114,35 +94,37 @@ import System.Process.Run
import System.Process.Internals (createProcess_)
#endif
+-- | Has an executable been built or not?
+data ExecutableBuildStatus
+ = ExecutableBuilt
+ | ExecutableNotBuilt
+ deriving (Show, Eq, Ord)
+
-- | Fetch the packages necessary for a build, for example in combination with a dry run.
-preFetch :: (StackM env m, HasEnvConfig env) => Plan -> m ()
+preFetch :: HasEnvConfig env => Plan -> RIO env ()
preFetch plan
- | Set.null idents = $logDebug "Nothing to fetch"
+ | Set.null idents = logDebug "Nothing to fetch"
| otherwise = do
- $logDebug $ T.pack $
+ logDebug $ T.pack $
"Prefetching: " ++
intercalate ", " (map packageIdentifierString $ Set.toList idents)
fetchPackages idents
where
- idents = Set.unions $ map toIdent $ Map.toList $ planTasks plan
+ idents = Set.unions $ map toIdent $ Map.elems $ planTasks plan
- toIdent (name, task) =
+ toIdent task =
case taskType task of
- TTLocal _ -> Set.empty
- TTUpstream package _ _ -> Set.singleton $ PackageIdentifier
- name
- (packageVersion package)
+ TTFiles{} -> Set.empty
+ TTIndex _ _ (PackageIdentifierRevision ident _) -> Set.singleton ident
-- | Print a description of build plan for human consumption.
-printPlan :: (StackM env m)
- => Plan
- -> m ()
+printPlan :: HasRunner env => Plan -> RIO env ()
printPlan plan = do
case Map.elems $ planUnregisterLocal plan of
- [] -> $logInfo "No packages would be unregistered."
+ [] -> logInfo "No packages would be unregistered."
xs -> do
- $logInfo "Would unregister locally:"
- forM_ xs $ \(ident, reason) -> $logInfo $ T.concat
+ logInfo "Would unregister locally:"
+ forM_ xs $ \(ident, reason) -> logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, if T.null reason
then ""
@@ -153,13 +135,13 @@ printPlan plan = do
]
]
- $logInfo ""
+ logInfo ""
case Map.elems $ planTasks plan of
- [] -> $logInfo "Nothing to build."
+ [] -> logInfo "Nothing to build."
xs -> do
- $logInfo "Would build:"
- mapM_ ($logInfo . displayTask) xs
+ logInfo "Would build:"
+ mapM_ (logInfo . displayTask) xs
let hasTests = not . Set.null . testComponents . taskComponents
hasBenches = not . Set.null . benchComponents . taskComponents
@@ -167,21 +149,21 @@ printPlan plan = do
benches = Map.elems $ Map.filter hasBenches $ planFinals plan
unless (null tests) $ do
- $logInfo ""
- $logInfo "Would test:"
- mapM_ ($logInfo . displayTask) tests
+ logInfo ""
+ logInfo "Would test:"
+ mapM_ (logInfo . displayTask) tests
unless (null benches) $ do
- $logInfo ""
- $logInfo "Would benchmark:"
- mapM_ ($logInfo . displayTask) benches
+ logInfo ""
+ logInfo "Would benchmark:"
+ mapM_ (logInfo . displayTask) benches
- $logInfo ""
+ logInfo ""
case Map.toList $ planInstallExes plan of
- [] -> $logInfo "No executables to be installed."
+ [] -> logInfo "No executables to be installed."
xs -> do
- $logInfo "Would install executables:"
- forM_ xs $ \(name, loc) -> $logInfo $ T.concat
+ logInfo "Would install executables:"
+ forM_ xs $ \(name, loc) -> logInfo $ T.concat
[ name
, " from "
, case loc of
@@ -200,8 +182,8 @@ displayTask task = T.pack $ concat
Local -> "local"
, ", source="
, case taskType task of
- TTLocal lp -> toFilePath $ lpDir lp
- TTUpstream{} -> "package index"
+ TTFiles lp _ -> toFilePath $ lpDir lp
+ TTIndex{} -> "package index"
, if Set.null missing
then ""
else ", after: " ++ intercalate "," (map packageIdentifierString $ Set.toList missing)
@@ -209,7 +191,7 @@ displayTask task = T.pack $ concat
where
missing = tcoMissing $ taskConfigOpts task
-data ExecuteEnv m = ExecuteEnv
+data ExecuteEnv = ExecuteEnv
{ eeEnvOverride :: !EnvOverride
, eeConfigureLock :: !(MVar ())
, eeInstallLock :: !(MVar ())
@@ -233,8 +215,8 @@ data ExecuteEnv m = ExecuteEnv
, eeSnapshotDumpPkgs :: !(TVar (Map GhcPkgId (DumpPackage () () ())))
, eeLocalDumpPkgs :: !(TVar (Map GhcPkgId (DumpPackage () () ())))
, eeLogFiles :: !(TChan (Path Abs Dir, Path Abs File))
- , eeGetGhcPath :: !(m (Path Abs File))
- , eeGetGhcjsPath :: !(m (Path Abs File))
+ , eeGetGhcPath :: !(forall m. MonadIO m => m (Path Abs File))
+ , eeGetGhcjsPath :: !(forall m. MonadIO m => m (Path Abs File))
, eeCustomBuilt :: !(IORef (Set PackageName))
-- ^ Stores which packages with custom-setup have already had their
-- Setup.hs built.
@@ -267,11 +249,11 @@ simpleSetupHash =
encodeUtf8 (T.pack (unwords buildSetupArgs)) <> setupGhciShimCode <> simpleSetupCode
-- | Get a compiled Setup exe
-getSetupExe :: (StackM env m, HasEnvConfig env)
+getSetupExe :: HasEnvConfig env
=> Path Abs File -- ^ Setup.hs input file
-> Path Abs File -- ^ SetupShim.hs input file
-> Path Abs Dir -- ^ temporary directory
- -> m (Maybe (Path Abs File))
+ -> RIO env (Maybe (Path Abs File))
getSetupExe setupHs setupShimHs tmpdir = do
wc <- view $ actualCompilerVersionL.whichCompilerL
platformDir <- platformGhcRelDir
@@ -324,13 +306,16 @@ getSetupExe setupHs setupShimHs tmpdir = do
, toFilePath tmpOutputPath
] ++
["-build-runner" | wc == Ghcjs]
- runCmd' (\cp -> cp { std_out = UseHandle stderr }) (Cmd (Just tmpdir) (compilerExeName wc) menv args) Nothing
+ callProcess' (\cp -> cp { std_out = UseHandle stderr }) (Cmd (Just tmpdir) (compilerExeName wc) menv args)
+ `catch` \(ProcessExitedUnsuccessfully _ ec) -> do
+ compilerPath <- getCompilerPath wc
+ throwM $ SetupHsBuildFailure ec Nothing compilerPath args Nothing []
when (wc == Ghcjs) $ renameDir tmpJsExePath jsExePath
renameFile tmpExePath exePath
return $ Just exePath
-- | Execute a function that takes an 'ExecuteEnv'.
-withExecuteEnv :: forall env m a. (StackM env m, HasEnvConfig env)
+withExecuteEnv :: forall env a. HasEnvConfig env
=> EnvOverride
-> BuildOpts
-> BuildOptsCLI
@@ -339,18 +324,18 @@ withExecuteEnv :: forall env m a. (StackM env m, HasEnvConfig env)
-> [DumpPackage () () ()] -- ^ global packages
-> [DumpPackage () () ()] -- ^ snapshot packages
-> [DumpPackage () () ()] -- ^ local packages
- -> (ExecuteEnv m -> m a)
- -> m a
-withExecuteEnv menv bopts boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages inner = do
+ -> (ExecuteEnv -> RIO env a)
+ -> RIO env a
+withExecuteEnv menv bopts boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages inner =
withSystemTempDir stackProgName $ \tmpdir -> do
- configLock <- newMVar ()
- installLock <- newMVar ()
+ configLock <- liftIO $ newMVar ()
+ installLock <- liftIO $ newMVar ()
idMap <- liftIO $ newTVarIO Map.empty
config <- view configL
getGhcPath <- runOnce $ getCompilerPath Ghc
getGhcjsPath <- runOnce $ getCompilerPath Ghcjs
- customBuiltRef <- liftIO $ newIORef Set.empty
+ customBuiltRef <- newIORef Set.empty
-- Create files for simple setup and setup shim, if necessary
let setupSrcDir =
@@ -373,6 +358,7 @@ withExecuteEnv menv bopts boptsCli baseConfigOpts locals globalPackages snapshot
localPackagesTVar <- liftIO $ newTVarIO (toDumpPackagesByGhcPkgId localPackages)
logFilesTChan <- liftIO $ atomically newTChan
let totalWanted = length $ filter lpWanted locals
+ env <- ask
inner ExecuteEnv
{ eeEnvOverride = menv
, eeBuildOpts = bopts
@@ -398,14 +384,14 @@ withExecuteEnv menv bopts boptsCli baseConfigOpts locals globalPackages snapshot
, eeSnapshotDumpPkgs = snapshotPackagesTVar
, eeLocalDumpPkgs = localPackagesTVar
, eeLogFiles = logFilesTChan
- , eeGetGhcPath = getGhcPath
- , eeGetGhcjsPath = getGhcjsPath
+ , eeGetGhcPath = runRIO env getGhcPath
+ , eeGetGhcjsPath = runRIO env getGhcjsPath
, eeCustomBuilt = customBuiltRef
} `finally` dumpLogs logFilesTChan totalWanted
where
toDumpPackagesByGhcPkgId = Map.fromList . map (\dp -> (dpGhcPkgId dp, dp))
- dumpLogs :: TChan (Path Abs Dir, Path Abs File) -> Int -> m ()
+ dumpLogs :: TChan (Path Abs Dir, Path Abs File) -> Int -> RIO env ()
dumpLogs chan totalWanted = do
allLogs <- fmap reverse $ liftIO $ atomically drainChan
case allLogs of
@@ -418,13 +404,18 @@ withExecuteEnv menv bopts boptsCli baseConfigOpts locals globalPackages snapshot
DumpWarningLogs -> mapM_ dumpLogIfWarning allLogs
DumpNoLogs
| totalWanted > 1 ->
- $logInfo $ T.concat
+ logInfo $ T.concat
[ "Build output has been captured to log files, use "
, "--dump-logs to see it on the console"
]
| otherwise -> return ()
- $logInfo $ T.pack $ "Log files have been written to: "
+ logInfo $ T.pack $ "Log files have been written to: "
++ toFilePath (parent (snd firstLog))
+
+ -- We only strip the colors /after/ we've dumped logs, so that
+ -- we get pretty colors in our dump output on the terminal.
+ colors <- shouldForceGhcColorFlag
+ when colors $ liftIO $ mapM_ (stripColors . snd) allLogs
where
drainChan :: STM [(Path Abs Dir, Path Abs File)]
drainChan = do
@@ -435,10 +426,10 @@ withExecuteEnv menv bopts boptsCli baseConfigOpts locals globalPackages snapshot
xs <- drainChan
return $ x:xs
- dumpLogIfWarning :: (Path Abs Dir, Path Abs File) -> m ()
+ dumpLogIfWarning :: (Path Abs Dir, Path Abs File) -> RIO env ()
dumpLogIfWarning (pkgDir, filepath) = do
firstWarning <- runResourceT
- $ CB.sourceFile (toFilePath filepath)
+ $ transPipe liftResourceT (CB.sourceFile (toFilePath filepath))
$$ CT.decodeUtf8Lenient
=$ CT.lines
=$ CL.map stripCR
@@ -450,19 +441,40 @@ withExecuteEnv menv bopts boptsCli baseConfigOpts locals globalPackages snapshot
isWarning t = ": Warning:" `T.isSuffixOf` t -- prior to GHC 8
|| ": warning:" `T.isInfixOf` t -- GHC 8 is slightly different
- dumpLog :: String -> (Path Abs Dir, Path Abs File) -> m ()
+ dumpLog :: String -> (Path Abs Dir, Path Abs File) -> RIO env ()
dumpLog msgSuffix (pkgDir, filepath) = do
- $logInfo $ T.pack $ concat ["\n-- Dumping log file", msgSuffix, ": ", toFilePath filepath, "\n"]
+ logInfo $ T.pack $ concat ["\n-- Dumping log file", msgSuffix, ": ", toFilePath filepath, "\n"]
compilerVer <- view actualCompilerVersionL
runResourceT
- $ CB.sourceFile (toFilePath filepath)
+ $ transPipe liftResourceT (CB.sourceFile (toFilePath filepath))
$$ CT.decodeUtf8Lenient
=$ mungeBuildOutput ExcludeTHLoading ConvertPathsToAbsolute pkgDir compilerVer
- =$ CL.mapM_ $logInfo
- $logInfo $ T.pack $ "\n-- End of log file: " ++ toFilePath filepath ++ "\n"
+ =$ CL.mapM_ logInfo
+ logInfo $ T.pack $ "\n-- End of log file: " ++ toFilePath filepath ++ "\n"
+
+ stripColors :: Path Abs File -> IO ()
+ stripColors fp = do
+ let colorfp = toFilePath fp ++ "-color"
+ runConduitRes $ CB.sourceFile (toFilePath fp) .| CB.sinkFile colorfp
+ runConduitRes
+ $ CB.sourceFile colorfp
+ .| noColors
+ .| CB.sinkFile (toFilePath fp)
+
+ where
+ noColors = do
+ CB.takeWhile (/= 27) -- ESC
+ mnext <- CB.head
+ case mnext of
+ Nothing -> return ()
+ Just x -> assert (x == 27) $ do
+ -- Color sequences always end with an m
+ CB.dropWhile (/= 109) -- m
+ CB.drop 1 -- drop the m itself
+ noColors
-- | Perform the actual plan
-executePlan :: (StackM env m, HasEnvConfig env)
+executePlan :: HasEnvConfig env
=> EnvOverride
-> BuildOptsCLI
-> BaseConfigOpts
@@ -471,11 +483,11 @@ executePlan :: (StackM env m, HasEnvConfig env)
-> [DumpPackage () () ()] -- ^ snapshot packages
-> [DumpPackage () () ()] -- ^ local packages
-> InstalledMap
- -> Map PackageName SimpleTarget
+ -> Map PackageName Target
-> Plan
- -> m ()
+ -> RIO env ()
executePlan menv boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages installedMap targets plan = do
- $logDebug "Executing the build plan"
+ logDebug "Executing the build plan"
bopts <- view buildOptsL
withExecuteEnv menv bopts boptsCli baseConfigOpts locals globalPackages snapshotPackages localPackages (executePlan' installedMap targets plan)
@@ -487,20 +499,24 @@ executePlan menv boptsCli baseConfigOpts locals globalPackages snapshotPackages
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
+ , esKeepGhcRts = False
}
forM_ (boptsCLIExec boptsCli) $ \(cmd, args) ->
- $withProcessTimeLog cmd args $
+ withProcessTimeLog cmd args $
callProcess (Cmd Nothing cmd menv' args)
copyExecutables
- :: (StackM env m, HasEnvConfig env)
+ :: HasEnvConfig env
=> Map Text InstallLocation
- -> m ()
+ -> RIO env ()
copyExecutables exes | Map.null exes = return ()
copyExecutables exes = do
snapBin <- (</> bindirSuffix) `liftM` installationRootDeps
localBin <- (</> bindirSuffix) `liftM` installationRootLocal
- destDir <- view $ configL.to configLocalBin
+ compilerSpecific <- boptsInstallCompilerTool <$> view buildOptsL
+ destDir <- if compilerSpecific
+ then bindirCompilerTools
+ else view $ configL.to configLocalBin
ensureDir destDir
destDir' <- liftIO . D.canonicalizePath . toFilePath $ destDir
@@ -518,11 +534,11 @@ copyExecutables exes = do
case loc of
Snap -> snapBin
Local -> localBin
- mfp <- forgivingAbsence (resolveFile bindir $ T.unpack name ++ ext)
+ mfp <- liftIO $ forgivingAbsence (resolveFile bindir $ T.unpack name ++ ext)
>>= rejectMissingFile
case mfp of
Nothing -> do
- $logWarn $ T.concat
+ logWarn $ T.concat
[ "Couldn't find executable "
, name
, " in directory "
@@ -531,7 +547,7 @@ copyExecutables exes = do
return Nothing
Just file -> do
let destFile = destDir' FP.</> T.unpack name ++ ext
- $logInfo $ T.concat
+ logInfo $ T.concat
[ "Copying from "
, T.pack $ toFilePath file
, " to "
@@ -545,13 +561,13 @@ copyExecutables exes = do
return $ Just (name <> T.pack ext)
unless (null installed) $ do
- $logInfo ""
- $logInfo $ T.concat
+ logInfo ""
+ logInfo $ T.concat
[ "Copied executables to "
, T.pack destDir'
, ":"]
- forM_ installed $ \exe -> $logInfo ("- " <> exe)
- warnInstallSearchPathIssues destDir' installed
+ forM_ installed $ \exe -> logInfo ("- " <> exe)
+ unless compilerSpecific $ warnInstallSearchPathIssues destDir' installed
-- | Windows can't write over the current executable. Instead, we rename the
@@ -566,12 +582,12 @@ windowsRenameCopy src dest = do
old = dest ++ ".old"
-- | Perform the actual plan (internal)
-executePlan' :: (StackM env m, HasEnvConfig env)
+executePlan' :: HasEnvConfig env
=> InstalledMap
- -> Map PackageName SimpleTarget
+ -> Map PackageName Target
-> Plan
- -> ExecuteEnv m
- -> m ()
+ -> ExecuteEnv
+ -> RIO env ()
executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do
when (toCoverage $ boptsTestOpts eeBuildOpts) deleteHpcReports
cv <- view actualCompilerVersionL
@@ -581,7 +597,7 @@ executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do
ids -> do
localDB <- packageDatabaseLocal
forM_ ids $ \(id', (ident, reason)) -> do
- $logInfo $ T.concat
+ logInfo $ T.concat
[ T.pack $ packageIdentifierString ident
, ": unregistering"
, if T.null reason
@@ -597,13 +613,9 @@ executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do
liftIO $ atomically $ modifyTVar' eeLocalDumpPkgs $ \initMap ->
foldl' (flip Map.delete) initMap $ Map.keys (planUnregisterLocal plan)
- -- Yes, we're explicitly discarding result values, which in general would
- -- be bad. monad-unlift does this all properly at the type system level,
- -- but I don't want to pull it in for this one use case, when we know that
- -- stack always using transformer stacks that are safe for this use case.
- runInBase <- liftBaseWith $ \run -> return (void . run)
+ run <- askRunInIO
- let actions = concatMap (toActions installedMap' runInBase ee) $ Map.elems $ Map.mergeWithKey
+ let actions = concatMap (toActions installedMap' run ee) $ Map.elems $ Map.mergeWithKey
(\_ b f -> Just (Just b, Just f))
(fmap (\b -> (Just b, Nothing)))
(fmap (\f -> (Nothing, Just f)))
@@ -625,10 +637,10 @@ executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do
let total = length actions
loop prev
| prev == total =
- runInBase $ $logStickyDone ("Completed " <> T.pack (show total) <> " action(s).")
+ run $ logStickyDone ("Completed " <> T.pack (show total) <> " action(s).")
| otherwise = do
- when terminal $ runInBase $
- $logSticky ("Progress: " <> T.pack (show prev) <> "/" <> T.pack (show total))
+ when terminal $ run $
+ logSticky ("Progress: " <> T.pack (show prev) <> "/" <> T.pack (show total))
done <- atomically $ do
done <- readTVar doneVar
check $ done /= prev
@@ -662,10 +674,10 @@ executePlan' installedMap0 targets plan ee@ExecuteEnv {..} = do
$ Map.elems
$ planUnregisterLocal plan
-toActions :: (StackM env m, HasEnvConfig env)
+toActions :: HasEnvConfig env
=> InstalledMap
- -> (m () -> IO ())
- -> ExecuteEnv m
+ -> (RIO env () -> IO ())
+ -> ExecuteEnv
-> (Maybe Task, Maybe Task) -- build and final
-> [Action]
toActions installedMap runInBase ee (mbuild, mfinal) =
@@ -719,9 +731,9 @@ toActions installedMap runInBase ee (mbuild, mfinal) =
beopts = boptsBenchmarkOpts bopts
-- | Generate the ConfigCache
-getConfigCache :: (StackM env m, HasEnvConfig env)
- => ExecuteEnv m -> Task -> InstalledMap -> Bool -> Bool
- -> m (Map PackageIdentifier GhcPkgId, ConfigCache)
+getConfigCache :: HasEnvConfig env
+ => ExecuteEnv -> Task -> InstalledMap -> Bool -> Bool
+ -> RIO env (Map PackageIdentifier GhcPkgId, ConfigCache)
getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBench = do
useExactConf <- view $ configL.to configAllowNewer
let extra =
@@ -731,7 +743,7 @@ getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBenc
-- 'stack test'. See:
-- https://github.com/commercialhaskell/stack/issues/805
case taskType of
- TTLocal lp ->
+ TTFiles lp _ ->
-- FIXME: make this work with exact-configuration.
-- Not sure how to plumb the info atm. See
-- https://github.com/commercialhaskell/stack/issues/2049
@@ -749,7 +761,7 @@ getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBenc
-> installedToGhcPkgId ident installed
Just installed -> installedToGhcPkgId ident installed
_ -> error "singleBuild: invariant violated, missing package ID missing"
- installedToGhcPkgId ident (Library ident' x) = assert (ident == ident') $ Just (ident, x)
+ installedToGhcPkgId ident (Library ident' x _) = assert (ident == ident') $ Just (ident, x)
installedToGhcPkgId _ (Executable _) = Nothing
missing' = Map.fromList $ mapMaybe getMissing $ Set.toList missing
TaskConfigOpts missing mkOpts = taskConfigOpts
@@ -762,8 +774,8 @@ getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBenc
, configCacheDeps = allDeps
, configCacheComponents =
case taskType of
- TTLocal lp -> Set.map renderComponent $ lpComponents lp
- TTUpstream{} -> Set.empty
+ TTFiles lp _ -> Set.map renderComponent $ lpComponents lp
+ TTIndex{} -> Set.empty
, configCacheHaddock =
shouldHaddockPackage eeBuildOpts eeWanted (packageIdentifierName taskProvides)
, configCachePkgSrc = taskCachePkgSrc
@@ -772,18 +784,19 @@ getConfigCache ExecuteEnv {..} task@Task {..} installedMap enableTest enableBenc
return (allDepsMap, cache)
-- | Ensure that the configuration for the package matches what is given
-ensureConfig :: (StackM env m, HasEnvConfig env)
+ensureConfig :: HasEnvConfig env
=> ConfigCache -- ^ newConfigCache
-> Path Abs Dir -- ^ package directory
- -> ExecuteEnv m
- -> m () -- ^ announce
- -> (ExcludeTHLoading -> [String] -> m ()) -- ^ cabal
+ -> ExecuteEnv
+ -> RIO env () -- ^ announce
+ -> (ExcludeTHLoading -> [String] -> RIO env ()) -- ^ cabal
-> Path Abs File -- ^ .cabal file
- -> m Bool
-ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp = do
+ -> Task
+ -> RIO env Bool
+ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp task = do
newCabalMod <- liftIO (fmap modTime (D.getModificationTime (toFilePath cabalfp)))
needConfig <-
- if boptsReconfigure eeBuildOpts
+ if boptsReconfigure eeBuildOpts || taskAnyMissing task
then return True
else do
-- We can ignore the components portion of the config
@@ -801,6 +814,9 @@ ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp = do
return $ fmap ignoreComponents mOldConfigCache /= Just (ignoreComponents newConfigCache)
|| mOldCabalMod /= Just newCabalMod
let ConfigureOpts dirs nodirs = configCacheOpts newConfigCache
+
+ when (taskBuildTypeConfig task) ensureConfigureScript
+
when needConfig $ withMVar eeConfigureLock $ \_ -> do
deleteCaches pkgDir
announce
@@ -825,9 +841,22 @@ ensureConfig newConfigCache pkgDir ExecuteEnv {..} announce cabal cabalfp = do
writeCabalMod pkgDir newCabalMod
return needConfig
+ where
+ -- When build-type is Configure, we need to have a configure
+ -- script in the local directory. If it doesn't exist, build it
+ -- with autoreconf -i. See:
+ -- https://github.com/commercialhaskell/stack/issues/3534
+ ensureConfigureScript = do
+ let fp = pkgDir </> $(mkRelFile "configure")
+ exists <- doesFileExist fp
+ unless exists $ do
+ logInfo $ "Trying to generate configure with autoreconf in " <> T.pack (toFilePath pkgDir)
+ menv <- getMinimalEnvOverride
+ readProcessNull (Just pkgDir) menv "autoreconf" ["-i"] `catchAny` \ex ->
+ logWarn $ "Unable to run autoreconf: " <> T.pack (show ex)
announceTask :: MonadLogger m => Task -> Text -> m ()
-announceTask task x = $logInfo $ T.concat
+announceTask task x = logInfo $ T.concat
[ T.pack $ packageIdentifierString $ taskProvides task
, ": "
, x
@@ -844,10 +873,10 @@ announceTask task x = $logInfo $ T.concat
-- custom setup is built.
--
-- * Provides the user a function with which run the Cabal process.
-withSingleContext :: forall env m a. (StackM env m, HasEnvConfig env)
- => (m () -> IO ())
+withSingleContext :: forall env a. HasEnvConfig env
+ => (RIO env () -> IO ())
-> ActionContext
- -> ExecuteEnv m
+ -> ExecuteEnv
-> Task
-> Maybe (Map PackageIdentifier GhcPkgId)
-- ^ All dependencies' package ids to provide to Setup.hs. If
@@ -857,12 +886,13 @@ withSingleContext :: forall env m a. (StackM env m, HasEnvConfig env)
-> ( Package -- Package info
-> Path Abs File -- Cabal file path
-> Path Abs Dir -- Package root directory file path
- -> (ExcludeTHLoading -> [String] -> m ()) -- Function to run Cabal with args
- -> (Text -> m ()) -- An 'announce' function, for different build phases
+ -> (ExcludeTHLoading -> [String] -> RIO env ())
+ -- Function to run Cabal with args
+ -> (Text -> RIO env ()) -- An 'announce' function, for different build phases
-> Bool -- Whether output should be directed to the console
-> Maybe (Path Abs File, Handle) -- Log file
- -> m a)
- -> m a
+ -> RIO env a)
+ -> RIO env a
withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} mdeps msuffix inner0 =
withPackage $ \package cabalfp pkgDir ->
withLogFile pkgDir package $ \mlogFile ->
@@ -873,8 +903,8 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
wanted =
case taskType of
- TTLocal lp -> lpWanted lp
- TTUpstream{} -> False
+ TTFiles lp _ -> lpWanted lp
+ TTIndex{} -> False
console = wanted
&& all (\(ActionId ident _) -> ident == taskProvides) (Set.toList acRemaining)
@@ -882,19 +912,15 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
withPackage inner =
case taskType of
- TTLocal lp -> inner (lpPackage lp) (lpCabalFile lp) (lpDir lp)
- TTUpstream package _ gitSHA1 -> do
- mdist <- liftM Just distRelativeDir
- m <- unpackPackageIdents eeTempDir mdist
- $ Map.singleton taskProvides gitSHA1
- case Map.toList m of
- [(ident, dir)]
- | ident == taskProvides -> do
- let name = packageIdentifierName taskProvides
- cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal"
- let cabalfp = dir </> cabalfpRel
- inner package cabalfp dir
- _ -> error $ "withPackage: invariant violated: " ++ show m
+ TTFiles lp _ -> inner (lpPackage lp) (lpCabalFile lp) (lpDir lp)
+ TTIndex package _ pir -> do
+ mdist <- distRelativeDir
+ dir <- unpackPackageIdent eeTempDir mdist pir
+
+ let name = packageIdentifierName taskProvides
+ cabalfpRel <- parseRelFile $ packageNameString name ++ ".cabal"
+ let cabalfp = dir </> cabalfpRel
+ inner package cabalfp dir
withLogFile pkgDir package inner
| console = inner Nothing
@@ -905,7 +931,7 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
-- We only want to dump logs for local non-dependency packages
case taskType of
- TTLocal lp | lpWanted lp ->
+ TTFiles lp _ | lpWanted lp ->
liftIO $ atomically $ writeTChan eeLogFiles (pkgDir, logPath)
_ -> return ()
@@ -918,8 +944,8 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
:: Package
-> Path Abs Dir
-> Maybe (Path Abs File, Handle)
- -> ((ExcludeTHLoading -> [String] -> m ()) -> m a)
- -> m a
+ -> ((ExcludeTHLoading -> [String] -> RIO env ()) -> RIO env a)
+ -> RIO env a
withCabal package pkgDir mlogFile inner = do
config <- view configL
@@ -931,6 +957,7 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = True
+ , esKeepGhcRts = False
}
menv <- liftIO $ configEnvOverride config envSettings
distRelativeDir' <- distRelativeDir
@@ -961,20 +988,18 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
: ["-hide-all-packages"]
)
- warnCustomNoDeps :: m ()
+ warnCustomNoDeps :: RIO env ()
warnCustomNoDeps =
case (taskType, packageBuildType package) of
- (TTLocal{}, Just C.Custom) -> do
- $logWarn $ T.pack $ concat
- [ "Package "
- , packageNameString $ packageName package
- , " uses a custom Cabal build, but does not use a custom-setup stanza"
+ (TTFiles lp Local, Just C.Custom) | lpWanted lp -> do
+ prettyWarnL
+ [ flow "Package"
+ , display $ packageName package
+ , flow "uses a custom Cabal build, but does not use a custom-setup stanza"
]
- $logWarn "Using the explicit setup deps approach based on configuration"
- $logWarn "Strongly recommend fixing the package's cabal file"
_ -> return ()
- getPackageArgs :: Path Abs Dir -> m [String]
+ getPackageArgs :: Path Abs Dir -> RIO env [String]
getPackageArgs setupDir =
case (packageSetupDeps package, mdeps) of
-- The package is using the Cabal custom-setup
@@ -983,11 +1008,16 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
-- explicit list of dependencies, and we
-- should simply use all of them.
(Just customSetupDeps, _) -> do
+ unless (Map.member $(mkPackageName "Cabal") customSetupDeps) $
+ prettyWarnL
+ [ display $ packageName package
+ , "has a setup-depends field, but it does not mention a Cabal dependency. This is likely to cause build errors."
+ ]
allDeps <-
case mdeps of
Just x -> return x
Nothing -> do
- $logWarn "In getPackageArgs: custom-setup in use, but no dependency map present"
+ prettyWarnS "In getPackageArgs: custom-setup in use, but no dependency map present"
return Map.empty
matchedDeps <- forM (Map.toList customSetupDeps) $ \(name, range) -> do
let matches (PackageIdentifier name' version) =
@@ -996,10 +1026,10 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
case filter (matches . fst) (Map.toList allDeps) of
x:xs -> do
unless (null xs)
- ($logWarn (T.pack ("Found multiple installed packages for custom-setup dep: " ++ packageNameString name)))
+ (logWarn (T.pack ("Found multiple installed packages for custom-setup dep: " ++ packageNameString name)))
return ("-package-id=" ++ ghcPkgIdString (snd x), Just (toCabalPackageIdentifier (fst x)))
[] -> do
- $logWarn (T.pack ("Could not find custom-setup dep: " ++ packageNameString name))
+ logWarn (T.pack ("Could not find custom-setup dep: " ++ packageNameString name))
return ("-package=" ++ packageNameString name, Nothing)
let depsArgs = map fst matchedDeps
-- Generate setup_macros.h and provide it to ghc
@@ -1055,7 +1085,7 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
setupArgs = ("--builddir=" ++ toFilePathNoTrailingSep distRelativeDir') : args
- runExe :: Path Abs File -> [String] -> m ()
+ runExe :: Path Abs File -> [String] -> RIO env ()
runExe exeName fullArgs = do
compilerVer <- view actualCompilerVersionL
runAndOutput compilerVer `catch` \(ProcessExitedUnsuccessfully _ ec) -> do
@@ -1065,19 +1095,19 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
Just (logFile, h) -> do
liftIO $ hClose h
runResourceT
- $ CB.sourceFile (toFilePath logFile)
+ $ transPipe liftResourceT (CB.sourceFile (toFilePath logFile))
=$= CT.decodeUtf8Lenient
$$ mungeBuildOutput stripTHLoading makeAbsolute pkgDir compilerVer
=$ CL.consume
- throwM $ CabalExitedUnsuccessfully
+ throwM $ SetupHsBuildFailure
ec
- taskProvides
+ (Just taskProvides)
exeName
fullArgs
(fmap fst mlogFile)
bss
where
- runAndOutput :: CompilerVersion -> m ()
+ runAndOutput :: CompilerVersion 'CVActual -> RIO env ()
runAndOutput compilerVer = case mlogFile of
Just (_, h) ->
sinkProcessStderrStdoutHandle (Just pkgDir) menv (toFilePath exeName) fullArgs h h
@@ -1088,7 +1118,7 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
outputSink
:: ExcludeTHLoading
-> LogLevel
- -> CompilerVersion
+ -> CompilerVersion 'CVActual
-> Sink S.ByteString IO ()
outputSink excludeTH level compilerVer =
CT.decodeUtf8Lenient
@@ -1100,10 +1130,9 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
ExcludeTHLoading -> ConvertPathsToAbsolute
KeepTHLoading -> KeepPathsAsIs
- wc <- view $ actualCompilerVersionL.whichCompilerL
- exeName <- case (esetupexehs, wc) of
- (Left setupExe, _) -> return setupExe
- (Right setuphs, compiler) -> do
+ exeName <- case esetupexehs of
+ Left setupExe -> return setupExe
+ Right setuphs -> do
distDir <- distDirFromDir pkgDir
let setupDir = distDir </> $(mkRelDir "setup")
outputFile = setupDir </> $(mkRelFile "setup")
@@ -1112,6 +1141,7 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
then return outputFile
else do
ensureDir setupDir
+ compiler <- view $ actualCompilerVersionL.whichCompilerL
compilerPath <-
case compiler of
Ghc -> eeGetGhcPath
@@ -1154,14 +1184,14 @@ withSingleContext runInBase ActionContext {..} ExecuteEnv {..} task@Task {..} md
-- local install directory. Note that this is literally invoking Cabal
-- with @copy@, and not the copying done by @stack install@ - that is
-- handled by 'copyExecutables'.
-singleBuild :: forall env m. (StackM env m, HasEnvConfig env)
- => (m () -> IO ())
+singleBuild :: forall env. (HasEnvConfig env, HasRunner env)
+ => (RIO env () -> IO ())
-> ActionContext
- -> ExecuteEnv m
+ -> ExecuteEnv
-> Task
-> InstalledMap
-> Bool -- ^ Is this a final build?
- -> m ()
+ -> RIO env ()
singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} installedMap isFinalBuild = do
(allDepsMap, cache) <- getConfigCache ee task installedMap enableTests enableBenchmarks
mprecompiled <- getPrecompiled cache
@@ -1187,7 +1217,7 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
enableTests = buildingFinals && any isCTest (taskComponents task)
enableBenchmarks = buildingFinals && any isCBench (taskComponents task)
- annSuffix = if result == "" then "" else " (" <> result <> ")"
+ annSuffix executableBuildStatuses = if result == "" then "" else " (" <> result <> ")"
where
result = T.intercalate " + " $ concat
[ ["lib" | taskAllInOne && hasLib]
@@ -1196,23 +1226,32 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
, ["bench" | enableBenchmarks]
]
(hasLib, hasExe) = case taskType of
- TTLocal lp -> (packageHasLibrary (lpPackage lp), not (Set.null (exesToBuild lp)))
+ TTFiles lp Local ->
+ let hasLibrary =
+ case packageLibraries (lpPackage lp) of
+ NoLibraries -> False
+ HasLibraries _ -> True
+ in (hasLibrary, not (Set.null (exesToBuild executableBuildStatuses lp)))
-- This isn't true, but we don't want to have this info for
-- upstream deps.
- TTUpstream{} -> (False, False)
+ _ -> (False, False)
getPrecompiled cache =
case taskLocation task of
Snap | not shouldHaddockPackage' -> do
- mpc <- readPrecompiledCache taskProvides
- (configCacheOpts cache)
- (configCacheDeps cache)
+ mpc <-
+ case taskLocation task of
+ Snap -> readPrecompiledCache
+ (ttPackageLocation taskType)
+ (configCacheOpts cache)
+ (configCacheDeps cache)
+ _ -> return Nothing
case mpc of
Nothing -> return Nothing
-- Only pay attention to precompiled caches that refer to packages within
-- the snapshot.
Just pc | maybe False
- (bcoSnapInstallRoot eeBaseConfigOpts `isParentOf`)
+ (bcoSnapInstallRoot eeBaseConfigOpts `isProperPrefixOf`)
(parseAbsFile =<< pcLibrary pc) ->
return Nothing
-- If old precompiled cache files are left around but snapshots are deleted,
@@ -1241,7 +1280,7 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
menv' <- modifyEnvOverride menv
$ Map.insert
- "GHC_PACKAGE_PATH"
+ (ghcPkgPathEnvVar wc)
(T.pack $ toFilePathNoTrailingSep $ bcoSnapDB eeBaseConfigOpts)
-- In case a build of the library with different flags already exists, unregister it
@@ -1281,18 +1320,24 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
return $ Just $
case mpkgid of
Nothing -> assert False $ Executable taskProvides
- Just pkgid -> Library taskProvides pkgid
+ Just pkgid -> Library taskProvides pkgid Nothing
where
bindir = toFilePath $ bcoSnapInstallRoot eeBaseConfigOpts </> bindirSuffix
realConfigAndBuild cache allDepsMap = withSingleContext runInBase ac ee task (Just allDepsMap) Nothing
$ \package cabalfp pkgDir cabal announce _console _mlogFile -> do
- _neededConfig <- ensureConfig cache pkgDir ee (announce ("configure" <> annSuffix)) cabal cabalfp
+ executableBuildStatuses <- getExecutableBuildStatuses package pkgDir
+ when (not (cabalIsSatisfied executableBuildStatuses) && taskIsTarget task)
+ (logInfo
+ ("Building all executables for `" <> packageNameText (packageName package) <>
+ "' once. After a successful build of all of them, only specified executables will be rebuilt."))
+
+ _neededConfig <- ensureConfig cache pkgDir ee (announce ("configure" <> annSuffix executableBuildStatuses)) cabal cabalfp task
let installedMapHasThisPkg :: Bool
installedMapHasThisPkg =
case Map.lookup (packageName package) installedMap of
- Just (_, Library ident _) -> ident == taskProvides
+ Just (_, Library ident _ _) -> ident == taskProvides
Just (_, Executable _) -> True
_ -> False
@@ -1304,38 +1349,40 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
-- https://github.com/commercialhaskell/stack/issues/2787
(True, _) | null acDownstream -> return Nothing
(_, True) | null acDownstream || installedMapHasThisPkg -> do
- initialBuildSteps cabal announce
+ initialBuildSteps executableBuildStatuses cabal announce
return Nothing
- _ -> liftM Just $ realBuild cache package pkgDir cabal announce
+ _ -> liftM Just $ realBuild cache package pkgDir cabal announce executableBuildStatuses
- initialBuildSteps cabal announce = do
- () <- announce ("initial-build-steps" <> annSuffix)
+ initialBuildSteps executableBuildStatuses cabal announce = do
+ () <- announce ("initial-build-steps" <> annSuffix executableBuildStatuses)
cabal KeepTHLoading ["repl", "stack-initial-build-steps"]
realBuild
:: ConfigCache
-> Package
-> Path Abs Dir
- -> (ExcludeTHLoading -> [String] -> m ())
- -> (Text -> m ())
- -> m Installed
- realBuild cache package pkgDir cabal announce = do
+ -> (ExcludeTHLoading -> [String] -> RIO env ())
+ -> (Text -> RIO env ())
+ -> Map Text ExecutableBuildStatus
+ -> RIO env Installed
+ realBuild cache package pkgDir cabal announce executableBuildStatuses = do
wc <- view $ actualCompilerVersionL.whichCompilerL
markExeNotInstalled (taskLocation task) taskProvides
case taskType of
- TTLocal lp -> do
+ TTFiles lp _ -> do -- FIXME should this only be for local packages?
when enableTests $ unsetTestSuccess pkgDir
writeBuildCache pkgDir $ lpNewBuildCache lp
- TTUpstream{} -> return ()
+ TTIndex{} -> return ()
-- FIXME: only output these if they're in the build plan.
preBuildTime <- modTime <$> liftIO getCurrentTime
let postBuildCheck _succeeded = do
mlocalWarnings <- case taskType of
- TTLocal lp -> do
+ TTFiles lp Local -> do
warnings <- checkForUnlistedFiles taskType preBuildTime pkgDir
+ -- TODO: Perhaps only emit these warnings for non extra-dep?
return (Just (lpCabalFile lp, warnings))
_ -> return Nothing
-- NOTE: once
@@ -1346,16 +1393,16 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
"- In" <+>
maybe "the library component" (\c -> fromString c <+> "component") mcomp <>
":" <> line <>
- indent 4 (mconcat $ intersperse line $ map (goodGreen . fromString . C.display) modules)
+ indent 4 (mconcat $ intersperse line $ map (styleGood . fromString . C.display) modules)
forM_ mlocalWarnings $ \(cabalfp, warnings) -> do
- unless (null warnings) $ $prettyWarn $
+ unless (null warnings) $ prettyWarn $
"The following modules should be added to exposed-modules or other-modules in" <+>
display cabalfp <> ":" <> line <>
indent 4 (mconcat $ map showModuleWarning warnings) <>
line <> line <>
"Missing modules in the cabal file are likely to cause undefined reference errors from the linker, along with other problems."
- () <- announce ("build" <> annSuffix)
+ () <- announce ("build" <> annSuffix executableBuildStatuses)
config <- view configL
extraOpts <- extraBuildOptions wc eeBuildOpts
let stripTHLoading
@@ -1364,10 +1411,10 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
cabal stripTHLoading (("build" :) $ (++ extraOpts) $
case (taskType, taskAllInOne, isFinalBuild) of
(_, True, True) -> error "Invariant violated: cannot have an all-in-one build that also has a final build step."
- (TTLocal lp, False, False) -> primaryComponentOptions lp
- (TTLocal lp, False, True) -> finalComponentOptions lp
- (TTLocal lp, True, False) -> primaryComponentOptions lp ++ finalComponentOptions lp
- (TTUpstream{}, _, _) -> [])
+ (TTFiles lp _, False, False) -> primaryComponentOptions executableBuildStatuses lp
+ (TTFiles lp _, False, True) -> finalComponentOptions lp
+ (TTFiles lp _, True, False) -> primaryComponentOptions executableBuildStatuses lp ++ finalComponentOptions lp
+ (TTIndex{}, _, _) -> [])
`catch` \ex -> case ex of
CabalExitedUnsuccessfully{} -> postBuildCheck False >> throwM ex
_ -> throwM ex
@@ -1385,19 +1432,23 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
-- Older hscolour colouring
Left _ -> do
hscolourExists <- doesExecutableExist eeEnvOverride "HsColour"
- unless hscolourExists $ $logWarn
+ unless hscolourExists $ logWarn
("Warning: haddock not generating hyperlinked sources because 'HsColour' not\n" <>
"found on PATH (use 'stack install hscolour' to install).")
return ["--hyperlink-source" | hscolourExists]
cabal KeepTHLoading $ concat
- [ ["haddock", "--html", "--html-location=../$pkg-$version/"]
+ [ ["haddock", "--html", "--hoogle", "--html-location=../$pkg-$version/"]
, sourceFlag
, ["--internal" | boptsHaddockInternal eeBuildOpts]
, [ "--haddock-option=" <> opt
| opt <- hoAdditionalArgs (boptsHaddockOpts eeBuildOpts) ]
]
- let shouldCopy = not isFinalBuild && (packageHasLibrary package || not (Set.null (packageExes package)))
+ let hasLibrary =
+ case packageLibraries package of
+ NoLibraries -> False
+ HasLibraries _ -> True
+ shouldCopy = not isFinalBuild && (hasLibrary || not (Set.null (packageExes package)))
when shouldCopy $ withMVar eeInstallLock $ \() -> do
announce "copy/register"
eres <- try $ cabal KeepTHLoading ["copy"]
@@ -1405,7 +1456,7 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
Left err@CabalExitedUnsuccessfully{} ->
throwM $ CabalCopyFailed (packageBuildType package == Just C.Simple) (show err)
_ -> return ()
- when (packageHasLibrary package) $ cabal KeepTHLoading ["register"]
+ when hasLibrary $ cabal KeepTHLoading ["register"]
let (installedPkgDb, installedDumpPkgsTVar) =
case taskLocation task of
@@ -1416,28 +1467,31 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
( bcoLocalDB eeBaseConfigOpts
, eeLocalDumpPkgs )
let ident = PackageIdentifier (packageName package) (packageVersion package)
- mpkgid <- if packageHasLibrary package
- then do
+ mpkgid <- case packageLibraries package of
+ HasLibraries _ -> do
mpkgid <- loadInstalledPkg eeEnvOverride wc [installedPkgDb] installedDumpPkgsTVar (packageName package)
case mpkgid of
Nothing -> throwM $ Couldn'tFindPkgId $ packageName package
- Just pkgid -> return $ Library ident pkgid
- else do
+ Just pkgid -> return $ Library ident pkgid Nothing
+ NoLibraries -> do
markExeInstalled (taskLocation task) taskProvides -- TODO unify somehow with writeFlagCache?
return $ Executable ident
case taskLocation task of
- Snap -> writePrecompiledCache eeBaseConfigOpts taskProvides
+ Snap ->
+ writePrecompiledCache
+ eeBaseConfigOpts
+ (ttPackageLocation taskType)
(configCacheOpts cache)
(configCacheDeps cache)
mpkgid (packageExes package)
- Local -> return ()
+ _ -> return ()
case taskType of
- -- For upstream packages, pkgDir is in the tmp directory. We
- -- eagerly delete it if no other tasks require it, to reduce
- -- space usage in tmp (#3018).
- TTUpstream{} -> do
+ -- For packages from a package index, pkgDir is in the tmp
+ -- directory. We eagerly delete it if no other tasks
+ -- require it, to reduce space usage in tmp (#3018).
+ TTIndex{} -> do
let remaining = filter (\(ActionId x _) -> x == taskProvides) (Set.toList acRemaining)
when (null remaining) $ removeDirRecur pkgDir
_ -> return ()
@@ -1453,20 +1507,71 @@ singleBuild runInBase ac@ActionContext {..} ee@ExecuteEnv {..} task@Task {..} in
return $ Just (dpGhcPkgId dp)
_ -> error "singleBuild: invariant violated: multiple results when describing installed package"
+-- | Get the build status of all the package executables. Do so by
+-- testing whether their expected output file exists, e.g.
+--
+-- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha
+-- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha.exe
+-- .stack-work/dist/x86_64-osx/Cabal-1.22.4.0/build/alpha/alpha.jsexe/ (NOTE: a dir)
+getExecutableBuildStatuses
+ :: HasEnvConfig env
+ => Package -> Path Abs Dir -> RIO env (Map Text ExecutableBuildStatus)
+getExecutableBuildStatuses package pkgDir = do
+ compiler <- view $ actualCompilerVersionL.whichCompilerL
+ distDir <- distDirFromDir pkgDir
+ platform <- view platformL
+ fmap
+ M.fromList
+ (mapM (checkExeStatus compiler platform distDir) (Set.toList (packageExes package)))
+
+-- | Check whether the given executable is defined in the given dist directory.
+checkExeStatus
+ :: (MonadLogger m, MonadIO m, MonadThrow m)
+ => WhichCompiler
+ -> Platform
+ -> Path b Dir
+ -> Text
+ -> m (Text, ExecutableBuildStatus)
+checkExeStatus compiler platform distDir name = do
+ exename <- parseRelDir (T.unpack name)
+ exists <- checkPath (distDir </> $(mkRelDir "build") </> exename)
+ pure
+ ( name
+ , if exists
+ then ExecutableBuilt
+ else ExecutableNotBuilt)
+ where
+ checkPath base =
+ case compiler of
+ Ghcjs -> do
+ dir <- parseRelDir (file ++ ".jsexe")
+ doesDirExist (base </> dir)
+ _ ->
+ case platform of
+ Platform _ Windows -> do
+ fileandext <- parseRelFile (file ++ ".exe")
+ doesFileExist (base </> fileandext)
+ _ -> do
+ fileandext <- parseRelFile file
+ doesFileExist (base </> fileandext)
+ where
+ file = T.unpack name
+
-- | Check if any unlisted files have been found, and add them to the build cache.
-checkForUnlistedFiles :: (StackM env m, HasEnvConfig env) => TaskType -> ModTime -> Path Abs Dir -> m [PackageWarning]
-checkForUnlistedFiles (TTLocal lp) preBuildTime pkgDir = do
+checkForUnlistedFiles :: HasEnvConfig env => TaskType -> ModTime -> Path Abs Dir -> RIO env [PackageWarning]
+checkForUnlistedFiles (TTFiles lp _) preBuildTime pkgDir = do
(addBuildCache,warnings) <-
addUnlistedToBuildCache
preBuildTime
(lpPackage lp)
(lpCabalFile lp)
+ (lpComponents lp)
(lpNewBuildCache lp)
unless (null addBuildCache) $
writeBuildCache pkgDir $
Map.unions (lpNewBuildCache lp : addBuildCache)
return warnings
-checkForUnlistedFiles TTUpstream{} _ _ = return []
+checkForUnlistedFiles TTIndex{} _ _ = return []
-- | Determine if all of the dependencies given are installed
depsPresent :: InstalledMap -> Map PackageName VersionRange -> Bool
@@ -1479,15 +1584,15 @@ depsPresent installedMap deps = all
-- | Implements running a package's tests. Also handles producing
-- coverage reports if coverage is enabled.
-singleTest :: (StackM env m, HasEnvConfig env)
- => (m () -> IO ())
+singleTest :: HasEnvConfig env
+ => (RIO env () -> IO ())
-> TestOpts
-> [Text]
-> ActionContext
- -> ExecuteEnv m
+ -> ExecuteEnv
-> Task
-> InstalledMap
- -> m ()
+ -> RIO env ()
singleTest runInBase topts testsToRun ac ee task installedMap = do
-- FIXME: Since this doesn't use cabal, we should be able to avoid using a
-- fullblown 'withSingleContext'.
@@ -1543,6 +1648,7 @@ singleTest runInBase topts testsToRun ac ee task installedMap = do
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
+ , esKeepGhcRts = False
}
if exists
then do
@@ -1550,8 +1656,8 @@ singleTest runInBase topts testsToRun ac ee task installedMap = do
when needHpc $ do
tixexists <- doesFileExist tixPath
when tixexists $
- $logWarn ("Removing HPC file " <> T.pack (toFilePath tixPath))
- ignoringAbsence (removeFile tixPath)
+ logWarn ("Removing HPC file " <> T.pack (toFilePath tixPath))
+ liftIO $ ignoringAbsence (removeFile tixPath)
let args = toAdditionalArgs topts
argsDisplay = case args of
@@ -1562,7 +1668,7 @@ singleTest runInBase topts testsToRun ac ee task installedMap = do
-- Clear "Progress: ..." message before
-- redirecting output.
when (isNothing mlogFile) $ do
- $logStickyDone ""
+ logStickyDone ""
liftIO $ hFlush stdout
liftIO $ hFlush stderr
@@ -1584,17 +1690,22 @@ singleTest runInBase topts testsToRun ac ee task installedMap = do
ec <- liftIO $ waitForProcess ph
-- Add a trailing newline, incase the test
-- output didn't finish with a newline.
- when (isNothing mlogFile) ($logInfo "")
+ when (isNothing mlogFile) (logInfo "")
-- Move the .tix file out of the package
-- directory into the hpc work dir, for
-- tidiness.
when needHpc $
updateTixFile (packageName package) tixPath testName'
- return $ case ec of
- ExitSuccess -> Map.empty
- _ -> Map.singleton testName $ Just ec
+ let announceResult result = announce $ "Test suite " <> testName <> " " <> result
+ case ec of
+ ExitSuccess -> do
+ announceResult "passed"
+ return Map.empty
+ _ -> do
+ announceResult "failed"
+ return $ Map.singleton testName (Just ec)
else do
- $logError $ T.pack $ show $ TestSuiteExeMissing
+ logError $ T.pack $ show $ TestSuiteExeMissing
(packageBuildType package == Just C.Simple)
exeName
(packageNameString (packageName package))
@@ -1623,15 +1734,15 @@ singleTest runInBase topts testsToRun ac ee task installedMap = do
bs
-- | Implements running a package's benchmarks.
-singleBench :: (StackM env m, HasEnvConfig env)
- => (m () -> IO ())
+singleBench :: HasEnvConfig env
+ => (RIO env () -> IO ())
-> BenchmarkOpts
-> [Text]
-> ActionContext
- -> ExecuteEnv m
+ -> ExecuteEnv
-> Task
-> InstalledMap
- -> m ()
+ -> RIO env ()
singleBench runInBase beopts benchesToRun ac ee task installedMap = do
(allDepsMap, _cache) <- getConfigCache ee task installedMap False True
withSingleContext runInBase ac ee task (Just allDepsMap) (Just "bench") $ \_package _cabalfp _pkgDir cabal announce _console _mlogFile -> do
@@ -1655,11 +1766,11 @@ data ExcludeTHLoading = ExcludeTHLoading | KeepTHLoading
data ConvertPathsToAbsolute = ConvertPathsToAbsolute | KeepPathsAsIs
-- | Strip Template Haskell "Loading package" lines and making paths absolute.
-mungeBuildOutput :: forall m. (MonadIO m, MonadCatch m, MonadBaseControl IO m)
+mungeBuildOutput :: forall m. MonadIO m
=> ExcludeTHLoading -- ^ exclude TH loading?
-> ConvertPathsToAbsolute -- ^ convert paths to absolute?
-> Path Abs Dir -- ^ package's root directory
- -> CompilerVersion -- ^ compiler we're building with
+ -> CompilerVersion 'CVActual -- ^ compiler we're building with
-> ConduitM Text Text m ()
mungeBuildOutput excludeTHLoading makeAbsolute pkgDir compilerVer = void $
CT.lines
@@ -1700,9 +1811,9 @@ mungeBuildOutput excludeTHLoading makeAbsolute pkgDir compilerVer = void $
let (x, y) = T.break (== ':') bs
mabs <-
if isValidSuffix y
- then liftM (fmap ((T.takeWhile isSpace x <>) . T.pack . toFilePath)) $
+ then liftIO $ liftM (fmap ((T.takeWhile isSpace x <>) . T.pack . toFilePath)) $
forgivingAbsence (resolveFile pkgDir (T.unpack $ T.dropWhile isSpace x)) `catch`
- \(_ :: PathParseException) -> return Nothing
+ \(_ :: PathException) -> return Nothing
else return Nothing
case mabs of
Nothing -> return bs
@@ -1742,39 +1853,56 @@ getSetupHs dir = do
-- Do not pass `-hpcdir` as GHC option if the coverage is not enabled.
-- This helps running stack-compiled programs with dynamic interpreters like `hint`.
-- Cfr: https://github.com/commercialhaskell/stack/issues/997
-extraBuildOptions :: (StackM env m, HasEnvConfig env) => WhichCompiler -> BuildOpts -> m [String]
+extraBuildOptions :: (HasEnvConfig env, HasRunner env)
+ => WhichCompiler -> BuildOpts -> RIO env [String]
extraBuildOptions wc bopts = do
+ colorOpt <- appropriateGhcColorFlag
let ddumpOpts = " -ddump-hi -ddump-to-file"
optsFlag = compilerOptionsCabalFlag wc
+ baseOpts = ddumpOpts ++ maybe "" (" " ++) colorOpt
if toCoverage (boptsTestOpts bopts)
then do
hpcIndexDir <- toFilePathNoTrailingSep <$> hpcRelativeDir
- return [optsFlag, "-hpcdir " ++ hpcIndexDir ++ ddumpOpts]
+ return [optsFlag, "-hpcdir " ++ hpcIndexDir ++ baseOpts]
else
- return [optsFlag, ddumpOpts]
+ return [optsFlag, baseOpts]
-- Library and executable build components.
-primaryComponentOptions :: LocalPackage -> [String]
-primaryComponentOptions lp = ["lib:" ++ packageNameString (packageName (lpPackage lp))
+primaryComponentOptions :: Map Text ExecutableBuildStatus -> LocalPackage -> [String]
+primaryComponentOptions executableBuildStatuses lp =
-- TODO: get this information from target parsing instead,
-- which will allow users to turn off library building if
-- desired
- | packageHasLibrary (lpPackage lp)] ++
- map (T.unpack . T.append "exe:") (Set.toList $ exesToBuild lp)
-
-exesToBuild :: LocalPackage -> Set Text
-exesToBuild lp = packageExes (lpPackage lp)
- -- NOTE: Ideally we'd do something like the following code, allowing
- -- the user to control which executables get built. However, due to
- -- https://github.com/haskell/cabal/issues/2780 we must build all
- -- exes...
- --
- -- if lpWanted lp
- -- then exeComponents (lpComponents lp)
- -- -- Build all executables in the event that no
- -- -- specific list is provided (as happens with
- -- -- extra-deps).
- -- else packageExes (lpPackage lp)
+ (case packageLibraries (lpPackage lp) of
+ NoLibraries -> []
+ HasLibraries names ->
+ map T.unpack
+ $ T.append "lib:" (packageNameText (packageName (lpPackage lp)))
+ : map (T.append "flib:") (Set.toList names)) ++
+ map (T.unpack . T.append "exe:") (Set.toList $ exesToBuild executableBuildStatuses lp)
+
+-- | History of this function:
+--
+-- * Normally it would do either all executables or if the user
+-- specified requested components, just build them. Afterwards, due
+-- to this Cabal bug <https://github.com/haskell/cabal/issues/2780>,
+-- we had to make Stack build all executables every time.
+--
+-- * In <https://github.com/commercialhaskell/stack/issues/3229> this
+-- was flagged up as very undesirable behavior on a large project,
+-- hence the behavior below that we build all executables once
+-- (modulo success), and thereafter pay attention to user-wanted
+-- components.
+--
+exesToBuild :: Map Text ExecutableBuildStatus -> LocalPackage -> Set Text
+exesToBuild executableBuildStatuses lp =
+ if cabalIsSatisfied executableBuildStatuses && lpWanted lp
+ then exeComponents (lpComponents lp)
+ else packageExes (lpPackage lp)
+
+-- | Do the current executables satisfy Cabal's bugged out requirements?
+cabalIsSatisfied :: Map k ExecutableBuildStatus -> Bool
+cabalIsSatisfied = all (== ExecutableBuilt) . M.elems
-- Test-suite and benchmark build components.
finalComponentOptions :: LocalPackage -> [String]
@@ -1786,8 +1914,8 @@ finalComponentOptions lp =
taskComponents :: Task -> Set NamedComponent
taskComponents task =
case taskType task of
- TTLocal lp -> lpComponents lp
- TTUpstream{} -> Set.empty
+ TTFiles lp _ -> lpComponents lp -- FIXME probably just want Local, maybe even just lpWanted
+ TTIndex{} -> Set.empty
-- | Take the given list of package dependencies and the contents of the global
-- package database, and construct a set of installed package IDs that:
diff --git a/src/Stack/Build/Haddock.hs b/src/Stack/Build/Haddock.hs
index 39404c9..ce183a7 100644
--- a/src/Stack/Build/Haddock.hs
+++ b/src/Stack/Build/Haddock.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
@@ -16,31 +17,17 @@ module Stack.Build.Haddock
, shouldHaddockDeps
) where
-import Control.Exception (tryJust, onException)
-import Control.Monad
-import Control.Monad.Catch (MonadCatch)
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Control.Monad.Trans.Resource
+import Stack.Prelude
import qualified Data.Foldable as F
-import Data.Function
import qualified Data.HashSet as HS
-import Data.List
import Data.List.Extra (nubOrd)
-import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
-import Data.Maybe
-import Data.Maybe.Extra (mapMaybeM)
-import Data.Monoid ((<>))
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.Text (Text)
import qualified Data.Text as T
import Data.Time (UTCTime)
import Path
import Path.Extra
import Path.IO
-import Prelude
import Stack.PackageDump
import Stack.PrettyPrint
import Stack.Types.Build
@@ -50,21 +37,19 @@ import Stack.Types.GhcPkgId
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT (StackM)
-import Stack.Types.StringError
+import Stack.Types.Runner
import qualified System.FilePath as FP
-import System.IO.Error (isDoesNotExistError)
import System.Process.Read
import Web.Browser (openBrowser)
openHaddocksInBrowser
- :: StackM env m
+ :: HasRunner env
=> BaseConfigOpts
-> Map PackageName (PackageIdentifier, InstallLocation)
-- ^ Available packages and their locations for the current project
-> Set PackageName
-- ^ Build targets as determined by 'Stack.Build.Source.loadSourceMap'
- -> m ()
+ -> RIO env ()
openHaddocksInBrowser bco pkgLocations buildTargets = do
let cliTargets = (boptsCLITargets . bcoBuildOptsCLI) bco
getDocIndex = do
@@ -91,13 +76,13 @@ openHaddocksInBrowser bco pkgLocations buildTargets = do
if exists
then return docFile
else do
- $logWarn $
+ logWarn $
"Expected to find documentation at " <>
T.pack (toFilePath docFile) <>
", but that file is missing. Opening doc index instead."
getDocIndex
_ -> getDocIndex
- $prettyInfo $ "Opening" <+> display docFile <+> "in the browser."
+ prettyInfo $ "Opening" <+> display docFile <+> "in the browser."
_ <- liftIO $ openBrowser (toFilePath docFile)
return ()
@@ -119,7 +104,7 @@ shouldHaddockDeps bopts = fromMaybe (boptsHaddock bopts) (boptsHaddockDeps bopts
-- | Generate Haddock index and contents for local packages.
generateLocalHaddockIndex
- :: (MonadIO m, MonadCatch m, MonadLogger m, MonadBaseControl IO m)
+ :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
@@ -138,14 +123,14 @@ generateLocalHaddockIndex envOverride wc bco localDumpPkgs locals = do
"local packages"
envOverride
wc
- (boptsHaddockOpts (bcoBuildOpts bco))
+ bco
dumpPackages
"."
(localDocDir bco)
-- | Generate Haddock index and contents for local packages and their dependencies.
generateDepsHaddockIndex
- :: (MonadIO m, MonadCatch m, MonadLogger m, MonadBaseControl IO m)
+ :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
@@ -161,7 +146,7 @@ generateDepsHaddockIndex envOverride wc bco globalDumpPkgs snapshotDumpPkgs loca
"local packages and dependencies"
envOverride
wc
- (boptsHaddockOpts (bcoBuildOpts bco))
+ bco
deps
".."
depDocDir
@@ -190,7 +175,7 @@ generateDepsHaddockIndex envOverride wc bco globalDumpPkgs snapshotDumpPkgs loca
-- | Generate Haddock index and contents for all snapshot packages.
generateSnapHaddockIndex
- :: (MonadIO m, MonadCatch m, MonadLogger m, MonadBaseControl IO m)
+ :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> BaseConfigOpts
@@ -202,23 +187,23 @@ generateSnapHaddockIndex envOverride wc bco globalDumpPkgs snapshotDumpPkgs =
"snapshot packages"
envOverride
wc
- (boptsHaddockOpts (bcoBuildOpts bco))
+ bco
(Map.elems snapshotDumpPkgs ++ Map.elems globalDumpPkgs)
"."
(snapDocDir bco)
-- | Generate Haddock index and contents for specified packages.
generateHaddockIndex
- :: (MonadIO m, MonadCatch m, MonadLogger m, MonadBaseControl IO m)
+ :: (MonadUnliftIO m, MonadLogger m)
=> Text
-> EnvOverride
-> WhichCompiler
- -> HaddockOpts
+ -> BaseConfigOpts
-> [DumpPackage () () ()]
-> FilePath
-> Path Abs Dir
-> m ()
-generateHaddockIndex descr envOverride wc hdopts dumpPackages docRelFP destDir = do
+generateHaddockIndex descr envOverride wc bco dumpPackages docRelFP destDir = do
ensureDir destDir
interfaceOpts <- (liftIO . fmap nubOrd . mapMaybeM toInterfaceOpt) dumpPackages
unless (null interfaceOpts) $ do
@@ -231,7 +216,7 @@ generateHaddockIndex descr envOverride wc hdopts dumpPackages docRelFP destDir =
or [mt > indexModTime | (_,mt,_,_) <- interfaceOpts]
if needUpdate
then do
- $logInfo
+ logInfo
(T.concat ["Updating Haddock index for ", descr, " in\n",
T.pack (toFilePath destIndexFile)])
liftIO (mapM_ copyPkgDocs interfaceOpts)
@@ -239,11 +224,13 @@ generateHaddockIndex descr envOverride wc hdopts dumpPackages docRelFP destDir =
(Just destDir)
envOverride
(haddockExeName wc)
- (hoAdditionalArgs hdopts ++
+ (map (("--optghc=-package-db=" ++ ) . toFilePathNoTrailingSep)
+ [bcoSnapDB bco, bcoLocalDB bco] ++
+ hoAdditionalArgs (boptsHaddockOpts (bcoBuildOpts bco)) ++
["--gen-contents", "--gen-index"] ++
[x | (xs,_,_,_) <- interfaceOpts, x <- xs])
else
- $logInfo
+ logInfo
(T.concat ["Haddock index for ", descr, " already up to date at:\n",
T.pack (toFilePath destIndexFile)])
where
@@ -273,8 +260,6 @@ generateHaddockIndex descr envOverride wc hdopts dumpPackages docRelFP destDir =
, srcInterfaceModTime
, srcInterfaceAbsFile
, destInterfaceAbsFile )
- tryGetModificationTime :: Path Abs File -> IO (Either () UTCTime)
- tryGetModificationTime = tryJust (guard . isDoesNotExistError) . getModificationTime
copyPkgDocs :: (a, UTCTime, Path Abs File, Path Abs File) -> IO ()
copyPkgDocs (_,srcInterfaceModTime,srcInterfaceAbsFile,destInterfaceAbsFile) = do
-- Copy dependencies' haddocks to documentation directory. This way, relative @../$pkg-$ver@
diff --git a/src/Stack/Build/Installed.hs b/src/Stack/Build/Installed.hs
index 747dd45..38818df 100644
--- a/src/Stack/Build/Installed.hs
+++ b/src/Stack/Build/Installed.hs
@@ -1,8 +1,8 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE TemplateHaskell #-}
-- Determine which packages are already installed
module Stack.Build.Installed
( InstalledMap
@@ -11,29 +11,19 @@ module Stack.Build.Installed
, getInstalled
) where
-import Control.Applicative
-import Control.Arrow
-import Control.Monad
-import Control.Monad.Logger
import Data.Conduit
import qualified Data.Conduit.List as CL
import qualified Data.Foldable as F
-import Data.Function
import qualified Data.HashSet as HashSet
import Data.List
-import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Map.Strict as Map
-import Data.Maybe
-import Data.Maybe.Extra (mapMaybeM)
-import Data.Monoid
import qualified Data.Text as T
import Path
-import Prelude hiding (FilePath, writeFile)
import Stack.Build.Cache
import Stack.Constants
-import Stack.GhcPkg
import Stack.PackageDump
+import Stack.Prelude
import Stack.Types.Build
import Stack.Types.Compiler
import Stack.Types.Config
@@ -42,8 +32,8 @@ import Stack.Types.Package
import Stack.Types.PackageDump
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT
import Stack.Types.Version
+import System.Process.Read (EnvOverride)
-- | Options for 'getInstalled'.
data GetInstalledOpts = GetInstalledOpts
@@ -56,17 +46,18 @@ data GetInstalledOpts = GetInstalledOpts
}
-- | Returns the new InstalledMap and all of the locally registered packages.
-getInstalled :: (StackM env m, HasEnvConfig env, PackageInstallInfo pii)
+getInstalled :: HasEnvConfig env
=> EnvOverride
-> GetInstalledOpts
- -> Map PackageName pii -- ^ does not contain any installed information
- -> m ( InstalledMap
+ -> Map PackageName PackageSource -- ^ does not contain any installed information
+ -> RIO env
+ ( InstalledMap
, [DumpPackage () () ()] -- globally installed
, [DumpPackage () () ()] -- snapshot installed
, [DumpPackage () () ()] -- locally installed
)
getInstalled menv opts sourceMap = do
- $logDebug "Finding out which packages are already installed"
+ logDebug "Finding out which packages are already installed"
snapDBPath <- packageDatabaseDeps
localDBPath <- packageDatabaseLocal
extraDBPaths <- packageDatabaseExtra
@@ -126,14 +117,14 @@ getInstalled menv opts sourceMap = do
-- The goal is to ascertain that the dependencies for a package are present,
-- that it has profiling if necessary, and that it matches the version and
-- location needed by the SourceMap
-loadDatabase :: (StackM env m, HasEnvConfig env, PackageInstallInfo pii)
+loadDatabase :: HasEnvConfig env
=> EnvOverride
-> GetInstalledOpts
-> Maybe InstalledCache -- ^ if Just, profiling or haddock is required
- -> Map PackageName pii -- ^ to determine which installed things we should include
+ -> Map PackageName PackageSource -- ^ to determine which installed things we should include
-> Maybe (InstalledPackageLocation, Path Abs Dir) -- ^ package database, Nothing for global
-> [LoadHelper] -- ^ from parent databases
- -> m ([LoadHelper], [DumpPackage () () ()])
+ -> RIO env ([LoadHelper], [DumpPackage () () ()])
loadDatabase menv opts mcache sourceMap mdb lhs0 = do
wc <- view $ actualCompilerVersionL.to whichCompiler
(lhs1', dps) <- ghcPkgDump menv wc (fmap snd (maybeToList mdb))
@@ -186,7 +177,7 @@ processLoadResult _ True (WrongVersion actual wanted, lh)
-- Allow some packages in the ghcjs global DB to have the wrong
-- versions. Treat them as wired-ins by setting deps to [].
| fst (lhPair lh) `HashSet.member` ghcjsBootPackages = do
- $logWarn $ T.concat
+ logWarn $ T.concat
[ "Ignoring that the GHCJS boot package \""
, packageNameText (fst (lhPair lh))
, "\" has a different version, "
@@ -196,7 +187,7 @@ processLoadResult _ True (WrongVersion actual wanted, lh)
]
return (Just lh)
processLoadResult mdb _ (reason, lh) = do
- $logDebug $ T.concat $
+ logDebug $ T.concat $
[ "Ignoring package "
, packageNameText (fst (lhPair lh))
] ++
@@ -231,10 +222,9 @@ data Allowed
-- | Check if a can be included in the set of installed packages or not, based
-- on the package selections made by the user. This does not perform any
-- dirtiness or flag change checks.
-isAllowed :: PackageInstallInfo pii
- => GetInstalledOpts
+isAllowed :: GetInstalledOpts
-> Maybe InstalledCache
- -> Map PackageName pii
+ -> Map PackageName PackageSource
-> Maybe InstalledPackageLocation
-> DumpPackage Bool Bool Bool
-> Allowed
@@ -288,7 +278,7 @@ toLoadHelper mloc dp = LoadHelper
if name `HashSet.member` wiredInPackages
then []
else dpDepends dp
- , lhPair = (name, (toPackageLocation mloc, Library ident gid))
+ , lhPair = (name, (toPackageLocation mloc, Library ident gid (dpLicense dp)))
}
where
gid = dpGhcPkgId dp
diff --git a/src/Stack/Build/Source.hs b/src/Stack/Build/Source.hs
index 3b508f6..c92f128 100644
--- a/src/Stack/Build/Source.hs
+++ b/src/Stack/Build/Source.hs
@@ -1,9 +1,7 @@
-{-# LANGUAGE BangPatterns #-}
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE PackageImports #-}
-{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE ConstraintKinds #-}
@@ -12,25 +10,12 @@ module Stack.Build.Source
( loadSourceMap
, loadSourceMapFull
, SourceMap
- , PackageSource (..)
, getLocalFlags
, getGhcOptions
- , getLocalPackageViews
- , parseTargetsFromBuildOpts
- , parseTargetsFromBuildOptsWith
, addUnlistedToBuildCache
- , getDefaultPackageConfig
- , getPackageConfig
) where
-import Control.Applicative
-import Control.Arrow ((&&&))
-import Control.Exception (assert, catch)
-import Control.Monad hiding (sequence)
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Control.Monad.Reader (MonadReader)
-import Control.Monad.Trans.Resource
+import Stack.Prelude
import Crypto.Hash (Digest, SHA256(..))
import Crypto.Hash.Conduit (sinkHash)
import qualified Data.ByteArray as Mem (convert)
@@ -38,163 +23,96 @@ import qualified Data.ByteString as S
import Data.Conduit (($$), ZipSink (..))
import qualified Data.Conduit.Binary as CB
import qualified Data.Conduit.List as CL
-import Data.Either
-import Data.Function
import qualified Data.HashSet as HashSet
import Data.List
import qualified Data.Map as Map
-import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
-import Data.Maybe
-import Data.Monoid
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.Text (Text)
-import qualified Data.Text as T
-import Data.Traversable (sequence)
-import Distribution.Package (pkgName, pkgVersion)
-import Distribution.PackageDescription (GenericPackageDescription, package, packageDescription)
-import qualified Distribution.PackageDescription as C
-import Path
-import Path.IO
-import Prelude hiding (sequence)
import Stack.Build.Cache
import Stack.Build.Target
-import Stack.BuildPlan (shadowMiniBuildPlan)
import Stack.Config (getLocalPackages)
import Stack.Constants (wiredInPackages)
import Stack.Package
-import Stack.PackageIndex (getPackageVersions)
+import Stack.PackageLocation
import Stack.Types.Build
import Stack.Types.BuildPlan
import Stack.Types.Config
import Stack.Types.FlagName
import Stack.Types.Package
import Stack.Types.PackageName
-import Stack.Types.Resolver
-import Stack.Types.StackT
-import Stack.Types.Version
import qualified System.Directory as D
import System.FilePath (takeFileName)
-import System.IO (withBinaryFile, IOMode (ReadMode))
import System.IO.Error (isDoesNotExistError)
-- | Like 'loadSourceMapFull', but doesn't return values that aren't as
-- commonly needed.
-loadSourceMap :: (StackM env m, HasEnvConfig env)
+loadSourceMap :: HasEnvConfig env
=> NeedTargets
-> BuildOptsCLI
- -> m ( [LocalPackage]
- , SourceMap
- )
+ -> RIO env ([LocalPackage], SourceMap)
loadSourceMap needTargets boptsCli = do
- (_, _, locals, _, _, sourceMap) <- loadSourceMapFull needTargets boptsCli
+ (_, _, locals, _, sourceMap) <- loadSourceMapFull needTargets boptsCli
return (locals, sourceMap)
-- | Given the build commandline options, does the following:
--
-- * Parses the build targets.
--
--- * Loads the 'MiniBuildPlan' from the resolver, with extra-deps
+-- * Loads the 'LoadedSnapshot' from the resolver, with extra-deps
-- shadowing any packages that should be built locally.
--
-- * Loads up the 'LocalPackage' info.
--
-- * Builds a 'SourceMap', which contains info for all the packages that
-- will be involved in the build.
-loadSourceMapFull :: (StackM env m, HasEnvConfig env)
+loadSourceMapFull :: HasEnvConfig env
=> NeedTargets
-> BuildOptsCLI
- -> m ( Map PackageName SimpleTarget
- , MiniBuildPlan
- , [LocalPackage]
- , Set PackageName -- non-local targets
- , Map PackageName Version -- extra-deps from configuration and cli
+ -> RIO env
+ ( Map PackageName Target
+ , LoadedSnapshot
+ , [LocalPackage] -- FIXME do we really want this? it's in the SourceMap
+ , Set PackageName -- non-project targets
, SourceMap
)
loadSourceMapFull needTargets boptsCli = do
bconfig <- view buildConfigL
- rawLocals <- getLocalPackageViews
- (mbp0, cliExtraDeps, targets) <- parseTargetsFromBuildOptsWith rawLocals needTargets boptsCli
-
- -- Extend extra-deps to encompass targets requested on the command line
- -- that are not in the snapshot.
- extraDeps0 <- extendExtraDeps
- (bcExtraDeps bconfig)
- cliExtraDeps
- (Map.keysSet $ Map.filter (== STUnknown) targets)
-
- locals <- mapM (loadLocalPackage boptsCli targets) $ Map.toList rawLocals
- checkFlagsUsed boptsCli locals extraDeps0 (mbpPackages mbp0)
+ (ls, localDeps, targets) <- parseTargets needTargets boptsCli
+ lp <- getLocalPackages
+ locals <- mapM (loadLocalPackage True boptsCli targets) $ Map.toList $ lpProject lp
+ checkFlagsUsed boptsCli locals localDeps (lsPackages ls)
checkComponentsBuildable locals
- let
- -- loadLocals returns PackageName (foo) and PackageIdentifier (bar-1.2.3) targets separately;
- -- here we combine them into nonLocalTargets. This is one of the
- -- return values of this function.
- nonLocalTargets :: Set PackageName
- nonLocalTargets =
- Map.keysSet $ Map.filter (not . isLocal) targets
- where
- isLocal (STLocalComps _) = True
- isLocal STLocalAll = True
- isLocal STUnknown = False
- isLocal STNonLocal = False
-
- shadowed = Map.keysSet rawLocals <> Map.keysSet extraDeps0
-
- -- Ignores all packages in the MiniBuildPlan that depend on any
- -- local packages or extra-deps. All packages that have
- -- transitive dependenceis on these packages are treated as
- -- extra-deps (extraDeps1).
- (mbp, extraDeps1) = shadowMiniBuildPlan mbp0 shadowed
-
- -- Combine the extra-deps with the ones implicitly shadowed.
- extraDeps2 = Map.union
- (Map.map (\v -> (v, Map.empty, [])) extraDeps0)
- (Map.map (\mpi -> (mpiVersion mpi, mpiFlags mpi, mpiGhcOptions mpi)) extraDeps1)
-
- -- Add flag and ghc-option settings from the config file / cli
- extraDeps3 = Map.mapWithKey
- (\n (v, flags0, ghcOptions0) ->
- let flags =
- case ( Map.lookup (Just n) $ boptsCLIFlags boptsCli
- , Map.lookup Nothing $ boptsCLIFlags boptsCli
- , Map.lookup n $ unPackageFlags $ bcFlags bconfig
- ) of
- -- Didn't have any flag overrides, fall back to the flags
- -- defined in the snapshot.
- (Nothing, Nothing, Nothing) -> flags0
- -- Either command line flag for this package, general
- -- command line flag, or flag in stack.yaml is defined.
- -- Take all of those and ignore the snapshot flags.
- (x, y, z) -> Map.unions
- [ fromMaybe Map.empty x
- , fromMaybe Map.empty y
- , fromMaybe Map.empty z
- ]
- ghcOptions =
- ghcOptions0 ++
- getGhcOptions bconfig boptsCli n False False
- -- currently have no ability for extra-deps to specify their
- -- cabal file hashes
- in PSUpstream v Local flags ghcOptions Nothing)
- extraDeps2
-
- -- Combine the local packages, extra-deps, and MiniBuildPlan into
+ -- TODO for extra sanity, confirm that the targets we threw away are all TargetAll
+ let nonProjectTargets = Map.keysSet targets `Set.difference` Map.keysSet (lpProject lp)
+
+ -- Combine the local packages, extra-deps, and LoadedSnapshot into
-- one unified source map.
- let sourceMap = Map.unions
- [ Map.fromList $ flip map locals $ \lp ->
- let p = lpPackage lp
- in (packageName p, PSLocal lp)
- , extraDeps3
- , flip Map.mapWithKey (mbpPackages mbp) $ \n mpi ->
- let configOpts = getGhcOptions bconfig boptsCli n False False
- in PSUpstream (mpiVersion mpi) Snap (mpiFlags mpi) (mpiGhcOptions mpi ++ configOpts) (mpiGitSHA1 mpi)
- ]
+ let goLPI loc n lpi = do
+ let configOpts = getGhcOptions bconfig boptsCli n False False
+ case lpiLocation lpi of
+ -- NOTE: configOpts includes lpiGhcOptions for now, this may get refactored soon
+ PLIndex pir -> return $ PSIndex loc (lpiFlags lpi) configOpts pir
+ PLOther pl -> do
+ root <- view projectRootL
+ lpv <- parseSingleCabalFile root True pl
+ lp' <- loadLocalPackage False boptsCli targets (n, lpv)
+ return $ PSFiles lp' loc
+ sourceMap' <- Map.unions <$> sequence
+ [ return $ Map.fromList $ map (\lp' -> (packageName $ lpPackage lp', PSFiles lp' Local)) locals
+ , sequence $ Map.mapWithKey (goLPI Local) localDeps
+ , sequence $ Map.mapWithKey (goLPI Snap) (lsPackages ls)
+ ]
+ let sourceMap = sourceMap'
`Map.difference` Map.fromList (map (, ()) (HashSet.toList wiredInPackages))
- return (targets, mbp, locals, nonLocalTargets, extraDeps0, sourceMap)
+ return
+ ( targets
+ , ls
+ , locals
+ , nonProjectTargets
+ , sourceMap
+ )
-- | All flags for a local package.
getLocalFlags
@@ -205,7 +123,7 @@ getLocalFlags
getLocalFlags bconfig boptsCli name = Map.unions
[ Map.findWithDefault Map.empty (Just name) cliFlags
, Map.findWithDefault Map.empty Nothing cliFlags
- , Map.findWithDefault Map.empty name (unPackageFlags (bcFlags bconfig))
+ , Map.findWithDefault Map.empty name (bcFlags bconfig)
]
where
cliFlags = boptsCLIFlags boptsCli
@@ -214,10 +132,17 @@ getLocalFlags bconfig boptsCli name = Map.unions
-- configuration and commandline.
getGhcOptions :: BuildConfig -> BuildOptsCLI -> PackageName -> Bool -> Bool -> [Text]
getGhcOptions bconfig boptsCli name isTarget isLocal = concat
- [ ghcOptionsFor name (configGhcOptions config)
+ [ Map.findWithDefault [] AGOEverything (configGhcOptionsByCat config)
+ , if isLocal
+ then Map.findWithDefault [] AGOLocals (configGhcOptionsByCat config)
+ else []
+ , if isTarget
+ then Map.findWithDefault [] AGOTargets (configGhcOptionsByCat config)
+ else []
+ , Map.findWithDefault [] name (configGhcOptionsByName config)
, concat [["-fhpc"] | isLocal && toCoverage (boptsTestOpts bopts)]
, if boptsLibProfile bopts || boptsExeProfile bopts
- then ["-auto-all","-caf-all"]
+ then ["-fprof-auto","-fprof-cafs"]
else []
, if not $ boptsLibStrip bopts || boptsExeStrip bopts
then ["-g"]
@@ -235,137 +160,6 @@ getGhcOptions bconfig boptsCli name isTarget isLocal = concat
AGOLocals -> isLocal
AGOEverything -> True
--- | Use the build options and environment to parse targets.
---
--- If the local packages views are already known, use 'parseTargetsFromBuildOptsWith'
--- instead.
---
--- Along with the 'Map' of targets, this yields the loaded
--- 'MiniBuildPlan' for the resolver, as well as a Map of extra-deps
--- derived from the commandline. These extra-deps targets come from when
--- the user specifies a particular package version on the commonadline,
--- or when a flag is specified for a snapshot package.
-parseTargetsFromBuildOpts
- :: (StackM env m, HasEnvConfig env)
- => NeedTargets
- -> BuildOptsCLI
- -> m (MiniBuildPlan, M.Map PackageName Version, M.Map PackageName SimpleTarget)
-parseTargetsFromBuildOpts needTargets boptscli = do
- rawLocals <- getLocalPackageViews
- parseTargetsFromBuildOptsWith rawLocals needTargets boptscli
-
-parseTargetsFromBuildOptsWith
- :: (StackM env m, HasEnvConfig env)
- => Map PackageName (LocalPackageView, GenericPackageDescription)
- -- ^ Local package views
- -> NeedTargets
- -> BuildOptsCLI
- -> m (MiniBuildPlan, M.Map PackageName Version, M.Map PackageName SimpleTarget)
-parseTargetsFromBuildOptsWith rawLocals needTargets boptscli = do
- $logDebug "Parsing the targets"
- bconfig <- view buildConfigL
- mbp0 <-
- case bcResolver bconfig of
- ResolverCompiler _ -> do
- -- We ignore the resolver version, as it might be
- -- GhcMajorVersion, and we want the exact version
- -- we're using.
- version <- view actualCompilerVersionL
- return MiniBuildPlan
- { mbpCompilerVersion = version
- , mbpPackages = Map.empty
- }
- _ -> return (bcWantedMiniBuildPlan bconfig)
- workingDir <- getCurrentDir
-
- let snapshot = mpiVersion <$> mbpPackages mbp0
- flagExtraDeps <- convertSnapshotToExtra
- snapshot
- (bcExtraDeps bconfig)
- rawLocals
- (catMaybes $ Map.keys $ boptsCLIFlags boptscli)
-
- (cliExtraDeps, targets) <-
- parseTargets
- needTargets
- (bcImplicitGlobal bconfig)
- snapshot
- (flagExtraDeps <> bcExtraDeps bconfig)
- (fst <$> rawLocals)
- workingDir
- (boptsCLITargets boptscli)
- return (mbp0, cliExtraDeps <> flagExtraDeps, targets)
-
--- | For every package in the snapshot which is referenced by a flag, give the
--- user a warning and then add it to extra-deps.
-convertSnapshotToExtra
- :: MonadLogger m
- => Map PackageName Version -- ^ snapshot
- -> Map PackageName Version -- ^ extra-deps
- -> Map PackageName a -- ^ locals
- -> [PackageName] -- ^ packages referenced by a flag
- -> m (Map PackageName Version)
-convertSnapshotToExtra snapshot extra0 locals = go Map.empty
- where
- go !extra [] = return extra
- go extra (flag:flags)
- | Just _ <- Map.lookup flag extra0 = go extra flags
- | flag `Map.member` locals = go extra flags
- | otherwise = case Map.lookup flag snapshot of
- Nothing -> go extra flags
- Just version -> do
- $logWarn $ T.concat
- [ "- Implicitly adding "
- , T.pack $ packageNameString flag
- , " to extra-deps based on command line flag"
- ]
- go (Map.insert flag version extra) flags
-
--- | Parse out the local package views for the current project
-getLocalPackageViews :: (StackM env m, HasEnvConfig env)
- => m (Map PackageName (LocalPackageView, GenericPackageDescription))
-getLocalPackageViews = do
- $logDebug "Parsing the cabal files of the local packages"
- packages <- getLocalPackages
- locals <- forM (Map.toList packages) $ \(dir, treatLikeExtraDep) -> do
- cabalfp <- findOrGenerateCabalFile dir
- (warnings,gpkg) <- readPackageUnresolved cabalfp
- mapM_ (printCabalFileWarning cabalfp) warnings
- let cabalID = package $ packageDescription gpkg
- name = fromCabalPackageName $ pkgName cabalID
- checkCabalFileName name cabalfp
- let lpv = LocalPackageView
- { lpvVersion = fromCabalVersion $ pkgVersion cabalID
- , lpvRoot = dir
- , lpvCabalFP = cabalfp
- , lpvExtraDep = treatLikeExtraDep
- , lpvComponents = getNamedComponents gpkg
- }
- return (name, (lpv, gpkg))
- checkDuplicateNames locals
- return $ Map.fromList locals
- where
- getNamedComponents gpkg = Set.fromList $ concat
- [ maybe [] (const [CLib]) (C.condLibrary gpkg)
- , go CExe C.condExecutables
- , go CTest C.condTestSuites
- , go CBench C.condBenchmarks
- ]
- where
- go wrapper f = map (wrapper . T.pack . fst) $ f gpkg
-
--- | Check if there are any duplicate package names and, if so, throw an
--- exception.
-checkDuplicateNames :: MonadThrow m => [(PackageName, (LocalPackageView, gpd))] -> m ()
-checkDuplicateNames locals =
- case filter hasMultiples $ Map.toList $ Map.fromListWith (++) $ map toPair locals of
- [] -> return ()
- x -> throwM $ DuplicateLocalPackageNames x
- where
- toPair (pn, (lpv, _)) = (pn, [lpvRoot lpv])
- hasMultiples (_, _:_:_) = True
- hasMultiples _ = False
-
splitComponents :: [NamedComponent]
-> (Set Text, Set Text, Set Text)
splitComponents =
@@ -380,31 +174,55 @@ splitComponents =
-- | Upgrade the initial local package info to a full-blown @LocalPackage@
-- based on the selected components
loadLocalPackage
- :: forall m env. (StackM env m, HasEnvConfig env)
- => BuildOptsCLI
- -> Map PackageName SimpleTarget
- -> (PackageName, (LocalPackageView, GenericPackageDescription))
- -> m LocalPackage
-loadLocalPackage boptsCli targets (name, (lpv, gpkg)) = do
+ :: forall env. HasEnvConfig env
+ => Bool
+ -- ^ Should this be treated as part of $locals? False for extra-deps.
+ --
+ -- See: https://github.com/commercialhaskell/stack/issues/3574#issuecomment-346512821
+ -> BuildOptsCLI
+ -> Map PackageName Target
+ -> (PackageName, LocalPackageView)
+ -> RIO env LocalPackage
+loadLocalPackage isLocal boptsCli targets (name, lpv) = do
let mtarget = Map.lookup name targets
- config <- getPackageConfig boptsCli name (isJust mtarget) True
+ config <- getPackageConfig boptsCli name (isJust mtarget) isLocal
bopts <- view buildOptsL
- let (exes, tests, benches) =
+ let (exeCandidates, testCandidates, benchCandidates) =
case mtarget of
- Just (STLocalComps comps) -> splitComponents $ Set.toList comps
- Just STLocalAll ->
+ Just (TargetComps comps) -> splitComponents $ Set.toList comps
+ Just (TargetAll _packageType) ->
( packageExes pkg
- , if boptsTests bopts && not (lpvExtraDep lpv)
+ , if boptsTests bopts
then Map.keysSet (packageTests pkg)
else Set.empty
- , if boptsBenchmarks bopts && not (lpvExtraDep lpv)
+ , if boptsBenchmarks bopts
then packageBenchmarks pkg
else Set.empty
)
- Just STNonLocal -> assert False mempty
- Just STUnknown -> assert False mempty
Nothing -> mempty
+ -- See https://github.com/commercialhaskell/stack/issues/2862
+ isWanted = case mtarget of
+ Nothing -> False
+ -- FIXME: When issue #1406 ("stack 0.1.8 lost ability to
+ -- build individual executables or library") is resolved,
+ -- 'hasLibrary' is only relevant if the library is
+ -- part of the target spec.
+ Just _ ->
+ let hasLibrary =
+ case packageLibraries pkg of
+ NoLibraries -> False
+ HasLibraries _ -> True
+ in hasLibrary || not (Set.null nonLibComponents)
+
+ filterSkippedComponents = Set.filter (not . (`elem` boptsSkipComponents bopts))
+
+ (exes, tests, benches) = (filterSkippedComponents exeCandidates,
+ filterSkippedComponents testCandidates,
+ filterSkippedComponents benchCandidates)
+
+ nonLibComponents = toComponents exes tests benches
+
toComponents e t b = Set.unions
[ Set.map CExe e
, Set.map CTest t
@@ -439,6 +257,7 @@ loadLocalPackage boptsCli targets (name, (lpv, gpkg)) = do
-- This allows us to do an optimization where these are passed
-- if the deps are present. This can avoid doing later
-- unnecessary reconfigures.
+ gpkg = lpvGPD lpv
pkg = resolvePackage config gpkg
btpkg
| Set.null tests && Set.null benches = Nothing
@@ -447,7 +266,8 @@ loadLocalPackage boptsCli targets (name, (lpv, gpkg)) = do
benchpkg = resolvePackage benchconfig gpkg
mbuildCache <- tryGetBuildCache $ lpvRoot lpv
- (files,_) <- getPackageFilesSimple pkg (lpvCabalFP lpv)
+
+ (files,_) <- getPackageFilesForTargets pkg (lpvCabalFP lpv) nonLibComponents
(dirtyFiles, newBuildCache) <- checkBuildCache
(fromMaybe Map.empty mbuildCache)
@@ -469,8 +289,8 @@ loadLocalPackage boptsCli targets (name, (lpv, gpkg)) = do
, lpNewBuildCache = newBuildCache
, lpCabalFile = lpvCabalFP lpv
, lpDir = lpvRoot lpv
- , lpWanted = isJust mtarget
- , lpComponents = toComponents exes tests benches
+ , lpWanted = isWanted
+ , lpComponents = nonLibComponents
-- TODO: refactor this so that it's easier to be sure that these
-- components are indeed unbuildable.
--
@@ -481,6 +301,7 @@ loadLocalPackage boptsCli targets (name, (lpv, gpkg)) = do
(exes `Set.difference` packageExes pkg)
(tests `Set.difference` Map.keysSet (packageTests pkg))
(benches `Set.difference` packageBenchmarks pkg)
+ , lpLocation = lpvLoc lpv
}
-- | Ensure that the flags specified in the stack.yaml file and on the command
@@ -488,7 +309,7 @@ loadLocalPackage boptsCli targets (name, (lpv, gpkg)) = do
checkFlagsUsed :: (MonadThrow m, MonadReader env m, HasBuildConfig env)
=> BuildOptsCLI
-> [LocalPackage]
- -> Map PackageName extraDeps -- ^ extra deps
+ -> Map PackageName (LoadedPackageInfo (PackageLocationIndex FilePath)) -- ^ local deps
-> Map PackageName snapshot -- ^ snapshot, for error messages
-> m ()
checkFlagsUsed boptsCli lps extraDeps snapshot = do
@@ -497,21 +318,21 @@ checkFlagsUsed boptsCli lps extraDeps snapshot = do
-- Check if flags specified in stack.yaml and the command line are
-- used, see https://github.com/commercialhaskell/stack/issues/617
let flags = map (, FSCommandLine) [(k, v) | (Just k, v) <- Map.toList $ boptsCLIFlags boptsCli]
- ++ map (, FSStackYaml) (Map.toList $ unPackageFlags $ bcFlags bconfig)
+ ++ map (, FSStackYaml) (Map.toList $ bcFlags bconfig)
localNameMap = Map.fromList $ map (packageName . lpPackage &&& lpPackage) lps
checkFlagUsed ((name, userFlags), source) =
case Map.lookup name localNameMap of
-- Package is not available locally
Nothing ->
- case Map.lookup name extraDeps of
+ if Map.member name extraDeps
+ -- We don't check for flag presence for extra deps
+ then Nothing
-- Also not in extra-deps, it's an error
- Nothing ->
+ else
case Map.lookup name snapshot of
Nothing -> Just $ UFNoPackage source name
Just _ -> Just $ UFSnapshot name
- -- We don't check for flag presence for extra deps
- Just _ -> Nothing
-- Package exists locally, let's check if the flags are defined
Just pkg ->
let unused = Set.difference (Map.keysSet userFlags) (packageDefinedFlags pkg)
@@ -528,41 +349,6 @@ checkFlagsUsed boptsCli lps extraDeps snapshot = do
$ InvalidFlagSpecification
$ Set.fromList unusedFlags
--- | Add in necessary packages to extra dependencies
---
--- Originally part of https://github.com/commercialhaskell/stack/issues/272,
--- this was then superseded by
--- https://github.com/commercialhaskell/stack/issues/651
-extendExtraDeps
- :: (StackM env m, HasBuildConfig env)
- => Map PackageName Version -- ^ original extra deps
- -> Map PackageName Version -- ^ package identifiers from the command line
- -> Set PackageName -- ^ all packages added on the command line
- -> m (Map PackageName Version) -- ^ new extradeps
-extendExtraDeps extraDeps0 cliExtraDeps unknowns = do
- (errs, unknowns') <- fmap partitionEithers $ mapM addUnknown $ Set.toList unknowns
- case errs of
- [] -> return $ Map.unions $ extraDeps1 : unknowns'
- _ -> do
- bconfig <- view buildConfigL
- throwM $ UnknownTargets
- (Set.fromList errs)
- Map.empty -- TODO check the cliExtraDeps for presence in index
- (bcStackYaml bconfig)
- where
- extraDeps1 = Map.union extraDeps0 cliExtraDeps
- addUnknown pn = do
- case Map.lookup pn extraDeps1 of
- Just _ -> return (Right Map.empty)
- Nothing -> do
- mlatestVersion <- getLatestVersion pn
- case mlatestVersion of
- Just v -> return (Right $ Map.singleton pn v)
- Nothing -> return (Left pn)
- getLatestVersion pn = do
- vs <- getPackageVersions pn
- return (fmap fst (Set.maxView vs))
-
-- | Compare the current filesystem state to the cached information, and
-- determine (1) if the files are dirty, and (2) the new cache values.
checkBuildCache :: forall m. (MonadIO m)
@@ -586,7 +372,7 @@ checkBuildCache oldCache files = do
go fp _ _ | takeFileName fp == "cabal_macros.h" = return (Set.empty, Map.empty)
-- Common case where it's in the cache and on the filesystem.
go fp (Just modTime') (Just fci)
- | fciModTime fci == modTime' = return (Set.empty, Map.empty)
+ | fciModTime fci == modTime' = return (Set.empty, Map.singleton fp fci)
| otherwise = do
newFci <- calcFci modTime' fp
let isDirty =
@@ -603,14 +389,15 @@ checkBuildCache oldCache files = do
-- | Returns entries to add to the build cache for any newly found unlisted modules
addUnlistedToBuildCache
- :: (StackM env m, HasEnvConfig env)
+ :: HasEnvConfig env
=> ModTime
-> Package
-> Path Abs File
+ -> Set NamedComponent
-> Map FilePath a
- -> m ([Map FilePath FileCacheInfo], [PackageWarning])
-addUnlistedToBuildCache preBuildTime pkg cabalFP buildCache = do
- (files,warnings) <- getPackageFilesSimple pkg cabalFP
+ -> RIO env ([Map FilePath FileCacheInfo], [PackageWarning])
+addUnlistedToBuildCache preBuildTime pkg cabalFP nonLibComponents buildCache = do
+ (files,warnings) <- getPackageFilesForTargets pkg cabalFP nonLibComponents
let newFiles =
Set.toList $
Set.map toFilePath files `Set.difference` Map.keysSet buildCache
@@ -628,16 +415,21 @@ addUnlistedToBuildCache preBuildTime pkg cabalFP buildCache = do
return (Map.singleton fp newFci)
else return Map.empty
--- | Gets list of Paths for files in a package
-getPackageFilesSimple
- :: (StackM env m, HasEnvConfig env)
- => Package -> Path Abs File -> m (Set (Path Abs File), [PackageWarning])
-getPackageFilesSimple pkg cabalFP = do
- (_,compFiles,cabalFiles,warnings) <-
+-- | Gets list of Paths for files relevant to a set of components in a package.
+-- Note that the library component, if any, is always automatically added to the
+-- set of components.
+getPackageFilesForTargets
+ :: HasEnvConfig env
+ => Package -> Path Abs File -> Set NamedComponent -> RIO env (Set (Path Abs File), [PackageWarning])
+getPackageFilesForTargets pkg cabalFP components = do
+ (_,compFiles,otherFiles,warnings) <-
getPackageFiles (packageFiles pkg) cabalFP
- return
- ( Set.map dotCabalGetPath (mconcat (M.elems compFiles)) <> cabalFiles
- , warnings)
+ let filesForComponent cn = Set.map dotCabalGetPath
+ $ M.findWithDefault mempty cn compFiles
+ files = Set.unions
+ $ otherFiles
+ : map filesForComponent (Set.toList $ Set.insert CLib components)
+ return (files, warnings)
-- | Get file modification time, if it exists.
getModTimeMaybe :: MonadIO m => FilePath -> m (Maybe ModTime)
@@ -678,20 +470,6 @@ checkComponentsBuildable lps =
, c <- Set.toList (lpUnbuildable lp)
]
-getDefaultPackageConfig :: (MonadIO m, MonadReader env m, HasEnvConfig env)
- => m PackageConfig
-getDefaultPackageConfig = do
- platform <- view platformL
- compilerVersion <- view actualCompilerVersionL
- return PackageConfig
- { packageConfigEnableTests = False
- , packageConfigEnableBenchmarks = False
- , packageConfigFlags = M.empty
- , packageConfigGhcOptions = []
- , packageConfigCompilerVersion = compilerVersion
- , packageConfigPlatform = platform
- }
-
-- | Get 'PackageConfig' for package given its name.
getPackageConfig :: (MonadIO m, MonadReader env m, HasEnvConfig env)
=> BuildOptsCLI
diff --git a/src/Stack/Build/Target.hs b/src/Stack/Build/Target.hs
index 72a8718..e591f1e 100644
--- a/src/Stack/Build/Target.hs
+++ b/src/Stack/Build/Target.hs
@@ -1,56 +1,125 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
-{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE StandaloneDeriving #-}
+{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
-- | Parsing command line targets
+--
+-- There are two relevant data sources for performing this parsing:
+-- the project configuration, and command line arguments. Project
+-- configurations includes the resolver (defining a LoadedSnapshot of
+-- global and snapshot packages), local dependencies, and project
+-- packages. It also defines local flag overrides.
+--
+-- The command line arguments specify both additional local flag
+-- overrides and targets in their raw form.
+--
+-- Flags are simple: we just combine CLI flags with config flags and
+-- make one big map of flags, preferring CLI flags when present.
+--
+-- Raw targets can be a package name, a package name with component,
+-- just a component, or a package name and version number. We first
+-- must resolve these raw targets into both simple targets and
+-- additional dependencies. This works as follows:
+--
+-- * If a component is specified, find a unique project package which
+-- defines that component, and convert it into a name+component
+-- target.
+--
+-- * Ensure that all name+component values refer to valid components
+-- in the given project package.
+--
+-- * For names, check if the name is present in the snapshot, local
+-- deps, or project packages. If it is not, then look up the most
+-- recent version in the package index and convert to a
+-- name+version.
+--
+-- * For name+version, first ensure that the name is not used by a
+-- project package. Next, if that name+version is present in the
+-- snapshot or local deps _and_ its location is PLIndex, we have the
+-- package. Otherwise, add to local deps with the appropriate
+-- PLIndex.
+--
+-- If in either of the last two bullets we added a package to local
+-- deps, print a warning to the user recommending modifying the
+-- extra-deps.
+--
+-- Combine the various 'ResolveResults's together into 'Target'
+-- values, by combining various components for a single package and
+-- ensuring that no conflicting statements were made about targets.
+--
+-- At this point, we now have a Map from package name to SimpleTarget,
+-- and an updated Map of local dependencies. We still have the
+-- aggregated flags, and the snapshot and project packages.
+--
+-- Finally, we upgrade the snapshot by using
+-- calculatePackagePromotion.
module Stack.Build.Target
( -- * Types
- ComponentName
- , UnresolvedComponent (..)
- , RawTarget (..)
- , LocalPackageView (..)
- , SimpleTarget (..)
+ Target (..)
, NeedTargets (..)
- -- * Parsers
- , parseRawTarget
+ , PackageType (..)
, parseTargets
+ -- * Convenience helpers
+ , gpdVersion
+ -- * Test suite exports
+ , parseRawTarget
+ , RawTarget (..)
+ , UnresolvedComponent (..)
) where
-import Control.Applicative
-import Control.Arrow (second)
-import Control.Monad.Catch (MonadCatch, throwM)
-import Control.Monad.IO.Class
-import Data.Either (partitionEithers)
-import Data.Foldable
-import Data.List.Extra (groupSort)
-import Data.List.NonEmpty (NonEmpty((:|)))
-import qualified Data.List.NonEmpty as NonEmpty
-import Data.Map (Map)
+import Stack.Prelude
import qualified Data.Map as Map
-import Data.Maybe (mapMaybe)
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.Text (Text)
import qualified Data.Text as T
+import Distribution.PackageDescription (GenericPackageDescription, package, packageDescription)
import Path
import Path.Extra (rejectMissingDir)
import Path.IO
-import Prelude hiding (concat, concatMap) -- Fix redundant import warnings
+import Stack.Config (getLocalPackages)
+import Stack.Fetch (withCabalLoader)
+import Stack.PackageIndex
+import Stack.PackageLocation
+import Stack.Snapshot (calculatePackagePromotion)
+import Stack.Types.Config
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Version
-import Stack.Types.Config
import Stack.Types.Build
-import Stack.Types.Package
+import Stack.Types.BuildPlan
+import Stack.Types.GhcPkgId
--- | The name of a component, which applies to executables, test suites, and benchmarks
-type ComponentName = Text
+-- | Do we need any targets? For example, `stack build` will fail if
+-- no targets are provided.
+data NeedTargets = NeedTargets | AllowNoTargets
+---------------------------------------------------------------------------------
+-- Get the RawInput
+---------------------------------------------------------------------------------
+
+-- | Raw target information passed on the command line.
newtype RawInput = RawInput { unRawInput :: Text }
+getRawInput :: BuildOptsCLI -> Map PackageName LocalPackageView -> ([Text], [RawInput])
+getRawInput boptscli locals =
+ let textTargets' = boptsCLITargets boptscli
+ textTargets =
+ -- Handle the no targets case, which means we pass in the names of all project packages
+ if null textTargets'
+ then map packageNameText (Map.keys locals)
+ else textTargets'
+ in (textTargets', map RawInput textTargets)
+
+---------------------------------------------------------------------------------
+-- Turn RawInput into RawTarget
+---------------------------------------------------------------------------------
+
+-- | The name of a component, which applies to executables, test
+-- suites, and benchmarks
+type ComponentName = Text
+
-- | Either a fully resolved component, or a component name that could be
-- either an executable, test, or benchmark
data UnresolvedComponent
@@ -60,23 +129,50 @@ data UnresolvedComponent
-- | Raw command line input, without checking against any databases or list of
-- locals. Does not deal with directories
-data RawTarget (a :: RawTargetType) where
- RTPackageComponent :: !PackageName -> !UnresolvedComponent -> RawTarget a
- RTComponent :: !ComponentName -> RawTarget a
- RTPackage :: !PackageName -> RawTarget a
- RTPackageIdentifier :: !PackageIdentifier -> RawTarget 'HasIdents
+data RawTarget
+ = RTPackageComponent !PackageName !UnresolvedComponent
+ | RTComponent !ComponentName
+ | RTPackage !PackageName
+ -- Explicitly _not_ supporting revisions on the command line. If
+ -- you want that, you should be modifying your stack.yaml! (In
+ -- fact, you should probably do that anyway, we're just letting
+ -- people be lazy, since we're Haskeletors.)
+ | RTPackageIdentifier !PackageIdentifier
+ deriving (Show, Eq)
-deriving instance Show (RawTarget a)
-deriving instance Eq (RawTarget a)
-deriving instance Ord (RawTarget a)
+-- | Same as @parseRawTarget@, but also takes directories into account.
+parseRawTargetDirs :: MonadIO m
+ => Path Abs Dir -- ^ current directory
+ -> Map PackageName LocalPackageView
+ -> RawInput -- ^ raw target information from the commandline
+ -> m (Either Text [(RawInput, RawTarget)])
+parseRawTargetDirs root locals ri =
+ case parseRawTarget t of
+ Just rt -> return $ Right [(ri, rt)]
+ Nothing -> do
+ mdir <- liftIO $ forgivingAbsence (resolveDir root (T.unpack t))
+ >>= rejectMissingDir
+ case mdir of
+ Nothing -> return $ Left $ "Directory not found: " `T.append` t
+ Just dir ->
+ case mapMaybe (childOf dir) $ Map.toList locals of
+ [] -> return $ Left $
+ "No local directories found as children of " `T.append`
+ t
+ names -> return $ Right $ map ((ri, ) . RTPackage) names
+ where
+ childOf dir (name, lpv) =
+ if dir == lpvRoot lpv || isProperPrefixOf dir (lpvRoot lpv)
+ then Just name
+ else Nothing
-data RawTargetType = HasIdents | NoIdents
+ RawInput t = ri
-- | If this function returns @Nothing@, the input should be treated as a
-- directory.
-parseRawTarget :: Text -> Maybe (RawTarget 'HasIdents)
+parseRawTarget :: Text -> Maybe RawTarget
parseRawTarget t =
- (RTPackageIdentifier <$> parsePackageIdentifierFromString s)
+ (RTPackageIdentifier <$> parsePackageIdentifier t)
<|> (RTPackage <$> parsePackageNameFromString s)
<|> (RTComponent <$> T.stripPrefix ":" t)
<|> parsePackageComponent
@@ -104,94 +200,75 @@ parseRawTarget t =
"bench" -> Just CBench
_ -> Nothing
--- | A view of a local package needed for resolving components
-data LocalPackageView = LocalPackageView
- { lpvVersion :: !Version
- , lpvRoot :: !(Path Abs Dir)
- , lpvCabalFP :: !(Path Abs File)
- , lpvComponents :: !(Set NamedComponent)
- , lpvExtraDep :: !TreatLikeExtraDep
- }
-
--- | Same as @parseRawTarget@, but also takes directories into account.
-parseRawTargetDirs :: (MonadIO m, MonadCatch m)
- => Path Abs Dir -- ^ current directory
- -> Map PackageName LocalPackageView
- -> Text
- -> m (Either Text [(RawInput, RawTarget 'HasIdents)])
-parseRawTargetDirs root locals t =
- case parseRawTarget t of
- Just rt -> return $ Right [(ri, rt)]
- Nothing -> do
- mdir <- forgivingAbsence (resolveDir root (T.unpack t))
- >>= rejectMissingDir
- case mdir of
- Nothing -> return $ Left $ "Directory not found: " `T.append` t
- Just dir ->
- case mapMaybe (childOf dir) $ Map.toList locals of
- [] -> return $ Left $
- "No local directories found as children of " `T.append`
- t
- names -> return $ Right $ map ((ri, ) . RTPackage) names
- where
- ri = RawInput t
-
- childOf dir (name, lpv) =
- if (dir == lpvRoot lpv || isParentOf dir (lpvRoot lpv)) && not (lpvExtraDep lpv)
- then Just name
- else Nothing
+---------------------------------------------------------------------------------
+-- Resolve the raw targets
+---------------------------------------------------------------------------------
-data SimpleTarget
- = STUnknown
- | STNonLocal
- | STLocalComps !(Set NamedComponent)
- | STLocalAll
- deriving (Show, Eq, Ord)
+data ResolveResult = ResolveResult
+ { rrName :: !PackageName
+ , rrRaw :: !RawInput
+ , rrComponent :: !(Maybe NamedComponent)
+ -- ^ Was a concrete component specified?
+ , rrAddedDep :: !(Maybe Version)
+ -- ^ Only if we're adding this as a dependency
+ , rrPackageType :: !PackageType
+ }
-resolveIdents :: Map PackageName Version -- ^ snapshot
- -> Map PackageName Version -- ^ extra deps
- -> Map PackageName LocalPackageView
- -> (RawInput, RawTarget 'HasIdents)
- -> Either Text ((RawInput, RawTarget 'NoIdents), Map PackageName Version)
-resolveIdents _ _ _ (ri, RTPackageComponent x y) = Right ((ri, RTPackageComponent x y), Map.empty)
-resolveIdents _ _ _ (ri, RTComponent x) = Right ((ri, RTComponent x), Map.empty)
-resolveIdents _ _ _ (ri, RTPackage x) = Right ((ri, RTPackage x), Map.empty)
-resolveIdents snap extras locals (ri, RTPackageIdentifier (PackageIdentifier name version)) =
- fmap ((ri, RTPackage name), ) newExtras
- where
- newExtras =
- case (Map.lookup name locals, mfound) of
- -- Error if it matches a local package, pkg idents not
- -- supported for local.
- (Just _, _) -> Left $ T.concat
- [ packageNameText name
- , " target has a specific version number, but it is a local package."
- , "\nTo avoid confusion, we will not install the specified version or build the local one."
- , "\nTo build the local package, specify the target without an explicit version."
- ]
- -- If the found version matches, no need for an extra-dep.
- (_, Just foundVersion) | foundVersion == version -> Right Map.empty
- -- Otherwise, if there is no specified version or a
- -- mismatch, add an extra-dep.
- _ -> Right $ Map.singleton name version
- mfound = asum (map (Map.lookup name) [extras, snap])
-
-resolveRawTarget :: Map PackageName Version -- ^ snapshot
- -> Map PackageName Version -- ^ extra deps
- -> Map PackageName LocalPackageView
- -> (RawInput, RawTarget 'NoIdents)
- -> Either Text (PackageName, (RawInput, SimpleTarget))
-resolveRawTarget snap extras locals (ri, rt) =
+-- | Convert a 'RawTarget' into a 'ResolveResult' (see description on
+-- the module).
+resolveRawTarget
+ :: forall env. HasConfig env
+ => Map PackageName (LoadedPackageInfo GhcPkgId) -- ^ globals
+ -> Map PackageName (LoadedPackageInfo (PackageLocationIndex FilePath)) -- ^ snapshot
+ -> Map PackageName (GenericPackageDescription, PackageLocationIndex FilePath) -- ^ local deps
+ -> Map PackageName LocalPackageView -- ^ project packages
+ -> (RawInput, RawTarget)
+ -> RIO env (Either Text ResolveResult)
+resolveRawTarget globals snap deps locals (ri, rt) =
go rt
where
- go (RTPackageComponent name ucomp) =
+ -- Helper function: check if a 'NamedComponent' matches the given 'ComponentName'
+ isCompNamed :: ComponentName -> NamedComponent -> Bool
+ isCompNamed _ CLib = False
+ isCompNamed t1 (CExe t2) = t1 == t2
+ isCompNamed t1 (CTest t2) = t1 == t2
+ isCompNamed t1 (CBench t2) = t1 == t2
+
+ go (RTComponent cname) = return $
+ -- Associated list from component name to package that defines
+ -- it. We use an assoc list and not a Map so we can detect
+ -- duplicates.
+ let allPairs = concatMap
+ (\(name, lpv) -> map (name,) $ Set.toList $ lpvComponents lpv)
+ (Map.toList locals)
+ in case filter (isCompNamed cname . snd) allPairs of
+ [] -> Left $ cname `T.append` " doesn't seem to be a local target. Run 'stack ide targets' for a list of available targets"
+ [(name, comp)] -> Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Just comp
+ , rrAddedDep = Nothing
+ , rrPackageType = ProjectPackage
+ }
+ matches -> Left $ T.concat
+ [ "Ambiugous component name "
+ , cname
+ , ", matches: "
+ , T.pack $ show matches
+ ]
+ go (RTPackageComponent name ucomp) = return $
case Map.lookup name locals of
Nothing -> Left $ T.pack $ "Unknown local package: " ++ packageNameString name
Just lpv ->
case ucomp of
ResolvedComponent comp
- | comp `Set.member` lpvComponents lpv ->
- Right (name, (ri, STLocalComps $ Set.singleton comp))
+ | comp `Set.member` lpvComponents lpv -> Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Just comp
+ , rrAddedDep = Nothing
+ , rrPackageType = ProjectPackage
+ }
| otherwise -> Left $ T.pack $ concat
[ "Component "
, show comp
@@ -206,7 +283,13 @@ resolveRawTarget snap extras locals (ri, rt) =
, " does not exist in package "
, T.pack $ packageNameString name
]
- [x] -> Right (name, (ri, STLocalComps $ Set.singleton x))
+ [x] -> Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Just x
+ , rrAddedDep = Nothing
+ , rrPackageType = ProjectPackage
+ }
matches -> Left $ T.concat
[ "Ambiguous component name "
, comp
@@ -215,109 +298,260 @@ resolveRawTarget snap extras locals (ri, rt) =
, ": "
, T.pack $ show matches
]
- go (RTComponent cname) =
- let allPairs = concatMap
- (\(name, lpv) -> map (name,) $ Set.toList $ lpvComponents lpv)
- (Map.toList locals)
- in case filter (isCompNamed cname . snd) allPairs of
- [] -> Left $ cname `T.append` " doesn't seem to be a local target. Run 'stack ide targets' for a list of available targets"
- [(name, comp)] ->
- Right (name, (ri, STLocalComps $ Set.singleton comp))
- matches -> Left $ T.concat
- [ "Ambiugous component name "
- , cname
- , ", matches: "
- , T.pack $ show matches
- ]
- go (RTPackage name) =
- case Map.lookup name locals of
- Just _lpv -> Right (name, (ri, STLocalAll))
- Nothing ->
- case Map.lookup name extras of
- Just _ -> Right (name, (ri, STNonLocal))
- Nothing ->
- case Map.lookup name snap of
- Just _ -> Right (name, (ri, STNonLocal))
- Nothing -> Right (name, (ri, STUnknown))
-
-isCompNamed :: Text -> NamedComponent -> Bool
-isCompNamed _ CLib = False
-isCompNamed t1 (CExe t2) = t1 == t2
-isCompNamed t1 (CTest t2) = t1 == t2
-isCompNamed t1 (CBench t2) = t1 == t2
-
-simplifyTargets :: [(PackageName, (RawInput, SimpleTarget))]
- -> ([Text], Map PackageName SimpleTarget)
-simplifyTargets =
- foldMap go . collect
+ go (RTPackage name)
+ | Map.member name locals = return $ Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Nothing
+ , rrAddedDep = Nothing
+ , rrPackageType = ProjectPackage
+ }
+ | Map.member name deps ||
+ Map.member name snap ||
+ Map.member name globals = return $ Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Nothing
+ , rrAddedDep = Nothing
+ , rrPackageType = Dependency
+ }
+ | otherwise = do
+ mversion <- getLatestVersion name
+ return $ case mversion of
+ -- This is actually an error case. We _could_ return a
+ -- Left value here, but it turns out to be better to defer
+ -- this until the ConstructPlan phase, and let it complain
+ -- about the missing package so that we get more errors
+ -- together, plus the fancy colored output from that
+ -- module.
+ Nothing -> Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Nothing
+ , rrAddedDep = Nothing
+ , rrPackageType = Dependency
+ }
+ Just version -> Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Nothing
+ , rrAddedDep = Just version
+ , rrPackageType = Dependency
+ }
+ where
+ getLatestVersion pn = do
+ vs <- getPackageVersions pn
+ return (fmap fst (Set.maxView vs))
+
+ go (RTPackageIdentifier ident@(PackageIdentifier name version))
+ | Map.member name locals = return $ Left $ T.concat
+ [ packageNameText name
+ , " target has a specific version number, but it is a local package."
+ , "\nTo avoid confusion, we will not install the specified version or build the local one."
+ , "\nTo build the local package, specify the target without an explicit version."
+ ]
+ | otherwise = return $
+ case Map.lookup name allLocs of
+ -- Installing it from the package index, so we're cool
+ -- with overriding it if necessary
+ Just (PLIndex (PackageIdentifierRevision (PackageIdentifier _name versionLoc) _mcfi)) -> Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Nothing
+ , rrAddedDep =
+ if version == versionLoc
+ -- But no need to override anyway, this is already the
+ -- version we have
+ then Nothing
+ -- OK, we'll override it
+ else Just version
+ , rrPackageType = Dependency
+ }
+ -- The package was coming from something besides the
+ -- index, so refuse to do the override
+ Just (PLOther loc') -> Left $ T.concat
+ [ "Package with identifier was targeted on the command line: "
+ , packageIdentifierText ident
+ , ", but it was specified from a non-index location: "
+ , T.pack $ show loc'
+ , ".\nRecommendation: add the correctly desired version to extra-deps."
+ ]
+ -- Not present at all, so add it
+ Nothing -> Right ResolveResult
+ { rrName = name
+ , rrRaw = ri
+ , rrComponent = Nothing
+ , rrAddedDep = Just version
+ , rrPackageType = Dependency
+ }
+
+ where
+ allLocs :: Map PackageName (PackageLocationIndex FilePath)
+ allLocs = Map.unions
+ [ Map.mapWithKey
+ (\name' lpi -> PLIndex $ PackageIdentifierRevision
+ (PackageIdentifier name' (lpiVersion lpi))
+ CFILatest)
+ globals
+ , Map.map lpiLocation snap
+ , Map.map snd deps
+ ]
+
+---------------------------------------------------------------------------------
+-- Combine the ResolveResults
+---------------------------------------------------------------------------------
+
+-- | How a package is intended to be built
+data Target
+ = TargetAll !PackageType
+ -- ^ Build all of the default components.
+ | TargetComps !(Set NamedComponent)
+ -- ^ Only build specific components
+
+data PackageType = ProjectPackage | Dependency
+ deriving (Eq, Show)
+
+combineResolveResults
+ :: forall m. MonadLogger m
+ => [ResolveResult]
+ -> m ([Text], Map PackageName Target, Map PackageName (PackageLocationIndex FilePath))
+combineResolveResults results = do
+ addedDeps <- fmap Map.unions $ forM results $ \result ->
+ case rrAddedDep result of
+ Nothing -> return Map.empty
+ Just version -> do
+ let ident = PackageIdentifier (rrName result) version
+ return $ Map.singleton (rrName result) $ PLIndex $ PackageIdentifierRevision ident CFILatest
+
+ let m0 = Map.unionsWith (++) $ map (\rr -> Map.singleton (rrName rr) [rr]) results
+ (errs, ms) = partitionEithers $ flip map (Map.toList m0) $ \(name, rrs) ->
+ let mcomps = map rrComponent rrs in
+ -- Confirm that there is either exactly 1 with no component, or
+ -- that all rrs are components
+ case rrs of
+ [] -> assert False $ Left "Somehow got no rrComponent values, that can't happen"
+ [rr] | isNothing (rrComponent rr) -> Right $ Map.singleton name $ TargetAll $ rrPackageType rr
+ _
+ | all isJust mcomps -> Right $ Map.singleton name $ TargetComps $ Set.fromList $ catMaybes mcomps
+ | otherwise -> Left $ T.concat
+ [ "The package "
+ , packageNameText name
+ , " was specified in multiple, incompatible ways: "
+ , T.unwords $ map (unRawInput . rrRaw) rrs
+ ]
+
+ return (errs, Map.unions ms, addedDeps)
+
+---------------------------------------------------------------------------------
+-- OK, let's do it!
+---------------------------------------------------------------------------------
+
+parseTargets
+ :: HasEnvConfig env
+ => NeedTargets
+ -> BuildOptsCLI
+ -> RIO env
+ ( LoadedSnapshot -- upgraded snapshot, with some packages possibly moved to local
+ , Map PackageName (LoadedPackageInfo (PackageLocationIndex FilePath)) -- all local deps
+ , Map PackageName Target
+ )
+parseTargets needTargets boptscli = do
+ logDebug "Parsing the targets"
+ bconfig <- view buildConfigL
+ ls0 <- view loadedSnapshotL
+ workingDir <- getCurrentDir
+ lp <- getLocalPackages
+ let locals = lpProject lp
+ deps = lpDependencies lp
+ globals = lsGlobals ls0
+ snap = lsPackages ls0
+ (textTargets', rawInput) = getRawInput boptscli locals
+
+ (errs1, concat -> rawTargets) <- fmap partitionEithers $ forM rawInput $
+ parseRawTargetDirs workingDir (lpProject lp)
+
+ (errs2, resolveResults) <- fmap partitionEithers $ forM rawTargets $
+ resolveRawTarget globals snap deps locals
+
+ (errs3, targets, addedDeps) <- combineResolveResults resolveResults
+
+ case concat [errs1, errs2, errs3] of
+ [] -> return ()
+ errs -> throwIO $ TargetParseException errs
+
+ case (Map.null targets, needTargets) of
+ (False, _) -> return ()
+ (True, AllowNoTargets) -> return ()
+ (True, NeedTargets)
+ | null textTargets' && bcImplicitGlobal bconfig -> throwIO $ TargetParseException
+ ["The specified targets matched no packages.\nPerhaps you need to run 'stack init'?"]
+ | null textTargets' && Map.null locals -> throwIO $ TargetParseException
+ ["The project contains no local packages (packages not marked with 'extra-dep')"]
+ | otherwise -> throwIO $ TargetParseException
+ ["The specified targets matched no packages"]
+
+ root <- view projectRootL
+
+ let dropMaybeKey (Nothing, _) = Map.empty
+ dropMaybeKey (Just key, value) = Map.singleton key value
+ flags = Map.unionWith Map.union
+ (Map.unions (map dropMaybeKey (Map.toList (boptsCLIFlags boptscli))))
+ (bcFlags bconfig)
+ hides = Map.empty -- not supported to add hidden packages
+
+ -- We promote packages to the local database if the GHC options
+ -- are added to them by name. See:
+ -- https://github.com/commercialhaskell/stack/issues/849#issuecomment-320892095.
+ --
+ -- GHC options applied to all packages are handled by getGhcOptions.
+ options = configGhcOptionsByName (bcConfig bconfig)
+
+ drops = Set.empty -- not supported to add drops
+
+ (globals', snapshots, locals') <- withCabalLoader $ \loadFromIndex -> do
+ addedDeps' <- fmap Map.fromList $ forM (Map.toList addedDeps) $ \(name, loc) -> do
+ gpd <- parseSingleCabalFileIndex loadFromIndex root loc
+ return (name, (gpd, loc, Nothing))
+
+ -- Calculate a list of all of the locals, based on the project
+ -- packages, local dependencies, and added deps found from the
+ -- command line
+ let allLocals :: Map PackageName (GenericPackageDescription, PackageLocationIndex FilePath, Maybe LocalPackageView)
+ allLocals = Map.unions
+ [ -- project packages
+ Map.map
+ (\lpv -> (lpvGPD lpv, PLOther $ lpvLoc lpv, Just lpv))
+ (lpProject lp)
+ , -- added deps take precendence over local deps
+ addedDeps'
+ , -- added deps take precendence over local deps
+ Map.map
+ (\(gpd, loc) -> (gpd, loc, Nothing))
+ (lpDependencies lp)
+ ]
+
+ calculatePackagePromotion
+ loadFromIndex root ls0 (Map.elems allLocals)
+ flags hides options drops
+
+ let ls = LoadedSnapshot
+ { lsCompilerVersion = lsCompilerVersion ls0
+ , lsGlobals = globals'
+ , lsPackages = snapshots
+ }
+
+ localDeps = Map.fromList $ flip mapMaybe (Map.toList locals') $ \(name, lpi) ->
+ -- We want to ignore any project packages, but grab the local
+ -- deps and upgraded snapshot deps
+ case lpiLocation lpi of
+ (_, Just (Just _localPackageView)) -> Nothing -- project package
+ (loc, _) -> Just (name, lpi { lpiLocation = loc }) -- upgraded or local dep
+
+ return (ls, localDeps, targets)
+
+gpdVersion :: GenericPackageDescription -> Version
+gpdVersion gpd =
+ version
where
- go :: (PackageName, NonEmpty (RawInput, SimpleTarget))
- -> ([Text], Map PackageName SimpleTarget)
- go (name, (_, st) :| []) = ([], Map.singleton name st)
- go (name, pairs) =
- case partitionEithers $ map (getLocalComp . snd) (NonEmpty.toList pairs) of
- ([], comps) -> ([], Map.singleton name $ STLocalComps $ Set.unions comps)
- _ ->
- let err = T.pack $ concat
- [ "Overlapping targets provided for package "
- , packageNameString name
- , ": "
- , show $ map (unRawInput . fst) (NonEmpty.toList pairs)
- ]
- in ([err], Map.empty)
-
- collect :: Ord a => [(a, b)] -> [(a, NonEmpty b)]
- collect = map (second NonEmpty.fromList) . groupSort
-
- getLocalComp (STLocalComps comps) = Right comps
- getLocalComp _ = Left ()
-
--- | Need targets, e.g. `stack build` or allow none?
-data NeedTargets
- = NeedTargets
- | AllowNoTargets
-
-parseTargets :: (MonadCatch m, MonadIO m)
- => NeedTargets -- ^ need at least one target
- -> Bool -- ^ using implicit global project?
- -> Map PackageName Version -- ^ snapshot
- -> Map PackageName Version -- ^ extra deps
- -> Map PackageName LocalPackageView
- -> Path Abs Dir -- ^ current directory
- -> [Text] -- ^ command line targets
- -> m (Map PackageName Version, Map PackageName SimpleTarget)
-parseTargets needTargets implicitGlobal snap extras locals currDir textTargets' = do
- let nonExtraDeps = Map.keys $ Map.filter (not . lpvExtraDep) locals
- textTargets =
- if null textTargets'
- then map (T.pack . packageNameString) nonExtraDeps
- else textTargets'
- erawTargets <- mapM (parseRawTargetDirs currDir locals) textTargets
-
- let (errs1, rawTargets) = partitionEithers erawTargets
- -- When specific package identifiers are provided, treat these
- -- as extra-deps.
- (errs2, unzip -> (rawTargets', newExtras)) = partitionEithers $
- map (resolveIdents snap extras locals) $ concat rawTargets
- -- Find targets that specify components in the local packages,
- -- otherwise find package targets in snap and extra-deps.
- (errs3, targetTypes) = partitionEithers $
- map (resolveRawTarget snap extras locals) rawTargets'
- (errs4, targets) = simplifyTargets targetTypes
- errs = concat [errs1, errs2, errs3, errs4]
-
- if null errs
- then if Map.null targets
- then case needTargets of
- AllowNoTargets ->
- return (Map.empty, Map.empty)
- NeedTargets
- | null textTargets' && implicitGlobal -> throwM $ TargetParseException
- ["The specified targets matched no packages.\nPerhaps you need to run 'stack init'?"]
- | null textTargets' && null nonExtraDeps -> throwM $ TargetParseException
- ["The project contains no local packages (packages not marked with 'extra-dep')"]
- | otherwise -> throwM $ TargetParseException
- ["The specified targets matched no packages"]
- else return (Map.unions newExtras, targets)
- else throwM $ TargetParseException errs
+ PackageIdentifier _ version = fromCabalPackageIdentifier $ package $ packageDescription gpd
diff --git a/src/Stack/BuildPlan.hs b/src/Stack/BuildPlan.hs
index f84c666..5d6b6c0 100644
--- a/src/Stack/BuildPlan.hs
+++ b/src/Stack/BuildPlan.hs
@@ -1,12 +1,11 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
+{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
-{-# LANGUAGE DeriveGeneric #-}
-{-# LANGUAGE EmptyDataDecls #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE TemplateHaskell #-}
-{-# LANGUAGE TupleSections #-}
+{-# LANGUAGE ScopedTypeVariables #-}
-- | Resolving a build plan for a set of packages in a given Stackage
-- snapshot.
@@ -19,84 +18,42 @@ module Stack.BuildPlan
, DepErrors
, gpdPackageDeps
, gpdPackages
- , gpdPackageName
- , MiniBuildPlan(..)
- , MiniPackageInfo(..)
- , loadResolver
- , loadMiniBuildPlan
, removeSrcPkgDefaultFlags
- , resolveBuildPlan
, selectBestSnapshot
, getToolMap
- , shadowMiniBuildPlan
, showItems
- , showPackageFlags
- , parseCustomMiniBuildPlan
- , loadBuildPlan
) where
-import Control.Applicative
-import Control.Exception (assert)
-import Control.Monad (liftM, forM, unless)
-import Control.Monad.Catch
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Control.Monad.Reader (MonadReader)
-import Control.Monad.State.Strict (State, execState, get, modify,
- put)
-import Crypto.Hash (hashWith, SHA256(..))
-import Data.Aeson.Extended (WithJSONWarnings(..), logJSONWarnings)
-import Data.Store.VersionTagged
-import qualified Data.ByteArray as Mem (convert)
-import qualified Data.ByteString as S
-import qualified Data.ByteString.Base64.URL as B64URL
-import qualified Data.ByteString.Char8 as S8
-import Data.Either (partitionEithers)
+import Stack.Prelude
import qualified Data.Foldable as F
import qualified Data.HashSet as HashSet
import Data.List (intercalate)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NonEmpty
-import Data.Map (Map)
import qualified Data.Map as Map
-import Data.Maybe (fromMaybe, mapMaybe, isNothing)
-import Data.Monoid
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.Text (Text)
import qualified Data.Text as T
-import Data.Text.Encoding (encodeUtf8)
-import qualified Data.Traversable as Tr
-import Data.Typeable (Typeable)
-import Data.Yaml (decodeEither', decodeFileEither)
import qualified Distribution.Package as C
import Distribution.PackageDescription (GenericPackageDescription,
flagDefault, flagManual,
flagName, genPackageFlags,
- executables, exeName, library, libBuildInfo, buildable)
+ condExecutables)
import qualified Distribution.PackageDescription as C
+import qualified Distribution.Types.UnqualComponentName as C
import Distribution.System (Platform)
import Distribution.Text (display)
import qualified Distribution.Version as C
-import Network.HTTP.Download
-import Path
-import Path.IO
-import Prelude -- Fix AMP warning
import Stack.Constants
-import Stack.Fetch
import Stack.Package
-import Stack.PackageIndex
+import Stack.Snapshot
import Stack.Types.BuildPlan
import Stack.Types.FlagName
import Stack.Types.PackageIdentifier
-import Stack.Types.PackageIndex
import Stack.Types.PackageName
import Stack.Types.Version
import Stack.Types.Config
-import Stack.Types.Urls
import Stack.Types.Compiler
import Stack.Types.Resolver
-import Stack.Types.StackT
data BuildPlanException
= UnknownPackages
@@ -104,7 +61,6 @@ data BuildPlanException
(Map PackageName (Maybe Version, Set PackageName)) -- truly unknown
(Map PackageName (Set PackageIdentifier)) -- shadowed
| SnapshotNotFound SnapName
- | FilepathInDownloadedSnapshot T.Text
| NeitherCompilerOrResolverSpecified T.Text
deriving (Typeable)
instance Exception BuildPlanException
@@ -182,246 +138,17 @@ instance Show BuildPlanException where
$ Set.toList
$ Set.unions
$ Map.elems shadowed
- show (FilepathInDownloadedSnapshot url) = unlines
- [ "Downloaded snapshot specified a 'resolver: { location: filepath }' "
- , "field, but filepaths are not allowed in downloaded snapshots.\n"
- , "Filepath specified: " ++ T.unpack url
- ]
show (NeitherCompilerOrResolverSpecified url) =
"Failed to load custom snapshot at " ++
T.unpack url ++
", because no 'compiler' or 'resolver' is specified."
--- | Determine the necessary packages to install to have the given set of
--- packages available.
---
--- This function will not provide test suite and benchmark dependencies.
---
--- This may fail if a target package is not present in the @BuildPlan@.
-resolveBuildPlan
- :: (StackMiniM env m, HasBuildConfig env)
- => MiniBuildPlan
- -> (PackageName -> Bool) -- ^ is it shadowed by a local package?
- -> Map PackageName (Set PackageName) -- ^ required packages, and users of it
- -> m ( Map PackageName (Version, Map FlagName Bool)
- , Map PackageName (Set PackageName)
- )
-resolveBuildPlan mbp isShadowed packages
- | Map.null (rsUnknown rs) && Map.null (rsShadowed rs) = return (rsToInstall rs, rsUsedBy rs)
- | otherwise = do
- bconfig <- view buildConfigL
- (caches, _gitShaCaches) <- getPackageCaches
- let maxVer =
- Map.fromListWith max $
- map toTuple $
- Map.keys caches
- unknown = flip Map.mapWithKey (rsUnknown rs) $ \ident x ->
- (Map.lookup ident maxVer, x)
- throwM $ UnknownPackages
- (bcStackYaml bconfig)
- unknown
- (rsShadowed rs)
- where
- rs = getDeps mbp isShadowed packages
-
-data ResolveState = ResolveState
- { rsVisited :: Map PackageName (Set PackageName) -- ^ set of shadowed dependencies
- , rsUnknown :: Map PackageName (Set PackageName)
- , rsShadowed :: Map PackageName (Set PackageIdentifier)
- , rsToInstall :: Map PackageName (Version, Map FlagName Bool)
- , rsUsedBy :: Map PackageName (Set PackageName)
- }
-
-toMiniBuildPlan
- :: (StackMiniM env m, HasConfig env)
- => CompilerVersion -- ^ Compiler version
- -> Map PackageName Version -- ^ cores
- -> Map PackageName (Version, Map FlagName Bool, [Text], Maybe GitSHA1) -- ^ non-core packages
- -> m MiniBuildPlan
-toMiniBuildPlan compilerVersion corePackages packages = do
- -- Determine the dependencies of all of the packages in the build plan. We
- -- handle core packages specially, because some of them will not be in the
- -- package index. For those, we allow missing packages to exist, and then
- -- remove those from the list of dependencies, since there's no way we'll
- -- ever reinstall them anyway.
- (cores, missingCores) <- addDeps True compilerVersion
- $ fmap (, Map.empty, [], Nothing) corePackages
-
- (extras, missing) <- addDeps False compilerVersion packages
-
- assert (Set.null missing) $ return MiniBuildPlan
- { mbpCompilerVersion = compilerVersion
- , mbpPackages = Map.unions
- [ fmap (removeMissingDeps (Map.keysSet cores)) cores
- , extras
- , Map.fromList $ map goCore $ Set.toList missingCores
- ]
- }
- where
- goCore (PackageIdentifier name version) = (name, MiniPackageInfo
- { mpiVersion = version
- , mpiFlags = Map.empty
- , mpiGhcOptions = []
- , mpiPackageDeps = Set.empty
- , mpiToolDeps = Set.empty
- , mpiExes = Set.empty
- , mpiHasLibrary = True
- , mpiGitSHA1 = Nothing
- })
-
- removeMissingDeps cores mpi = mpi
- { mpiPackageDeps = Set.intersection cores (mpiPackageDeps mpi)
- }
-
--- | Add in the resolved dependencies from the package index
-addDeps
- :: (StackMiniM env m, HasConfig env)
- => Bool -- ^ allow missing
- -> CompilerVersion -- ^ Compiler version
- -> Map PackageName (Version, Map FlagName Bool, [Text], Maybe GitSHA1)
- -> m (Map PackageName MiniPackageInfo, Set PackageIdentifier)
-addDeps allowMissing compilerVersion toCalc = do
- platform <- view platformL
- (resolvedMap, missingIdents) <-
- if allowMissing
- then do
- (missingNames, missingIdents, m) <-
- resolvePackagesAllowMissing Nothing shaMap Set.empty
- assert (Set.null missingNames)
- $ return (m, missingIdents)
- else do
- m <- resolvePackages Nothing shaMap Set.empty
- return (m, Set.empty)
- let byIndex = Map.fromListWith (++) $ flip map resolvedMap
- $ \rp ->
- let (cache, ghcOptions, sha) =
- case Map.lookup (packageIdentifierName (rpIdent rp)) toCalc of
- Nothing -> (Map.empty, [], Nothing)
- Just (_, x, y, z) -> (x, y, z)
- in (indexName $ rpIndex rp, [(rp, (cache, ghcOptions, sha))])
- res <- forM (Map.toList byIndex) $ \(indexName', pkgs) ->
- fmap Map.unions $ withCabalFiles indexName' pkgs
- $ \ident (flags, ghcOptions, mgitSha) cabalBS ->
- case readPackageUnresolvedBS (Right ident) cabalBS of
- Left e
- | allowedToSkip ident -> return Map.empty
- | otherwise -> throwM e
- Right (_warnings, gpd) -> do
- let packageConfig = PackageConfig
- { packageConfigEnableTests = False
- , packageConfigEnableBenchmarks = False
- , packageConfigFlags = flags
- , packageConfigGhcOptions = ghcOptions
- , packageConfigCompilerVersion = compilerVersion
- , packageConfigPlatform = platform
- }
- name = packageIdentifierName ident
- pd = resolvePackageDescription packageConfig gpd
- exes = Set.fromList $ map (ExeName . T.pack . exeName) $ executables pd
- notMe = Set.filter (/= name) . Map.keysSet
- return $ Map.singleton name MiniPackageInfo
- { mpiVersion = packageIdentifierVersion ident
- , mpiFlags = flags
- , mpiGhcOptions = ghcOptions
- , mpiPackageDeps = notMe $ packageDependencies pd
- , mpiToolDeps = Map.keysSet $ packageToolDependencies pd
- , mpiExes = exes
- , mpiHasLibrary = maybe
- False
- (buildable . libBuildInfo)
- (library pd)
- , mpiGitSHA1 = mgitSha
- }
- return (Map.unions res, missingIdents)
- where
- shaMap = Map.fromList
- $ map (\(n, (v, _f, _ghcOptions, gitsha)) -> (PackageIdentifier n v, gitsha))
- $ Map.toList toCalc
-
- -- Michael Snoyman, 2017-07-31:
- --
- -- This is a stop-gap measure to address a specific concern around
- -- the GHC 8.2.1 release. The current Stack version (1.5.0) will
- -- eagerly parse all cabal files mentioned in a snapshot,
- -- including global packages. Additionally, for the first time
- -- (AFAICT), GHC 8.2.1 is providing a package on Hackage with a
- -- ghc.cabal file, which requires the (not yet supported) Cabal
- -- 2.0 file format. To work around this, we're adding a special
- -- dispensation to ignore parse failures for this package.
- --
- -- Master already does better by simply ignoring global
- -- information and looking things up in the database. We may want
- -- to consider going a step further and simply ignoring _all_
- -- parse failures, or turning them into warnings, though I haven't
- -- considered the repercussions of that.
- allowedToSkip (PackageIdentifier name _) = name == $(mkPackageName "ghc")
-
--- | Resolve all packages necessary to install for the needed packages.
-getDeps :: MiniBuildPlan
- -> (PackageName -> Bool) -- ^ is it shadowed by a local package?
- -> Map PackageName (Set PackageName)
- -> ResolveState
-getDeps mbp isShadowed packages =
- execState (mapM_ (uncurry goName) $ Map.toList packages) ResolveState
- { rsVisited = Map.empty
- , rsUnknown = Map.empty
- , rsShadowed = Map.empty
- , rsToInstall = Map.empty
- , rsUsedBy = Map.empty
- }
- where
- toolMap = getToolMap mbp
-
- -- | Returns a set of shadowed packages we depend on.
- goName :: PackageName -> Set PackageName -> State ResolveState (Set PackageName)
- goName name users = do
- -- Even though we could check rsVisited first and short-circuit things
- -- earlier, lookup in mbpPackages first so that we can produce more
- -- usable error information on missing dependencies
- rs <- get
- put rs
- { rsUsedBy = Map.insertWith Set.union name users $ rsUsedBy rs
- }
- case Map.lookup name $ mbpPackages mbp of
- Nothing -> do
- modify $ \rs' -> rs'
- { rsUnknown = Map.insertWith Set.union name users $ rsUnknown rs'
- }
- return Set.empty
- Just mpi -> case Map.lookup name (rsVisited rs) of
- Just shadowed -> return shadowed
- Nothing -> do
- put rs { rsVisited = Map.insert name Set.empty $ rsVisited rs }
- let depsForTools = Set.unions $ mapMaybe (flip Map.lookup toolMap) (Set.toList $ mpiToolDeps mpi)
- let deps = Set.filter (/= name) (mpiPackageDeps mpi <> depsForTools)
- shadowed <- fmap F.fold $ Tr.forM (Set.toList deps) $ \dep ->
- if isShadowed dep
- then do
- modify $ \rs' -> rs'
- { rsShadowed = Map.insertWith
- Set.union
- dep
- (Set.singleton $ PackageIdentifier name (mpiVersion mpi))
- (rsShadowed rs')
- }
- return $ Set.singleton dep
- else do
- shadowed <- goName dep (Set.singleton name)
- let m = Map.fromSet (\_ -> Set.singleton $ PackageIdentifier name (mpiVersion mpi)) shadowed
- modify $ \rs' -> rs'
- { rsShadowed = Map.unionWith Set.union m $ rsShadowed rs'
- }
- return shadowed
- modify $ \rs' -> rs'
- { rsToInstall = Map.insert name (mpiVersion mpi, mpiFlags mpi) $ rsToInstall rs'
- , rsVisited = Map.insert name shadowed $ rsVisited rs'
- }
- return shadowed
-
--- | Map from tool name to package providing it
-getToolMap :: MiniBuildPlan -> Map Text (Set PackageName)
-getToolMap mbp =
- Map.unionsWith Set.union
+-- | Map from tool name to package providing it. This accounts for
+-- both snapshot and local packages (deps and project packages).
+getToolMap :: LoadedSnapshot
+ -> LocalPackages
+ -> Map ExeName (Set PackageName)
+getToolMap ls locals =
{- We no longer do this, following discussion at:
@@ -432,111 +159,30 @@ getToolMap mbp =
$ Map.fromList (map (packageNameByteString &&& Set.singleton) (Map.keys ps))
-}
- -- And then get all of the explicit executable names
- $ concatMap goPair (Map.toList ps)
+ Map.unionsWith Set.union $ concat
+ [ concatMap goSnap $ Map.toList $ lsPackages ls
+ , concatMap goLocalProj $ Map.toList $ lpProject locals
+ , concatMap goLocalDep $ Map.toList $ lpDependencies locals
+ ]
where
- ps = mbpPackages mbp
-
- goPair (pname, mpi) =
- map (flip Map.singleton (Set.singleton pname) . unExeName)
+ goSnap (pname, lpi) =
+ map (flip Map.singleton (Set.singleton pname))
$ Set.toList
- $ mpiExes mpi
-
-loadResolver
- :: (StackMiniM env m, HasConfig env, HasGHCVariant env)
- => Maybe (Path Abs File)
- -> Resolver
- -> m (MiniBuildPlan, LoadedResolver)
-loadResolver mconfigPath resolver =
- case resolver of
- ResolverSnapshot snap ->
- liftM (, ResolverSnapshot snap) $ loadMiniBuildPlan snap
- -- TODO(mgsloan): Not sure what this FIXME means
- -- FIXME instead of passing the stackYaml dir we should maintain
- -- the file URL in the custom resolver always relative to stackYaml.
- ResolverCustom name url -> do
- (mbp, hash) <- parseCustomMiniBuildPlan mconfigPath url
- return (mbp, ResolverCustomLoaded name url hash)
- ResolverCompiler compiler -> return
- ( MiniBuildPlan
- { mbpCompilerVersion = compiler
- , mbpPackages = mempty
- }
- , ResolverCompiler compiler
- )
-
--- | Load up a 'MiniBuildPlan', preferably from cache
-loadMiniBuildPlan
- :: (StackMiniM env m, HasConfig env, HasGHCVariant env)
- => SnapName -> m MiniBuildPlan
-loadMiniBuildPlan name = do
- path <- configMiniBuildPlanCache name
- $(versionedDecodeOrLoad miniBuildPlanVC) path $ liftM buildPlanFixes $ do
- bp <- loadBuildPlan name
- toMiniBuildPlan
- (siCompilerVersion $ bpSystemInfo bp)
- (siCorePackages $ bpSystemInfo bp)
- (goPP <$> bpPackages bp)
- where
- goPP pp =
- ( ppVersion pp
- , pcFlagOverrides $ ppConstraints pp
- -- TODO: store ghc options in BuildPlan?
- , []
- , ppCabalFileInfo pp
- >>= fmap (GitSHA1 . encodeUtf8)
- . Map.lookup "GitSHA1"
- . cfiHashes
- )
+ $ lpiProvidedExes lpi
--- | Some hard-coded fixes for build plans, hopefully to be irrelevant over
--- time.
-buildPlanFixes :: MiniBuildPlan -> MiniBuildPlan
-buildPlanFixes mbp = mbp
- { mbpPackages = Map.fromList $ map go $ Map.toList $ mbpPackages mbp
- }
- where
- go (name, mpi) =
- (name, mpi
- { mpiFlags = goF (packageNameString name) (mpiFlags mpi)
- })
+ goLocalProj (pname, lpv) =
+ map (flip Map.singleton (Set.singleton pname))
+ [ExeName t | CExe t <- Set.toList (lpvComponents lpv)]
- goF "persistent-sqlite" = Map.insert $(mkFlagName "systemlib") False
- goF "yaml" = Map.insert $(mkFlagName "system-libyaml") False
- goF _ = id
-
--- | Load the 'BuildPlan' for the given snapshot. Will load from a local copy
--- if available, otherwise downloading from Github.
-loadBuildPlan :: (StackMiniM env m, HasConfig env) => SnapName -> m BuildPlan
-loadBuildPlan name = do
- stackage <- view stackRootL
- file' <- parseRelFile $ T.unpack file
- let fp = buildPlanDir stackage </> file'
- $logDebug $ "Decoding build plan from: " <> T.pack (toFilePath fp)
- eres <- liftIO $ decodeFileEither $ toFilePath fp
- case eres of
- Right bp -> return bp
- Left e -> do
- $logDebug $ "Decoding build plan from file failed: " <> T.pack (show e)
- ensureDir (parent fp)
- url <- buildBuildPlanUrl name file
- req <- parseRequest $ T.unpack url
- $logSticky $ "Downloading " <> renderSnapName name <> " build plan ..."
- $logDebug $ "Downloading build plan from: " <> url
- _ <- redownload req fp
- $logStickyDone $ "Downloaded " <> renderSnapName name <> " build plan."
- liftIO (decodeFileEither $ toFilePath fp) >>= either throwM return
-
- where
- file = renderSnapName name <> ".yaml"
+ goLocalDep (pname, (gpd, _loc)) =
+ map (flip Map.singleton (Set.singleton pname))
+ $ gpdExes gpd
-buildBuildPlanUrl :: (MonadReader env m, HasConfig env) => SnapName -> Text -> m Text
-buildBuildPlanUrl name file = do
- urls <- view $ configL.to configUrls
- return $
- case name of
- LTS _ _ -> urlsLtsBuildPlans urls <> "/" <> file
- Nightly _ -> urlsNightlyBuildPlans urls <> "/" <> file
+ -- TODO consider doing buildable checking. Not a big deal though:
+ -- worse case scenario is we build an extra package that wasn't
+ -- strictly needed.
+ gpdExes :: GenericPackageDescription -> [ExeName]
+ gpdExes = map (ExeName . T.pack . C.unUnqualComponentName . fst) . condExecutables
gpdPackages :: [GenericPackageDescription] -> Map PackageName Version
gpdPackages gpds = Map.fromList $
@@ -545,15 +191,9 @@ gpdPackages gpds = Map.fromList $
fromCabalIdent (C.PackageIdentifier name version) =
(fromCabalPackageName name, fromCabalVersion version)
-gpdPackageName :: GenericPackageDescription -> PackageName
-gpdPackageName = fromCabalPackageName
- . C.pkgName
- . C.package
- . C.packageDescription
-
gpdPackageDeps
:: GenericPackageDescription
- -> CompilerVersion
+ -> CompilerVersion 'CVActual
-> Platform
-> Map FlagName Bool
-> Map PackageName VersionRange
@@ -561,7 +201,9 @@ gpdPackageDeps gpd cv platform flags =
Map.filterWithKey (const . (/= name)) (packageDependencies pkgDesc)
where
name = gpdPackageName gpd
- pkgDesc = resolvePackageDescription pkgConfig gpd
+ -- Since tests and benchmarks are both enabled, doesn't matter
+ -- if we choose modified or unmodified
+ pkgDesc = pdpModifiedBuildable $ resolvePackageDescription pkgConfig gpd
pkgConfig = PackageConfig
{ packageConfigEnableTests = True
, packageConfigEnableBenchmarks = True
@@ -600,7 +242,7 @@ removeSrcPkgDefaultFlags gpds flags =
-- Returns the plan which produces least number of dep errors
selectPackageBuildPlan
:: Platform
- -> CompilerVersion
+ -> CompilerVersion 'CVActual
-> Map PackageName Version
-> GenericPackageDescription
-> (Map PackageName (Map FlagName Bool), DepErrors)
@@ -639,7 +281,7 @@ selectPackageBuildPlan platform compiler pool gpd =
-- constraints can be satisfied against a given build plan or pool of packages.
checkPackageBuildPlan
:: Platform
- -> CompilerVersion
+ -> CompilerVersion 'CVActual
-> Map PackageName Version
-> Map FlagName Bool
-> GenericPackageDescription
@@ -693,7 +335,7 @@ combineDepError (DepError a x) (DepError b y) =
-- will be chosen automatically.
checkBundleBuildPlan
:: Platform
- -> CompilerVersion
+ -> CompilerVersion 'CVActual
-> Map PackageName Version
-> Maybe (Map PackageName (Map FlagName Bool))
-> [GenericPackageDescription]
@@ -717,7 +359,7 @@ data BuildPlanCheck =
BuildPlanCheckOk (Map PackageName (Map FlagName Bool))
| BuildPlanCheckPartial (Map PackageName (Map FlagName Bool)) DepErrors
| BuildPlanCheckFail (Map PackageName (Map FlagName Bool)) DepErrors
- CompilerVersion
+ (CompilerVersion 'CVActual)
-- | Compare 'BuildPlanCheck', where GT means a better plan.
compareBuildPlanCheck :: BuildPlanCheck -> BuildPlanCheck -> Ordering
@@ -742,18 +384,22 @@ instance Show BuildPlanCheck where
-- given snapshot. Returns how well the snapshot satisfies the dependencies of
-- the packages.
checkSnapBuildPlan
- :: (StackM env m, HasConfig env, HasGHCVariant env)
- => [GenericPackageDescription]
+ :: (HasConfig env, HasGHCVariant env)
+ => Path Abs Dir -- ^ project root, used for checking out necessary files
+ -> [GenericPackageDescription]
-> Maybe (Map PackageName (Map FlagName Bool))
- -> SnapName
- -> m BuildPlanCheck
-checkSnapBuildPlan gpds flags snap = do
+ -> SnapshotDef
+ -> Maybe (CompilerVersion 'CVActual)
+ -> RIO env BuildPlanCheck
+checkSnapBuildPlan root gpds flags snapshotDef mactualCompiler = do
platform <- view platformL
- mbp <- loadMiniBuildPlan snap
+ rs <- loadSnapshot mactualCompiler root snapshotDef
let
- compiler = mbpCompilerVersion mbp
- snapPkgs = mpiVersion <$> mbpPackages mbp
+ compiler = lsCompilerVersion rs
+ snapPkgs = Map.union
+ (lpiVersion <$> lsGlobals rs)
+ (lpiVersion <$> lsPackages rs)
(f, errs) = checkBundleBuildPlan platform compiler snapPkgs flags gpds
cerrs = compilerErrors compiler errs
@@ -775,15 +421,16 @@ checkSnapBuildPlan gpds flags snap = do
-- | Find a snapshot and set of flags that is compatible with and matches as
-- best as possible with the given 'GenericPackageDescription's.
selectBestSnapshot
- :: (StackM env m, HasConfig env, HasGHCVariant env)
- => [GenericPackageDescription]
+ :: (HasConfig env, HasGHCVariant env)
+ => Path Abs Dir -- ^ project root, used for checking out necessary files
+ -> [GenericPackageDescription]
-> NonEmpty SnapName
- -> m (SnapName, BuildPlanCheck)
-selectBestSnapshot gpds snaps = do
- $logInfo $ "Selecting the best among "
+ -> RIO env (SnapshotDef, BuildPlanCheck)
+selectBestSnapshot root gpds snaps = do
+ logInfo $ "Selecting the best among "
<> T.pack (show (NonEmpty.length snaps))
<> " snapshots...\n"
- F.foldr1 go (NonEmpty.map getResult snaps)
+ F.foldr1 go (NonEmpty.map (getResult <=< loadResolver . ResolverSnapshot) snaps)
where
go mold mnew = do
old@(_snap, bpc) <- mold
@@ -792,7 +439,13 @@ selectBestSnapshot gpds snaps = do
_ -> fmap (betterSnap old) mnew
getResult snap = do
- result <- checkSnapBuildPlan gpds Nothing snap
+ result <- checkSnapBuildPlan root gpds Nothing snap
+ -- We know that we're only dealing with ResolverSnapshot
+ -- here, where we can rely on the global package hints.
+ -- Therefore, we don't use an actual compiler. For more
+ -- info, see comments on
+ -- Stack.Solver.checkSnapBuildPlanActual.
+ Nothing
reportResult result snap
return (snap, result)
@@ -801,16 +454,16 @@ selectBestSnapshot gpds snaps = do
| otherwise = (s2, r2)
reportResult BuildPlanCheckOk {} snap = do
- $logInfo $ "* Matches " <> renderSnapName snap
- $logInfo ""
+ logInfo $ "* Matches " <> sdResolverName snap
+ logInfo ""
reportResult r@BuildPlanCheckPartial {} snap = do
- $logWarn $ "* Partially matches " <> renderSnapName snap
- $logWarn $ indent $ T.pack $ show r
+ logWarn $ "* Partially matches " <> sdResolverName snap
+ logWarn $ indent $ T.pack $ show r
reportResult r@BuildPlanCheckFail {} snap = do
- $logWarn $ "* Rejected " <> renderSnapName snap
- $logWarn $ indent $ T.pack $ show r
+ logWarn $ "* Rejected " <> sdResolverName snap
+ logWarn $ indent $ T.pack $ show r
indent t = T.unlines $ fmap (" " <>) (T.lines t)
@@ -844,7 +497,7 @@ showMapPackages mp = showItems $ Map.keys mp
showCompilerErrors
:: Map PackageName (Map FlagName Bool)
-> DepErrors
- -> CompilerVersion
+ -> CompilerVersion 'CVActual
-> Text
showCompilerErrors flags errs compiler =
T.concat
@@ -890,236 +543,3 @@ showDepErrors flags errs =
flagVals = T.concat (map showFlags userPkgs)
userPkgs = Map.keys $ Map.unions (Map.elems (fmap deNeededBy errs))
showFlags pkg = maybe "" (showPackageFlags pkg) (Map.lookup pkg flags)
-
--- | Given a set of packages to shadow, this removes them, and any
--- packages that transitively depend on them, from the 'MiniBuildPlan'.
--- The 'Map' result yields all of the packages that were downstream of
--- the shadowed packages. It does not include the shadowed packages.
-shadowMiniBuildPlan :: MiniBuildPlan
- -> Set PackageName
- -> (MiniBuildPlan, Map PackageName MiniPackageInfo)
-shadowMiniBuildPlan (MiniBuildPlan cv pkgs0) shadowed =
- (MiniBuildPlan cv (Map.fromList met), Map.fromList unmet)
- where
- pkgs1 = Map.difference pkgs0 $ Map.fromSet (const ()) shadowed
-
- depsMet = flip execState Map.empty $ mapM_ (check Set.empty) (Map.keys pkgs1)
-
- check visited name
- | name `Set.member` visited =
- error $ "shadowMiniBuildPlan: cycle detected, your MiniBuildPlan is broken: " ++ show (visited, name)
- | otherwise = do
- m <- get
- case Map.lookup name m of
- Just x -> return x
- Nothing ->
- case Map.lookup name pkgs1 of
- Nothing
- | name `Set.member` shadowed -> return False
-
- -- In this case, we have to assume that we're
- -- constructing a build plan on a different OS or
- -- architecture, and therefore different packages
- -- are being chosen. The common example of this is
- -- the Win32 package.
- | otherwise -> return True
- Just mpi -> do
- let visited' = Set.insert name visited
- ress <- mapM (check visited') (Set.toList $ mpiPackageDeps mpi)
- let res = and ress
- modify $ \m' -> Map.insert name res m'
- return res
-
- (met, unmet) = partitionEithers $ map toEither $ Map.toList pkgs1
-
- toEither pair@(name, _) =
- wrapper pair
- where
- wrapper =
- case Map.lookup name depsMet of
- Just True -> Left
- Just False -> Right
- Nothing -> assert False Right
-
--- This works differently for snapshots fetched from URL and those
--- fetched from file:
---
--- 1) If downloading the snapshot from a URL, assume the fetched data is
--- immutable. Hash the URL in order to determine the location of the
--- cached download. The file contents of the snapshot determines the
--- hash for looking up cached MBP.
---
--- 2) If loading the snapshot from a file, load all of the involved
--- snapshot files. The hash used to determine the cached MBP is the hash
--- of the concatenation of the parent's hash with the snapshot contents.
---
--- Why this difference? We want to make it easy to simply edit snapshots
--- in the filesystem, but we want caching for remote snapshots. In order
--- to avoid reparsing / reloading all the yaml for remote snapshots, we
--- need a different hash system.
-
--- TODO: This could probably be more efficient if it first merged the
--- custom snapshots, and then applied them to the MBP. It is nice to
--- apply directly, because then we have the guarantee that it's
--- semantically identical to snapshot extension. If this optimization is
--- implemented, note that the direct Monoid for CustomSnapshot is not
--- correct. Crucially, if a package is present in the snapshot, its
--- flags and ghc-options are not based on settings from prior snapshots.
--- TODO: This semantics should be discussed / documented more.
-
--- TODO: allow a hash check in the resolver. This adds safety /
--- correctness, allowing you to ensure that you are indeed getting the
--- right custom snapshot.
-
--- TODO: Allow custom plan to specify a name.
-
-parseCustomMiniBuildPlan
- :: (StackMiniM env m, HasConfig env, HasGHCVariant env)
- => Maybe (Path Abs File) -- ^ Root directory for when url is a filepath
- -> T.Text
- -> m (MiniBuildPlan, SnapshotHash)
-parseCustomMiniBuildPlan mconfigPath0 url0 = do
- $logDebug $ "Loading " <> url0 <> " build plan"
- case parseUrlThrow $ T.unpack url0 of
- Just req -> downloadCustom url0 req
- Nothing ->
- case mconfigPath0 of
- Nothing -> throwM $ FilepathInDownloadedSnapshot url0
- Just configPath -> do
- (getMbp, hash) <- readCustom configPath url0
- mbp <- getMbp
- -- NOTE: We make the choice of only writing a cache
- -- file for the full MBP, not the intermediate ones.
- -- This isn't necessarily the best choice if we want
- -- to share work extended snapshots. I think only
- -- writing this one is more efficient for common
- -- cases.
- binaryPath <- getBinaryPath hash
- alreadyCached <- doesFileExist binaryPath
- unless alreadyCached $ $(versionedEncodeFile miniBuildPlanVC) binaryPath mbp
- return (mbp, hash)
- where
- downloadCustom url req = do
- let urlHash = S8.unpack $ trimmedSnapshotHash $ doHash $ encodeUtf8 url
- hashFP <- parseRelFile $ urlHash ++ ".yaml"
- customPlanDir <- getCustomPlanDir
- let cacheFP = customPlanDir </> $(mkRelDir "yaml") </> hashFP
- _ <- download req cacheFP
- yamlBS <- liftIO $ S.readFile $ toFilePath cacheFP
- let yamlHash = doHash yamlBS
- binaryPath <- getBinaryPath yamlHash
- liftM (, yamlHash) $ $(versionedDecodeOrLoad miniBuildPlanVC) binaryPath $ do
- (cs, mresolver) <- decodeYaml yamlBS
- parentMbp <- case (csCompilerVersion cs, mresolver) of
- (Nothing, Nothing) -> throwM (NeitherCompilerOrResolverSpecified url)
- (Just cv, Nothing) -> return (compilerBuildPlan cv)
- -- NOTE: ignoring the parent's hash, even though
- -- there could be one. URL snapshot's hash are
- -- determined just from their contents.
- (_, Just resolver) -> liftM fst (loadResolver Nothing resolver)
- applyCustomSnapshot cs parentMbp
- readCustom configPath path = do
- yamlFP <- resolveFile (parent configPath) (T.unpack $ fromMaybe path $
- T.stripPrefix "file://" path <|> T.stripPrefix "file:" path)
- yamlBS <- liftIO $ S.readFile $ toFilePath yamlFP
- (cs, mresolver) <- decodeYaml yamlBS
- (getMbp, hash) <- case mresolver of
- Just (ResolverCustom _ url ) ->
- case parseUrlThrow $ T.unpack url of
- Just req -> do
- let getMbp = do
- -- Ignore custom hash, under the
- -- assumption that the URL is sufficient
- -- for identity.
- (mbp, _) <- downloadCustom url req
- return mbp
- return (getMbp, doHash yamlBS)
- Nothing -> do
- (getMbp0, SnapshotHash hash0) <- readCustom yamlFP url
- let hash = doHash (hash0 <> yamlBS)
- getMbp = do
- binaryPath <- getBinaryPath hash
- -- Idea here is to not waste time
- -- writing out intermediate cache files,
- -- but check for them.
- exists <- doesFileExist binaryPath
- if exists
- then do
- eres <- $(versionedDecodeFile miniBuildPlanVC) binaryPath
- case eres of
- Just mbp -> return mbp
- -- Invalid format cache file, remove.
- Nothing -> do
- removeFile binaryPath
- getMbp0
- else getMbp0
- return (getMbp, hash)
- Just resolver -> do
- -- NOTE: in the cases where we don't have a hash, the
- -- normal resolver name is enough. Since this name is
- -- part of the yaml file, it ends up in our hash.
- let hash = doHash yamlBS
- getMbp = do
- (mbp, resolver') <- loadResolver (Just configPath) resolver
- let mhash = customResolverHash resolver'
- assert (isNothing mhash) (return mbp)
- return (getMbp, hash)
- Nothing -> do
- case csCompilerVersion cs of
- Nothing -> throwM (NeitherCompilerOrResolverSpecified path)
- Just cv -> do
- let hash = doHash yamlBS
- getMbp = return (compilerBuildPlan cv)
- return (getMbp, hash)
- return (applyCustomSnapshot cs =<< getMbp, hash)
- getBinaryPath hash = do
- binaryFilename <- parseRelFile $ S8.unpack (trimmedSnapshotHash hash) ++ ".bin"
- customPlanDir <- getCustomPlanDir
- return $ customPlanDir </> $(mkRelDir "bin") </> binaryFilename
- decodeYaml yamlBS = do
- WithJSONWarnings res warnings <-
- either (throwM . ParseCustomSnapshotException url0) return $
- decodeEither' yamlBS
- logJSONWarnings (T.unpack url0) warnings
- return res
- compilerBuildPlan cv = MiniBuildPlan
- { mbpCompilerVersion = cv
- , mbpPackages = mempty
- }
- getCustomPlanDir = do
- root <- view stackRootL
- return $ root </> $(mkRelDir "custom-plan")
- doHash = SnapshotHash . B64URL.encode . Mem.convert . hashWith SHA256
-
-applyCustomSnapshot
- :: (StackMiniM env m, HasConfig env)
- => CustomSnapshot
- -> MiniBuildPlan
- -> m MiniBuildPlan
-applyCustomSnapshot cs mbp0 = do
- let CustomSnapshot mcompilerVersion
- packages
- dropPackages
- (PackageFlags flags)
- ghcOptions
- = cs
- addFlagsAndOpts :: PackageIdentifier -> (PackageName, (Version, Map FlagName Bool, [Text], Maybe GitSHA1))
- addFlagsAndOpts (PackageIdentifier name ver) =
- ( name
- , ( ver
- , Map.findWithDefault Map.empty name flags
- -- NOTE: similar to 'allGhcOptions' in Stack.Types.Build
- , ghcOptionsFor name ghcOptions
- -- we add a Nothing since we don't yet collect Git SHAs for custom snapshots
- , Nothing
- )
- )
- packageMap = Map.fromList $ map addFlagsAndOpts $ Set.toList packages
- cv = fromMaybe (mbpCompilerVersion mbp0) mcompilerVersion
- packages0 =
- mbpPackages mbp0 `Map.difference` Map.fromSet (const ()) dropPackages
- mbp1 <- toMiniBuildPlan cv mempty packageMap
- return MiniBuildPlan
- { mbpCompilerVersion = cv
- , mbpPackages = Map.union (mbpPackages mbp1) packages0
- }
diff --git a/src/Stack/Clean.hs b/src/Stack/Clean.hs
index 97fddb5..aa76d7b 100644
--- a/src/Stack/Clean.hs
+++ b/src/Stack/Clean.hs
@@ -1,6 +1,8 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
-- | Clean a project.
module Stack.Clean
@@ -9,53 +11,47 @@ module Stack.Clean
,StackCleanException(..)
) where
-import Control.Exception (Exception)
-import Control.Monad.Catch (throwM)
-import Data.Foldable (forM_)
+import Stack.Prelude
import Data.List ((\\),intercalate)
import qualified Data.Map.Strict as Map
-import Data.Maybe (mapMaybe)
-import Data.Typeable (Typeable)
-import Path (Path, Abs, Dir)
+import qualified Data.Text as T
import Path.IO (ignoringAbsence, removeDirRecur)
-import Stack.Build.Source (getLocalPackageViews)
-import Stack.Build.Target (LocalPackageView(..))
import Stack.Config (getLocalPackages)
-import Stack.Constants (distDirFromDir, workDirFromDir)
+import Stack.Constants.Config (distDirFromDir, workDirFromDir)
import Stack.Types.PackageName
import Stack.Types.Config
-import Stack.Types.StackT
+import System.Exit (exitFailure)
-- | Deletes build artifacts in the current project.
--
-- Throws 'StackCleanException'.
-clean
- :: (StackM env m, HasEnvConfig env)
- => CleanOpts
- -> m ()
+clean :: HasEnvConfig env => CleanOpts -> RIO env ()
clean cleanOpts = do
- dirs <- dirsToDelete cleanOpts
- forM_ dirs (ignoringAbsence . removeDirRecur)
+ failures <- mapM cleanDir =<< dirsToDelete cleanOpts
+ when (or failures) $ liftIO exitFailure
+ where
+ cleanDir dir =
+ liftIO (ignoringAbsence (removeDirRecur dir) >> return False) `catchAny` \ex -> do
+ logError $ "Exception while recursively deleting " <> T.pack (toFilePath dir) <> "\n" <> T.pack (show ex)
+ logError "Perhaps you do not have permission to delete these files or they are in use?"
+ return True
-dirsToDelete
- :: (StackM env m, HasEnvConfig env)
- => CleanOpts
- -> m [Path Abs Dir]
+dirsToDelete :: HasEnvConfig env => CleanOpts -> RIO env [Path Abs Dir]
dirsToDelete cleanOpts = do
packages <- getLocalPackages
case cleanOpts of
CleanShallow [] ->
-- Filter out packages listed as extra-deps
- mapM distDirFromDir . Map.keys . Map.filter (== False) $ packages
+ mapM (distDirFromDir . lpvRoot) $ Map.elems $ lpProject packages
CleanShallow targets -> do
- localPkgViews <- getLocalPackageViews
- let localPkgNames = Map.keys localPkgViews
- getPkgDir pkgName = fmap (lpvRoot . fst) (Map.lookup pkgName localPkgViews)
+ let localPkgViews = lpProject packages
+ localPkgNames = Map.keys localPkgViews
+ getPkgDir pkgName = fmap lpvRoot (Map.lookup pkgName localPkgViews)
case targets \\ localPkgNames of
[] -> mapM distDirFromDir (mapMaybe getPkgDir targets)
xs -> throwM (NonLocalPackages xs)
CleanFull -> do
- pkgWorkDirs <- mapM workDirFromDir (Map.keys packages)
+ pkgWorkDirs <- mapM (workDirFromDir . lpvRoot) $ Map.elems $ lpProject packages
projectWorkDir <- getProjectWorkDir
return (projectWorkDir : pkgWorkDirs)
diff --git a/src/Stack/Config.hs b/src/Stack/Config.hs
index 2d53f03..40b023d 100644
--- a/src/Stack/Config.hs
+++ b/src/Stack/Config.hs
@@ -1,5 +1,7 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE CPP #-}
+{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
@@ -33,7 +35,6 @@ module Stack.Config
,loadConfigYaml
,packagesParser
,getLocalPackages
- ,resolvePackageEntry
,getImplicitGlobalProjectDir
,getStackYaml
,getSnapshots
@@ -44,43 +45,24 @@ module Stack.Config
,defaultConfigYaml
,getProjectConfig
,LocalConfigStatus(..)
- ,removePathFromPackageEntry
) where
-import qualified Codec.Archive.Tar as Tar
-import qualified Codec.Archive.Zip as Zip
-import qualified Codec.Compression.GZip as GZip
-import Control.Applicative
-import Control.Arrow ((***))
-import Control.Exception (assert)
-import Control.Monad (liftM, unless, when, filterM)
-import Control.Monad.Catch (MonadThrow, MonadCatch, catchAll, throwM, catch)
import Control.Monad.Extra (firstJustM)
-import Control.Monad.IO.Class
-import Control.Monad.Logger hiding (Loc)
-import Control.Monad.Reader (ask, runReaderT)
-import Crypto.Hash (hashWith, SHA256(..))
+import Stack.Prelude
import Data.Aeson.Extended
-import qualified Data.ByteArray as Mem (convert)
import qualified Data.ByteString as S
-import qualified Data.ByteString.Base64.URL as B64URL
-import qualified Data.ByteString.Lazy as L
-import Data.Foldable (forM_)
-import Data.IORef
import qualified Data.IntMap as IntMap
import qualified Data.Map as Map
-import Data.Maybe
-import Data.Monoid.Extra
import qualified Data.Text as T
-import Data.Text.Encoding (encodeUtf8, decodeUtf8)
+import Data.Text.Encoding (encodeUtf8)
import qualified Data.Yaml as Yaml
+import qualified Distribution.PackageDescription as C
import Distribution.System (OS (..), Platform (..), buildPlatform, Arch(OtherArch))
import qualified Distribution.Text
-import Distribution.Version (simplifyVersionRange)
+import Distribution.Version (simplifyVersionRange, mkVersion')
import GHC.Conc (getNumProcessors)
import Lens.Micro (lens)
import Network.HTTP.Client (parseUrlThrow)
-import Network.HTTP.Download (download)
import Network.HTTP.Simple (httpJSON, getResponseBody)
import Options.Applicative (Parser, strOption, long, help)
import Path
@@ -88,31 +70,31 @@ import Path.Extra (toFilePathNoTrailingSep)
import Path.Find (findInParents)
import Path.IO
import qualified Paths_stack as Meta
-import Stack.BuildPlan
import Stack.Config.Build
import Stack.Config.Docker
import Stack.Config.Nix
import Stack.Config.Urls
import Stack.Constants
+import Stack.Fetch
import qualified Stack.Image as Image
+import Stack.PackageLocation
+import Stack.Snapshot
import Stack.Types.BuildPlan
import Stack.Types.Compiler
import Stack.Types.Config
import Stack.Types.Docker
-import Stack.Types.Internal
import Stack.Types.Nix
+import Stack.Types.PackageName (PackageName)
+import Stack.Types.PackageIdentifier
import Stack.Types.PackageIndex (IndexType (ITHackageSecurity), HackageSecurity (..))
import Stack.Types.Resolver
-import Stack.Types.StackT
-import Stack.Types.StringError
+import Stack.Types.Runner
import Stack.Types.Urls
import Stack.Types.Version
import System.Environment
-import System.IO
import System.PosixCompat.Files (fileOwner, getFileStatus)
import System.PosixCompat.User (getEffectiveUserID)
import System.Process.Read
-import System.Process.Run
-- | If deprecated path exists, use it and print a warning.
-- Otherwise, return the new path.
@@ -134,7 +116,7 @@ tryDeprecatedPath mWarningDesc exists new old = do
case mWarningDesc of
Nothing -> return ()
Just desc ->
- $logWarn $ T.concat
+ logWarn $ T.concat
[ "Warning: Location of ", desc, " at '"
, T.pack (toFilePath old)
, "' is deprecated; rename it to '"
@@ -161,9 +143,7 @@ getImplicitGlobalProjectDir config =
-- | This is slightly more expensive than @'asks' ('bcStackYaml' '.' 'getBuildConfig')@
-- and should only be used when no 'BuildConfig' is at hand.
-getStackYaml
- :: (StackMiniM env m, HasConfig env)
- => m (Path Abs File)
+getStackYaml :: HasConfig env => RIO env (Path Abs File)
getStackYaml = do
config <- view configL
case configMaybeProject config of
@@ -171,28 +151,27 @@ getStackYaml = do
Nothing -> liftM (</> stackDotYaml) (getImplicitGlobalProjectDir config)
-- | Download the 'Snapshots' value from stackage.org.
-getSnapshots
- :: (StackMiniM env m, HasConfig env)
- => m Snapshots
+getSnapshots :: HasConfig env => RIO env Snapshots
getSnapshots = do
latestUrlText <- askLatestSnapshotUrl
latestUrl <- parseUrlThrow (T.unpack latestUrlText)
- $logDebug $ "Downloading snapshot versions file from " <> latestUrlText
+ logDebug $ "Downloading snapshot versions file from " <> latestUrlText
result <- httpJSON latestUrl
- $logDebug $ "Done downloading and parsing snapshot versions file"
+ logDebug "Done downloading and parsing snapshot versions file"
return $ getResponseBody result
-- | Turn an 'AbstractResolver' into a 'Resolver'.
makeConcreteResolver
- :: (StackMiniM env m, HasConfig env)
- => AbstractResolver
- -> m Resolver
-makeConcreteResolver (ARResolver r) = return r
-makeConcreteResolver ar = do
+ :: HasConfig env
+ => Maybe (Path Abs Dir) -- ^ root of project for resolving custom relative paths
+ -> AbstractResolver
+ -> RIO env Resolver
+makeConcreteResolver root (ARResolver r) = parseCustomLocation root r
+makeConcreteResolver root ar = do
snapshots <- getSnapshots
r <-
case ar of
- ARResolver r -> assert False $ return r
+ ARResolver r -> assert False $ makeConcreteResolver root $ ARResolver r
ARGlobal -> do
config <- view configL
implicitGlobalDir <- getImplicitGlobalProjectDir config
@@ -203,18 +182,18 @@ makeConcreteResolver ar = do
ARLatestNightly -> return $ ResolverSnapshot $ Nightly $ snapshotsNightly snapshots
ARLatestLTSMajor x ->
case IntMap.lookup x $ snapshotsLts snapshots of
- Nothing -> errorString $ "No LTS release found with major version " ++ show x
+ Nothing -> throwString $ "No LTS release found with major version " ++ show x
Just y -> return $ ResolverSnapshot $ LTS x y
ARLatestLTS
- | IntMap.null $ snapshotsLts snapshots -> errorString "No LTS releases found"
+ | IntMap.null $ snapshotsLts snapshots -> throwString "No LTS releases found"
| otherwise ->
let (x, y) = IntMap.findMax $ snapshotsLts snapshots
in return $ ResolverSnapshot $ LTS x y
- $logInfo $ "Selected resolver: " <> resolverName r
+ logInfo $ "Selected resolver: " <> resolverRawName r
return r
-- | Get the latest snapshot resolver available.
-getLatestResolver :: (StackMiniM env m, HasConfig env) => m Resolver
+getLatestResolver :: HasConfig env => RIO env (ResolverWith a)
getLatestResolver = do
snapshots <- getSnapshots
let mlts = do
@@ -226,14 +205,14 @@ getLatestResolver = do
-- | Create a 'Config' value when we're not using any local
-- configuration files (e.g., the script command)
configNoLocalConfig
- :: (MonadLogger m, MonadIO m, MonadCatch m)
+ :: (MonadLogger m, MonadUnliftIO m, MonadThrow m, MonadReader env m, HasRunner env)
=> Path Abs Dir -- ^ stack root
-> Maybe AbstractResolver
-> ConfigMonoid
-> m Config
-configNoLocalConfig _ Nothing _ = throwM NoResolverWhenUsingNoLocalConfig
+configNoLocalConfig _ Nothing _ = throwIO NoResolverWhenUsingNoLocalConfig
configNoLocalConfig stackRoot (Just resolver) configMonoid = do
- userConfigPath <- getFakeConfigPath stackRoot resolver
+ userConfigPath <- liftIO $ getFakeConfigPath stackRoot resolver
configFromConfigMonoid
stackRoot
userConfigPath
@@ -244,7 +223,7 @@ configNoLocalConfig stackRoot (Just resolver) configMonoid = do
-- Interprets ConfigMonoid options.
configFromConfigMonoid
- :: (MonadLogger m, MonadIO m, MonadCatch m)
+ :: (MonadLogger m, MonadUnliftIO m, MonadThrow m, MonadReader env m, HasRunner env)
=> Path Abs Dir -- ^ stack root, e.g. ~/.stack
-> Path Abs File -- ^ user config file path, e.g. ~/.stack/config.yaml
-> Bool -- ^ allow locals?
@@ -258,12 +237,12 @@ configFromConfigMonoid
-- If --stack-work is passed, prefer it. Otherwise, if STACK_WORK
-- is set, use that. If neither, use the default ".stack-work"
mstackWorkEnv <- liftIO $ lookupEnv stackWorkEnvVar
- configWorkDir0 <- maybe (return $(mkRelDir ".stack-work")) parseRelDir mstackWorkEnv
+ configWorkDir0 <- maybe (return $(mkRelDir ".stack-work")) (liftIO . parseRelDir) mstackWorkEnv
let configWorkDir = fromFirst configWorkDir0 configMonoidWorkDir
-- This code is to handle the deprecation of latest-snapshot-url
configUrls <- case (getFirst configMonoidLatestSnapshotUrl, getFirst (urlsMonoidLatestSnapshot configMonoidUrls)) of
(Just url, Nothing) -> do
- $logWarn "The latest-snapshot-url field is deprecated in favor of 'urls' configuration"
+ logWarn "The latest-snapshot-url field is deprecated in favor of 'urls' configuration"
return (urlsFromMonoid configMonoidUrls) { urlsLatestSnapshot = url }
_ -> return (urlsFromMonoid configMonoidUrls)
let configConnectionCount = fromFirst 8 configMonoidConnectionCount
@@ -293,14 +272,15 @@ configFromConfigMonoid
configGHCVariant0 = getFirst configMonoidGHCVariant
configGHCBuild = getFirst configMonoidGHCBuild
- configInstallGHC = fromFirst False configMonoidInstallGHC
+ configInstallGHC = fromFirst True configMonoidInstallGHC
configSkipGHCCheck = fromFirst False configMonoidSkipGHCCheck
configSkipMsys = fromFirst False configMonoidSkipMsys
configExtraIncludeDirs = configMonoidExtraIncludeDirs
configExtraLibDirs = configMonoidExtraLibDirs
configOverrideGccPath = getFirst configMonoidOverrideGccPath
-
+ configOverrideHpack = maybe HpackBundled HpackCommand $ getFirst configMonoidOverrideHpack
+
-- Only place in the codebase where platform is hard-coded. In theory
-- in the future, allow it to be configured.
(Platform defArch defOS) = buildPlatform
@@ -316,7 +296,7 @@ configFromConfigMonoid
configCompilerCheck = fromFirst MatchMinor configMonoidCompilerCheck
case arch of
- OtherArch unk -> $logWarn $ "Warning: Unknown value for architecture setting: " <> T.pack (show unk)
+ OtherArch unk -> logWarn $ "Warning: Unknown value for architecture setting: " <> T.pack (show unk)
_ -> return ()
configPlatformVariant <- liftIO $
@@ -366,8 +346,8 @@ configFromConfigMonoid
-- TODO: Either catch specific exceptions or add a
-- parseRelAsAbsDirMaybe utility and use it along with
-- resolveDirMaybe.
- `catchAll`
- const (throwM (NoSuchDirectory userPath))
+ `catchAny`
+ const (throwIO (NoSuchDirectory userPath))
configJobs <-
case getFirst configMonoidJobs of
@@ -377,7 +357,8 @@ configFromConfigMonoid
let configTemplateParams = configMonoidTemplateParameters
configScmInit = getFirst configMonoidScmInit
- configGhcOptions = configMonoidGhcOptions
+ configGhcOptionsByName = configMonoidGhcOptionsByName
+ configGhcOptionsByCat = configMonoidGhcOptionsByCat
configSetupInfoLocations = configMonoidSetupInfoLocations
configPvpBounds = fromFirst (PvpBounds PvpBoundsNone False) configMonoidPvpBounds
configModifyCodePage = fromFirst True configMonoidModifyCodePage
@@ -388,16 +369,19 @@ configFromConfigMonoid
configDefaultTemplate = getFirst configMonoidDefaultTemplate
configDumpLogs = fromFirst DumpWarningLogs configMonoidDumpLogs
configSaveHackageCreds = fromFirst True configMonoidSaveHackageCreds
+ configIgnoreRevisionMismatch = fromFirst False configMonoidIgnoreRevisionMismatch
configAllowDifferentUser <-
case getFirst configMonoidAllowDifferentUser of
Just True -> return True
_ -> getInContainer
- configPackageCaches <- liftIO $ newIORef Nothing
+ configPackageCache <- liftIO $ newIORef Nothing
let configMaybeProject = mproject
+ configRunner <- view runnerL
+
return Config {..}
-- | Get the default location of the local programs directory.
@@ -419,13 +403,13 @@ getDefaultLocalProgramsBase configStackRoot configPlatform override =
case Map.lookup "LOCALAPPDATA" $ unEnvOverride override of
Just t ->
case parseAbsDir $ T.unpack t of
- Nothing -> throwString ("Failed to parse LOCALAPPDATA environment variable (expected absolute directory): " ++ show t)
+ Nothing -> throwM $ stringException ("Failed to parse LOCALAPPDATA environment variable (expected absolute directory): " ++ show t)
Just lad -> return $ lad </> $(mkRelDir "Programs") </> $(mkRelDir stackProgName)
Nothing -> return defaultBase
_ -> return defaultBase
-- | An environment with a subset of BuildConfig used for setup.
-data MiniConfig = MiniConfig
+data MiniConfig = MiniConfig -- TODO do we really need a whole extra data type?
{ mcGHCVariant :: !GHCVariant
, mcConfig :: !Config
}
@@ -434,24 +418,32 @@ instance HasConfig MiniConfig where
instance HasPlatform MiniConfig
instance HasGHCVariant MiniConfig where
ghcVariantL = lens mcGHCVariant (\x y -> x { mcGHCVariant = y })
+instance HasRunner MiniConfig where
+ runnerL = configL.runnerL
+instance HasLogFunc MiniConfig where
+ logFuncL = configL.logFuncL
-- | Load the 'MiniConfig'.
loadMiniConfig :: Config -> MiniConfig
-loadMiniConfig config =
- let ghcVariant = fromMaybe GHCStandard (configGHCVariant0 config)
- in MiniConfig ghcVariant config
+loadMiniConfig config = MiniConfig
+ { mcGHCVariant = configGHCVariantDefault config
+ , mcConfig = config
+ }
+
+configGHCVariantDefault :: Config -> GHCVariant -- FIXME why not just use this as the HasGHCVariant instance for Config?
+configGHCVariantDefault = fromMaybe GHCStandard . configGHCVariant0
-- Load the configuration, using environment variables, and defaults as
-- necessary.
loadConfigMaybeProject
- :: StackM env m
+ :: HasRunner env
=> ConfigMonoid
-- ^ Config monoid from parsed command-line arguments
-> Maybe AbstractResolver
-- ^ Override resolver
-> LocalConfigStatus (Project, Path Abs File, ConfigMonoid)
-- ^ Project config to use, if any
- -> m (LoadConfig m)
+ -> RIO env LoadConfig
loadConfigMaybeProject configArgs mresolver mproject = do
(stackRoot, userOwnsStackRoot) <- determineStackRootAndOwnership configArgs
@@ -477,10 +469,10 @@ loadConfigMaybeProject configArgs mresolver mproject = do
config <-
case mproject of
- LCSNoConfig -> configNoLocalConfig stackRoot mresolver configArgs
+ LCSNoConfig _ -> configNoLocalConfig stackRoot mresolver configArgs
LCSProject project -> loadHelper $ Just project
LCSNoProject -> loadHelper Nothing
- unless (fromCabalVersion Meta.version `withinRange` configRequireStackVersion config)
+ unless (fromCabalVersion (mkVersion' Meta.version) `withinRange` configRequireStackVersion config)
(throwM (BadStackVersionException (configRequireStackVersion config)))
let mprojectRoot = fmap (\(_, fp, _) -> parent fp) mproject
@@ -492,48 +484,79 @@ loadConfigMaybeProject configArgs mresolver mproject = do
return LoadConfig
{ lcConfig = config
- , lcLoadBuildConfig = loadBuildConfig mproject config mresolver
+ , lcLoadBuildConfig = runRIO config . loadBuildConfig mproject mresolver
, lcProjectRoot =
case mprojectRoot of
LCSProject fp -> Just fp
LCSNoProject -> Nothing
- LCSNoConfig -> Nothing
+ LCSNoConfig _ -> Nothing
}
-- | Load the configuration, using current directory, environment variables,
-- and defaults as necessary. The passed @Maybe (Path Abs File)@ is an
-- override for the location of the project's stack.yaml.
-loadConfig :: StackM env m
+loadConfig :: HasRunner env
=> ConfigMonoid
-- ^ Config monoid from parsed command-line arguments
-> Maybe AbstractResolver
-- ^ Override resolver
-> StackYamlLoc (Path Abs File)
-- ^ Override stack.yaml
- -> m (LoadConfig m)
+ -> RIO env LoadConfig
loadConfig configArgs mresolver mstackYaml =
loadProjectConfig mstackYaml >>= loadConfigMaybeProject configArgs mresolver
-- | Load the build configuration, adds build-specific values to config loaded by @loadConfig@.
-- values.
-loadBuildConfig :: StackM env m
- => LocalConfigStatus (Project, Path Abs File, ConfigMonoid)
- -> Config
+loadBuildConfig :: LocalConfigStatus (Project, Path Abs File, ConfigMonoid)
-> Maybe AbstractResolver -- override resolver
- -> Maybe CompilerVersion -- override compiler
- -> m BuildConfig
-loadBuildConfig mproject config mresolver mcompiler = do
- env <- ask
+ -> Maybe (CompilerVersion 'CVWanted) -- override compiler
+ -> RIO Config BuildConfig
+loadBuildConfig mproject maresolver mcompiler = do
+ config <- ask
+
+ -- If provided, turn the AbstractResolver from the command line
+ -- into a Resolver that can be used below.
+
+ -- The maresolver and mcompiler are provided on the command
+ -- line. In order to properly deal with an AbstractResolver, we
+ -- need a base directory (to deal with custom snapshot relative
+ -- paths). We consider the current working directory to be the
+ -- correct base. Let's calculate the mresolver first.
+ mresolver <- forM maresolver $ \aresolver -> do
+ -- For display purposes only
+ let name =
+ case aresolver of
+ ARResolver resolver -> resolverRawName resolver
+ ARLatestNightly -> "nightly"
+ ARLatestLTS -> "lts"
+ ARLatestLTSMajor x -> T.pack $ "lts-" ++ show x
+ ARGlobal -> "global"
+ logDebug ("Using resolver: " <> name <> " specified on command line")
+
+ -- In order to resolve custom snapshots, we need a base
+ -- directory to deal with relative paths. For the case of
+ -- LCSNoConfig, we use the parent directory provided. This is
+ -- because, when running the script interpreter, we assume the
+ -- resolver is in fact coming from the file contents itself and
+ -- not the command line. For the project and non project cases,
+ -- however, we use the current directory.
+ base <-
+ case mproject of
+ LCSNoConfig parentDir -> return parentDir
+ LCSProject _ -> resolveDir' "."
+ LCSNoProject -> resolveDir' "."
+ makeConcreteResolver (Just base) aresolver
(project', stackYamlFP) <- case mproject of
LCSProject (project, fp, _) -> do
- forM_ (projectUserMsg project) ($logWarn . T.pack)
+ forM_ (projectUserMsg project) (logWarn . T.pack)
return (project, fp)
- LCSNoConfig -> do
- p <- getEmptyProject
+ LCSNoConfig _ -> do
+ p <- assert (isJust mresolver) (getEmptyProject mresolver)
return (p, configUserConfigPath config)
LCSNoProject -> do
- $logDebug "Run from outside a project, using implicit global project config"
+ logDebug "Run from outside a project, using implicit global project config"
destDir <- getImplicitGlobalProjectDir config
let dest :: Path Abs File
dest = destDir </> stackDotYaml
@@ -544,26 +567,17 @@ loadBuildConfig mproject config mresolver mcompiler = do
if exists
then do
ProjectAndConfigMonoid project _ <- loadConfigYaml (parseProjectAndConfigMonoid destDir) dest
- when (view terminalL env) $
- case mresolver of
+ when (view terminalL config) $
+ case maresolver of
Nothing ->
- $logDebug ("Using resolver: " <> resolverName (projectResolver project) <>
+ logDebug ("Using resolver: " <> resolverRawName (projectResolver project) <>
" from implicit global project's config file: " <> T.pack dest')
- Just aresolver -> do
- let name =
- case aresolver of
- ARResolver resolver -> resolverName resolver
- ARLatestNightly -> "nightly"
- ARLatestLTS -> "lts"
- ARLatestLTSMajor x -> T.pack $ "lts-" ++ show x
- ARGlobal -> "global"
- $logDebug ("Using resolver: " <> name <>
- " specified on command line")
+ Just _ -> return ()
return (project, dest)
else do
- $logInfo ("Writing implicit global project config file to: " <> T.pack dest')
- $logInfo "Note: You can change the snapshot via the resolver field there."
- p <- getEmptyProject
+ logInfo ("Writing implicit global project config file to: " <> T.pack dest')
+ logInfo "Note: You can change the snapshot via the resolver field there."
+ p <- getEmptyProject mresolver
liftIO $ do
S.writeFile dest' $ S.concat
[ "# This is the implicit global project's config file, which is only used when\n"
@@ -579,31 +593,22 @@ loadBuildConfig mproject config mresolver mcompiler = do
[ "This is the implicit global project, which is used only when 'stack' is run\n"
, "outside of a real project.\n" ]
return (p, dest)
- resolver <-
- case mresolver of
- Nothing -> return $ projectResolver project'
- Just aresolver ->
- runReaderT (makeConcreteResolver aresolver) miniConfig
let project = project'
- { projectResolver = resolver
- , projectCompiler = mcompiler <|> projectCompiler project'
+ { projectCompiler = mcompiler <|> projectCompiler project'
+ , projectResolver = fromMaybe (projectResolver project') mresolver
}
- (mbp0, loadedResolver) <- flip runReaderT miniConfig $
- loadResolver (Just stackYamlFP) (projectResolver project)
- let mbp = case projectCompiler project of
- Just compiler -> mbp0 { mbpCompilerVersion = compiler }
- Nothing -> mbp0
+ sd0 <- runRIO config $ loadResolver $ projectResolver project
+ let sd = maybe id setCompilerVersion (projectCompiler project) sd0
extraPackageDBs <- mapM resolveDir' (projectExtraPackageDBs project)
return BuildConfig
{ bcConfig = config
- , bcResolver = loadedResolver
- , bcWantedMiniBuildPlan = mbp
- , bcGHCVariant = view ghcVariantL miniConfig
- , bcPackageEntries = projectPackages project
- , bcExtraDeps = projectExtraDeps project
+ , bcSnapshotDef = sd
+ , bcGHCVariant = configGHCVariantDefault config
+ , bcPackages = projectPackages project
+ , bcDependencies = projectDependencies project
, bcExtraPackageDBs = extraPackageDBs
, bcStackYaml = stackYamlFP
, bcFlags = projectFlags project
@@ -611,25 +616,23 @@ loadBuildConfig mproject config mresolver mcompiler = do
case mproject of
LCSNoProject -> True
LCSProject _ -> False
- LCSNoConfig -> False
+ LCSNoConfig _ -> False
}
where
- miniConfig = loadMiniConfig config
-
- getEmptyProject = do
+ getEmptyProject :: Maybe Resolver -> RIO Config Project
+ getEmptyProject mresolver = do
r <- case mresolver of
- Just aresolver -> do
- r' <- runReaderT (makeConcreteResolver aresolver) miniConfig
- $logInfo ("Using resolver: " <> resolverName r' <> " specified on command line")
- return r'
+ Just resolver -> do
+ logInfo ("Using resolver: " <> resolverRawName resolver <> " specified on command line")
+ return resolver
Nothing -> do
- r'' <- runReaderT getLatestResolver miniConfig
- $logInfo ("Using latest snapshot resolver: " <> resolverName r'')
+ r'' <- getLatestResolver
+ logInfo ("Using latest snapshot resolver: " <> resolverRawName r'')
return r''
return Project
{ projectUserMsg = Nothing
- , projectPackages = mempty
- , projectExtraDeps = mempty
+ , projectPackages = []
+ , projectDependencies = []
, projectFlags = mempty
, projectResolver = r
, projectCompiler = Nothing
@@ -638,199 +641,64 @@ loadBuildConfig mproject config mresolver mcompiler = do
-- | Get packages from EnvConfig, downloading and cloning as necessary.
-- If the packages have already been downloaded, this uses a cached value (
-getLocalPackages
- :: (StackMiniM env m, HasEnvConfig env)
- => m (Map.Map (Path Abs Dir) TreatLikeExtraDep)
+getLocalPackages :: forall env. HasEnvConfig env => RIO env LocalPackages
getLocalPackages = do
cacheRef <- view $ envConfigL.to envConfigPackagesRef
mcached <- liftIO $ readIORef cacheRef
case mcached of
Just cached -> return cached
- Nothing -> do
- menv <- getMinimalEnvOverride
+ Nothing -> withCabalLoader $ \loadFromIndex -> do
root <- view projectRootL
- entries <- view $ buildConfigL.to bcPackageEntries
- liftM (Map.fromList . concat) $ mapM
- (resolvePackageEntry menv root)
- entries
-
--- | Resolve a PackageEntry into a list of paths, downloading and cloning as
--- necessary.
-resolvePackageEntry
- :: (StackMiniM env m, HasConfig env)
- => EnvOverride
- -> Path Abs Dir -- ^ project root
- -> PackageEntry
- -> m [(Path Abs Dir, TreatLikeExtraDep)]
-resolvePackageEntry menv projRoot pe = do
- entryRoot <- resolvePackageLocation menv projRoot (peLocation pe)
- paths <-
- case peSubdirs pe of
- [] -> return [entryRoot]
- subs -> mapM (resolveDir entryRoot) subs
- extraDep <-
- case peExtraDepMaybe pe of
- Just e -> return e
- Nothing ->
- case peLocation pe of
- PLFilePath _ ->
- -- we don't give a warning on missing explicit
- -- value here, user intent is almost always
- -- the default for a local directory
- return False
- PLRemote url _ -> do
- $logWarn $ mconcat
- [ "No extra-dep setting found for package at URL:\n\n"
- , url
- , "\n\n"
- , "This is usually a mistake, external packages "
- , "should typically\nbe treated as extra-deps to avoid "
- , "spurious test case failures."
- ]
- return False
- return $ map (, extraDep) paths
-
--- | Resolve a PackageLocation into a path, downloading and cloning as
--- necessary.
-resolvePackageLocation
- :: (StackMiniM env m, HasConfig env)
- => EnvOverride
- -> Path Abs Dir -- ^ project root
- -> PackageLocation
- -> m (Path Abs Dir)
-resolvePackageLocation _ projRoot (PLFilePath fp) = resolveDir projRoot fp
-resolvePackageLocation menv projRoot (PLRemote url remotePackageType) = do
- workDir <- view workDirL
- let nameBeforeHashing = case remotePackageType of
- RPTHttp{} -> url
- RPTGit commit -> T.unwords [url, commit]
- RPTHg commit -> T.unwords [url, commit, "hg"]
- -- TODO: dedupe with code for snapshot hash?
- name = T.unpack $ decodeUtf8 $ S.take 12 $ B64URL.encode $ Mem.convert $ hashWith SHA256 $ encodeUtf8 nameBeforeHashing
- root = projRoot </> workDir </> $(mkRelDir "downloaded")
- fileExtension' = case remotePackageType of
- RPTHttp -> ".http-archive"
- _ -> ".unused"
-
- fileRel <- parseRelFile $ name ++ fileExtension'
- dirRel <- parseRelDir name
- dirRelTmp <- parseRelDir $ name ++ ".tmp"
- let file = root </> fileRel
- dir = root </> dirRel
-
- exists <- doesDirExist dir
- unless exists $ do
- ignoringAbsence (removeDirRecur dir)
-
- let cloneAndExtract commandName cloneArgs resetCommand commit = do
- ensureDir root
- callProcessInheritStderrStdout Cmd
- { cmdDirectoryToRunIn = Just root
- , cmdCommandToRun = commandName
- , cmdEnvOverride = menv
- , cmdCommandLineArguments =
- "clone" :
- cloneArgs ++
- [ T.unpack url
- , toFilePathNoTrailingSep dir
- ]
- }
- created <- doesDirExist dir
- unless created $ throwM $ FailedToCloneRepo commandName
- readProcessNull (Just dir) menv commandName
- (resetCommand ++ [T.unpack commit, "--"])
- `catch` \case
- ex@ProcessFailed{} -> do
- $logInfo $ "Please ensure that commit " <> commit <> " exists within " <> url
- throwM ex
- ex -> throwM ex
-
- case remotePackageType of
- RPTHttp -> do
- let dirTmp = root </> dirRelTmp
- ignoringAbsence (removeDirRecur dirTmp)
-
- let fp = toFilePath file
- req <- parseUrlThrow $ T.unpack url
- _ <- download req file
-
- let tryTar = do
- $logDebug $ "Trying to untar " <> T.pack fp
- liftIO $ withBinaryFile fp ReadMode $ \h -> do
- lbs <- L.hGetContents h
- let entries = Tar.read $ GZip.decompress lbs
- Tar.unpack (toFilePath dirTmp) entries
- tryZip = do
- $logDebug $ "Trying to unzip " <> T.pack fp
- archive <- fmap Zip.toArchive $ liftIO $ L.readFile fp
- liftIO $ Zip.extractFilesFromArchive [Zip.OptDestination
- (toFilePath dirTmp)] archive
- err = throwM $ UnableToExtractArchive url file
-
- catchAllLog goodpath handler =
- catchAll goodpath $ \e -> do
- $logDebug $ "Got exception: " <> T.pack (show e)
- handler
-
- tryTar `catchAllLog` tryZip `catchAllLog` err
- renameDir dirTmp dir
-
- -- Passes in --git-dir to git and --repository to hg, in order
- -- to avoid the update commands being applied to the user's
- -- repo. See https://github.com/commercialhaskell/stack/issues/2748
- RPTGit commit -> cloneAndExtract "git" ["--recursive"] ["--git-dir=.git", "reset", "--hard"] commit
- RPTHg commit -> cloneAndExtract "hg" [] ["--repository", ".", "update", "-C"] commit
-
- case remotePackageType of
- RPTHttp -> do
- x <- listDir dir
- case x of
- ([dir'], []) -> return dir'
- (dirs, files) -> do
- ignoringAbsence (removeFile file)
- ignoringAbsence (removeDirRecur dir)
- throwM $ UnexpectedArchiveContents dirs files
- _ -> return dir
-
--- | Remove path from package entry. If the package entry contains subdirs, then it removes
--- the subdir. If the package entry points to the path to remove, this function returns
--- Nothing. If the package entry doesn't mention the path to remove, it is returned unchanged
-removePathFromPackageEntry
- :: (StackMiniM env m, HasConfig env)
- => EnvOverride
- -> Path Abs Dir -- ^ project root
- -> Path Abs Dir -- ^ path to remove
- -> PackageEntry
- -> m (Maybe PackageEntry)
- -- ^ Nothing if the whole package entry should be removed, otherwise
- -- it returns the updated PackageEntry
-removePathFromPackageEntry menv projectRoot pathToRemove packageEntry = do
- locationPath <- resolvePackageLocation menv projectRoot (peLocation packageEntry)
- case peSubdirs packageEntry of
- [] -> if locationPath == pathToRemove then return Nothing else return (Just packageEntry)
- subdirPaths -> do
- let shouldKeepSubdir path = do
- resolvedPath <- resolveDir locationPath path
- return (pathToRemove /= resolvedPath)
- filteredSubdirs <- filterM shouldKeepSubdir subdirPaths
- if null filteredSubdirs then return Nothing else return (Just packageEntry {peSubdirs = filteredSubdirs})
-
+ bc <- view buildConfigL
+
+ packages <- do
+ let withName lpv = (lpvName lpv, lpv)
+ map withName . concat <$> mapM (parseMultiCabalFiles root True) (bcPackages bc)
+
+ let wrapGPD (gpd, loc) =
+ let PackageIdentifier name _version =
+ fromCabalPackageIdentifier
+ $ C.package
+ $ C.packageDescription gpd
+ in (name, (gpd, loc))
+ deps <- (map wrapGPD . concat)
+ <$> mapM (parseMultiCabalFilesIndex loadFromIndex root) (bcDependencies bc)
+
+ checkDuplicateNames $
+ map (second (PLOther . lpvLoc)) packages ++
+ map (second snd) deps
+
+ return LocalPackages
+ { lpProject = Map.fromList packages
+ , lpDependencies = Map.fromList deps
+ }
+
+-- | Check if there are any duplicate package names and, if so, throw an
+-- exception.
+checkDuplicateNames :: MonadThrow m => [(PackageName, PackageLocationIndex FilePath)] -> m ()
+checkDuplicateNames locals =
+ case filter hasMultiples $ Map.toList $ Map.fromListWith (++) $ map (second return) locals of
+ [] -> return ()
+ x -> throwM $ DuplicateLocalPackageNames x
+ where
+ hasMultiples (_, _:_:_) = True
+ hasMultiples _ = False
-- | Get the stack root, e.g. @~/.stack@, and determine whether the user owns it.
--
-- On Windows, the second value is always 'True'.
determineStackRootAndOwnership
- :: (MonadIO m, MonadCatch m)
+ :: (MonadIO m)
=> ConfigMonoid
-- ^ Parsed command-line arguments
-> m (Path Abs Dir, Bool)
-determineStackRootAndOwnership clArgs = do
+determineStackRootAndOwnership clArgs = liftIO $ do
stackRoot <- do
case getFirst (configMonoidStackRoot clArgs) of
Just x -> return x
Nothing -> do
- mstackRoot <- liftIO $ lookupEnv stackRootEnvVar
+ mstackRoot <- lookupEnv stackRootEnvVar
case mstackRoot of
Nothing -> getAppUserDataDir stackProgName
Just x -> case parseAbsDir x of
@@ -841,12 +709,12 @@ determineStackRootAndOwnership clArgs = do
mdirAndOwnership <- findInParents getDirAndOwnership stackRoot
case mdirAndOwnership of
Just x -> return x
- Nothing -> throwM (BadStackRoot stackRoot)
+ Nothing -> throwIO (BadStackRoot stackRoot)
when (existingStackRootOrParentDir /= stackRoot) $
if userOwnsIt
- then liftIO $ ensureDir stackRoot
- else throwM $
+ then ensureDir stackRoot
+ else throwIO $
Won'tCreateStackRootInDirectoryOwnedByDifferentUser
stackRoot
existingStackRootOrParentDir
@@ -860,22 +728,22 @@ determineStackRootAndOwnership clArgs = do
-- If @dir@ doesn't exist, its parent directory is checked instead.
-- If the parent directory doesn't exist either, @'NoSuchDirectory' ('parent' dir)@
-- is thrown.
-checkOwnership :: (MonadIO m, MonadCatch m) => Path Abs Dir -> m ()
+checkOwnership :: (MonadIO m) => Path Abs Dir -> m ()
checkOwnership dir = do
mdirAndOwnership <- firstJustM getDirAndOwnership [dir, parent dir]
case mdirAndOwnership of
Just (_, True) -> return ()
- Just (dir', False) -> throwM (UserDoesn'tOwnDirectory dir')
+ Just (dir', False) -> throwIO (UserDoesn'tOwnDirectory dir')
Nothing ->
- (throwM . NoSuchDirectory) $ (toFilePathNoTrailingSep . parent) dir
+ (throwIO . NoSuchDirectory) $ (toFilePathNoTrailingSep . parent) dir
-- | @'getDirAndOwnership' dir@ returns @'Just' (dir, 'True')@ when @dir@
-- exists and the current user owns it in the sense of 'isOwnedByUser'.
getDirAndOwnership
- :: (MonadIO m, MonadCatch m)
+ :: (MonadIO m)
=> Path Abs Dir
-> m (Maybe (Path Abs Dir, Bool))
-getDirAndOwnership dir = forgivingAbsence $ do
+getDirAndOwnership dir = liftIO $ forgivingAbsence $ do
ownership <- isOwnedByUser dir
return (dir, ownership)
@@ -962,7 +830,7 @@ getProjectConfig SYLDefault = do
env <- liftIO getEnvironment
case lookup "STACK_YAML" env of
Just fp -> do
- $logInfo "Getting project config file from STACK_YAML environment"
+ logInfo "Getting project config file from STACK_YAML environment"
liftM LCSProject $ resolveFile' fp
Nothing -> do
currDir <- getCurrentDir
@@ -971,17 +839,18 @@ getProjectConfig SYLDefault = do
getStackDotYaml dir = do
let fp = dir </> stackDotYaml
fp' = toFilePath fp
- $logDebug $ "Checking for project config at: " <> T.pack fp'
+ logDebug $ "Checking for project config at: " <> T.pack fp'
exists <- doesFileExist fp
if exists
then return $ Just fp
else return Nothing
-getProjectConfig SYLNoConfig = return LCSNoConfig
+getProjectConfig (SYLNoConfig parentDir) = return (LCSNoConfig parentDir)
data LocalConfigStatus a
= LCSNoProject
| LCSProject a
- | LCSNoConfig
+ | LCSNoConfig !(Path Abs Dir)
+ -- ^ parent directory for making a concrete resolving
deriving (Show,Functor,Foldable,Traversable)
-- | Find the project config file location, respecting environment variables
@@ -996,15 +865,15 @@ loadProjectConfig mstackYaml = do
case mfp of
LCSProject fp -> do
currDir <- getCurrentDir
- $logDebug $ "Loading project config file " <>
- T.pack (maybe (toFilePath fp) toFilePath (stripDir currDir fp))
+ logDebug $ "Loading project config file " <>
+ T.pack (maybe (toFilePath fp) toFilePath (stripProperPrefix currDir fp))
LCSProject <$> load fp
LCSNoProject -> do
- $logDebug $ "No project config file found, using defaults."
+ logDebug "No project config file found, using defaults."
return LCSNoProject
- LCSNoConfig -> do
- $logDebug "Ignoring config files"
- return LCSNoConfig
+ LCSNoConfig mparentDir -> do
+ logDebug "Ignoring config files"
+ return (LCSNoConfig mparentDir)
where
load fp = do
ProjectAndConfigMonoid project config <- loadConfigYaml (parseProjectAndConfigMonoid (parent fp)) fp
@@ -1055,9 +924,15 @@ getFakeConfigPath
getFakeConfigPath stackRoot ar = do
asString <-
case ar of
- ARResolver r -> return $ T.unpack $ resolverName r
+ ARResolver r -> return $ T.unpack $ resolverRawName r
_ -> throwM $ InvalidResolverForNoLocalConfig $ show ar
- asDir <- parseRelDir asString
+ -- This takeWhile is an ugly hack. We don't actually need this
+ -- path for anything useful. But if we take the raw value for
+ -- a custom snapshot, it will be unparseable in a PATH.
+ -- Therefore, we add in this silly "strip up to :".
+ -- Better would be to defer figuring out this value until
+ -- after we have a fully loaded snapshot with a hash.
+ asDir <- parseRelDir $ takeWhile (/= ':') asString
let full = stackRoot </> $(mkRelDir "script") </> asDir </> $(mkRelFile "config.yaml")
ensureDir (parent full)
return full
diff --git a/src/Stack/Config/Build.hs b/src/Stack/Config/Build.hs
index e0c80e2..45fbbca 100644
--- a/src/Stack/Config/Build.hs
+++ b/src/Stack/Config/Build.hs
@@ -1,10 +1,10 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
-- | Build configuration
module Stack.Config.Build where
-import Data.Maybe
-import Data.Monoid.Extra
+import Stack.Prelude
import Stack.Types.Config
-- | Interprets BuildOptsMonoid options.
@@ -43,6 +43,9 @@ buildOptsFromMonoid BuildOptsMonoid{..} = BuildOpts
, boptsInstallExes = fromFirst
(boptsInstallExes defaultBuildOpts)
buildMonoidInstallExes
+ , boptsInstallCompilerTool = fromFirst
+ (boptsInstallCompilerTool defaultBuildOpts)
+ buildMonoidInstallCompilerTool
, boptsPreFetch = fromFirst
(boptsPreFetch defaultBuildOpts)
buildMonoidPreFetch
@@ -67,6 +70,7 @@ buildOptsFromMonoid BuildOptsMonoid{..} = BuildOpts
, boptsSplitObjs = fromFirst
(boptsSplitObjs defaultBuildOpts)
buildMonoidSplitObjs
+ , boptsSkipComponents = buildMonoidSkipComponents
}
where
-- These options are not directly used in bopts, instead they
diff --git a/src/Stack/Config/Docker.hs b/src/Stack/Config/Docker.hs
index b8e37a9..6778ff6 100644
--- a/src/Stack/Config/Docker.hs
+++ b/src/Stack/Config/Docker.hs
@@ -1,18 +1,14 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP, DeriveDataTypeable, RecordWildCards, TemplateHaskell #-}
-- | Docker configuration
module Stack.Config.Docker where
-import Control.Exception.Lifted
-import Control.Monad.Catch (MonadThrow)
+import Stack.Prelude
import Data.List (find)
-import Data.Maybe
-import Data.Monoid.Extra
import qualified Data.Text as T
-import Data.Typeable (Typeable)
import Distribution.Version (simplifyVersionRange)
import Path
-import Stack.Types.BuildPlan
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Docker
@@ -33,13 +29,13 @@ dockerOptsFromMonoid mproject stackRoot maresolver DockerOptsMonoid{..} = do
let mresolver =
case maresolver of
Just (ARResolver resolver) ->
- Just resolver
+ Just (void resolver)
Just aresolver ->
- throw
+ impureThrow
(ResolverNotSupportedException $
show aresolver)
Nothing ->
- fmap projectResolver mproject
+ fmap (void . projectResolver) mproject
defaultTag =
case mresolver of
Nothing -> ""
@@ -48,7 +44,7 @@ dockerOptsFromMonoid mproject stackRoot maresolver DockerOptsMonoid{..} = do
ResolverSnapshot n@(LTS _ _) ->
":" ++ T.unpack (renderSnapName n)
_ ->
- throw
+ impureThrow
(ResolverNotSupportedException $
show resolver)
in case getFirst dockerMonoidRepoOrImage of
@@ -78,6 +74,7 @@ dockerOptsFromMonoid mproject stackRoot maresolver DockerOptsMonoid{..} = do
simplifyVersionRange (getIntersectingVersionRange dockerMonoidRequireDockerVersion)
dockerDatabasePath = fromFirst (stackRoot </> $(mkRelFile "docker.db")) dockerMonoidDatabasePath
dockerStackExe = getFirst dockerMonoidStackExe
+
return DockerOpts{..}
where emptyToNothing Nothing = Nothing
emptyToNothing (Just s) | null s = Nothing
diff --git a/src/Stack/Config/Nix.hs b/src/Stack/Config/Nix.hs
index 1025bb6..164aafc 100644
--- a/src/Stack/Config/Nix.hs
+++ b/src/Stack/Config/Nix.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards, DeriveDataTypeable, OverloadedStrings #-}
-- | Nix configuration
@@ -7,23 +8,16 @@ module Stack.Config.Nix
,StackNixException(..)
) where
-import Control.Monad (when)
-import Data.Maybe
-import Data.Monoid.Extra
+import Stack.Prelude
import qualified Data.Text as T
-import Data.Typeable
import Distribution.System (OS (..))
import Stack.Types.Version
import Stack.Types.Nix
import Stack.Types.Compiler
-import Stack.Types.StringError
-import Control.Exception.Lifted
-import Control.Monad.Catch (throwM,MonadCatch)
-import Prelude
-- | Interprets NixOptsMonoid options.
nixOptsFromMonoid
- :: (Monad m, MonadCatch m)
+ :: MonadUnliftIO m
=> NixOptsMonoid
-> OS
-> m NixOpts
@@ -39,15 +33,16 @@ nixOptsFromMonoid NixOptsMonoid{..} os = do
++ prefixAll (T.pack "-I") (fromFirst [] nixMonoidPath)
nixAddGCRoots = fromFirst False nixMonoidAddGCRoots
when (not (null nixPackages) && isJust nixInitFile) $
- throwM NixCannotUseShellFileAndPackagesException
+ throwIO NixCannotUseShellFileAndPackagesException
return NixOpts{..}
where prefixAll p (x:xs) = p : x : prefixAll p xs
prefixAll _ _ = []
-nixCompiler :: CompilerVersion -> T.Text
+nixCompiler :: CompilerVersion a -> Either StringException T.Text
nixCompiler compilerVersion =
let -- These are the latest minor versions for each respective major version available in nixpkgs
- fixMinor "8.0" = "8.0.1"
+ fixMinor "8.2" = "8.2.1"
+ fixMinor "8.0" = "8.0.2"
fixMinor "7.10" = "7.10.3"
fixMinor "7.8" = "7.8.4"
fixMinor "7.6" = "7.6.3"
@@ -60,8 +55,8 @@ nixCompiler compilerVersion =
(T.filter (/= '.')
(fixMinor (versionText v)))
in case compilerVersion of
- GhcVersion v -> nixCompilerFromVersion v
- _ -> errorString "Only GHC is supported by stack --nix"
+ GhcVersion v -> Right $ nixCompilerFromVersion v
+ _ -> Left $ stringException "Only GHC is supported by stack --nix"
-- Exceptions thown specifically by Stack.Nix
data StackNixException
diff --git a/src/Stack/Config/Urls.hs b/src/Stack/Config/Urls.hs
index 6bce9c1..c7b7ec3 100644
--- a/src/Stack/Config/Urls.hs
+++ b/src/Stack/Config/Urls.hs
@@ -1,9 +1,10 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Stack.Config.Urls (urlsFromMonoid) where
import Stack.Types.Urls
-import Data.Monoid.Extra
+import Stack.Prelude
urlsFromMonoid :: UrlsMonoid -> Urls
urlsFromMonoid monoid =
diff --git a/src/Stack/ConfigCmd.hs b/src/Stack/ConfigCmd.hs
index c4c8094..25cb48e 100644
--- a/src/Stack/ConfigCmd.hs
+++ b/src/Stack/ConfigCmd.hs
@@ -1,8 +1,8 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
-{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE GADTs #-}
-- | Make changes to project or global configuration.
@@ -13,28 +13,20 @@ module Stack.ConfigCmd
,cfgCmdSetName
,cfgCmdName) where
-import Control.Applicative
-import Control.Monad
-import Control.Monad.Catch (throwM)
-import Control.Monad.IO.Class
-import Control.Monad.Logger
+import Stack.Prelude
import qualified Data.ByteString as S
import qualified Data.HashMap.Strict as HMap
-import Data.Monoid
-import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Yaml as Yaml
import qualified Options.Applicative as OA
import qualified Options.Applicative.Types as OA
import Path
import Path.IO
-import Prelude -- Silence redundant import warnings
-import Stack.BuildPlan
import Stack.Config (makeConcreteResolver, getProjectConfig, getImplicitGlobalProjectDir, LocalConfigStatus(..))
import Stack.Constants
+import Stack.Snapshot (loadResolver)
import Stack.Types.Config
import Stack.Types.Resolver
-import Stack.Types.StringError
data ConfigCmdSet
= ConfigCmdSetResolver AbstractResolver
@@ -56,51 +48,46 @@ configCmdSetScope (ConfigCmdSetSystemGhc scope _) = scope
configCmdSetScope (ConfigCmdSetInstallGhc scope _) = scope
cfgCmdSet
- :: (StackMiniM env m, HasConfig env, HasGHCVariant env)
- => GlobalOpts -> ConfigCmdSet -> m ()
+ :: (HasConfig env, HasGHCVariant env)
+ => GlobalOpts -> ConfigCmdSet -> RIO env ()
cfgCmdSet go cmd = do
conf <- view configL
configFilePath <-
- liftM
- toFilePath
- (case configCmdSetScope cmd of
+ case configCmdSetScope cmd of
CommandScopeProject -> do
mstackYamlOption <- forM (globalStackYaml go) resolveFile'
mstackYaml <- getProjectConfig mstackYamlOption
case mstackYaml of
LCSProject stackYaml -> return stackYaml
LCSNoProject -> liftM (</> stackDotYaml) (getImplicitGlobalProjectDir conf)
- LCSNoConfig -> errorString "config command used when no local configuration available"
- CommandScopeGlobal -> return (configUserConfigPath conf))
+ LCSNoConfig _ -> throwString "config command used when no local configuration available"
+ CommandScopeGlobal -> return (configUserConfigPath conf)
-- We don't need to worry about checking for a valid yaml here
(config :: Yaml.Object) <-
- liftIO (Yaml.decodeFileEither configFilePath) >>= either throwM return
- newValue <- cfgCmdSetValue cmd
+ liftIO (Yaml.decodeFileEither (toFilePath configFilePath)) >>= either throwM return
+ newValue <- cfgCmdSetValue (parent configFilePath) cmd
let cmdKey = cfgCmdSetOptionName cmd
config' = HMap.insert cmdKey newValue config
if config' == config
- then $logInfo
- (T.pack configFilePath <>
+ then logInfo
+ (T.pack (toFilePath configFilePath) <>
" already contained the intended configuration and remains unchanged.")
else do
- liftIO (S.writeFile configFilePath (Yaml.encode config'))
- $logInfo (T.pack configFilePath <> " has been updated.")
+ liftIO (S.writeFile (toFilePath configFilePath) (Yaml.encode config'))
+ logInfo (T.pack (toFilePath configFilePath) <> " has been updated.")
cfgCmdSetValue
- :: (StackMiniM env m, HasConfig env, HasGHCVariant env)
- => ConfigCmdSet -> m Yaml.Value
-cfgCmdSetValue (ConfigCmdSetResolver newResolver) = do
- concreteResolver <- makeConcreteResolver newResolver
- case concreteResolver of
- -- Check that the snapshot actually exists
- ResolverSnapshot snapName -> void $ loadMiniBuildPlan snapName
- ResolverCompiler _ -> return ()
- -- TODO: custom snapshot support? Would need a way to specify on CLI
- ResolverCustom _ _ -> errorString "'stack config set resolver' does not support custom resolvers"
- return (Yaml.String (resolverName concreteResolver))
-cfgCmdSetValue (ConfigCmdSetSystemGhc _ bool) =
+ :: (HasConfig env, HasGHCVariant env)
+ => Path Abs Dir -- ^ root directory of project
+ -> ConfigCmdSet -> RIO env Yaml.Value
+cfgCmdSetValue root (ConfigCmdSetResolver newResolver) = do
+ concreteResolver <- makeConcreteResolver (Just root) newResolver
+ -- Check that the snapshot actually exists
+ void $ loadResolver concreteResolver
+ return (Yaml.toJSON concreteResolver)
+cfgCmdSetValue _ (ConfigCmdSetSystemGhc _ bool) =
return (Yaml.Bool bool)
-cfgCmdSetValue (ConfigCmdSetInstallGhc _ bool) =
+cfgCmdSetValue _ (ConfigCmdSetInstallGhc _ bool) =
return (Yaml.Bool bool)
cfgCmdSetOptionName :: ConfigCmdSet -> Text
diff --git a/src/Stack/Constants.hs b/src/Stack/Constants.hs
index 0f84946..a762eef 100644
--- a/src/Stack/Constants.hs
+++ b/src/Stack/Constants.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
@@ -5,12 +6,8 @@
module Stack.Constants
(buildPlanDir
- ,distDirFromDir
- ,workDirFromDir
- ,distRelativeDir
+ ,buildPlanCacheDir
,haskellModuleExts
- ,imageStagingDir
- ,projectDockerSandboxDir
,stackDotYaml
,stackWorkEnvVar
,stackRootEnvVar
@@ -18,11 +15,6 @@ module Stack.Constants
,deprecatedStackRootOptionName
,inContainerEnvVar
,inNixShellEnvVar
- ,configCacheFile
- ,configCabalMod
- ,buildCacheFile
- ,testSuccessFile
- ,testBuiltFile
,stackProgName
,stackProgNameUpper
,wiredInPackages
@@ -30,31 +22,24 @@ module Stack.Constants
,cabalPackageName
,implicitGlobalProjectDirDeprecated
,implicitGlobalProjectDir
- ,hpcRelativeDir
- ,hpcDirFromDir
- ,objectInterfaceDirL
- ,templatesDir
,defaultUserConfigPathDeprecated
,defaultUserConfigPath
,defaultGlobalConfigPathDeprecated
,defaultGlobalConfigPath
,platformVariantEnvVar
,compilerOptionsCabalFlag
+ ,ghcColorForceFlag
+ ,minTerminalWidth
+ ,maxTerminalWidth
+ ,defaultTerminalWidth
)
where
-import Control.Monad.Catch (MonadThrow)
-import Control.Monad.Reader
import Data.Char (toUpper)
-import Data.HashSet (HashSet)
import qualified Data.HashSet as HashSet
-import Data.Text (Text)
-import Lens.Micro (Getting)
import Path as FL
-import Prelude
+import Stack.Prelude
import Stack.Types.Compiler
-import Stack.Types.Config
-import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-- | Extensions for anything that can be a Haskell module.
@@ -69,127 +54,6 @@ haskellFileExts = ["hs", "hsc", "lhs"]
haskellPreprocessorExts :: [Text]
haskellPreprocessorExts = ["gc", "chs", "hsc", "x", "y", "ly", "cpphs"]
--- | Output .o/.hi directory.
-objectInterfaceDirL :: HasBuildConfig env => Getting r env (Path Abs Dir)
-objectInterfaceDirL = to $ \env -> -- FIXME is this idomatic lens code?
- let workDir = view workDirL env
- root = view projectRootL env
- in root </> workDir </> $(mkRelDir "odir/")
-
--- | The filename used for dirtiness check of source files.
-buildCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => Path Abs Dir -- ^ Package directory.
- -> m (Path Abs File)
-buildCacheFile dir =
- liftM
- (</> $(mkRelFile "stack-build-cache"))
- (distDirFromDir dir)
-
--- | The filename used to mark tests as having succeeded
-testSuccessFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => Path Abs Dir -- ^ Package directory
- -> m (Path Abs File)
-testSuccessFile dir =
- liftM
- (</> $(mkRelFile "stack-test-success"))
- (distDirFromDir dir)
-
--- | The filename used to mark tests as having built
-testBuiltFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => Path Abs Dir -- ^ Package directory
- -> m (Path Abs File)
-testBuiltFile dir =
- liftM
- (</> $(mkRelFile "stack-test-built"))
- (distDirFromDir dir)
-
--- | The filename used for dirtiness check of config.
-configCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => Path Abs Dir -- ^ Package directory.
- -> m (Path Abs File)
-configCacheFile dir =
- liftM
- (</> $(mkRelFile "stack-config-cache"))
- (distDirFromDir dir)
-
--- | The filename used for modification check of .cabal
-configCabalMod :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => Path Abs Dir -- ^ Package directory.
- -> m (Path Abs File)
-configCabalMod dir =
- liftM
- (</> $(mkRelFile "stack-cabal-mod"))
- (distDirFromDir dir)
-
--- | Directory for HPC work.
-hpcDirFromDir
- :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => Path Abs Dir -- ^ Package directory.
- -> m (Path Abs Dir)
-hpcDirFromDir fp =
- liftM (fp </>) hpcRelativeDir
-
--- | Relative location of directory for HPC work.
-hpcRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => m (Path Rel Dir)
-hpcRelativeDir =
- liftM (</> $(mkRelDir "hpc")) distRelativeDir
-
--- | Package's build artifacts directory.
-distDirFromDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => Path Abs Dir
- -> m (Path Abs Dir)
-distDirFromDir fp =
- liftM (fp </>) distRelativeDir
-
--- | Package's working directory.
-workDirFromDir :: (MonadReader env m, HasEnvConfig env)
- => Path Abs Dir
- -> m (Path Abs Dir)
-workDirFromDir fp = view $ workDirL.to (fp </>)
-
--- | Directory for project templates.
-templatesDir :: Config -> Path Abs Dir
-templatesDir config = configStackRoot config </> $(mkRelDir "templates")
-
--- | Relative location of build artifacts.
-distRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
- => m (Path Rel Dir)
-distRelativeDir = do
- cabalPkgVer <- view cabalVersionL
- platform <- platformGhcRelDir
- wc <- view $ actualCompilerVersionL.to whichCompiler
- -- Cabal version, suffixed with "_ghcjs" if we're using GHCJS.
- envDir <-
- parseRelDir $
- (if wc == Ghcjs then (++ "_ghcjs") else id) $
- packageIdentifierString $
- PackageIdentifier cabalPackageName cabalPkgVer
- platformAndCabal <- useShaPathOnWindows (platform </> envDir)
- workDir <- view workDirL
- return $
- workDir </>
- $(mkRelDir "dist") </>
- platformAndCabal
-
--- | Docker sandbox from project root.
-projectDockerSandboxDir :: (MonadReader env m, HasConfig env)
- => Path Abs Dir -- ^ Project root
- -> m (Path Abs Dir) -- ^ Docker sandbox
-projectDockerSandboxDir projectRoot = do
- workDir <- view workDirL
- return $ projectRoot </> workDir </> $(mkRelDir "docker/")
-
--- | Image staging dir from project root.
-imageStagingDir :: (MonadReader env m, HasConfig env, MonadThrow m)
- => Path Abs Dir -- ^ Project root
- -> Int -- ^ Index of image
- -> m (Path Abs Dir) -- ^ Docker sandbox
-imageStagingDir projectRoot imageIdx = do
- workDir <- view workDirL
- idxRelDir <- parseRelDir (show imageIdx)
- return $ projectRoot </> workDir </> $(mkRelDir "image") </> idxRelDir
-
-- | Name of the 'stack' program, uppercased
stackProgNameUpper :: String
stackProgNameUpper = map toUpper stackProgName
@@ -231,7 +95,7 @@ inContainerEnvVar = stackProgNameUpper ++ "_IN_CONTAINER"
-- although we already have STACK_IN_NIX_EXTRA_ARGS that is set in the same conditions,
-- it can happen that STACK_IN_NIX_EXTRA_ARGS is set to empty.
inNixShellEnvVar :: String
-inNixShellEnvVar = map toUpper stackProgName ++ "_IN_NIXSHELL"
+inNixShellEnvVar = map toUpper stackProgName ++ "_IN_NIX_SHELL"
-- See https://downloads.haskell.org/~ghc/7.10.1/docs/html/libraries/ghc/src/Module.html#integerPackageKey
wiredInPackages :: HashSet PackageName
@@ -343,6 +207,12 @@ buildPlanDir :: Path Abs Dir -- ^ Stack root
-> Path Abs Dir
buildPlanDir = (</> $(mkRelDir "build-plan"))
+-- | Path where binary caches of the build plans are stored.
+buildPlanCacheDir
+ :: Path Abs Dir -- ^ Stack root
+ -> Path Abs Dir
+buildPlanCacheDir = (</> $(mkRelDir "build-plan-cache"))
+
-- | Environment variable that stores a variant to append to platform-specific directory
-- names. Used to ensure incompatible binaries aren't shared between Docker builds and host
platformVariantEnvVar :: String
@@ -353,3 +223,21 @@ platformVariantEnvVar = stackProgNameUpper ++ "_PLATFORM_VARIANT"
compilerOptionsCabalFlag :: WhichCompiler -> String
compilerOptionsCabalFlag Ghc = "--ghc-options"
compilerOptionsCabalFlag Ghcjs = "--ghcjs-options"
+
+-- | The flag to pass to GHC when we want to force its output to be
+-- colorized.
+ghcColorForceFlag :: String
+ghcColorForceFlag = "-fdiagnostics-color=always"
+
+-- | The minimum allowed terminal width. Used for pretty-printing.
+minTerminalWidth :: Int
+minTerminalWidth = 40
+
+-- | The maximum allowed terminal width. Used for pretty-printing.
+maxTerminalWidth :: Int
+maxTerminalWidth = 200
+
+-- | The default terminal width. Used for pretty-printing when we can't
+-- automatically detect it and when the user doesn't supply one.
+defaultTerminalWidth :: Int
+defaultTerminalWidth = 100
diff --git a/src/Stack/Constants.hs-boot b/src/Stack/Constants.hs-boot
deleted file mode 100644
index b604dfd..0000000
--- a/src/Stack/Constants.hs-boot
+++ /dev/null
@@ -1,3 +0,0 @@
-module Stack.Constants where
-
-stackProgName :: String
diff --git a/src/Stack/Constants/Config.hs b/src/Stack/Constants/Config.hs
new file mode 100644
index 0000000..7882ff5
--- /dev/null
+++ b/src/Stack/Constants/Config.hs
@@ -0,0 +1,147 @@
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE OverloadedStrings #-}
+module Stack.Constants.Config
+ ( distDirFromDir
+ , workDirFromDir
+ , distRelativeDir
+ , imageStagingDir
+ , projectDockerSandboxDir
+ , configCacheFile
+ , configCabalMod
+ , buildCacheFile
+ , testSuccessFile
+ , testBuiltFile
+ , hpcRelativeDir
+ , hpcDirFromDir
+ , objectInterfaceDirL
+ , templatesDir
+ ) where
+
+import Stack.Prelude
+import Stack.Constants
+import Stack.Types.Compiler
+import Stack.Types.Config
+import Stack.Types.PackageIdentifier
+import Path
+
+-- | Output .o/.hi directory.
+objectInterfaceDirL :: HasBuildConfig env => Getting r env (Path Abs Dir)
+objectInterfaceDirL = to $ \env -> -- FIXME is this idomatic lens code?
+ let workDir = view workDirL env
+ root = view projectRootL env
+ in root </> workDir </> $(mkRelDir "odir/")
+
+-- | The filename used for dirtiness check of source files.
+buildCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => Path Abs Dir -- ^ Package directory.
+ -> m (Path Abs File)
+buildCacheFile dir =
+ liftM
+ (</> $(mkRelFile "stack-build-cache"))
+ (distDirFromDir dir)
+
+-- | The filename used to mark tests as having succeeded
+testSuccessFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => Path Abs Dir -- ^ Package directory
+ -> m (Path Abs File)
+testSuccessFile dir =
+ liftM
+ (</> $(mkRelFile "stack-test-success"))
+ (distDirFromDir dir)
+
+-- | The filename used to mark tests as having built
+testBuiltFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => Path Abs Dir -- ^ Package directory
+ -> m (Path Abs File)
+testBuiltFile dir =
+ liftM
+ (</> $(mkRelFile "stack-test-built"))
+ (distDirFromDir dir)
+
+-- | The filename used for dirtiness check of config.
+configCacheFile :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => Path Abs Dir -- ^ Package directory.
+ -> m (Path Abs File)
+configCacheFile dir =
+ liftM
+ (</> $(mkRelFile "stack-config-cache"))
+ (distDirFromDir dir)
+
+-- | The filename used for modification check of .cabal
+configCabalMod :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => Path Abs Dir -- ^ Package directory.
+ -> m (Path Abs File)
+configCabalMod dir =
+ liftM
+ (</> $(mkRelFile "stack-cabal-mod"))
+ (distDirFromDir dir)
+
+-- | Directory for HPC work.
+hpcDirFromDir
+ :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => Path Abs Dir -- ^ Package directory.
+ -> m (Path Abs Dir)
+hpcDirFromDir fp =
+ liftM (fp </>) hpcRelativeDir
+
+-- | Relative location of directory for HPC work.
+hpcRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => m (Path Rel Dir)
+hpcRelativeDir =
+ liftM (</> $(mkRelDir "hpc")) distRelativeDir
+
+-- | Package's build artifacts directory.
+distDirFromDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => Path Abs Dir
+ -> m (Path Abs Dir)
+distDirFromDir fp =
+ liftM (fp </>) distRelativeDir
+
+-- | Package's working directory.
+workDirFromDir :: (MonadReader env m, HasEnvConfig env)
+ => Path Abs Dir
+ -> m (Path Abs Dir)
+workDirFromDir fp = view $ workDirL.to (fp </>)
+
+-- | Directory for project templates.
+templatesDir :: Config -> Path Abs Dir
+templatesDir config = configStackRoot config </> $(mkRelDir "templates")
+
+-- | Relative location of build artifacts.
+distRelativeDir :: (MonadThrow m, MonadReader env m, HasEnvConfig env)
+ => m (Path Rel Dir)
+distRelativeDir = do
+ cabalPkgVer <- view cabalVersionL
+ platform <- platformGhcRelDir
+ wc <- view $ actualCompilerVersionL.to whichCompiler
+ -- Cabal version, suffixed with "_ghcjs" if we're using GHCJS.
+ envDir <-
+ parseRelDir $
+ (if wc == Ghcjs then (++ "_ghcjs") else id) $
+ packageIdentifierString $
+ PackageIdentifier cabalPackageName cabalPkgVer
+ platformAndCabal <- useShaPathOnWindows (platform </> envDir)
+ workDir <- view workDirL
+ return $
+ workDir </>
+ $(mkRelDir "dist") </>
+ platformAndCabal
+
+-- | Docker sandbox from project root.
+projectDockerSandboxDir :: (MonadReader env m, HasConfig env)
+ => Path Abs Dir -- ^ Project root
+ -> m (Path Abs Dir) -- ^ Docker sandbox
+projectDockerSandboxDir projectRoot = do
+ workDir <- view workDirL
+ return $ projectRoot </> workDir </> $(mkRelDir "docker/")
+
+-- | Image staging dir from project root.
+imageStagingDir :: (MonadReader env m, HasConfig env, MonadThrow m)
+ => Path Abs Dir -- ^ Project root
+ -> Int -- ^ Index of image
+ -> m (Path Abs Dir) -- ^ Docker sandbox
+imageStagingDir projectRoot imageIdx = do
+ workDir <- view workDirL
+ idxRelDir <- parseRelDir (show imageIdx)
+ return $ projectRoot </> workDir </> $(mkRelDir "image") </> idxRelDir
diff --git a/src/Stack/Coverage.hs b/src/Stack/Coverage.hs
index 0948d69..cda22ec 100644
--- a/src/Stack/Coverage.hs
+++ b/src/Stack/Coverage.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
@@ -16,35 +17,20 @@ module Stack.Coverage
, generateHpcMarkupIndex
) where
-import Control.Exception.Safe (handleIO)
-import Control.Exception.Lifted
-import Control.Monad (liftM, when, unless, void, (<=<))
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Control.Monad.Trans.Resource
+import Stack.Prelude
import qualified Data.ByteString.Char8 as S8
-import Data.Foldable (forM_, asum, toList)
-import Data.Function
import Data.List
import qualified Data.Map.Strict as Map
-import Data.Maybe
-import Data.Maybe.Extra (mapMaybeM)
-import Data.Monoid ((<>))
-import Data.String
-import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.IO as T
import qualified Data.Text.Lazy as LT
-import Data.Traversable (forM)
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
-import Prelude hiding (FilePath, writeFile)
-import Stack.Build.Source (parseTargetsFromBuildOpts)
import Stack.Build.Target
import Stack.Config (getLocalPackages)
-import Stack.Constants
+import Stack.Constants.Config
import Stack.Package
import Stack.PrettyPrint
import Stack.Types.Compiler
@@ -52,8 +38,7 @@ import Stack.Types.Config
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT (StackM)
-import Stack.Types.StringError
+import Stack.Types.Runner
import Stack.Types.Version
import System.FilePath (isPathSeparator)
import System.Process.Read
@@ -62,38 +47,35 @@ import Trace.Hpc.Tix
import Web.Browser (openBrowser)
-- | Invoked at the beginning of running with "--coverage"
-deleteHpcReports :: (StackM env m, HasEnvConfig env)
- => m ()
+deleteHpcReports :: HasEnvConfig env => RIO env ()
deleteHpcReports = do
hpcDir <- hpcReportDir
- ignoringAbsence (removeDirRecur hpcDir)
+ liftIO $ ignoringAbsence (removeDirRecur hpcDir)
-- | Move a tix file into a sub-directory of the hpc report directory. Deletes the old one if one is
-- present.
-updateTixFile :: (StackM env m, HasEnvConfig env)
- => PackageName -> Path Abs File -> String -> m ()
+updateTixFile :: HasEnvConfig env => PackageName -> Path Abs File -> String -> RIO env ()
updateTixFile pkgName tixSrc testName = do
exists <- doesFileExist tixSrc
when exists $ do
tixDest <- tixFilePath pkgName testName
- ignoringAbsence (removeFile tixDest)
+ liftIO $ ignoringAbsence (removeFile tixDest)
ensureDir (parent tixDest)
-- Remove exe modules because they are problematic. This could be revisited if there's a GHC
-- version that fixes https://ghc.haskell.org/trac/ghc/ticket/1853
mtix <- readTixOrLog tixSrc
case mtix of
- Nothing -> $logError $ "Failed to read " <> T.pack (toFilePath tixSrc)
+ Nothing -> logError $ "Failed to read " <> T.pack (toFilePath tixSrc)
Just tix -> do
liftIO $ writeTix (toFilePath tixDest) (removeExeModules tix)
-- TODO: ideally we'd do a file move, but IIRC this can
-- have problems. Something about moving between drives
-- on windows?
copyFile tixSrc =<< parseAbsFile (toFilePath tixDest ++ ".premunging")
- ignoringAbsence (removeFile tixSrc)
+ liftIO $ ignoringAbsence (removeFile tixSrc)
-- | Get the directory used for hpc reports for the given pkgId.
-hpcPkgPath :: (StackM env m, HasEnvConfig env)
- => PackageName -> m (Path Abs Dir)
+hpcPkgPath :: HasEnvConfig env => PackageName -> RIO env (Path Abs Dir)
hpcPkgPath pkgName = do
outputDir <- hpcReportDir
pkgNameRel <- parseRelDir (packageNameString pkgName)
@@ -101,16 +83,16 @@ hpcPkgPath pkgName = do
-- | Get the tix file location, given the name of the file (without extension), and the package
-- identifier string.
-tixFilePath :: (StackM env m, HasEnvConfig env)
- => PackageName -> String -> m (Path Abs File)
+tixFilePath :: HasEnvConfig env
+ => PackageName -> String -> RIO env (Path Abs File)
tixFilePath pkgName testName = do
pkgPath <- hpcPkgPath pkgName
tixRel <- parseRelFile (testName ++ "/" ++ testName ++ ".tix")
return (pkgPath </> tixRel)
-- | Generates the HTML coverage report and shows a textual coverage summary for a package.
-generateHpcReport :: (StackM env m, HasEnvConfig env)
- => Path Abs Dir -> Package -> [Text] -> m ()
+generateHpcReport :: HasEnvConfig env
+ => Path Abs Dir -> Package -> [Text] -> RIO env ()
generateHpcReport pkgDir package tests = do
compilerVersion <- view actualCompilerVersionL
-- If we're using > GHC 7.10, the hpc 'include' parameter must specify a ghc package key. See
@@ -118,11 +100,15 @@ generateHpcReport pkgDir package tests = do
let pkgName = packageNameText (packageName package)
pkgId = packageIdentifierString (packageIdentifier package)
ghcVersion = getGhcVersion compilerVersion
+ hasLibrary =
+ case packageLibraries package of
+ NoLibraries -> False
+ HasLibraries _ -> True
eincludeName <-
-- Pre-7.8 uses plain PKG-version in tix files.
if ghcVersion < $(mkVersion "7.10") then return $ Right $ Just pkgId
-- We don't expect to find a package key if there is no library.
- else if not (packageHasLibrary package) then return $ Right Nothing
+ else if not hasLibrary then return $ Right Nothing
-- Look in the inplace DB for the package key.
-- See https://github.com/commercialhaskell/stack/issues/1181#issuecomment-148968986
else do
@@ -132,7 +118,7 @@ generateHpcReport pkgDir package tests = do
eincludeName <- findPackageFieldForBuiltPackage pkgDir (packageIdentifier package) hpcNameField
case eincludeName of
Left err -> do
- $logError err
+ logError err
return $ Left err
Right includeName -> return $ Right $ Just $ T.unpack includeName
forM_ tests $ \testName -> do
@@ -148,16 +134,17 @@ generateHpcReport pkgDir package tests = do
Just includeName -> ["--include", includeName ++ ":"]
Nothing -> []
mreportPath <- generateHpcReportInternal tixSrc reportDir report extraArgs extraArgs
- forM_ mreportPath (displayReportPath report)
+ forM_ mreportPath (displayReportPath report . display)
-generateHpcReportInternal :: (StackM env m, HasEnvConfig env)
- => Path Abs File -> Path Abs Dir -> Text -> [String] -> [String] -> m (Maybe (Path Abs File))
+generateHpcReportInternal :: HasEnvConfig env
+ => Path Abs File -> Path Abs Dir -> Text -> [String] -> [String]
+ -> RIO env (Maybe (Path Abs File))
generateHpcReportInternal tixSrc reportDir report extraMarkupArgs extraReportArgs = do
-- If a .tix file exists, move it to the HPC output directory and generate a report for it.
tixFileExists <- doesFileExist tixSrc
if not tixFileExists
then do
- $logError $ T.concat
+ logError $ T.concat
[ "Didn't find .tix for "
, report
, " - expected to find it at "
@@ -167,21 +154,21 @@ generateHpcReportInternal tixSrc reportDir report extraMarkupArgs extraReportArg
return Nothing
else (`catch` \err -> do
let msg = show (err :: ReadProcessException)
- $logError (T.pack msg)
+ logError (T.pack msg)
generateHpcErrorReport reportDir $ sanitize msg
return Nothing) $
- (`onException` $logError ("Error occurred while producing " <> report)) $ do
+ (`onException` logError ("Error occurred while producing " <> report)) $ do
-- Directories for .mix files.
hpcRelDir <- hpcRelativeDir
-- Compute arguments used for both "hpc markup" and "hpc report".
- pkgDirs <- liftM Map.keys getLocalPackages
+ pkgDirs <- liftM (map lpvRoot . Map.elems . lpProject) getLocalPackages
let args =
-- Use index files from all packages (allows cross-package coverage results).
concatMap (\x -> ["--srcdir", toFilePathNoTrailingSep x]) pkgDirs ++
-- Look for index files in the correct dir (relative to each pkgdir).
["--hpcdir", toFilePathNoTrailingSep hpcRelDir, "--reset-hpcdirs"]
menv <- getMinimalEnvOverride
- $logInfo $ "Generating " <> report
+ logInfo $ "Generating " <> report
outputLines <- liftM (map (S8.filter (/= '\r')) . S8.lines) $
readProcessStdout Nothing menv "hpc"
( "report"
@@ -202,13 +189,13 @@ generateHpcReportInternal tixSrc reportDir report extraMarkupArgs extraReportArg
, " the hpc program. Please report this issue if you think"
, " your coverage report should have meaningful results."
]
- $logError (msg False)
+ logError (msg False)
generateHpcErrorReport reportDir (msg True)
return Nothing
else do
let reportPath = reportDir </> $(mkRelFile "hpc_index.html")
-- Print output, stripping @\r@ characters because Windows.
- forM_ outputLines ($logInfo . T.decodeUtf8)
+ forM_ outputLines (logInfo . T.decodeUtf8)
-- Generate the markup.
void $ readProcessStdout Nothing menv "hpc"
( "markup"
@@ -225,8 +212,8 @@ data HpcReportOpts = HpcReportOpts
, hroptsOpenBrowser :: Bool
} deriving (Show)
-generateHpcReportForTargets :: (StackM env m, HasEnvConfig env)
- => HpcReportOpts -> m ()
+generateHpcReportForTargets :: HasEnvConfig env
+ => HpcReportOpts -> RIO env ()
generateHpcReportForTargets opts = do
let (tixFiles, targetNames) = partition (".tix" `T.isSuffixOf`) (hroptsInputs opts)
targetTixFiles <-
@@ -236,20 +223,18 @@ generateHpcReportForTargets opts = do
then return []
else do
when (hroptsAll opts && not (null targetNames)) $
- $logWarn $ "Since --all is used, it is redundant to specify these targets: " <> T.pack (show targetNames)
- (_,_,targets) <- parseTargetsFromBuildOpts
+ logWarn $ "Since --all is used, it is redundant to specify these targets: " <> T.pack (show targetNames)
+ (_,_,targets) <- parseTargets
AllowNoTargets
defaultBuildOptsCLI
{ boptsCLITargets = if hroptsAll opts then [] else targetNames }
liftM concat $ forM (Map.toList targets) $ \(name, target) ->
case target of
- STUnknown -> throwString $
- "Error: " ++ packageNameString name ++ " isn't a known local page"
- STNonLocal -> throwString $
+ TargetAll Dependency -> throwString $
"Error: Expected a local package, but " ++
packageNameString name ++
" is either an extra-dep or in the snapshot."
- STLocalComps comps -> do
+ TargetComps comps -> do
pkgPath <- hpcPkgPath name
forM (toList comps) $ \nc ->
case nc of
@@ -259,7 +244,7 @@ generateHpcReportForTargets opts = do
"Can't specify anything except test-suites as hpc report targets (" ++
packageNameString name ++
" is used with a non test-suite target)"
- STLocalAll -> do
+ TargetAll ProjectPackage -> do
pkgPath <- hpcPkgPath name
exists <- doesDirExist pkgPath
if exists
@@ -284,12 +269,11 @@ generateHpcReportForTargets opts = do
forM_ mreportPath $ \reportPath ->
if hroptsOpenBrowser opts
then do
- $prettyInfo $ "Opening" <+> display reportPath <+> "in the browser."
+ prettyInfo $ "Opening" <+> display reportPath <+> "in the browser."
void $ liftIO $ openBrowser (toFilePath reportPath)
- else displayReportPath report reportPath
+ else displayReportPath report (display reportPath)
-generateHpcUnifiedReport :: (StackM env m, HasEnvConfig env)
- => m ()
+generateHpcUnifiedReport :: HasEnvConfig env => RIO env ()
generateHpcUnifiedReport = do
outputDir <- hpcReportDir
ensureDir outputDir
@@ -303,7 +287,7 @@ generateHpcUnifiedReport = do
let tixFiles = tixFiles0 ++ extraTixFiles
reportDir = outputDir </> $(mkRelDir "combined/all")
if length tixFiles < 2
- then $logInfo $ T.concat
+ then logInfo $ T.concat
[ if null tixFiles then "No tix files" else "Only one tix file"
, " found in "
, T.pack (toFilePath outputDir)
@@ -312,14 +296,15 @@ generateHpcUnifiedReport = do
else do
let report = "unified report"
mreportPath <- generateUnionReport report reportDir tixFiles
- forM_ mreportPath (displayReportPath report)
+ forM_ mreportPath (displayReportPath report . display)
-generateUnionReport :: (StackM env m, HasEnvConfig env)
- => Text -> Path Abs Dir -> [Path Abs File] -> m (Maybe (Path Abs File))
+generateUnionReport :: HasEnvConfig env
+ => Text -> Path Abs Dir -> [Path Abs File]
+ -> RIO env (Maybe (Path Abs File))
generateUnionReport report reportDir tixFiles = do
(errs, tix) <- fmap (unionTixes . map removeExeModules) (mapMaybeM readTixOrLog tixFiles)
- $logDebug $ "Using the following tix files: " <> T.pack (show tixFiles)
- unless (null errs) $ $logWarn $ T.concat $
+ logDebug $ "Using the following tix files: " <> T.pack (show tixFiles)
+ unless (null errs) $ logWarn $ T.concat $
"The following modules are left out of the " : report : " due to version mismatches: " :
intersperse ", " (map T.pack errs)
tixDest <- liftM (reportDir </>) $ parseRelFile (dirnameString reportDir ++ ".tix")
@@ -327,13 +312,13 @@ generateUnionReport report reportDir tixFiles = do
liftIO $ writeTix (toFilePath tixDest) tix
generateHpcReportInternal tixDest reportDir report [] []
-readTixOrLog :: (MonadLogger m, MonadIO m, MonadBaseControl IO m) => Path b File -> m (Maybe Tix)
+readTixOrLog :: (MonadLogger m, MonadUnliftIO m) => Path b File -> m (Maybe Tix)
readTixOrLog path = do
- mtix <- liftIO (readTix (toFilePath path)) `catch` \errorCall -> do
- $logError $ "Error while reading tix: " <> T.pack (show (errorCall :: ErrorCall))
+ mtix <- liftIO (readTix (toFilePath path)) `catchAny` \errorCall -> do
+ logError $ "Error while reading tix: " <> T.pack (show errorCall)
return Nothing
when (isNothing mtix) $
- $logError $ "Failed to read tix file " <> T.pack (toFilePath path)
+ logError $ "Failed to read tix file " <> T.pack (toFilePath path)
return mtix
-- | Module names which contain '/' have a package name, and so they weren't built into the
@@ -351,8 +336,7 @@ unionTixes tixes = (Map.keys errs, Tix (Map.elems outputs))
| hash1 == hash2 && len1 == len2 = Right (TixModule k hash1 len1 (zipWith (+) tix1 tix2))
merge _ _ = Left ()
-generateHpcMarkupIndex :: (StackM env m, HasEnvConfig env)
- => m ()
+generateHpcMarkupIndex :: HasEnvConfig env => RIO env ()
generateHpcMarkupIndex = do
outputDir <- hpcReportDir
let outputFile = outputDir </> $(mkRelFile "index.html")
@@ -364,7 +348,7 @@ generateHpcMarkupIndex = do
let indexPath = subdir </> $(mkRelFile "hpc_index.html")
exists' <- doesFileExist indexPath
if not exists' then return Nothing else do
- relPath <- stripDir outputDir indexPath
+ relPath <- stripProperPrefix outputDir indexPath
let package = dirname dir
testsuite = dirname subdir
return $ Just $ T.concat
@@ -402,7 +386,7 @@ generateHpcMarkupIndex = do
["</tbody></table>"]) ++
["</body></html>"]
unless (null rows) $
- $logInfo $ "\nAn index of the generated HTML coverage reports is available at " <>
+ logInfo $ "\nAn index of the generated HTML coverage reports is available at " <>
T.pack (toFilePath outputFile)
generateHpcErrorReport :: MonadIO m => Path Abs Dir -> Text -> m ()
@@ -427,8 +411,9 @@ dirnameString :: Path r Dir -> String
dirnameString = dropWhileEnd isPathSeparator . toFilePath . dirname
findPackageFieldForBuiltPackage
- :: (StackM env m, HasEnvConfig env)
- => Path Abs Dir -> PackageIdentifier -> Text -> m (Either Text Text)
+ :: HasEnvConfig env
+ => Path Abs Dir -> PackageIdentifier -> Text
+ -> RIO env (Either Text Text)
findPackageFieldForBuiltPackage pkgDir pkgId field = do
distDir <- distDirFromDir pkgDir
let inplaceDir = distDir </> $(mkRelDir "package.conf.inplace")
@@ -443,14 +428,14 @@ findPackageFieldForBuiltPackage pkgDir pkgId field = do
if cabalVer < $(mkVersion "1.24")
then do
path <- liftM (inplaceDir </>) $ parseRelFile (pkgIdStr ++ "-inplace.conf")
- $logDebug $ "Parsing config in Cabal < 1.24 location: " <> T.pack (toFilePath path)
+ logDebug $ "Parsing config in Cabal < 1.24 location: " <> T.pack (toFilePath path)
exists <- doesFileExist path
if exists then extractField path else notFoundErr
else do
-- With Cabal-1.24, it's in a different location.
- $logDebug $ "Scanning " <> T.pack (toFilePath inplaceDir) <> " for files matching " <> T.pack pkgIdStr
+ logDebug $ "Scanning " <> T.pack (toFilePath inplaceDir) <> " for files matching " <> T.pack pkgIdStr
(_, files) <- handleIO (const $ return ([], [])) $ listDir inplaceDir
- $logDebug $ T.pack (show files)
+ logDebug $ T.pack (show files)
case mapMaybe (\file -> fmap (const file) . (T.stripSuffix ".conf" <=< T.stripPrefix (T.pack (pkgIdStr ++ "-")))
. T.pack . toFilePath . filename $ file) files of
[] -> notFoundErr
@@ -458,12 +443,12 @@ findPackageFieldForBuiltPackage pkgDir pkgId field = do
_ -> return $ Left $ "Multiple files matching " <> T.pack (pkgIdStr ++ "-*.conf") <> " found in " <>
T.pack (toFilePath inplaceDir) <> ". Maybe try 'stack clean' on this package?"
-displayReportPath :: (StackM env m, HasAnsiAnn (Ann a), Display a)
- => Text -> a -> m ()
+displayReportPath :: (HasRunner env)
+ => Text -> AnsiDoc -> RIO env ()
displayReportPath report reportPath =
- $prettyInfo $ "The" <+> fromString (T.unpack report) <+> "is available at" <+> display reportPath
+ prettyInfo $ "The" <+> fromString (T.unpack report) <+> "is available at" <+> reportPath
-findExtraTixFiles :: (StackM env m , HasEnvConfig env) => m [Path Abs File]
+findExtraTixFiles :: HasEnvConfig env => RIO env [Path Abs File]
findExtraTixFiles = do
outputDir <- hpcReportDir
let dir = outputDir </> $(mkRelDir "extra-tix-files")
diff --git a/src/Stack/Docker.hs b/src/Stack/Docker.hs
index 8a331e7..5051960 100644
--- a/src/Stack/Docker.hs
+++ b/src/Stack/Docker.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP, ConstraintKinds, DeriveDataTypeable, FlexibleContexts, MultiWayIf, NamedFieldPuns,
OverloadedStrings, PackageImports, RankNTypes, RecordWildCards, ScopedTypeVariables,
TemplateHaskell, TupleSections #-}
@@ -20,15 +21,7 @@ module Stack.Docker
,StackDockerException(..)
) where
-import Control.Applicative
-import Control.Concurrent.MVar.Lifted (MVar,modifyMVar_,newMVar)
-import Control.Exception.Lifted
-import Control.Monad
-import Control.Monad.Catch (MonadThrow,throwM,MonadCatch)
-import Control.Monad.IO.Class (MonadIO,liftIO)
-import Control.Monad.Logger (MonadLogger,logError,logInfo,logWarn)
-import Control.Monad.Reader (MonadReader,runReaderT)
-import Control.Monad.Trans.Control (MonadBaseControl)
+import Stack.Prelude
import Control.Monad.Writer (execWriter,runWriter,tell)
import qualified Crypto.Hash as Hash (Digest, MD5, hash)
import Data.Aeson.Extended (FromJSON(..),(.:),(.:?),(.!=),eitherDecode)
@@ -37,14 +30,11 @@ import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as LBS
import Data.Char (isSpace,toUpper,isAscii,isDigit)
import Data.Conduit.List (sinkNull)
-import Data.List (dropWhileEnd,intercalate,isPrefixOf,isInfixOf,foldl')
+import Data.List (dropWhileEnd,intercalate,isPrefixOf,isInfixOf)
import Data.List.Extra (trim, nubOrd)
-import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
-import Data.Maybe
import Data.Ord (Down(..))
import Data.Streaming.Process (ProcessExitedUnsuccessfully(..))
-import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time (UTCTime,LocalTime(..),diffDays,utcToLocalTime,getZonedTime,ZonedTime(..))
@@ -54,16 +44,15 @@ import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO hiding (canonicalizePath)
import qualified Paths_stack as Meta
-import Prelude -- Fix redundant import warnings
import Stack.Config (getInContainer)
import Stack.Constants
+import Stack.Constants.Config
import Stack.Docker.GlobalDB
import Stack.Types.PackageIndex
+import Stack.Types.Runner
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Docker
-import Stack.Types.Internal
-import Stack.Types.StackT
import Stack.Setup (ensureDockerStackExe)
import System.Directory (canonicalizePath,getHomeDirectory)
import System.Environment (getEnv,getEnvironment,getProgName,getArgs,getExecutablePath)
@@ -82,7 +71,6 @@ import Text.Printf (printf)
#ifndef WINDOWS
import Control.Concurrent (threadDelay)
-import qualified Control.Monad.Trans.Control as Control
import System.Posix.Signals
import qualified System.Posix.User as PosixUser
#endif
@@ -91,17 +79,17 @@ import qualified System.Posix.User as PosixUser
-- Otherwise, runs the inner action.
--
-- This takes an optional release action which should be taken IFF control is
--- transfering away from the current process to the intra-container one. The main use
+-- transferring away from the current process to the intra-container one. The main use
-- for this is releasing a lock. After launching reexecution, the host process becomes
-- nothing but an manager for the call into docker and thus may not hold the lock.
reexecWithOptionalContainer
- :: (StackM env m, HasConfig env)
+ :: HasConfig env
=> Maybe (Path Abs Dir)
- -> Maybe (m ())
+ -> Maybe (RIO env ())
-> IO ()
- -> Maybe (m ())
- -> Maybe (m ())
- -> m ()
+ -> Maybe (RIO env ())
+ -> Maybe (RIO env ())
+ -> RIO env ()
reexecWithOptionalContainer mprojectRoot =
execWithOptionalContainer mprojectRoot getCmdArgs
where
@@ -129,7 +117,7 @@ reexecWithOptionalContainer mprojectRoot =
| configPlatform config == dockerContainerPlatform -> do
exePath <- liftIO getExecutablePath
cmdArgs args exePath
- | otherwise -> throwM UnsupportedStackExeHostPlatformException
+ | otherwise -> throwIO UnsupportedStackExeHostPlatformException
Just DockerStackExeImage -> do
progName <- liftIO getProgName
return (FP.takeBaseName progName, args, [], [])
@@ -197,20 +185,20 @@ reexecWithOptionalContainer mprojectRoot =
--
-- This takes an optional release action just like `reexecWithOptionalContainer`.
execWithOptionalContainer
- :: (StackM env m, HasConfig env)
+ :: HasConfig env
=> Maybe (Path Abs Dir)
- -> GetCmdArgs env m
- -> Maybe (m ())
+ -> GetCmdArgs env
+ -> Maybe (RIO env ())
-> IO ()
- -> Maybe (m ())
- -> Maybe (m ())
- -> m ()
+ -> Maybe (RIO env ())
+ -> Maybe (RIO env ())
+ -> RIO env ()
execWithOptionalContainer mprojectRoot getCmdArgs mbefore inner mafter mrelease =
do config <- view configL
inContainer <- getInContainer
isReExec <- view reExecL
if | inContainer && not isReExec && (isJust mbefore || isJust mafter) ->
- throwM OnlyOnHostException
+ throwIO OnlyOnHostException
| inContainer ->
liftIO (do inner
exitSuccess)
@@ -231,20 +219,21 @@ execWithOptionalContainer mprojectRoot getCmdArgs mbefore inner mafter mrelease
fromMaybeAction (Just hook) = hook
-- | Error if running in a container.
-preventInContainer :: (MonadIO m,MonadThrow m) => m () -> m ()
+preventInContainer :: MonadIO m => m () -> m ()
preventInContainer inner =
do inContainer <- getInContainer
if inContainer
- then throwM OnlyOnHostException
+ then throwIO OnlyOnHostException
else inner
-- | Run a command in a new Docker container, then exit the process.
-runContainerAndExit :: (StackM env m, HasConfig env)
- => GetCmdArgs env m
+runContainerAndExit
+ :: HasConfig env
+ => GetCmdArgs env
-> Maybe (Path Abs Dir) -- ^ Project root (maybe)
- -> m () -- ^ Action to run before
- -> m () -- ^ Action to run after
- -> m ()
+ -> RIO env () -- ^ Action to run before
+ -> RIO env () -- ^ Action to run after
+ -> RIO env ()
runContainerAndExit getCmdArgs
mprojectRoot
before
@@ -270,7 +259,7 @@ runContainerAndExit getCmdArgs
image = dockerImage docker
when (isRemoteDocker &&
maybe False (isInfixOf "boot2docker") dockerCertPath)
- ($logWarn "Warning: Using boot2docker is NOT supported, and not likely to perform well.")
+ (logWarn "Warning: Using boot2docker is NOT supported, and not likely to perform well.")
maybeImageInfo <- inspect envOverride image
imageInfo@Inspect{..} <- case maybeImageInfo of
Just ii -> return ii
@@ -364,9 +353,9 @@ runContainerAndExit getCmdArgs
,args])
before
#ifndef WINDOWS
- runInBase <- Control.liftBaseWith $ \run -> return (void . run)
+ run <- askRunInIO
oldHandlers <- forM [sigINT,sigABRT,sigHUP,sigPIPE,sigTERM,sigUSR1,sigUSR2] $ \sig -> do
- let sigHandler = runInBase $ do
+ let sigHandler = run $ do
readProcessNull Nothing envOverride "docker"
["kill","--signal=" ++ show sig,containerID]
when (sig `elem` [sigTERM,sigABRT]) $ do
@@ -414,8 +403,7 @@ runContainerAndExit getCmdArgs
sshRelDir = $(mkRelDir ".ssh/")
-- | Clean-up old docker images and containers.
-cleanup :: (StackM env m, HasConfig env)
- => CleanupOpts -> m ()
+cleanup :: HasConfig env => CleanupOpts -> RIO env ()
cleanup opts =
do config <- view configL
let docker = configDocker config
@@ -474,16 +462,16 @@ cleanup opts =
[] -> return ()
(c:_):t:v:_ ->
do args <- if | toUpper c == 'R' && t == imageStr ->
- do $logInfo (concatT ["Removing image: '",v,"'"])
+ do logInfo (concatT ["Removing image: '",v,"'"])
return ["rmi",v]
| toUpper c == 'R' && t == containerStr ->
- do $logInfo (concatT ["Removing container: '",v,"'"])
+ do logInfo (concatT ["Removing container: '",v,"'"])
return ["rm","-f",v]
| otherwise -> throwM (InvalidCleanupCommandException line)
e <- try (readDockerProcess envOverride Nothing args)
case e of
Left ex@ProcessFailed{} ->
- $logError (concatT ["Could not remove: '",v,"': ", show ex])
+ logError (concatT ["Could not remove: '",v,"': ", show ex])
Left e' -> throwM e'
Right _ -> return ()
_ -> throwM (InvalidCleanupCommandException line)
@@ -495,12 +483,12 @@ cleanup opts =
| repo == "<none>" -> (hash,[])
| tag == "<none>" -> (hash,[repo])
| otherwise -> (hash,[repo ++ ":" ++ tag])
- _ -> throw (InvalidImagesOutputException line)
+ _ -> impureThrow (InvalidImagesOutputException line)
parseContainersOut = map parseContainer . drop 1 . lines . decodeUtf8
where parseContainer line =
case words line of
- hash:image:rest -> (hash,(image,last rest))
- _ -> throw (InvalidPSOutputException line)
+ hash:image:rest | last:_ <- reverse rest -> (hash,(image,last))
+ _ -> impureThrow (InvalidPSOutputException line)
buildPlan curTime
imagesLastUsed
imageRepos
@@ -641,17 +629,17 @@ cleanup opts =
containerStr = "container"
-- | Inspect Docker image or container.
-inspect :: (MonadIO m,MonadLogger m,MonadBaseControl IO m,MonadCatch m)
+inspect :: (MonadUnliftIO m,MonadLogger m)
=> EnvOverride -> String -> m (Maybe Inspect)
inspect envOverride image =
do results <- inspects envOverride [image]
case Map.toList results of
[] -> return Nothing
[(_,i)] -> return (Just i)
- _ -> throwM (InvalidInspectOutputException "expect a single result")
+ _ -> throwIO (InvalidInspectOutputException "expect a single result")
-- | Inspect multiple Docker images and/or containers.
-inspects :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+inspects :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> [String] -> m (Map String Inspect)
inspects _ [] = return Map.empty
inspects envOverride images =
@@ -661,14 +649,15 @@ inspects envOverride images =
Right inspectOut ->
-- filtering with 'isAscii' to workaround @docker inspect@ output containing invalid UTF-8
case eitherDecode (LBS.pack (filter isAscii (decodeUtf8 inspectOut))) of
- Left msg -> throwM (InvalidInspectOutputException msg)
+ Left msg -> throwIO (InvalidInspectOutputException msg)
Right results -> return (Map.fromList (map (\r -> (iiId r,r)) results))
Left (ProcessFailed _ _ _ err)
- | "Error: No such image" `LBS.isPrefixOf` err -> return Map.empty
- Left e -> throwM e
+ | any (`LBS.isPrefixOf` err) missingImagePrefixes -> return Map.empty
+ Left e -> throwIO e
+ where missingImagePrefixes = ["Error: No such image", "Error: No such object:"]
-- | Pull latest version of configured Docker image from registry.
-pull :: (StackM env m, HasConfig env) => m ()
+pull :: HasConfig env => RIO env ()
pull =
do config <- view configL
let docker = configDocker config
@@ -680,9 +669,9 @@ pull =
pullImage :: (MonadLogger m,MonadIO m,MonadThrow m)
=> EnvOverride -> DockerOpts -> String -> m ()
pullImage envOverride docker image =
- do $logInfo (concatT ["Pulling image from registry: '",image,"'"])
+ do logInfo (concatT ["Pulling image from registry: '",image,"'"])
when (dockerRegistryLogin docker)
- (do $logInfo "You may need to log in."
+ (do logInfo "You may need to log in."
callProcess $ Cmd
Nothing
"docker"
@@ -706,30 +695,30 @@ pullImage envOverride docker image =
ec <- liftIO (waitForProcess ph)
case ec of
ExitSuccess -> return ()
- ExitFailure _ -> throwM (PullFailedException image)
+ ExitFailure _ -> throwIO (PullFailedException image)
-- | Check docker version (throws exception if incorrect)
checkDockerVersion
- :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+ :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> DockerOpts -> m ()
checkDockerVersion envOverride docker =
do dockerExists <- doesExecutableExist envOverride "docker"
- unless dockerExists (throwM DockerNotInstalledException)
+ unless dockerExists (throwIO DockerNotInstalledException)
dockerVersionOut <- readDockerProcess envOverride Nothing ["--version"]
case words (decodeUtf8 dockerVersionOut) of
(_:_:v:_) ->
case parseVersionFromString (stripVersion v) of
Just v'
| v' < minimumDockerVersion ->
- throwM (DockerTooOldException minimumDockerVersion v')
+ throwIO (DockerTooOldException minimumDockerVersion v')
| v' `elem` prohibitedDockerVersions ->
- throwM (DockerVersionProhibitedException prohibitedDockerVersions v')
+ throwIO (DockerVersionProhibitedException prohibitedDockerVersions v')
| not (v' `withinRange` dockerRequireDockerVersion docker) ->
- throwM (BadDockerVersionException (dockerRequireDockerVersion docker) v')
+ throwIO (BadDockerVersionException (dockerRequireDockerVersion docker) v')
| otherwise ->
return ()
- _ -> throwM InvalidVersionOutputException
- _ -> throwM InvalidVersionOutputException
+ _ -> throwIO InvalidVersionOutputException
+ _ -> throwIO InvalidVersionOutputException
where minimumDockerVersion = $(mkVersion "1.6.0")
prohibitedDockerVersions = []
stripVersion v = takeWhile (/= '-') (dropWhileEnd (not . isDigit) v)
@@ -747,14 +736,14 @@ reset maybeProjectRoot keepHome = do
-- | The Docker container "entrypoint": special actions performed when first entering
-- a container, such as switching the UID/GID to the "outside-Docker" user's.
-entrypoint :: (MonadIO m, MonadBaseControl IO m, MonadCatch m, MonadLogger m)
+entrypoint :: (MonadUnliftIO m, MonadLogger m, MonadThrow m)
=> Config -> DockerEntrypoint -> m ()
entrypoint config@Config{..} DockerEntrypoint{..} =
modifyMVar_ entrypointMVar $ \alreadyRan -> do
-- Only run the entrypoint once
unless alreadyRan $ do
envOverride <- getEnvOverride configPlatform
- homeDir <- parseAbsDir =<< liftIO (getEnv "HOME")
+ homeDir <- liftIO $ parseAbsDir =<< getEnv "HOME"
-- Get the UserEntry for the 'stack' user in the image, if it exists
estackUserEntry0 <- liftIO $ tryJust (guard . isDoesNotExistError) $
User.getUserEntryForName stackUserName
@@ -768,7 +757,7 @@ entrypoint config@Config{..} DockerEntrypoint{..} =
Right ue -> do
-- If the 'stack' user exists in the image, copy any build plans and package indices from
-- its original home directory to the host's stack root, to avoid needing to download them
- origStackHomeDir <- parseAbsDir (User.homeDirectory ue)
+ origStackHomeDir <- liftIO $ parseAbsDir (User.homeDirectory ue)
let origStackRoot = origStackHomeDir </> $(mkRelDir ("." ++ stackProgName))
buildPlanDirExists <- doesDirExist (buildPlanDir origStackRoot)
when buildPlanDirExists $ do
@@ -863,9 +852,9 @@ removeDirectoryContents path excludeDirs excludeFiles =
-- | Produce a strict 'S.ByteString' from the stdout of a
-- process. Throws a 'ReadProcessException' exception if the
--- process fails. Logs process's stderr using @$logError@.
+-- process fails. Logs process's stderr using @logError@.
readDockerProcess
- :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+ :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> Maybe (Path Abs Dir) -> [String] -> m BS.ByteString
readDockerProcess envOverride mpwd = readProcessStdout mpwd envOverride "docker"
@@ -881,13 +870,13 @@ hostBinDir = "/opt/host/bin"
decodeUtf8 :: BS.ByteString -> String
decodeUtf8 bs = T.unpack (T.decodeUtf8 bs)
--- | Convenience function constructing message for @$log*@.
+-- | Convenience function constructing message for @log*@.
concatT :: [String] -> Text
concatT = T.pack . concat
-- | Fail with friendly error if project root not set.
fromMaybeProjectRoot :: Maybe (Path Abs Dir) -> Path Abs Dir
-fromMaybeProjectRoot = fromMaybe (throw CannotDetermineProjectRootException)
+fromMaybeProjectRoot = fromMaybe (impureThrow CannotDetermineProjectRootException)
-- | Environment variable that contained the old sandbox ID.
-- | Use of this variable is deprecated, and only used to detect old images.
@@ -942,10 +931,9 @@ instance FromJSON ImageConfig where
<*> fmap join (o .:? "Entrypoint") .!= []
-- | Function to get command and arguments to run in Docker container
-type GetCmdArgs env m
- = (StackM env m, HasConfig env)
- => DockerOpts
+type GetCmdArgs env
+ = DockerOpts
-> EnvOverride
-> Inspect
-> Bool
- -> m (FilePath,[String],[(String,String)],[Mount])
+ -> RIO env (FilePath,[String],[(String,String)],[Mount])
diff --git a/src/Stack/Docker/GlobalDB.hs b/src/Stack/Docker/GlobalDB.hs
index 25ad081..1964124 100644
--- a/src/Stack/Docker/GlobalDB.hs
+++ b/src/Stack/Docker/GlobalDB.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE QuasiQuotes, TemplateHaskell, TypeFamilies, OverloadedStrings,
GADTs, FlexibleContexts, MultiParamTypeClasses, GeneralizedNewtypeDeriving,
RankNTypes, NamedFieldPuns #-}
@@ -15,24 +16,21 @@ module Stack.Docker.GlobalDB
,DockerImageExeId)
where
-import Control.Exception (IOException,catch,throwIO)
-import Control.Monad (forM_, when)
import Control.Monad.Logger (NoLoggingT)
-import Control.Monad.Trans.Resource (ResourceT)
+import Stack.Prelude
import Data.List (sortBy, isInfixOf, stripPrefix)
import Data.List.Extra (stripSuffix)
import qualified Data.Map.Strict as Map
-import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Data.Time.Clock (UTCTime,getCurrentTime)
import Database.Persist
import Database.Persist.Sqlite
import Database.Persist.TH
-import Path (toFilePath, parent)
+import Path (parent, (<.>))
import Path.IO (ensureDir)
import Stack.Types.Config
import Stack.Types.Docker
-import Stack.Types.StringError
+import System.FileLock (withFileLock, SharedExclusive(Exclusive))
share [mkPersist sqlSettings, mkMigrate "migrateTables"] [persistLowerCase|
DockerImageProject
@@ -101,10 +99,11 @@ setDockerImageExe config imageId exePath exeTimestamp compatible =
withGlobalDB :: forall a. Config -> SqlPersistT (NoLoggingT (ResourceT IO)) a -> IO a
withGlobalDB config action =
do let db = dockerDatabasePath (configDocker config)
+ dbLock <- db <.> "lock"
ensureDir (parent db)
- runSqlite (T.pack (toFilePath db))
+ withFileLock (toFilePath dbLock) Exclusive (\_fl -> runSqlite (T.pack (toFilePath db))
(do _ <- runMigrationSilent migrateTables
- action)
+ action))
`catch` \ex -> do
let str = show ex
str' = fromMaybe str $ stripPrefix "user error (" $
diff --git a/src/Stack/Dot.hs b/src/Stack/Dot.hs
index 1bf5aee..f58f1a8 100644
--- a/src/Stack/Dot.hs
+++ b/src/Stack/Dot.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
@@ -14,40 +15,32 @@ module Stack.Dot (dot
,pruneGraph
) where
-import Control.Applicative
-import Control.Arrow ((&&&))
-import Control.Monad (liftM, void)
-import Control.Monad.IO.Class
-import Control.Monad.Trans.Unlift (MonadBaseUnlift)
import qualified Data.Foldable as F
import qualified Data.HashSet as HashSet
-import Data.Map (Map)
import qualified Data.Map as Map
-import Data.Maybe
-import Data.Monoid ((<>))
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.Text (Text)
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Data.Traversable as T
+import Distribution.Text (display)
import Distribution.License (License(BSD3))
-import Prelude -- Fix redundant import warnings
import Stack.Build (withLoadPackage)
import Stack.Build.Installed (getInstalled, GetInstalledOpts(..))
import Stack.Build.Source
import Stack.Build.Target
+import Stack.Config (getLocalPackages)
import Stack.Constants
import Stack.Package
import Stack.PackageDump (DumpPackage(..))
+import Stack.Prelude
import Stack.Types.Build
+import Stack.Types.BuildPlan
import Stack.Types.Config
import Stack.Types.FlagName
import Stack.Types.GhcPkgId
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT
import Stack.Types.Version
-- | Options record for @stack dot@
@@ -80,9 +73,7 @@ data ListDepsOpts = ListDepsOpts
}
-- | Visualize the project's dependencies as a graphviz graph
-dot :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
- => DotOpts
- -> m ()
+dot :: HasEnvConfig env => DotOpts -> RIO env ()
dot dotOpts = do
(localNames, prunedGraph) <- createPrunedDependencyGraph dotOpts
printGraph dotOpts localNames prunedGraph
@@ -98,12 +89,13 @@ data DotPayload = DotPayload
-- | Create the dependency graph and also prune it as specified in the dot
-- options. Returns a set of local names and and a map from package names to
-- dependencies.
-createPrunedDependencyGraph :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
+createPrunedDependencyGraph :: HasEnvConfig env
=> DotOpts
- -> m (Set PackageName,
+ -> RIO env
+ (Set PackageName,
Map PackageName (Set PackageName, DotPayload))
createPrunedDependencyGraph dotOpts = do
- localNames <- liftM Map.keysSet getLocalPackageViews
+ localNames <- liftM (Map.keysSet . lpProject) getLocalPackages
resultGraph <- createDependencyGraph dotOpts
let pkgsToPrune = if dotIncludeBase dotOpts
then dotPrune dotOpts
@@ -115,11 +107,11 @@ createPrunedDependencyGraph dotOpts = do
-- name to a tuple of dependencies and payload if available. This
-- function mainly gathers the required arguments for
-- @resolveDependencies@.
-createDependencyGraph :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
+createDependencyGraph :: HasEnvConfig env
=> DotOpts
- -> m (Map PackageName (Set PackageName, DotPayload))
+ -> RIO env (Map PackageName (Set PackageName, DotPayload))
createDependencyGraph dotOpts = do
- (_, _, locals, _, _, sourceMap) <- loadSourceMapFull NeedTargets defaultBuildOptsCLI
+ (locals, sourceMap) <- loadSourceMap NeedTargets defaultBuildOptsCLI
{ boptsCLITargets = dotTargets dotOpts
, boptsCLIFlags = dotFlags dotOpts
}
@@ -134,27 +126,26 @@ createDependencyGraph dotOpts = do
globalIdMap = Map.fromList $ map (\dp -> (dpGhcPkgId dp, dpPackageIdent dp)) globalDump
withLoadPackage (\loader -> do
let depLoader = createDepLoader sourceMap installedMap globalDumpMap globalIdMap loadPackageDeps
- loadPackageDeps name version flags ghcOptions
+ loadPackageDeps name version loc flags ghcOptions
-- Skip packages that can't be loaded - see
-- https://github.com/commercialhaskell/stack/issues/2967
| name `elem` [$(mkPackageName "rts"), $(mkPackageName "ghc")] =
return (Set.empty, DotPayload (Just version) (Just BSD3))
- | otherwise = fmap (packageAllDeps &&& makePayload)
- (loader name version flags ghcOptions)
+ | otherwise = fmap (packageAllDeps &&& makePayload) (loader loc flags ghcOptions)
liftIO $ resolveDependencies (dotDependencyDepth dotOpts) graph depLoader)
where makePayload pkg = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg)
-listDependencies :: (StackM env m, HasEnvConfig env, MonadBaseUnlift IO m)
+listDependencies :: HasEnvConfig env
=> ListDepsOpts
- -> m ()
+ -> RIO env ()
listDependencies opts = do
let dotOpts = listDepsDotOpts opts
(_, resultGraph) <- createPrunedDependencyGraph dotOpts
void (Map.traverseWithKey go (snd <$> resultGraph))
where go name payload =
let payloadText =
- if listDepsLicense opts
- then maybe "<unknown>" (Text.pack . show) (payloadLicense payload)
+ if listDepsLicense opts
+ then maybe "<unknown>" (Text.pack . display) (payloadLicense payload)
else maybe "<unknown>" (Text.pack . show) (payloadVersion payload)
line = packageNameText name <> listDepsSep opts <> payloadText
in liftIO $ Text.putStrLn line
@@ -215,17 +206,20 @@ createDepLoader :: Applicative m
-> Map PackageName (InstallLocation, Installed)
-> Map PackageName (DumpPackage () () ())
-> Map GhcPkgId PackageIdentifier
- -> (PackageName -> Version -> Map FlagName Bool -> [Text] -> m (Set PackageName, DotPayload))
+ -> (PackageName -> Version -> PackageLocationIndex FilePath ->
+ Map FlagName Bool -> [Text] -> m (Set PackageName, DotPayload))
-> PackageName
-> m (Set PackageName, DotPayload)
createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pkgName =
if not (pkgName `HashSet.member` wiredInPackages)
then case Map.lookup pkgName sourceMap of
- Just (PSLocal lp) -> pure (packageAllDeps pkg, payloadFromLocal pkg)
+ Just (PSFiles lp _) -> pure (packageAllDeps pkg, payloadFromLocal pkg)
where
pkg = localPackageToPackage lp
- Just (PSUpstream version _ flags ghcOptions _) ->
- loadPackageDeps pkgName version flags ghcOptions
+ Just (PSIndex _ flags ghcOptions loc) ->
+ -- FIXME pretty certain this could be cleaned up a lot by including more info in PackageSource
+ let PackageIdentifierRevision (PackageIdentifier name version) _ = loc
+ in assert (pkgName == name) (loadPackageDeps pkgName version (PLIndex loc) flags ghcOptions)
Nothing -> pure (Set.empty, payloadFromInstalled (Map.lookup pkgName installed))
-- For wired-in-packages, use information from ghc-pkg (see #3084)
else case Map.lookup pkgName globalDumpMap of
@@ -238,7 +232,10 @@ createDepLoader sourceMap installed globalDumpMap globalIdMap loadPackageDeps pk
(dpDepends dp)
where
payloadFromLocal pkg = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg)
- payloadFromInstalled maybePkg = DotPayload (fmap (installedVersion . snd) maybePkg) Nothing
+ payloadFromInstalled maybePkg = DotPayload (fmap (installedVersion . snd) maybePkg) $
+ case maybePkg of
+ Just (_, Library _ _ mlicense) -> mlicense
+ _ -> Nothing
payloadFromDump dp = DotPayload (Just $ packageIdentifierVersion $ dpPackageIdent dp) (dpLicense dp)
-- | Resolve the direct (depth 0) external dependencies of the given local packages
diff --git a/src/Stack/Exec.hs b/src/Stack/Exec.hs
index 7b95b9c..f937b00 100644
--- a/src/Stack/Exec.hs
+++ b/src/Stack/Exec.hs
@@ -1,7 +1,6 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE FlexibleContexts #-}
-{-# LANGUAGE TemplateHaskell #-}
-{-# LANGUAGE OverloadedStrings #-}
#if __GLASGOW_HASKELL__ >= 800
{-# OPTIONS_GHC -Wno-redundant-constraints #-}
@@ -12,13 +11,10 @@
module Stack.Exec where
-import Control.Monad.Reader
-import Control.Monad.Logger
-import Control.Monad.Trans.Control (MonadBaseControl)
+import Stack.Prelude
import Stack.Types.Config
import System.Process.Log
-import Control.Exception.Lifted
import Data.Streaming.Process (ProcessExitedUnsuccessfully(..))
import System.Exit
import System.Process.Run (callProcess, callProcessObserveStdout, Cmd(..))
@@ -29,22 +25,30 @@ import qualified System.Process.PID1 as PID1
import System.Process.Read (EnvOverride, envHelper, preProcess)
#endif
--- | Default @EnvSettings@ which includes locals and GHC_PACKAGE_PATH
+-- | Default @EnvSettings@ which includes locals and GHC_PACKAGE_PATH.
+--
+-- Note that this also passes through the GHCRTS environment variable.
+-- See https://github.com/commercialhaskell/stack/issues/3444
defaultEnvSettings :: EnvSettings
defaultEnvSettings = EnvSettings
{ esIncludeLocals = True
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
+ , esKeepGhcRts = True
}
-- | Environment settings which do not embellish the environment
+--
+-- Note that this also passes through the GHCRTS environment variable.
+-- See https://github.com/commercialhaskell/stack/issues/3444
plainEnvSettings :: EnvSettings
plainEnvSettings = EnvSettings
{ esIncludeLocals = False
, esIncludeGhcPackagePath = False
, esStackExe = False
, esLocaleUtf8 = False
+ , esKeepGhcRts = True
}
-- | Execute a process within the Stack configured environment.
@@ -55,14 +59,14 @@ plainEnvSettings = EnvSettings
-- sub-process. This allows signals to be propagated (#527)
--
-- 2) On windows, an 'ExitCode' exception will be thrown.
-exec :: (MonadIO m, MonadLogger m, MonadBaseControl IO m)
+exec :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> String -> [String] -> m b
#ifdef WINDOWS
exec = execSpawn
#else
exec menv cmd0 args = do
cmd <- preProcess Nothing menv cmd0
- $withProcessTimeLog cmd args $
+ withProcessTimeLog cmd args $
liftIO $ PID1.run cmd args (envHelper menv)
#endif
@@ -70,19 +74,19 @@ exec menv cmd0 args = do
-- is a sub-process, which is helpful in some cases (#1306)
--
-- This function only exits by throwing 'ExitCode'.
-execSpawn :: (MonadIO m, MonadLogger m, MonadBaseControl IO m)
+execSpawn :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> String -> [String] -> m b
execSpawn menv cmd0 args = do
- e <- $withProcessTimeLog cmd0 args $
+ e <- withProcessTimeLog cmd0 args $
try (callProcess (Cmd Nothing cmd0 menv args))
liftIO $ case e of
Left (ProcessExitedUnsuccessfully _ ec) -> exitWith ec
Right () -> exitSuccess
-execObserve :: (MonadIO m, MonadLogger m, MonadBaseControl IO m)
+execObserve :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> String -> [String] -> m String
execObserve menv cmd0 args = do
- e <- $withProcessTimeLog cmd0 args $
+ e <- withProcessTimeLog cmd0 args $
try (callProcessObserveStdout (Cmd Nothing cmd0 menv args))
case e of
Left (ProcessExitedUnsuccessfully _ ec) -> liftIO $ exitWith ec
diff --git a/src/Stack/Fetch.hs b/src/Stack/Fetch.hs
index 1439d11..8714f49 100644
--- a/src/Stack/Fetch.hs
+++ b/src/Stack/Fetch.hs
@@ -1,4 +1,6 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
+{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
@@ -8,6 +10,7 @@
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
+{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE ViewPatterns #-}
@@ -15,6 +18,7 @@
module Stack.Fetch
( unpackPackages
+ , unpackPackageIdent
, unpackPackageIdents
, fetchPackages
, untar
@@ -29,66 +33,46 @@ import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Check as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import Codec.Compression.GZip (decompress)
-import Control.Applicative
-import Control.Concurrent.Async (Concurrently (..))
-import Control.Concurrent.MVar.Lifted (modifyMVar, newMVar)
import Control.Concurrent.STM
-import Control.Exception (assert)
-import Control.Monad (join, liftM, unless, void, when)
-import Control.Monad.Catch
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Control.Monad.Reader (ask, runReaderT)
-import Control.Monad.Trans.Control
-import Control.Monad.Trans.Unlift (MonadBaseUnlift, askRunBase)
+import Stack.Prelude
import Crypto.Hash (SHA256 (..))
-import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import qualified Data.ByteString.Lazy as L
-import Data.Either (partitionEithers)
import qualified Data.Foldable as F
-import Data.Function (fix)
import qualified Data.HashMap.Strict as HashMap
-import Data.List (intercalate)
+import qualified Data.HashSet as HashSet
+import Data.List (intercalate, maximum)
import Data.List.NonEmpty (NonEmpty)
import qualified Data.List.NonEmpty as NE
-import Data.Map (Map)
import qualified Data.Map as Map
-import Data.Maybe (maybeToList, catMaybes, isJust)
-import Data.Monoid
-import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as T
-import Data.Text.Encoding (encodeUtf8, decodeUtf8)
+import Data.Text.Encoding (decodeUtf8)
import Data.Text.Metrics
-import Data.Typeable (Typeable)
-import Data.Word (Word64)
import Network.HTTP.Download
import Path
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
-import Prelude -- Fix AMP warning
import Stack.PackageIndex
import Stack.Types.BuildPlan
import Stack.Types.Config
import Stack.Types.PackageIdentifier
import Stack.Types.PackageIndex
import Stack.Types.PackageName
+import Stack.Types.Runner
import Stack.Types.Version
-import System.FilePath ((<.>))
import qualified System.FilePath as FP
-import System.IO
+import System.IO (hSeek, SeekMode (AbsoluteSeek))
import System.PosixCompat (setFileMode)
-type PackageCaches = Map PackageIdentifier (PackageIndex, PackageCache)
-
data FetchException
= Couldn'tReadIndexTarball FilePath Tar.FormatError
| Couldn'tReadPackageTarball FilePath SomeException
| UnpackDirectoryAlreadyExists (Set FilePath)
| CouldNotParsePackageSelectors [String]
| UnknownPackageNames (Set PackageName)
- | UnknownPackageIdentifiers (Set PackageIdentifier) String
+ | UnknownPackageIdentifiers (HashSet PackageIdentifierRevision) String
+ Bool -- Do we use any 00-index.tar.gz indices? Just used for more informative error messages
deriving Typeable
instance Exception FetchException
@@ -114,15 +98,14 @@ instance Show FetchException where
show (UnknownPackageNames names) =
"The following packages were not found in your indices: " ++
intercalate ", " (map packageNameString $ Set.toList names)
- show (UnknownPackageIdentifiers idents suggestions) =
+ show (UnknownPackageIdentifiers idents suggestions uses00Index) =
"The following package identifiers were not found in your indices: " ++
- intercalate ", " (map packageIdentifierString $ Set.toList idents) ++
- (if null suggestions then "" else "\n" ++ suggestions)
+ intercalate ", " (map packageIdentifierRevisionString $ HashSet.toList idents) ++
+ (if null suggestions then "" else "\n" ++ suggestions) ++
+ (if uses00Index then "\n\nYou seem to be using a legacy 00-index.tar.gz tarball.\nConsider changing your configuration to use a 01-index.tar.gz file.\nAlternatively, you can set the ignore-revision-mismatch setting to true.\nFor more information, see: https://github.com/commercialhaskell/stack/issues/3520" else "")
-- | Fetch packages into the cache without unpacking
-fetchPackages :: (StackMiniM env m, HasConfig env)
- => Set PackageIdentifier
- -> m ()
+fetchPackages :: HasConfig env => Set PackageIdentifier -> RIO env ()
fetchPackages idents' = do
resolved <- resolvePackages Nothing idents Set.empty
ToFetchResult toFetch alreadyUnpacked <- getToFetch Nothing resolved
@@ -131,28 +114,26 @@ fetchPackages idents' = do
assert (Map.null nowUnpacked) (return ())
where
-- Since we're just fetching tarballs and not unpacking cabal files, we can
- -- always provide a Nothing Git SHA
- idents = Map.fromList $ map (, Nothing) $ Set.toList idents'
+ -- always provide a CFILatest cabal file info
+ idents = map (flip PackageIdentifierRevision CFILatest) $ Set.toList idents'
-- | Intended to work for the command line command.
-unpackPackages :: (StackMiniM env m, HasConfig env)
- => Maybe MiniBuildPlan -- ^ when looking up by name, take from this build plan
+unpackPackages :: HasConfig env
+ => Maybe SnapshotDef -- ^ when looking up by name, take from this build plan
-> FilePath -- ^ destination
-> [String] -- ^ names or identifiers
- -> m ()
-unpackPackages mMiniBuildPlan dest input = do
+ -> RIO env ()
+unpackPackages mSnapshotDef dest input = do
dest' <- resolveDir' dest
(names, idents) <- case partitionEithers $ map parse input of
([], x) -> return $ partitionEithers x
(errs, _) -> throwM $ CouldNotParsePackageSelectors errs
- resolved <- resolvePackages mMiniBuildPlan
- (Map.fromList idents)
- (Set.fromList names)
+ resolved <- resolvePackages mSnapshotDef idents (Set.fromList names)
ToFetchResult toFetch alreadyUnpacked <- getToFetch (Just dest') resolved
unless (Map.null alreadyUnpacked) $
throwM $ UnpackDirectoryAlreadyExists $ Set.fromList $ map toFilePath $ Map.elems alreadyUnpacked
unpacked <- fetchPackages' Nothing toFetch
- F.forM_ (Map.toList unpacked) $ \(ident, dest'') -> $logInfo $ T.pack $ concat
+ F.forM_ (Map.toList unpacked) $ \(ident, dest'') -> logInfo $ T.pack $ concat
[ "Unpacked "
, packageIdentifierString ident
, " to "
@@ -161,25 +142,40 @@ unpackPackages mMiniBuildPlan dest input = do
where
-- Possible future enhancement: parse names as name + version range
parse s =
- case parsePackageNameFromString s of
+ case parsePackageName t of
Right x -> Right $ Left x
Left _ ->
- case parsePackageIdentifierFromString s of
- Right x -> Right $ Right (x, Nothing)
- Left _ -> maybe (Left s) (Right . Right) $ do
- (identS, '@':revisionS) <- return $ break (== '@') s
- Right ident <- return $ parsePackageIdentifierFromString identS
- hash <- T.stripPrefix "gitsha1:" $ T.pack revisionS
- Just (ident, Just $ GitSHA1 $ encodeUtf8 hash)
+ case parsePackageIdentifierRevision t of
+ Right x -> Right $ Right x
+ Left _ -> Left s
+ where
+ t = T.pack s
+
+-- | Same as 'unpackPackageIdents', but for a single package.
+unpackPackageIdent
+ :: HasConfig env
+ => Path Abs Dir -- ^ unpack directory
+ -> Path Rel Dir -- ^ the dist rename directory, see: https://github.com/fpco/stack/issues/157
+ -> PackageIdentifierRevision
+ -> RIO env (Path Abs Dir)
+unpackPackageIdent unpackDir distDir (PackageIdentifierRevision ident mcfi) = do
+ -- FIXME make this more direct in the future
+ m <- unpackPackageIdents unpackDir (Just distDir) [PackageIdentifierRevision ident mcfi]
+ case Map.toList m of
+ [(ident', dir)]
+ | ident /= ident' -> error "unpackPackageIdent: ident mismatch"
+ | otherwise -> return dir
+ [] -> error "unpackPackageIdent: empty list"
+ _ -> error "unpackPackageIdent: multiple results"
-- | Ensure that all of the given package idents are unpacked into the build
-- unpack directory, and return the paths to all of the subdirectories.
unpackPackageIdents
- :: (StackMiniM env m, HasConfig env)
+ :: HasConfig env
=> Path Abs Dir -- ^ unpack directory
-> Maybe (Path Rel Dir) -- ^ the dist rename directory, see: https://github.com/fpco/stack/issues/157
- -> Map PackageIdentifier (Maybe GitSHA1)
- -> m (Map PackageIdentifier (Path Abs Dir))
+ -> [PackageIdentifierRevision]
+ -> RIO env (Map PackageIdentifier (Path Abs Dir))
unpackPackageIdents unpackDir mdistDir idents = do
resolved <- resolvePackages Nothing idents Set.empty
ToFetchResult toFetch alreadyUnpacked <- getToFetch (Just unpackDir) resolved
@@ -188,18 +184,19 @@ unpackPackageIdents unpackDir mdistDir idents = do
data ResolvedPackage = ResolvedPackage
{ rpIdent :: !PackageIdentifier
- , rpCache :: !PackageCache
+ , rpDownload :: !(Maybe PackageDownload)
+ , rpOffsetSize :: !OffsetSize
, rpIndex :: !PackageIndex
}
deriving Show
-- | Resolve a set of package names and identifiers into @FetchPackage@ values.
-resolvePackages :: (StackMiniM env m, HasConfig env)
- => Maybe MiniBuildPlan -- ^ when looking up by name, take from this build plan
- -> Map PackageIdentifier (Maybe GitSHA1)
+resolvePackages :: HasConfig env
+ => Maybe SnapshotDef -- ^ when looking up by name, take from this build plan
+ -> [PackageIdentifierRevision]
-> Set PackageName
- -> m [ResolvedPackage]
-resolvePackages mMiniBuildPlan idents0 names0 = do
+ -> RIO env [ResolvedPackage]
+resolvePackages mSnapshotDef idents0 names0 = do
eres <- go
case eres of
Left _ -> do
@@ -207,96 +204,106 @@ resolvePackages mMiniBuildPlan idents0 names0 = do
go >>= either throwM return
Right x -> return x
where
- go = r <$> resolvePackagesAllowMissing mMiniBuildPlan idents0 names0
- r (missingNames, missingIdents, idents)
+ go = r <$> getUses00Index <*> resolvePackagesAllowMissing mSnapshotDef idents0 names0
+ r uses00Index (missingNames, missingIdents, idents)
| not $ Set.null missingNames = Left $ UnknownPackageNames missingNames
- | not $ Set.null missingIdents = Left $ UnknownPackageIdentifiers missingIdents ""
+ | not $ HashSet.null missingIdents = Left $ UnknownPackageIdentifiers missingIdents "" uses00Index
| otherwise = Right idents
+-- | Does the configuration use a 00-index.tar.gz file for indices?
+-- See <https://github.com/commercialhaskell/stack/issues/3520>
+getUses00Index :: HasConfig env => RIO env Bool
+getUses00Index =
+ any is00 <$> view packageIndicesL
+ where
+ is00 :: PackageIndex -> Bool
+ is00 index = "00-index.tar.gz" `T.isInfixOf` indexLocation index
+
+-- | Turn package identifiers and package names into a list of
+-- @ResolvedPackage@s. Returns any unresolved names and
+-- identifier. These are considered unresolved even if the only
+-- mismatch is in the cabal file info (MSS 2017-07-17: old versions of
+-- this code had special handling to treat missing cabal file info as
+-- a warning, that's no longer necessary or desirable since all info
+-- should be present and checked).
resolvePackagesAllowMissing
- :: (StackMiniM env m, HasConfig env)
- => Maybe MiniBuildPlan -- ^ when looking up by name, take from this build plan
- -> Map PackageIdentifier (Maybe GitSHA1)
+ :: forall env. HasConfig env
+ => Maybe SnapshotDef -- ^ when looking up by name, take from this build plan
+ -> [PackageIdentifierRevision]
-> Set PackageName
- -> m (Set PackageName, Set PackageIdentifier, [ResolvedPackage])
-resolvePackagesAllowMissing mMiniBuildPlan idents0 names0 = do
- (res1, res2, resolved) <- inner
- if any (isJust . snd) resolved
- then do
- $logInfo "Missing some cabal revision files, updating indices"
- updateAllIndices
- (res1', res2', resolved') <- inner
-
- -- Print an error message if any SHAs are still missing.
- F.forM_ resolved' $ \(rp, missing) -> F.forM_ missing $ \(GitSHA1 sha) ->
- $logWarn $ mconcat
- [ "Did not find .cabal file for "
- , T.pack $ packageIdentifierString $ rpIdent rp
- , " with SHA of "
- , decodeUtf8 sha
- , " in tarball-based cache"
- ]
-
- return (res1', res2', map fst resolved')
- else return (res1, res2, map fst resolved)
- where
- inner = do
- (caches, shaCaches) <- getPackageCaches
-
- let versions = Map.fromListWith max $ map toTuple $ Map.keys caches
-
- getNamed :: PackageName -> Maybe (PackageIdentifier, Maybe GitSHA1)
- getNamed =
- case mMiniBuildPlan of
- Nothing -> getNamedFromIndex
- Just mbp -> getNamedFromBuildPlan mbp
-
- getNamedFromBuildPlan mbp name = do
- mpi <- Map.lookup name $ mbpPackages mbp
- Just (PackageIdentifier name (mpiVersion mpi), mpiGitSHA1 mpi)
- getNamedFromIndex name = fmap
- (\ver -> (PackageIdentifier name ver, Nothing))
- (Map.lookup name versions)
-
- (missingNames, idents1) = partitionEithers $ map
- (\name -> maybe (Left name) Right (getNamed name))
- (Set.toList names0)
- let (missingIdents, resolved) = partitionEithers $ map (goIdent caches shaCaches)
- $ Map.toList
- $ idents0 <> Map.fromList idents1
- return (Set.fromList missingNames, Set.fromList missingIdents, resolved)
-
- goIdent caches shaCaches (ident, mgitsha) =
- case Map.lookup ident caches of
- Nothing -> Left ident
- Just (index, cache) ->
- let (index', cache', missingGitSHA) =
- case mgitsha of
- Nothing -> (index, cache, mgitsha)
- Just gitsha ->
- case HashMap.lookup gitsha shaCaches of
- Just (index'', offsetSize) ->
- ( index''
- , cache { pcOffsetSize = offsetSize }
- -- we already got the info
- -- about this SHA, don't do
- -- any lookups later
- , Nothing
- )
- -- Index using HTTP, so we're missing the Git SHA
- Nothing -> (index, cache, mgitsha)
- in Right (ResolvedPackage
- { rpIdent = ident
- , rpCache = cache'
- , rpIndex = index'
- }, missingGitSHA)
+ -> RIO env (Set PackageName, HashSet PackageIdentifierRevision, [ResolvedPackage])
+resolvePackagesAllowMissing mSnapshotDef idents0 names0 = do
+ cache@(PackageCache cache') <- getPackageCaches
+
+ -- Find out the latest versions of all packages in the cache
+ let versions = fmap (maximum . HashMap.keys) cache'
+
+ -- Determines the identifier for a given name, either from
+ -- snapshot information or by taking the latest version
+ -- available
+ getNamed :: PackageName -> Maybe PackageIdentifierRevision
+ getNamed =
+ case mSnapshotDef of
+ Nothing -> getNamedFromIndex
+ Just sd -> getNamedFromSnapshotDef sd
+
+ -- Use whatever is specified in the snapshot. TODO this does not
+ -- handle the case where a snapshot defines a package outside of
+ -- the index, we'd need a LoadedSnapshot for that.
+ getNamedFromSnapshotDef sd name = do
+ loop $ sdLocations sd
+ where
+ loop [] = Nothing
+ loop (PLIndex ident@(PackageIdentifierRevision (PackageIdentifier name' _) _):rest)
+ | name == name' = Just ident
+ | otherwise = loop rest
+ loop (_:rest) = loop rest
+
+ -- Take latest version available, including latest cabal file information
+ getNamedFromIndex name = fmap
+ (\ver -> PackageIdentifierRevision (PackageIdentifier name ver) CFILatest)
+ (HashMap.lookup name versions)
+
+ (missingNames, idents1) = partitionEithers $ map
+ (\name -> maybe (Left name) Right (getNamed name))
+ (Set.toList names0)
+ config <- view configL
+ let (missingIdents, resolved) =
+ partitionEithers
+ $ map (\pir -> maybe (Left pir) Right (lookupResolvedPackage config pir cache))
+ $ idents0 <> idents1
+ return (Set.fromList missingNames, HashSet.fromList missingIdents, resolved)
+
+lookupResolvedPackage :: Config -> PackageIdentifierRevision -> PackageCache PackageIndex -> Maybe ResolvedPackage
+lookupResolvedPackage config (PackageIdentifierRevision ident@(PackageIdentifier name version) cfi) (PackageCache cache) = do
+ (index, mdownload, files) <- HashMap.lookup name cache >>= HashMap.lookup version
+ let moffsetSize =
+ case cfi of
+ CFILatest -> Just $ snd $ NE.last files
+ CFIHash _msize hash' -> -- TODO check size?
+ lookup hash'
+ $ concatMap (\(hashes, x) -> map (, x) hashes)
+ $ NE.toList files
+ CFIRevision rev -> fmap snd $ listToMaybe $ drop (fromIntegral rev) $ NE.toList files
+ offsetSize <-
+ case moffsetSize of
+ Just x -> Just x
+ Nothing
+ | configIgnoreRevisionMismatch config -> Just $ snd $ NE.last files
+ | otherwise -> Nothing
+ Just ResolvedPackage
+ { rpIdent = ident
+ , rpDownload = mdownload
+ , rpOffsetSize = offsetSize
+ , rpIndex = index
+ }
data ToFetch = ToFetch
{ tfTarball :: !(Path Abs File)
, tfDestDir :: !(Maybe (Path Abs Dir))
, tfUrl :: !T.Text
, tfSize :: !(Maybe Word64)
- , tfSHA256 :: !(Maybe ByteString)
+ , tfSHA256 :: !(Maybe StaticSHA256)
, tfCabal :: !ByteString
-- ^ Contents of the .cabal file
}
@@ -308,20 +315,18 @@ data ToFetchResult = ToFetchResult
-- | Add the cabal files to a list of idents with their caches.
withCabalFiles
- :: (StackMiniM env m, HasConfig env)
+ :: (MonadReader env m, MonadUnliftIO m, HasConfig env, MonadThrow m)
=> IndexName
-> [(ResolvedPackage, a)]
-> (PackageIdentifier -> a -> ByteString -> IO b)
-> m [b]
withCabalFiles name pkgs f = do
indexPath <- configPackageIndex name
- bracket
- (liftIO $ openBinaryFile (toFilePath indexPath) ReadMode)
- (liftIO . hClose) $ \h -> mapM (goPkg h) pkgs
+ withBinaryFile (toFilePath indexPath) ReadMode
+ $ \h -> mapM (goPkg h) pkgs
where
- goPkg h (ResolvedPackage ident pc _index, tf) = do
+ goPkg h (ResolvedPackage { rpIdent = ident, rpOffsetSize = OffsetSize offset size }, tf) = do
-- Did not find warning for tarballs is handled above
- let OffsetSize offset size = pcOffsetSize pc
liftIO $ do
hSeek h AbsoluteSeek $ fromIntegral offset
cabalBS <- S.hGet h $ fromIntegral size
@@ -330,49 +335,47 @@ withCabalFiles name pkgs f = do
-- | Provide a function which will load up a cabal @ByteString@ from the
-- package indices.
withCabalLoader
- :: (StackMiniM env m, HasConfig env, MonadBaseUnlift IO m)
- => ((PackageIdentifier -> IO ByteString) -> m a)
- -> m a
+ :: HasConfig env
+ => ((PackageIdentifierRevision -> IO ByteString) -> RIO env a)
+ -> RIO env a
withCabalLoader inner = do
- env <- ask
-
-- Want to try updating the index once during a single run for missing
-- package identifiers. We also want to ensure we only update once at a
-- time
--
-- TODO: probably makes sense to move this concern into getPackageCaches
- updateRef <- liftIO $ newMVar True
+ updateRef <- newMVar True
- loadCaches <- getPackageCachesIO
- runInBase <- liftBaseWith $ \run -> return (void . run)
- unlift <- askRunBase
+ u <- askUnliftIO
-- TODO in the future, keep all of the necessary @Handle@s open
- let doLookup :: PackageIdentifier
+ let doLookup :: PackageIdentifierRevision
-> IO ByteString
doLookup ident = do
- (caches, _gitSHACaches) <- loadCaches
- eres <- unlift $ lookupPackageIdentifierExact ident env caches
+ bothCaches <- unliftIO u getPackageCaches
+ eres <- unliftIO u $ lookupPackageIdentifierExact ident bothCaches
case eres of
Just bs -> return bs
-- Update the cache and try again
Nothing -> do
- let fuzzy = fuzzyLookupCandidates ident caches
+ let fuzzy = fuzzyLookupCandidates ident bothCaches
suggestions = case fuzzy of
- Nothing ->
- case typoCorrectionCandidates ident caches of
- Nothing -> ""
- Just cs -> "Perhaps you meant " <>
- orSeparated cs <> "?"
- Just cs -> "Possible candidates: " <>
+ FRNameNotFound Nothing -> ""
+ FRNameNotFound (Just cs) ->
+ "Perhaps you meant " <> orSeparated cs <> "?"
+ FRVersionNotFound cs -> "Possible candidates: " <>
commaSeparated (NE.map packageIdentifierText cs)
<> "."
+ FRRevisionNotFound cs ->
+ "The specified revision was not found.\nPossible candidates: " <>
+ commaSeparated (NE.map (T.pack . packageIdentifierRevisionString) cs)
+ <> "."
join $ modifyMVar updateRef $ \toUpdate ->
if toUpdate then do
- runInBase $ do
- $logInfo $ T.concat
+ unliftIO u $ do
+ logInfo $ T.concat
[ "Didn't see "
- , T.pack $ packageIdentifierString ident
+ , T.pack $ packageIdentifierRevisionString ident
, " in your package indices.\n"
, "Updating and trying again."
]
@@ -380,68 +383,78 @@ withCabalLoader inner = do
_ <- getPackageCaches
return ()
return (False, doLookup ident)
- else return (toUpdate,
- throwM $ UnknownPackageIdentifiers
- (Set.singleton ident) (T.unpack suggestions))
+ else do
+ uses00Index <- unliftIO u getUses00Index
+ return (toUpdate, throwIO $ UnknownPackageIdentifiers
+ (HashSet.singleton ident) (T.unpack suggestions) uses00Index)
inner doLookup
lookupPackageIdentifierExact
- :: (StackMiniM env m, HasConfig env)
- => PackageIdentifier
- -> env
- -> PackageCaches
+ :: (MonadReader env m, MonadUnliftIO m, HasConfig env, MonadThrow m)
+ => PackageIdentifierRevision
+ -> PackageCache PackageIndex
-> m (Maybe ByteString)
-lookupPackageIdentifierExact ident env caches =
- case Map.lookup ident caches of
- Nothing -> return Nothing
- Just (index, cache) -> do
- [bs] <- flip runReaderT env
- $ withCabalFiles (indexName index)
- [(ResolvedPackage
- { rpIdent = ident
- , rpCache = cache
- , rpIndex = index
- }, ())]
- $ \_ _ bs -> return bs
- return $ Just bs
+lookupPackageIdentifierExact identRev cache = do
+ config <- view configL
+ forM (lookupResolvedPackage config identRev cache) $ \rp -> do
+ [bs] <- withCabalFiles (indexName (rpIndex rp)) [(rp, ())] $ \_ _ bs -> return bs
+ return bs
+
+data FuzzyResults
+ = FRNameNotFound !(Maybe (NonEmpty T.Text))
+ | FRVersionNotFound !(NonEmpty PackageIdentifier)
+ | FRRevisionNotFound !(NonEmpty PackageIdentifierRevision)
-- | Given package identifier and package caches, return list of packages
-- with the same name and the same two first version number components found
-- in the caches.
fuzzyLookupCandidates
- :: PackageIdentifier
- -> PackageCaches
- -> Maybe (NonEmpty PackageIdentifier)
-fuzzyLookupCandidates (PackageIdentifier name ver) caches =
- let (_, zero, bigger) = Map.splitLookup zeroIdent caches
- zeroIdent = PackageIdentifier name $(mkVersion "0.0")
- sameName (PackageIdentifier n _) = n == name
- sameMajor (PackageIdentifier _ v) = toMajorVersion v == toMajorVersion ver
- in NE.nonEmpty . filter sameMajor $ maybe [] (pure . const zeroIdent) zero
- <> takeWhile sameName (Map.keys bigger)
+ :: PackageIdentifierRevision
+ -> PackageCache index
+ -> FuzzyResults
+fuzzyLookupCandidates (PackageIdentifierRevision (PackageIdentifier name ver) _rev) (PackageCache caches) =
+ case HashMap.lookup name caches of
+ Nothing -> FRNameNotFound $ typoCorrectionCandidates name (PackageCache caches)
+ Just m ->
+ case HashMap.lookup ver m of
+ Nothing ->
+ case NE.nonEmpty $ filter sameMajor $ HashMap.keys m of
+ Just vers -> FRVersionNotFound $ NE.map (PackageIdentifier name) vers
+ Nothing ->
+ case NE.nonEmpty $ HashMap.keys m of
+ Nothing -> error "fuzzyLookupCandidates: no versions"
+ Just vers -> FRVersionNotFound $ NE.map (PackageIdentifier name) vers
+ Just (_index, _mpd, revisions) ->
+ let hashes = concatMap fst $ NE.toList revisions
+ pirs = map (PackageIdentifierRevision (PackageIdentifier name ver) . CFIHash Nothing) hashes
+ in case NE.nonEmpty pirs of
+ Nothing -> error "fuzzyLookupCandidates: no revisions"
+ Just pirs' -> FRRevisionNotFound pirs'
+ where
+ sameMajor v = toMajorVersion v == toMajorVersion ver
-- | Try to come up with typo corrections for given package identifier using
-- package caches. This should be called before giving up, i.e. when
-- 'fuzzyLookupCandidates' cannot return anything.
typoCorrectionCandidates
- :: PackageIdentifier
- -> PackageCaches
+ :: PackageName
+ -> PackageCache index
-> Maybe (NonEmpty T.Text)
-typoCorrectionCandidates ident =
- let getName = packageNameText . packageIdentifierName
- name = getName ident
+typoCorrectionCandidates name' (PackageCache cache) =
+ let name = packageNameText name'
in NE.nonEmpty
. take 10
. map snd
. filter (\(distance, _) -> distance < 4)
- . map (\(k, _) -> (damerauLevenshtein name (getName k), getName k))
- . Map.toList
+ . map (\k -> (damerauLevenshtein name (packageNameText k), packageNameText k))
+ . HashMap.keys
+ $ cache
-- | Figure out where to fetch from.
-getToFetch :: (StackMiniM env m, HasConfig env)
+getToFetch :: HasConfig env
=> Maybe (Path Abs Dir) -- ^ directory to unpack into, @Nothing@ means no unpack
-> [ResolvedPackage]
- -> m ToFetchResult
+ -> RIO env ToFetchResult
getToFetch mdest resolvedAll = do
(toFetch0, unpacked) <- liftM partitionEithers $ mapM checkUnpacked resolvedAll
toFetch1 <- mapM goIndex $ Map.toList $ Map.fromListWith (++) toFetch0
@@ -464,7 +477,7 @@ getToFetch mdest resolvedAll = do
Just destDir -> return $ Right (ident, destDir)
Nothing -> do
let index = rpIndex resolved
- d = pcDownload $ rpCache resolved
+ d = rpDownload resolved
targz = T.pack $ packageIdentifierString ident ++ ".tar.gz"
tarball <- configPackageTarball (indexName index) ident
return $ Left (indexName index, [(resolved, ToFetch
@@ -497,40 +510,40 @@ getToFetch mdest resolvedAll = do
-- @
--
-- Since 0.1.0.0
-fetchPackages' :: (StackMiniM env m, HasConfig env)
+fetchPackages' :: HasConfig env
=> Maybe (Path Rel Dir) -- ^ the dist rename directory, see: https://github.com/fpco/stack/issues/157
-> Map PackageIdentifier ToFetch
- -> m (Map PackageIdentifier (Path Abs Dir))
+ -> RIO env (Map PackageIdentifier (Path Abs Dir))
fetchPackages' mdistDir toFetchAll = do
connCount <- view $ configL.to configConnectionCount
outputVar <- liftIO $ newTVarIO Map.empty
- runInBase <- liftBaseWith $ \run -> return (void . run)
+ run <- askRunInIO
parMapM_
connCount
- (go outputVar runInBase)
+ (go outputVar run)
(Map.toList toFetchAll)
liftIO $ readTVarIO outputVar
where
- go :: (MonadIO m,MonadThrow m,MonadLogger m)
+ go :: (MonadUnliftIO m,MonadThrow m,MonadLogger m,HasRunner env, MonadReader env m)
=> TVar (Map PackageIdentifier (Path Abs Dir))
-> (m () -> IO ())
-> (PackageIdentifier, ToFetch)
-> m ()
- go outputVar runInBase (ident, toFetch) = do
+ go outputVar run (ident, toFetch) = do
req <- parseUrlThrow $ T.unpack $ tfUrl toFetch
let destpath = tfTarball toFetch
let toHashCheck bs = HashCheck SHA256 (CheckHexDigestByteString bs)
let downloadReq = DownloadRequest
{ drRequest = req
- , drHashChecks = map toHashCheck $ maybeToList (tfSHA256 toFetch)
+ , drHashChecks = map (toHashCheck . staticSHA256ToBase16) $ maybeToList (tfSHA256 toFetch)
, drLengthCheck = fromIntegral <$> tfSize toFetch
, drRetryPolicy = drRetryPolicyDefault
}
let progressSink _ =
- liftIO $ runInBase $ $logInfo $ packageIdentifierText ident <> ": download"
+ liftIO $ run $ logInfo $ packageIdentifierText ident <> ": download"
_ <- verifiedDownload downloadReq destpath progressSink
identStrP <- parseRelDir $ packageIdentifierString ident
@@ -561,13 +574,13 @@ fetchPackages' mdistDir toFetchAll = do
let cabalFP =
innerDest FP.</>
packageNameString (packageIdentifierName ident)
- <.> "cabal"
+ FP.<.> "cabal"
S.writeFile cabalFP $ tfCabal toFetch
atomically $ modifyTVar outputVar $ Map.insert ident destDir
F.forM_ unexpectedEntries $ \(path, entryType) ->
- $logWarn $ "Unexpected entry type " <> entryType <> " for entry " <> T.pack path
+ logWarn $ "Unexpected entry type " <> entryType <> " for entry " <> T.pack path
-- | Internal function used to unpack tarball.
--
@@ -631,30 +644,24 @@ untar tarPath expectedTarFolder destDirParent = do
perm) filePerms
return unexpectedEntries
-parMapM_ :: (F.Foldable f,MonadIO m,MonadBaseControl IO m)
+parMapM_ :: (F.Foldable f,MonadUnliftIO m)
=> Int
-> (a -> m ())
-> f a
-> m ()
parMapM_ (max 1 -> 1) f xs = F.mapM_ f xs
-parMapM_ cnt f xs0 = do
- var <- liftIO (newTVarIO $ F.toList xs0)
-
- -- See comment on similar line in Stack.Build
- runInBase <- liftBaseWith $ \run -> return (void . run)
-
- let worker = fix $ \loop -> join $ atomically $ do
- xs <- readTVar var
- case xs of
- [] -> return $ return ()
- x:xs' -> do
- writeTVar var xs'
- return $ do
- runInBase $ f x
- loop
- workers 1 = Concurrently worker
- workers i = Concurrently worker *> workers (i - 1)
- liftIO $ runConcurrently $ workers cnt
+parMapM_ cnt f xs0 = withRunInIO $ \run -> do
+ var <- newTVarIO $ F.toList xs0
+
+ replicateConcurrently_ cnt $ fix $ \loop -> join $ atomically $ do
+ xs <- readTVar var
+ case xs of
+ [] -> return $ return ()
+ x:xs' -> do
+ writeTVar var xs'
+ return $ do
+ run $ f x
+ loop
orSeparated :: NonEmpty T.Text -> T.Text
orSeparated xs
diff --git a/src/Stack/FileWatch.hs b/src/Stack/FileWatch.hs
index f0c7dc9..6905c42 100644
--- a/src/Stack/FileWatch.hs
+++ b/src/Stack/FileWatch.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
module Stack.FileWatch
@@ -8,24 +9,17 @@ module Stack.FileWatch
import Blaze.ByteString.Builder (toLazyByteString, copyByteString)
import Blaze.ByteString.Builder.Char.Utf8 (fromShow)
-import Control.Concurrent.Async (race_)
import Control.Concurrent.STM
-import Control.Exception (Exception, fromException, catch, throwIO)
-import Control.Exception.Safe (tryAny)
-import Control.Monad (forever, unless, when)
+import Stack.Prelude
import qualified Data.ByteString.Lazy as L
import qualified Data.Map.Strict as Map
-import Data.Monoid ((<>))
-import Data.Set (Set)
import qualified Data.Set as Set
-import Data.String (fromString)
-import Data.Traversable (forM)
import GHC.IO.Exception
import GHC.IO.Handle (hIsTerminalDevice)
import Path
import System.Console.ANSI
import System.FSNotify
-import System.IO (Handle, stdout, stderr, hPutStrLn)
+import System.IO (stdout, stderr, hPutStrLn, getLine)
-- | Print an exception to stderr
printExceptionStderr :: Exception e => e -> IO ()
diff --git a/src/Stack/GhcPkg.hs b/src/Stack/GhcPkg.hs
index 9d2afb4..b38a00f 100644
--- a/src/Stack/GhcPkg.hs
+++ b/src/Stack/GhcPkg.hs
@@ -1,4 +1,5 @@
--- FIXME See how much of this module can be deleted.
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
@@ -11,33 +12,23 @@
module Stack.GhcPkg
(getGlobalDB
- ,EnvOverride
- ,envHelper
,findGhcPkgField
,createDatabase
,unregisterGhcPkgId
,getCabalPkgVer
,ghcPkgExeName
+ ,ghcPkgPathEnvVar
,mkGhcPackagePath)
where
-import Control.Monad
-import Control.Monad.Catch
-import Control.Monad.IO.Class
-import Control.Monad.Logger
-import Control.Monad.Trans.Control
+import Stack.Prelude
import qualified Data.ByteString.Char8 as S8
-import Data.Either
import Data.List
-import Data.Maybe
-import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
-import Data.Text.Extra (stripCR)
-import Path (Path, Abs, Dir, toFilePath, parent, mkRelFile, (</>))
+import Path (parent, mkRelFile, (</>))
import Path.Extra (toFilePathNoTrailingSep)
import Path.IO
-import Prelude hiding (FilePath)
import Stack.Constants
import Stack.Types.Build
import Stack.Types.GhcPkgId
@@ -49,15 +40,15 @@ import System.FilePath (searchPathSeparator)
import System.Process.Read
-- | Get the global package database
-getGlobalDB :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+getGlobalDB :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> WhichCompiler -> m (Path Abs Dir)
getGlobalDB menv wc = do
- $logDebug "Getting global package database location"
+ logDebug "Getting global package database location"
-- This seems like a strange way to get the global package database
-- location, but I don't know of a better one
- bs <- ghcPkg menv wc [] ["list", "--global"] >>= either throwM return
+ bs <- ghcPkg menv wc [] ["list", "--global"] >>= either throwIO return
let fp = S8.unpack $ stripTrailingColon $ firstLine bs
- resolveDir' fp
+ liftIO $ resolveDir' fp
where
stripTrailingColon bs
| S8.null bs = bs
@@ -66,7 +57,7 @@ getGlobalDB menv wc = do
firstLine = S8.takeWhile (\c -> c /= '\r' && c /= '\n')
-- | Run the ghc-pkg executable
-ghcPkg :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+ghcPkg :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> [Path Abs Dir]
@@ -84,7 +75,7 @@ ghcPkg menv wc pkgDbs args = do
args' = packageDbFlags pkgDbs ++ args
-- | Create a package database in the given directory, if it doesn't exist.
-createDatabase :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+createDatabase :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> WhichCompiler -> Path Abs Dir -> m ()
createDatabase menv wc db = do
exists <- doesFileExist (db </> $(mkRelFile "package.cache"))
@@ -95,12 +86,12 @@ createDatabase menv wc db = do
dirExists <- doesDirExist db
args <- if dirExists
then do
- $logWarn $ T.pack $ concat
+ logWarn $ T.pack $ concat
[ "The package database located at "
, toFilePath db
, " is corrupted (missing its package.cache file)."
]
- $logWarn "Proceeding with a recache"
+ logWarn "Proceeding with a recache"
return ["--package-db", toFilePath db, "recache"]
else do
-- Creating the parent doesn't seem necessary, as ghc-pkg
@@ -111,8 +102,8 @@ createDatabase menv wc db = do
eres <- tryProcessStdout Nothing menv (ghcPkgExeName wc) args
case eres of
Left e -> do
- $logError $ T.pack $ "Unable to create package database at " ++ toFilePath db
- throwM e
+ logError $ T.pack $ "Unable to create package database at " ++ toFilePath db
+ throwIO e
Right _ -> return ()
-- | Get the name to use for "ghc-pkg", given the compiler version.
@@ -120,6 +111,11 @@ ghcPkgExeName :: WhichCompiler -> String
ghcPkgExeName Ghc = "ghc-pkg"
ghcPkgExeName Ghcjs = "ghcjs-pkg"
+-- | Get the environment variable to use for the package DB paths.
+ghcPkgPathEnvVar :: WhichCompiler -> Text
+ghcPkgPathEnvVar Ghc = "GHC_PACKAGE_PATH"
+ghcPkgPathEnvVar Ghcjs = "GHCJS_PACKAGE_PATH"
+
-- | Get the necessary ghc-pkg flags for setting up the given package database
packageDbFlags :: [Path Abs Dir] -> [String]
packageDbFlags pkgDbs =
@@ -128,7 +124,7 @@ packageDbFlags pkgDbs =
-- | Get the value of a field of the package.
findGhcPkgField
- :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+ :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ package databases
@@ -149,7 +145,7 @@ findGhcPkgField menv wc pkgDbs name field = do
fmap (stripCR . T.decodeUtf8) $ listToMaybe $ S8.lines lbs
-- | Get the version of the package
-findGhcPkgVersion :: (MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+findGhcPkgVersion :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
-> [Path Abs Dir] -- ^ package databases
@@ -161,10 +157,10 @@ findGhcPkgVersion menv wc pkgDbs name = do
Just !v -> return (parseVersion v)
_ -> return Nothing
-unregisterGhcPkgId :: (MonadIO m, MonadLogger m, MonadCatch m, MonadBaseControl IO m)
+unregisterGhcPkgId :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride
-> WhichCompiler
- -> CompilerVersion
+ -> CompilerVersion 'CVActual
-> Path Abs Dir -- ^ package database
-> GhcPkgId
-> PackageIdentifier
@@ -172,7 +168,7 @@ unregisterGhcPkgId :: (MonadIO m, MonadLogger m, MonadCatch m, MonadBaseControl
unregisterGhcPkgId menv wc cv pkgDb gid ident = do
eres <- ghcPkg menv wc [pkgDb] args
case eres of
- Left e -> $logWarn $ T.pack $ show e
+ Left e -> logWarn $ T.pack $ show e
Right _ -> return ()
where
-- TODO ideally we'd tell ghc-pkg a GhcPkgId instead
@@ -183,16 +179,16 @@ unregisterGhcPkgId menv wc cv pkgDb gid ident = do
_ -> ["--ipid", ghcPkgIdString gid])
-- | Get the version of Cabal from the global package database.
-getCabalPkgVer :: (MonadThrow m, MonadIO m, MonadLogger m, MonadBaseControl IO m, MonadCatch m)
+getCabalPkgVer :: (MonadUnliftIO m, MonadLogger m)
=> EnvOverride -> WhichCompiler -> m Version
getCabalPkgVer menv wc = do
- $logDebug "Getting Cabal package version"
+ logDebug "Getting Cabal package version"
mres <- findGhcPkgVersion
menv
wc
[] -- global DB
cabalPackageName
- maybe (throwM $ Couldn'tFindPkgId cabalPackageName) return mres
+ maybe (throwIO $ Couldn'tFindPkgId cabalPackageName) return mres
-- | Get the value for GHC_PACKAGE_PATH
mkGhcPackagePath :: Bool -> Path Abs Dir -> Path Abs Dir -> [Path Abs Dir] -> Path Abs Dir -> Text
diff --git a/src/Stack/Ghci.hs b/src/Stack/Ghci.hs
index ca5ca70..7d97b41 100644
--- a/src/Stack/Ghci.hs
+++ b/src/Stack/Ghci.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
@@ -14,50 +15,27 @@ module Stack.Ghci
, GhciPkgInfo(..)
, GhciException(..)
, ghci
-
- -- TODO: Address what should and should not be exported.
- , renderScriptGhci
- , renderScriptIntero
) where
-import Control.Applicative
-import Control.Arrow (second)
-import Control.Exception.Safe (tryAny)
-import Control.Monad hiding (forM)
-import Control.Monad.Catch
-import Control.Monad.IO.Class
-import Control.Monad.Logger
+import Stack.Prelude
import Control.Monad.State.Strict (State, execState, get, modify)
-import Control.Monad.Trans.Unlift (MonadBaseUnlift)
import qualified Data.ByteString.Char8 as S8
-import Data.Either
-import Data.Function
import Data.List
import Data.List.Extra (nubOrd)
-import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
-import Data.Maybe
-import Data.Maybe.Extra (forMaybeM)
-import Data.Monoid
-import Data.Set (Set)
import qualified Data.Set as S
-import Data.String
-import Data.Text (Text)
import qualified Data.Text as T
-import Data.Traversable (forM)
-import Data.Typeable (Typeable)
import qualified Distribution.PackageDescription as C
import qualified Distribution.Text as C
import Path
import Path.Extra (toFilePathNoTrailingSep)
-import Path.IO
-import Prelude
+import Path.IO hiding (withSystemTempDir)
import Stack.Build
import Stack.Build.Installed
import Stack.Build.Source
import Stack.Build.Target
import Stack.Config (getLocalPackages)
-import Stack.Constants
+import Stack.Constants.Config
import Stack.Exec
import Stack.Ghci.Script
import Stack.Package
@@ -69,8 +47,8 @@ import Stack.Types.FlagName
import Stack.Types.Package
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT
-import Text.Read (readMaybe)
+import Stack.Types.Runner
+import System.IO (putStrLn, putStr, getLine)
#ifndef WINDOWS
import qualified System.Posix.Files as Posix
@@ -88,8 +66,9 @@ data GhciOpts = GhciOpts
, ghciMainIs :: !(Maybe Text)
, ghciLoadLocalDeps :: !Bool
, ghciSkipIntermediate :: !Bool
- , ghciHidePackages :: !Bool
+ , ghciHidePackages :: !(Maybe Bool)
, ghciNoBuild :: !Bool
+ , ghciOnlyMain :: !Bool
} deriving Show
-- | Necessary information to load a package or its components.
@@ -111,6 +90,7 @@ data GhciException
| MissingFileTarget String
| Can'tSpecifyFilesAndTargets
| Can'tSpecifyFilesAndMainIs
+ | GhciTargetParseException [Text]
deriving (Typeable)
instance Exception GhciException
@@ -125,14 +105,17 @@ instance Show GhciException where
show (MissingFileTarget name) =
"Cannot find file target " ++ name
show Can'tSpecifyFilesAndTargets =
- "Cannot use 'stack ghci' with both file targets and build targets"
+ "Cannot use 'stack ghci' with both file targets and package targets"
show Can'tSpecifyFilesAndMainIs =
"Cannot use 'stack ghci' with both file targets and --main-is flag"
+ show (GhciTargetParseException xs) =
+ show (TargetParseException xs) ++
+ "\nNote that to specify options to be passed to GHCi, use the --ghci-options flag"
-- | Launch a GHCi session for the given local package targets with the
-- given options and configure it with the load paths and extensions
-- of those targets.
-ghci :: (StackM r m, HasEnvConfig r, MonadBaseUnlift IO m) => GhciOpts -> m ()
+ghci :: HasEnvConfig env => GhciOpts -> RIO env ()
ghci opts@GhciOpts{..} = do
let buildOptsCLI = defaultBuildOptsCLI
{ boptsCLITargets = []
@@ -143,8 +126,9 @@ ghci opts@GhciOpts{..} = do
-- Parse --main-is argument.
mainIsTargets <- parseMainIsTargets buildOptsCLI ghciMainIs
-- Parse to either file targets or build targets
- etargets <- preprocessTargets ghciTargets
+ etargets <- preprocessTargets buildOptsCLI ghciTargets
(inputTargets, mfileTargets) <- case etargets of
+ Right packageTargets -> return (packageTargets, Nothing)
Left rawFileTargets -> do
case mainIsTargets of
Nothing -> return ()
@@ -152,51 +136,58 @@ ghci opts@GhciOpts{..} = do
-- Figure out targets based on filepath targets
(targetMap, fileInfo, extraFiles) <- findFileTargets locals rawFileTargets
return (targetMap, Just (fileInfo, extraFiles))
- Right rawTargets -> do
- (_,_,normalTargets) <- parseTargetsFromBuildOpts AllowNoTargets buildOptsCLI
- { boptsCLITargets = rawTargets }
- return (normalTargets, Nothing)
- -- Make sure the targets are known.
- checkTargets inputTargets
-- Get a list of all the local target packages.
localTargets <- getAllLocalTargets opts inputTargets mainIsTargets sourceMap
+ -- Get a list of all the non-local target packages.
+ nonLocalTargets <- getAllNonLocalTargets inputTargets
-- Check if additional package arguments are sensible.
addPkgs <- checkAdditionalPackages ghciAdditionalPackages
-- Build required dependencies and setup local packages.
+ stackYaml <- view stackYamlL
buildDepsAndInitialSteps opts (map (packageNameText . fst) localTargets)
+ targetWarnings stackYaml localTargets nonLocalTargets mfileTargets
-- Load the list of modules _after_ building, to catch changes in unlisted dependencies (#1180)
pkgs <- getGhciPkgInfos buildOptsCLI sourceMap addPkgs (fmap fst mfileTargets) localTargets
checkForIssues pkgs
-- Finally, do the invocation of ghci
- runGhci opts localTargets mainIsTargets pkgs (maybe [] snd mfileTargets)
+ runGhci opts localTargets mainIsTargets pkgs (maybe [] snd mfileTargets) (nonLocalTargets ++ addPkgs)
-preprocessTargets :: (StackM r m) => [Text] -> m (Either [Path Abs File] [Text])
-preprocessTargets rawTargets = do
- let (fileTargetsRaw, normalTargets) =
+preprocessTargets :: HasEnvConfig env => BuildOptsCLI -> [Text] -> RIO env (Either [Path Abs File] (Map PackageName Target))
+preprocessTargets buildOptsCLI rawTargets = do
+ let (fileTargetsRaw, normalTargetsRaw) =
partition (\t -> ".hs" `T.isSuffixOf` t || ".lhs" `T.isSuffixOf` t)
rawTargets
- fileTargets <- forM fileTargetsRaw $ \fp0 -> do
- let fp = T.unpack fp0
- mpath <- forgivingAbsence (resolveFile' fp)
- case mpath of
- Nothing -> throwM (MissingFileTarget fp)
- Just path -> return path
- case (null fileTargets, null normalTargets) of
- (False, False) -> throwM Can'tSpecifyFilesAndTargets
- (False, _) -> return (Left fileTargets)
- _ -> return (Right normalTargets)
-
-parseMainIsTargets :: (StackM r m, HasEnvConfig r) => BuildOptsCLI -> Maybe Text -> m (Maybe (Map PackageName SimpleTarget))
+ -- Only use file targets if we have no normal targets.
+ if not (null fileTargetsRaw) && null normalTargetsRaw
+ then do
+ fileTargets <- forM fileTargetsRaw $ \fp0 -> do
+ let fp = T.unpack fp0
+ mpath <- liftIO $ forgivingAbsence (resolveFile' fp)
+ case mpath of
+ Nothing -> throwM (MissingFileTarget fp)
+ Just path -> return path
+ return (Left fileTargets)
+ else do
+ -- Try parsing targets before checking if both file and
+ -- module targets are specified (see issue#3342).
+ (_,_,normalTargets) <- parseTargets AllowNoTargets buildOptsCLI { boptsCLITargets = normalTargetsRaw }
+ `catch` \ex -> case ex of
+ TargetParseException xs -> throwM (GhciTargetParseException xs)
+ _ -> throwM ex
+ unless (null fileTargetsRaw) $ throwM Can'tSpecifyFilesAndTargets
+ return (Right normalTargets)
+
+parseMainIsTargets :: HasEnvConfig env => BuildOptsCLI -> Maybe Text -> RIO env (Maybe (Map PackageName Target))
parseMainIsTargets buildOptsCLI mtarget = forM mtarget $ \target -> do
- (_,_,targets) <- parseTargetsFromBuildOpts AllowNoTargets buildOptsCLI
+ (_,_,targets) <- parseTargets AllowNoTargets buildOptsCLI
{ boptsCLITargets = [target] }
return targets
findFileTargets
- :: (StackM r m, HasEnvConfig r)
+ :: HasEnvConfig env
=> [LocalPackage]
-> [Path Abs File]
- -> m (Map PackageName SimpleTarget, Map PackageName (Set (Path Abs File)), [Path Abs File])
+ -> RIO env (Map PackageName Target, Map PackageName (Set (Path Abs File)), [Path Abs File])
findFileTargets locals fileTargets = do
filePackages <- forM locals $ \lp -> do
(_,compFiles,_,_) <- getPackageFiles (packageFiles (lpPackage lp)) (lpCabalFile lp)
@@ -211,18 +202,20 @@ findFileTargets locals fileTargets = do
results <- forM foundFileTargetComponents $ \(fp, xs) ->
case xs of
[] -> do
- $prettyWarn $
- "Couldn't find a component for file target" <+>
- display fp <>
- ". Attempting to load anyway."
+ prettyWarn $ vsep
+ [ "Couldn't find a component for file target" <+>
+ display fp <>
+ ". This means that the correct ghc options might not be used."
+ , "Attempting to load the file anyway."
+ ]
return $ Left fp
[x] -> do
- $prettyInfo $
+ prettyInfo $
"Using configuration for" <+> display x <+>
"to load" <+> display fp
return $ Right (fp, x)
(x:_) -> do
- $prettyWarn $
+ prettyWarn $
"Multiple components contain file target" <+>
display fp <> ":" <+>
mconcat (intersperse ", " (map display xs)) <> line <>
@@ -230,8 +223,8 @@ findFileTargets locals fileTargets = do
return $ Right (fp, x)
let (extraFiles, associatedFiles) = partitionEithers results
targetMap =
- foldl unionSimpleTargets M.empty $
- map (\(_, (name, comp)) -> M.singleton name (STLocalComps (S.singleton comp)))
+ foldl unionTargets M.empty $
+ map (\(_, (name, comp)) -> M.singleton name (TargetComps (S.singleton comp)))
associatedFiles
infoMap =
foldl (M.unionWith S.union) M.empty $
@@ -239,43 +232,28 @@ findFileTargets locals fileTargets = do
associatedFiles
return (targetMap, infoMap, extraFiles)
-checkTargets
- :: (StackM r m, HasEnvConfig r)
- => Map PackageName SimpleTarget
- -> m ()
-checkTargets mp = do
- let filtered = M.filter (== STUnknown) mp
- unless (M.null filtered) $ do
- bconfig <- view buildConfigL
- throwM $ UnknownTargets (M.keysSet filtered) M.empty (bcStackYaml bconfig)
-
getAllLocalTargets
- :: (StackM r m, HasEnvConfig r)
+ :: HasEnvConfig env
=> GhciOpts
- -> Map PackageName SimpleTarget
- -> Maybe (Map PackageName SimpleTarget)
+ -> Map PackageName Target
+ -> Maybe (Map PackageName Target)
-> SourceMap
- -> m [(PackageName, (Path Abs File, SimpleTarget))]
+ -> RIO env [(PackageName, (Path Abs File, Target))]
getAllLocalTargets GhciOpts{..} targets0 mainIsTargets sourceMap = do
-- Use the 'mainIsTargets' as normal targets, for CLI concision. See
-- #1845. This is a little subtle - we need to do the target parsing
-- independently in order to handle the case where no targets are
-- specified.
- let targets = maybe targets0 (unionSimpleTargets targets0) mainIsTargets
- packages <- getLocalPackages
+ let targets = maybe targets0 (unionTargets targets0) mainIsTargets
+ packages <- lpProject <$> getLocalPackages
-- Find all of the packages that are directly demanded by the
-- targets.
directlyWanted <-
forMaybeM (M.toList packages) $
- \(dir,treatLikeExtraDep) ->
- do cabalfp <- findOrGenerateCabalFile dir
- name <- parsePackageNameFromFilePath cabalfp
- if treatLikeExtraDep
- then return Nothing
- else case M.lookup name targets of
- Just simpleTargets ->
- return (Just (name, (cabalfp, simpleTargets)))
- Nothing -> return Nothing
+ \(name, lpv) ->
+ case M.lookup name targets of
+ Just simpleTargets -> return (Just (name, (lpvCabalFP lpv, simpleTargets)))
+ Nothing -> return Nothing
-- Figure out
let extraLoadDeps = getExtraLoadDeps ghciLoadLocalDeps sourceMap directlyWanted
if (ghciSkipIntermediate && not ghciLoadLocalDeps) || null extraLoadDeps
@@ -283,12 +261,12 @@ getAllLocalTargets GhciOpts{..} targets0 mainIsTargets sourceMap = do
else do
let extraList = T.intercalate ", " (map (packageNameText . fst) extraLoadDeps)
if ghciLoadLocalDeps
- then $logInfo $ T.concat
+ then logInfo $ T.concat
[ "The following libraries will also be loaded into GHCi because "
, "they are local dependencies of your targets, and you specified --load-local-deps:\n "
, extraList
]
- else $logInfo $ T.concat
+ else logInfo $ T.concat
[ "The following libraries will also be loaded into GHCi because "
, "they are intermediate dependencies of your targets:\n "
, extraList
@@ -296,7 +274,15 @@ getAllLocalTargets GhciOpts{..} targets0 mainIsTargets sourceMap = do
]
return (directlyWanted ++ extraLoadDeps)
-buildDepsAndInitialSteps :: (StackM r m, HasEnvConfig r, MonadBaseUnlift IO m) => GhciOpts -> [Text] -> m ()
+getAllNonLocalTargets
+ :: Map PackageName Target
+ -> RIO env [PackageName]
+getAllNonLocalTargets targets = do
+ let isNonLocal (TargetAll Dependency) = True
+ isNonLocal _ = False
+ return $ map fst $ filter (isNonLocal . snd) (M.toList targets)
+
+buildDepsAndInitialSteps :: HasEnvConfig env => GhciOpts -> [Text] -> RIO env ()
buildDepsAndInitialSteps GhciOpts{..} targets0 = do
let targets = targets0 ++ map T.pack ghciAdditionalPackages
-- If necessary, do the build, for local packagee targets, only do
@@ -311,8 +297,8 @@ buildDepsAndInitialSteps GhciOpts{..} targets0 = do
case eres of
Right () -> return ()
Left err -> do
- $prettyError $ fromString (show err)
- $prettyWarn "Build failed, but optimistically launching GHCi anyway"
+ prettyError $ fromString (show err)
+ prettyWarn "Build failed, but trying to launch GHCi anyway"
checkAdditionalPackages :: MonadThrow m => [String] -> m [PackageName]
checkAdditionalPackages pkgs = forM pkgs $ \name -> do
@@ -321,39 +307,51 @@ checkAdditionalPackages pkgs = forM pkgs $ \name -> do
maybe (throwM $ InvalidPackageOption name) return mres
runGhci
- :: (StackM r m, HasEnvConfig r)
+ :: HasEnvConfig env
=> GhciOpts
- -> [(PackageName, (Path Abs File, SimpleTarget))]
- -> Maybe (Map PackageName SimpleTarget)
+ -> [(PackageName, (Path Abs File, Target))]
+ -> Maybe (Map PackageName Target)
-> [GhciPkgInfo]
-> [Path Abs File]
- -> m ()
-runGhci GhciOpts{..} targets mainIsTargets pkgs extraFiles = do
+ -> [PackageName]
+ -> RIO env ()
+runGhci GhciOpts{..} targets mainIsTargets pkgs extraFiles exposePackages = do
config <- view configL
wc <- view $ actualCompilerVersionL.whichCompilerL
- let pkgopts = hidePkgOpt ++ genOpts ++ ghcOpts
- hidePkgOpt = if null pkgs || not ghciHidePackages then [] else ["-hide-all-packages"]
+ let pkgopts = hidePkgOpts ++ genOpts ++ ghcOpts
+ shouldHidePackages =
+ fromMaybe (not (null pkgs && null exposePackages)) ghciHidePackages
+ hidePkgOpts =
+ if shouldHidePackages
+ then "-hide-all-packages" :
+ -- This is necessary, because current versions of ghci
+ -- will entirely fail to start if base isn't visible. This
+ -- is because it tries to use the interpreter to set
+ -- buffering options on standard IO.
+ "-package" : "base" :
+ concatMap (\n -> ["-package", packageNameString n]) exposePackages
+ else []
oneWordOpts bio
- | ghciHidePackages = bioOneWordOpts bio ++ bioPackageFlags bio
+ | shouldHidePackages = bioOneWordOpts bio ++ bioPackageFlags bio
| otherwise = bioOneWordOpts bio
genOpts = nubOrd (concatMap (concatMap (oneWordOpts . snd) . ghciPkgOpts) pkgs)
(omittedOpts, ghcOpts) = partition badForGhci $
- concatMap (concatMap (bioOpts . snd) . ghciPkgOpts) pkgs ++
- getUserOptions Nothing ++
- concatMap (getUserOptions . Just . ghciPkgName) pkgs
- getUserOptions mpkg =
- map T.unpack (M.findWithDefault [] mpkg (unGhcOptions (configGhcOptions config)))
+ concatMap (concatMap (bioOpts . snd) . ghciPkgOpts) pkgs ++ map T.unpack
+ ( fold (configGhcOptionsByCat config) -- include everything, locals, and targets
+ ++ concatMap (getUserOptions . ghciPkgName) pkgs
+ )
+ getUserOptions pkg = M.findWithDefault [] pkg (configGhcOptionsByName config)
badForGhci x =
isPrefixOf "-O" x || elem x (words "-debug -threaded -ticky -static -Werror")
unless (null omittedOpts) $
- $logWarn
+ logWarn
("The following GHC options are incompatible with GHCi and have not been passed to it: " <>
T.unwords (map T.pack (nubOrd omittedOpts)))
oiDir <- view objectInterfaceDirL
let odir =
[ "-odir=" <> toFilePathNoTrailingSep oiDir
, "-hidir=" <> toFilePathNoTrailingSep oiDir ]
- $logInfo
+ logInfo
("Configuring GHCi with the following packages: " <>
T.intercalate ", " (map (packageNameText . ghciPkgName) pkgs))
let execGhci extras = do
@@ -365,73 +363,66 @@ runGhci GhciOpts{..} targets mainIsTargets pkgs extraFiles = do
-- not include CWD. If there aren't any packages, CWD
-- is included.
(if null pkgs then id else ("-i" : )) $
- odir <> pkgopts <> map T.unpack ghciGhcOptions <> ghciArgs <> extras)
- interrogateExeForRenderFunction = do
- menv <- liftIO $ configEnvOverride config defaultEnvSettings
- output <- execObserve menv (fromMaybe (compilerExeName wc) ghciGhcCommand) ["--version"]
- if "Intero" `isPrefixOf` output
- then return renderScriptIntero
- else return renderScriptGhci
+ odir <> pkgopts <> extras <> map T.unpack ghciGhcOptions <> ghciArgs)
+ -- TODO: Consider optimizing this check. Perhaps if no
+ -- "with-ghc" is specified, assume that it is not using intero.
+ checkIsIntero =
+ -- Optimization dependent on the behavior of renderScript -
+ -- it doesn't matter if it's intero or ghci when loading
+ -- multiple packages.
+ case pkgs of
+ [_] -> do
+ menv <- liftIO $ configEnvOverride config defaultEnvSettings
+ output <- execObserve menv (fromMaybe (compilerExeName wc) ghciGhcCommand) ["--version"]
+ return $ "Intero" `isPrefixOf` output
+ _ -> return False
withSystemTempDir "ghci" $ \tmpDirectory -> do
macrosOptions <- writeMacrosFile tmpDirectory pkgs
if ghciNoLoadModules
then execGhci macrosOptions
else do
checkForDuplicateModules pkgs
- renderFn <- interrogateExeForRenderFunction
+ isIntero <- checkIsIntero
bopts <- view buildOptsL
mainFile <- figureOutMainFile bopts mainIsTargets targets pkgs
- scriptPath <- writeGhciScript tmpDirectory (renderFn pkgs mainFile extraFiles)
+ scriptPath <- writeGhciScript tmpDirectory (renderScript isIntero pkgs mainFile ghciOnlyMain extraFiles)
execGhci (macrosOptions ++ ["-ghci-script=" <> toFilePath scriptPath])
writeMacrosFile :: (MonadIO m) => Path Abs Dir -> [GhciPkgInfo] -> m [String]
writeMacrosFile tmpDirectory packages = do
- preprocessCabalMacros packages macrosFile
+ preprocessCabalMacros packages macrosFile
where
macrosFile = tmpDirectory </> $(mkRelFile "cabal_macros.h")
writeGhciScript :: (MonadIO m) => Path Abs Dir -> GhciScript -> m (Path Abs File)
writeGhciScript tmpDirectory script = do
- liftIO $ scriptToFile scriptPath script
- setScriptPerms scriptFilePath
- return scriptPath
+ liftIO $ scriptToFile scriptPath script
+ setScriptPerms scriptFilePath
+ return scriptPath
where
scriptPath = tmpDirectory </> $(mkRelFile "ghci-script")
scriptFilePath = toFilePath scriptPath
-findOwningPackageForMain :: [GhciPkgInfo] -> Path Abs File -> Maybe GhciPkgInfo
-findOwningPackageForMain pkgs mainFile =
- find (\pkg -> toFilePath (ghciPkgDir pkg) `isPrefixOf` toFilePath mainFile) pkgs
-
-renderScriptGhci :: [GhciPkgInfo] -> Maybe (Path Abs File) -> [Path Abs File] -> GhciScript
-renderScriptGhci pkgs mainFile extraFiles =
- let addPhase = mconcat $ fmap renderPkg pkgs
- mainPhase = case mainFile of
- Just path -> cmdAddFile path
- Nothing -> mempty
- modulePhase = cmdModule $ foldl' S.union S.empty (fmap ghciPkgModules pkgs)
- in case getFileTargets pkgs <> extraFiles of
- [] -> addPhase <> mainPhase <> modulePhase
- fileTargets -> mconcat $ map cmdAddFile fileTargets
- where
- renderPkg pkg = cmdAdd (ghciPkgModules pkg)
-
-renderScriptIntero :: [GhciPkgInfo] -> Maybe (Path Abs File) -> [Path Abs File] -> GhciScript
-renderScriptIntero pkgs mainFile extraFiles =
- let addPhase = mconcat $ fmap renderPkg pkgs
- mainPhase = case mainFile of
- Just path ->
- case findOwningPackageForMain pkgs path of
- Just mainPkg -> cmdCdGhc (ghciPkgDir mainPkg) <> cmdAddFile path
- Nothing -> cmdAddFile path
- Nothing -> mempty
- modulePhase = cmdModule $ foldl' S.union S.empty (fmap ghciPkgModules pkgs)
- in case getFileTargets pkgs <> extraFiles of
- [] -> addPhase <> mainPhase <> modulePhase
- fileTargets -> mconcat $ map cmdAddFile fileTargets
- where
- renderPkg pkg = cmdCdGhc (ghciPkgDir pkg)
- <> cmdAdd (ghciPkgModules pkg)
+renderScript :: Bool -> [GhciPkgInfo] -> Maybe (Path Abs File) -> Bool -> [Path Abs File] -> GhciScript
+renderScript isIntero pkgs mainFile onlyMain extraFiles = do
+ let cdPhase = case (isIntero, pkgs) of
+ -- If only loading one package, set the cwd properly.
+ -- Otherwise don't try. See
+ -- https://github.com/commercialhaskell/stack/issues/3309
+ (True, [pkg]) -> cmdCdGhc (ghciPkgDir pkg)
+ _ -> mempty
+ addPhase = cmdAdd $ S.fromList (map Left allModules ++ addMain)
+ addMain = case mainFile of
+ Just path -> [Right path]
+ _ -> []
+ modulePhase = cmdModule $ S.fromList allModules
+ allModules = concatMap (S.toList . ghciPkgModules) pkgs
+ case getFileTargets pkgs <> extraFiles of
+ [] ->
+ if onlyMain
+ then cdPhase <> if isJust mainFile then cmdAdd (S.fromList addMain) else mempty
+ else cdPhase <> addPhase <> modulePhase
+ fileTargets -> cmdAdd (S.fromList (map Right fileTargets))
-- Hacky check if module / main phase should be omitted. This should be
-- improved if / when we have a better per-component load.
@@ -442,30 +433,30 @@ getFileTargets = concatMap (concatMap S.toList . maybeToList . ghciPkgTargetFile
-- is none, sometimes it's unambiguous, sometimes it's
-- ambiguous. Warns and returns nothing if it's ambiguous.
figureOutMainFile
- :: (StackM r m)
+ :: HasRunner env
=> BuildOpts
- -> Maybe (Map PackageName SimpleTarget)
- -> [(PackageName, (Path Abs File, SimpleTarget))]
+ -> Maybe (Map PackageName Target)
+ -> [(PackageName, (Path Abs File, Target))]
-> [GhciPkgInfo]
- -> m (Maybe (Path Abs File))
+ -> RIO env (Maybe (Path Abs File))
figureOutMainFile bopts mainIsTargets targets0 packages = do
case candidates of
[] -> return Nothing
- [c@(_,_,fp)] -> do $logInfo ("Using main module: " <> renderCandidate c)
+ [c@(_,_,fp)] -> do logInfo ("Using main module: " <> renderCandidate c)
return (Just fp)
candidate:_ -> do
borderedWarning $ do
- $logWarn "The main module to load is ambiguous. Candidates are: "
- forM_ (map renderCandidate candidates) $logWarn
- $logWarn
+ logWarn "The main module to load is ambiguous. Candidates are: "
+ forM_ (map renderCandidate candidates) logWarn
+ logWarn
"You can specify which one to pick by: "
- $logWarn
+ logWarn
(" * Specifying targets to stack ghci e.g. stack ghci " <>
sampleTargetArg candidate)
- $logWarn
+ logWarn
(" * Specifying what the main is e.g. stack ghci " <>
sampleMainIsArg candidate)
- $logWarn
+ logWarn
(" * Choosing from the candidate above [1.." <>
T.pack (show $ length candidates) <> "]")
liftIO userOption
@@ -527,13 +518,13 @@ figureOutMainFile bopts mainIsTargets targets0 packages = do
"--main-is " <> packageNameText pkg <> ":" <> renderComp comp
getGhciPkgInfos
- :: (StackM r m, HasEnvConfig r)
+ :: HasEnvConfig env
=> BuildOptsCLI
-> SourceMap
-> [PackageName]
-> Maybe (Map PackageName (Set (Path Abs File)))
- -> [(PackageName, (Path Abs File, SimpleTarget))]
- -> m [GhciPkgInfo]
+ -> [(PackageName, (Path Abs File, Target))]
+ -> RIO env [GhciPkgInfo]
getGhciPkgInfos buildOptsCLI sourceMap addPkgs mfileTargets localTargets = do
menv <- getMinimalEnvOverride
(installedMap, _, _, _) <- getInstalled
@@ -550,7 +541,7 @@ getGhciPkgInfos buildOptsCLI sourceMap addPkgs mfileTargets localTargets = do
-- | Make information necessary to load the given package in GHCi.
makeGhciPkgInfo
- :: (StackM r m, HasEnvConfig r)
+ :: HasEnvConfig env
=> BuildOptsCLI
-> SourceMap
-> InstalledMap
@@ -559,8 +550,8 @@ makeGhciPkgInfo
-> Maybe (Map PackageName (Set (Path Abs File)))
-> PackageName
-> Path Abs File
- -> SimpleTarget
- -> m GhciPkgInfo
+ -> Target
+ -> RIO env GhciPkgInfo
makeGhciPkgInfo buildOptsCLI sourceMap installedMap locals addPkgs mfileTargets name cabalfp target = do
bopts <- view buildOptsL
econfig <- view envConfigL
@@ -575,7 +566,11 @@ makeGhciPkgInfo buildOptsCLI sourceMap installedMap locals addPkgs mfileTargets
, packageConfigCompilerVersion = compilerVersion
, packageConfigPlatform = view platformL econfig
}
- (warnings,gpkgdesc) <- readPackageUnresolved cabalfp
+ -- TODO we've already parsed this information, otherwise we
+ -- wouldn't have figured out the cabalfp already. In the future:
+ -- retain that GenericPackageDescription in the relevant data
+ -- structures to avoid reparsing.
+ (gpkgdesc, _cabalfp) <- readPackageUnresolvedDir (parent cabalfp) True
-- Source the package's *.buildinfo file created by configure if any. See
-- https://www.haskell.org/cabal/users-guide/developing-packages.html#system-dependent-parameters
@@ -585,12 +580,18 @@ makeGhciPkgInfo buildOptsCLI sourceMap installedMap locals addPkgs mfileTargets
| hasDotBuildinfo = Just (parent cabalfp </> buildinfofp)
| otherwise = Nothing
mbuildinfo <- forM mbuildinfofp readDotBuildinfo
- let pkg =
+ let pdp = resolvePackageDescription config gpkgdesc
+ pkg =
packageFromPackageDescription config (C.genPackageFlags gpkgdesc) $
- maybe id C.updatePackageDescription mbuildinfo $
- resolvePackageDescription config gpkgdesc
+ maybe
+ pdp
+ (\bi ->
+ let PackageDescriptionPair x y = pdp
+ in PackageDescriptionPair
+ (C.updatePackageDescription bi x)
+ (C.updatePackageDescription bi y))
+ mbuildinfo
- mapM_ (printCabalFileWarning cabalfp) warnings
(mods,files,opts) <- getPackageOpts (packageOpts pkg) sourceMap installedMap locals addPkgs cabalfp
let filteredOpts = filterWanted opts
filterWanted = M.filterWithKey (\k _ -> k `S.member` allWanted)
@@ -612,10 +613,12 @@ makeGhciPkgInfo buildOptsCLI sourceMap installedMap locals addPkgs mfileTargets
-- NOTE: this should make the same choices as the components code in
-- 'loadLocalPackage'. Unfortunately for now we reiterate this logic
-- (differently).
-wantedPackageComponents :: BuildOpts -> SimpleTarget -> Package -> Set NamedComponent
-wantedPackageComponents _ (STLocalComps cs) _ = cs
-wantedPackageComponents bopts STLocalAll pkg = S.fromList $
- (if packageHasLibrary pkg then [CLib] else []) ++
+wantedPackageComponents :: BuildOpts -> Target -> Package -> Set NamedComponent
+wantedPackageComponents _ (TargetComps cs) _ = cs
+wantedPackageComponents bopts (TargetAll ProjectPackage) pkg = S.fromList $
+ (case packageLibraries pkg of
+ NoLibraries -> []
+ HasLibraries _names -> [CLib]) ++ -- FIXME. This ignores sub libraries and foreign libraries. Is that OK?
map CExe (S.toList (packageExes pkg)) <>
(if boptsTests bopts then map CTest (M.keys (packageTests pkg)) else []) <>
(if boptsBenchmarks bopts then map CBench (S.toList (packageBenchmarks pkg)) else [])
@@ -624,15 +627,15 @@ wantedPackageComponents _ _ _ = S.empty
checkForIssues :: (MonadThrow m, MonadLogger m) => [GhciPkgInfo] -> m ()
checkForIssues pkgs = do
unless (null issues) $ borderedWarning $ do
- $logWarn "Warning: There are cabal settings for this project which may prevent GHCi from loading your code properly."
- $logWarn "In some cases it can also load some projects which would otherwise fail to build."
- $logWarn ""
- mapM_ $logWarn $ intercalate [""] issues
- $logWarn ""
- $logWarn "To resolve, remove the flag(s) from the cabal file(s) and instead put them at the top of the haskell files."
- $logWarn ""
- $logWarn "It isn't yet possible to load multiple packages into GHCi in all cases - see"
- $logWarn "https://ghc.haskell.org/trac/ghc/ticket/10827"
+ logWarn "Warning: There are cabal settings for this project which may prevent GHCi from loading your code properly."
+ logWarn "In some cases it can also load some projects which would otherwise fail to build."
+ logWarn ""
+ mapM_ logWarn $ intercalate [""] issues
+ logWarn ""
+ logWarn "To resolve, remove the flag(s) from the cabal file(s) and instead put them at the top of the haskell files."
+ logWarn ""
+ logWarn "It isn't yet possible to load multiple packages into GHCi in all cases - see"
+ logWarn "https://ghc.haskell.org/trac/ghc/ticket/10827"
where
issues = concat
[ mixedFlag "-XNoImplicitPrelude"
@@ -686,20 +689,20 @@ checkForIssues pkgs = do
borderedWarning :: MonadLogger m => m a -> m a
borderedWarning f = do
- $logWarn ""
- $logWarn "* * * * * * * *"
+ logWarn ""
+ logWarn "* * * * * * * *"
x <- f
- $logWarn "* * * * * * * *"
- $logWarn ""
+ logWarn "* * * * * * * *"
+ logWarn ""
return x
checkForDuplicateModules :: (MonadThrow m, MonadLogger m) => [GhciPkgInfo] -> m ()
checkForDuplicateModules pkgs = do
unless (null duplicates) $ do
borderedWarning $ do
- $logWarn "The following modules are present in multiple packages:"
+ logWarn "The following modules are present in multiple packages:"
forM_ duplicates $ \(mn, pns) -> do
- $logWarn (" * " <> T.pack mn <> " (in " <> T.intercalate ", " (map packageNameText pns) <> ")")
+ logWarn (" * " <> T.pack mn <> " (in " <> T.intercalate ", " (map packageNameText pns) <> ")")
throwM LoadingDuplicateModules
where
duplicates, allModules :: [(String, [PackageName])]
@@ -708,6 +711,40 @@ checkForDuplicateModules pkgs = do
M.toList $ M.fromListWith (++) $
concatMap (\pkg -> map ((, [ghciPkgName pkg]) . C.display) (S.toList (ghciPkgModules pkg))) pkgs
+targetWarnings
+ :: HasRunner env
+ => Path Abs File
+ -> [(PackageName, (Path Abs File, Target))]
+ -> [PackageName]
+ -> Maybe (Map PackageName (Set (Path Abs File)), [Path Abs File])
+ -> RIO env ()
+targetWarnings stackYaml localTargets nonLocalTargets mfileTargets = do
+ unless (null nonLocalTargets) $
+ prettyWarnL
+ [ flow "Some targets"
+ , parens $ fillSep $ punctuate "," $ map (styleGood . display) nonLocalTargets
+ , flow "are not local packages, and so cannot be directly loaded."
+ , flow "In future versions of stack, this might be supported - see"
+ , styleUrl "https://github.com/commercialhaskell/stack/issues/1441"
+ , "."
+ , flow "It can still be useful to specify these, as they will be passed to ghci via -package flags."
+ ]
+ when (null localTargets && isNothing mfileTargets) $
+ prettyWarn $ vsep
+ [ flow "No local targets specified, so ghci will not use any options from your package.yaml / *.cabal files."
+ , ""
+ , flow "Potential ways to resolve this:"
+ , bulletedList
+ [ fillSep
+ [ flow "If you want to use the package.yaml / *.cabal package in the current directory, use"
+ , styleShell "stack init"
+ , flow "to create a new stack.yaml."
+ ]
+ , flow "Add to the 'packages' field of" <+> display stackYaml
+ ]
+ , ""
+ ]
+
-- Adds in intermediate dependencies between ghci targets. Note that it
-- will return a Lib component for these intermediate dependencies even
-- if they don't have a library (but that's fine for the usage within
@@ -718,8 +755,8 @@ checkForDuplicateModules pkgs = do
getExtraLoadDeps
:: Bool
-> SourceMap
- -> [(PackageName, (Path Abs File, SimpleTarget))]
- -> [(PackageName, (Path Abs File, SimpleTarget))]
+ -> [(PackageName, (Path Abs File, Target))]
+ -> [(PackageName, (Path Abs File, Target))]
getExtraLoadDeps loadAllDeps sourceMap targets =
M.toList $
(\mp -> foldl' (flip M.delete) mp (map fst targets)) $
@@ -730,25 +767,25 @@ getExtraLoadDeps loadAllDeps sourceMap targets =
getDeps :: PackageName -> [PackageName]
getDeps name =
case M.lookup name sourceMap of
- Just (PSLocal lp) -> M.keys (packageDeps (lpPackage lp))
+ Just (PSFiles lp _) -> M.keys (packageDeps (lpPackage lp)) -- FIXME just Local?
_ -> []
- go :: PackageName -> State (Map PackageName (Maybe (Path Abs File, SimpleTarget))) Bool
+ go :: PackageName -> State (Map PackageName (Maybe (Path Abs File, Target))) Bool
go name = do
cache <- get
case (M.lookup name cache, M.lookup name sourceMap) of
(Just (Just _), _) -> return True
(Just Nothing, _) | not loadAllDeps -> return False
- (_, Just (PSLocal lp)) -> do
+ (_, Just (PSFiles lp _)) -> do
let deps = M.keys (packageDeps (lpPackage lp))
shouldLoad <- liftM or $ mapM go deps
if shouldLoad
then do
- modify (M.insert name (Just (lpCabalFile lp, STLocalComps (S.singleton CLib))))
+ modify (M.insert name (Just (lpCabalFile lp, TargetComps (S.singleton CLib))))
return True
else do
modify (M.insert name Nothing)
return False
- (_, Just PSUpstream{}) -> return loadAllDeps
+ (_, Just PSIndex{}) -> return loadAllDeps
(_, _) -> return False
preprocessCabalMacros :: MonadIO m => [GhciPkgInfo] -> Path Abs File -> m [String]
@@ -773,21 +810,20 @@ setScriptPerms fp = do
]
#endif
-unionSimpleTargets :: Ord k => Map k SimpleTarget -> Map k SimpleTarget -> Map k SimpleTarget
-unionSimpleTargets = M.unionWith $ \l r ->
+unionTargets :: Ord k => Map k Target -> Map k Target -> Map k Target
+unionTargets = M.unionWith $ \l r ->
case (l, r) of
- (STUnknown, _) -> r
- (STNonLocal, _) -> r
- (STLocalComps sl, STLocalComps sr) -> STLocalComps (S.union sl sr)
- (STLocalComps _, STLocalAll) -> STLocalAll
- (STLocalComps _, _) -> l
- (STLocalAll, _) -> STLocalAll
-
-hasLocalComp :: (NamedComponent -> Bool) -> SimpleTarget -> Bool
+ (TargetAll Dependency, _) -> r
+ (TargetComps sl, TargetComps sr) -> TargetComps (S.union sl sr)
+ (TargetComps _, TargetAll ProjectPackage) -> TargetAll ProjectPackage
+ (TargetComps _, _) -> l
+ (TargetAll ProjectPackage, _) -> TargetAll ProjectPackage
+
+hasLocalComp :: (NamedComponent -> Bool) -> Target -> Bool
hasLocalComp p t =
case t of
- STLocalComps s -> any p (S.toList s)
- STLocalAll -> True
+ TargetComps s -> any p (S.toList s)
+ TargetAll ProjectPackage -> True
_ -> False
@@ -816,7 +852,7 @@ getPackageOptsAndTargetFiles pwd pkg = do
-- FIXME: use compilerOptionsCabalFlag
map ("--ghc-option=" ++) (concatMap (ghcOptions . snd) (ghciPkgOpts pkg))
, mapMaybe
- (fmap toFilePath . stripDir pwd)
+ (fmap toFilePath . stripProperPrefix pwd)
(S.toList (ghciPkgCFiles pkg) <> S.toList (ghciPkgModFiles pkg) <>
[paths_foo | paths_foo_exists]))
diff --git a/src/Stack/Ghci/Script.hs b/src/Stack/Ghci/Script.hs
index 2cfdfec..41a37a1 100644
--- a/src/Stack/Ghci/Script.hs
+++ b/src/Stack/Ghci/Script.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Stack.Ghci.Script
@@ -5,7 +6,6 @@ module Stack.Ghci.Script
, ModuleName
, cmdAdd
- , cmdAddFile
, cmdCdGhc
, cmdModule
@@ -14,18 +14,14 @@ module Stack.Ghci.Script
, scriptToFile
) where
-import Control.Applicative
import Data.ByteString.Lazy (ByteString)
import Data.ByteString.Builder
-import Data.Monoid
import Data.List
-import Data.Set (Set)
import qualified Data.Set as S
-import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8Builder)
import Path
-import Prelude -- Fix redundant imports warnings
-import System.IO
+import Stack.Prelude hiding (ByteString)
+import System.IO (hSetBuffering, BufferMode (..), hSetBinaryMode)
import Distribution.ModuleName hiding (toFilePath)
@@ -36,18 +32,14 @@ instance Monoid GhciScript where
(GhciScript xs) `mappend` (GhciScript ys) = GhciScript (ys <> xs)
data GhciCommand
- = Add (Set ModuleName)
- | AddFile (Path Abs File)
+ = Add (Set (Either ModuleName (Path Abs File)))
| CdGhc (Path Abs Dir)
| Module (Set ModuleName)
deriving (Show)
-cmdAdd :: Set ModuleName -> GhciScript
+cmdAdd :: Set (Either ModuleName (Path Abs File)) -> GhciScript
cmdAdd = GhciScript . (:[]) . Add
-cmdAddFile :: Path Abs File -> GhciScript
-cmdAddFile = GhciScript . (:[]) . AddFile
-
cmdCdGhc :: Path Abs Dir -> GhciScript
cmdCdGhc = GhciScript . (:[]) . CdGhc
@@ -82,13 +74,11 @@ commandToBuilder (Add modules)
| S.null modules = mempty
| otherwise =
fromText ":add "
- <> mconcat (intersperse (fromText " ")
- $ (stringUtf8 . quoteFileName . mconcat . intersperse "." . components) <$> S.toAscList modules)
+ <> mconcat (intersperse (fromText " ") $
+ fmap (stringUtf8 . quoteFileName . either (mconcat . intersperse "." . components) toFilePath)
+ (S.toAscList modules))
<> fromText "\n"
-commandToBuilder (AddFile path) =
- fromText ":add " <> stringUtf8 (quoteFileName (toFilePath path)) <> fromText "\n"
-
commandToBuilder (CdGhc path) =
fromText ":cd-ghc " <> stringUtf8 (quoteFileName (toFilePath path)) <> fromText "\n"
diff --git a/src/Stack/Hoogle.hs b/src/Stack/Hoogle.hs
index 435c38d..db7215a 100644
--- a/src/Stack/Hoogle.hs
+++ b/src/Stack/Hoogle.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
@@ -7,74 +8,57 @@ module Stack.Hoogle
( hoogleCmd
) where
-import Control.Exception
-import Control.Monad.IO.Class
-import Control.Monad.Logger
+import Stack.Prelude
import qualified Data.ByteString.Char8 as S8
+import Data.Char (isSpace)
import Data.List (find)
-import Data.Monoid
import qualified Data.Set as Set
+import qualified Data.Text as T
import Lens.Micro
-import Path
-import Path.IO
+import Path.IO hiding (findExecutable)
import qualified Stack.Build
import Stack.Fetch
import Stack.Runners
import Stack.Types.Config
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
-import Stack.Types.StackT
import Stack.Types.Version
import System.Exit
-import System.Process.Read (resetExeCache, tryProcessStdout)
+import System.Process.Read (resetExeCache, tryProcessStdout, findExecutable)
import System.Process.Run
-- | Hoogle command.
hoogleCmd :: ([String],Bool,Bool) -> GlobalOpts -> IO ()
-hoogleCmd (args,setup,rebuild) go = withBuildConfig go pathToHaddocks
+hoogleCmd (args,setup,rebuild) go = withBuildConfig go $ do
+ hooglePath <- ensureHoogleInPath
+ generateDbIfNeeded hooglePath
+ runHoogle hooglePath args
where
- pathToHaddocks :: StackT EnvConfig IO ()
- pathToHaddocks = do
- hoogleIsInPath <- checkHoogleInPath
- if hoogleIsInPath
- then haddocksToDb
- else do
- if setup
- then do
- $logWarn
- "Hoogle isn't installed or is too old. Automatically installing (use --no-setup to disable) ..."
- installHoogle
- haddocksToDb
- else do
- $logError
- "Hoogle isn't installed or is too old. Not installing it due to --no-setup."
- bail
- haddocksToDb :: StackT EnvConfig IO ()
- haddocksToDb = do
+ generateDbIfNeeded :: Path Abs File -> RIO EnvConfig ()
+ generateDbIfNeeded hooglePath = do
databaseExists <- checkDatabaseExists
if databaseExists && not rebuild
- then runHoogle args
+ then return ()
else if setup || rebuild
then do
- $logWarn
+ logWarn
(if rebuild
then "Rebuilding database ..."
else "No Hoogle database yet. Automatically building haddocks and hoogle database (use --no-setup to disable) ...")
buildHaddocks
- $logInfo "Built docs."
- generateDb
- $logInfo "Generated DB."
- runHoogle args
+ logInfo "Built docs."
+ generateDb hooglePath
+ logInfo "Generated DB."
else do
- $logError
+ logError
"No Hoogle database. Not building one due to --no-setup"
bail
- generateDb :: StackT EnvConfig IO ()
- generateDb = do
+ generateDb :: Path Abs File -> RIO EnvConfig ()
+ generateDb hooglePath = do
do dir <- hoogleRoot
createDirIfMissing True dir
- runHoogle ["generate", "--local"]
- buildHaddocks :: StackT EnvConfig IO ()
+ runHoogle hooglePath ["generate", "--local"]
+ buildHaddocks :: RIO EnvConfig ()
buildHaddocks =
liftIO
(catch
@@ -90,12 +74,12 @@ hoogleCmd (args,setup,rebuild) go = withBuildConfig go pathToHaddocks
defaultBuildOptsCLI))
(\(_ :: ExitCode) ->
return ()))
- installHoogle :: StackT EnvConfig IO ()
+ hooglePackageName = $(mkPackageName "hoogle")
+ hoogleMinVersion = $(mkVersion "5.0")
+ hoogleMinIdent =
+ PackageIdentifier hooglePackageName hoogleMinVersion
+ installHoogle :: RIO EnvConfig ()
installHoogle = do
- let hooglePackageName = $(mkPackageName "hoogle")
- hoogleMinVersion = $(mkVersion "5.0")
- hoogleMinIdent =
- PackageIdentifier hooglePackageName hoogleMinVersion
hooglePackageIdentifier <-
do (_,_,resolved) <-
resolvePackagesAllowMissing
@@ -121,11 +105,11 @@ hoogleCmd (args,setup,rebuild) go = withBuildConfig go pathToHaddocks
_ -> Left hoogleMinIdent)
case hooglePackageIdentifier of
Left{} ->
- $logInfo
+ logInfo
("Minimum " <> packageIdentifierText hoogleMinIdent <>
" is not in your index. Installing the minimum version.")
Right ident ->
- $logInfo
+ logInfo
("Minimum version is " <> packageIdentifierText hoogleMinIdent <>
". Found acceptable " <>
packageIdentifierText ident <>
@@ -151,8 +135,8 @@ hoogleCmd (args,setup,rebuild) go = withBuildConfig go pathToHaddocks
case e of
ExitSuccess -> resetExeCache menv
_ -> throwIO e))
- runHoogle :: [String] -> StackT EnvConfig IO ()
- runHoogle hoogleArgs = do
+ runHoogle :: Path Abs File -> [String] -> RIO EnvConfig ()
+ runHoogle hooglePath hoogleArgs = do
config <- view configL
menv <- liftIO $ configEnvOverride config envSettings
dbpath <- hoogleDatabasePath
@@ -160,27 +144,64 @@ hoogleCmd (args,setup,rebuild) go = withBuildConfig go pathToHaddocks
runCmd
Cmd
{ cmdDirectoryToRunIn = Nothing
- , cmdCommandToRun = "hoogle"
+ , cmdCommandToRun = toFilePath hooglePath
, cmdEnvOverride = menv
, cmdCommandLineArguments = hoogleArgs ++ databaseArg
}
Nothing
- bail :: StackT EnvConfig IO ()
+ bail :: RIO EnvConfig a
bail = liftIO (exitWith (ExitFailure (-1)))
checkDatabaseExists = do
path <- hoogleDatabasePath
liftIO (doesFileExist path)
- checkHoogleInPath = do
+ ensureHoogleInPath :: RIO EnvConfig (Path Abs File)
+ ensureHoogleInPath = do
config <- view configL
menv <- liftIO $ configEnvOverride config envSettings
- result <- tryProcessStdout Nothing menv "hoogle" ["--numeric-version"]
- case fmap (reads . S8.unpack) result of
- Right [(ver :: Double,_)] -> return (ver >= 5.0)
- _ -> return False
+ mhooglePath <- findExecutable menv "hoogle"
+ eres <- case mhooglePath of
+ Nothing -> return $ Left "Hoogle isn't installed."
+ Just hooglePath -> do
+ result <- tryProcessStdout Nothing menv (toFilePath hooglePath) ["--numeric-version"]
+ let unexpectedResult got = Left $ T.concat
+ [ "'"
+ , T.pack (toFilePath hooglePath)
+ , " --numeric-version' did not respond with expected value. Got: "
+ , got
+ ]
+ return $ case result of
+ Left err -> unexpectedResult $ T.pack (show err)
+ Right bs -> case parseVersionFromString (takeWhile (not . isSpace) (S8.unpack bs)) of
+ Nothing -> unexpectedResult $ T.pack (S8.unpack bs)
+ Just ver
+ | ver >= hoogleMinVersion -> Right hooglePath
+ | otherwise -> Left $ T.concat
+ [ "Installed Hoogle is too old, "
+ , T.pack (toFilePath hooglePath)
+ , " is version "
+ , versionText ver
+ , " but >= 5.0 is required."
+ ]
+ case eres of
+ Right hooglePath -> return hooglePath
+ Left err
+ | setup -> do
+ logWarn $ err <> " Automatically installing (use --no-setup to disable) ..."
+ installHoogle
+ mhooglePath' <- findExecutable menv "hoogle"
+ case mhooglePath' of
+ Just hooglePath -> return hooglePath
+ Nothing -> do
+ logWarn "Couldn't find hoogle in path after installing. This shouldn't happen, may be a bug."
+ bail
+ | otherwise -> do
+ logWarn $ err <> " Not installing it due to --no-setup."
+ bail
envSettings =
EnvSettings
{ esIncludeLocals = True
, esIncludeGhcPackagePath = True
, esStackExe = True
, esLocaleUtf8 = False
+ , esKeepGhcRts = False
}
diff --git a/src/Stack/IDE.hs b/src/Stack/IDE.hs
index 9457791..1763a7b 100644
--- a/src/Stack/IDE.hs
+++ b/src/Stack/IDE.hs
@@ -1,7 +1,7 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
-- | Functions for IDEs.
@@ -10,44 +10,39 @@ module Stack.IDE
, listTargets
) where
-import Control.Monad.Logger
-import Control.Monad.Reader
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as T
-import Stack.Build.Source (getLocalPackageViews)
-import Stack.Build.Target (LocalPackageView(..))
import Stack.Config (getLocalPackages)
-import Stack.Package (findOrGenerateCabalFile)
+import Stack.Package (readPackageUnresolvedDir, gpdPackageName)
+import Stack.Prelude
import Stack.Types.Config
import Stack.Types.Package
import Stack.Types.PackageName
-import Stack.Types.StackT
-- | List the packages inside the current project.
-listPackages :: (StackM env m, HasEnvConfig env) => m ()
+listPackages :: HasEnvConfig env => RIO env ()
listPackages = do
-- TODO: Instead of setting up an entire EnvConfig only to look up the package directories,
-- make do with a Config (and the Project inside) and use resolvePackageEntry to get
-- the directory.
- packageDirs <- liftM Map.keys getLocalPackages
+ packageDirs <- liftM (map lpvRoot . Map.elems . lpProject) getLocalPackages
forM_ packageDirs $ \dir -> do
- cabalfp <- findOrGenerateCabalFile dir
- pkgName <- parsePackageNameFromFilePath cabalfp
- ($logInfo . packageNameText) pkgName
+ (gpd, _) <- readPackageUnresolvedDir dir False
+ (logInfo . packageNameText) (gpdPackageName gpd)
-- | List the targets in the current project.
-listTargets :: (StackM env m, HasEnvConfig env) => m ()
+listTargets :: HasEnvConfig env => RIO env ()
listTargets =
- do rawLocals <- getLocalPackageViews
- $logInfo
+ do rawLocals <- lpProject <$> getLocalPackages
+ logInfo
(T.intercalate
"\n"
(map
renderPkgComponent
(concatMap
toNameAndComponent
- (Map.toList (Map.map fst rawLocals)))))
+ (Map.toList rawLocals))))
where
toNameAndComponent (pkgName,view') =
map (pkgName, ) (Set.toList (lpvComponents view'))
diff --git a/src/Stack/Image.hs b/src/Stack/Image.hs
index 65c7d0d..9e89204 100644
--- a/src/Stack/Image.hs
+++ b/src/Stack/Image.hs
@@ -1,3 +1,4 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
@@ -11,32 +12,26 @@ module Stack.Image
imgCmdName, imgDockerCmdName, imgOptsFromMonoid)
where
-import Control.Exception.Lifted hiding (finally)
-import Control.Monad
-import Control.Monad.Catch hiding (bracket)
-import Control.Monad.IO.Class
-import Control.Monad.Logger
+import Stack.Prelude
+import qualified Data.ByteString as B
import Data.Char (toLower)
import qualified Data.Map.Strict as Map
-import Data.Maybe
-import Data.Typeable
-import Data.Text (Text)
+import Data.Text.Encoding (encodeUtf8)
import qualified Data.Text as T
import Path
import Path.Extra
import Path.IO
-import Stack.Constants
+import Stack.Constants.Config
import Stack.PrettyPrint
import Stack.Types.Config
import Stack.Types.Image
-import Stack.Types.StackT
import System.Process.Run
-- | Stages the executables & additional content in a staging
-- directory under '.stack-work'
stageContainerImageArtifacts
- :: (StackM env m, HasEnvConfig env)
- => Maybe (Path Abs Dir) -> [Text] -> m ()
+ :: HasEnvConfig env
+ => Maybe (Path Abs Dir) -> [Text] -> RIO env ()
stageContainerImageArtifacts mProjectRoot imageNames = do
config <- view configL
forM_
@@ -48,7 +43,7 @@ stageContainerImageArtifacts mProjectRoot imageNames = do
(\(idx,opts) ->
do imageDir <-
imageStagingDir (fromMaybeProjectRoot mProjectRoot) idx
- ignoringAbsence (removeDirRecur imageDir)
+ liftIO (ignoringAbsence (removeDirRecur imageDir))
ensureDir imageDir
stageExesInDir opts imageDir
syncAddContentToDir opts imageDir)
@@ -58,8 +53,8 @@ stageContainerImageArtifacts mProjectRoot imageNames = do
-- extended with an ENTRYPOINT specified for each `entrypoint` listed
-- in the config file.
createContainerImageFromStage
- :: (StackM env m, HasConfig env)
- => Maybe (Path Abs Dir) -> [Text] -> m ()
+ :: HasConfig env
+ => Maybe (Path Abs Dir) -> [Text] -> RIO env ()
createContainerImageFromStage mProjectRoot imageNames = do
config <- view configL
forM_
@@ -84,22 +79,22 @@ filterImages names = filter (imageNameFound . imgDockerImageName)
-- | Stage all the Package executables in the usr/local/bin
-- subdirectory of a temp directory.
stageExesInDir
- :: (StackM env m, HasEnvConfig env)
- => ImageDockerOpts -> Path Abs Dir -> m ()
+ :: HasEnvConfig env
+ => ImageDockerOpts -> Path Abs Dir -> RIO env ()
stageExesInDir opts dir = do
srcBinPath <- fmap (</> $(mkRelDir "bin")) installationRootLocal
let destBinPath = dir </> $(mkRelDir "usr/local/bin")
ensureDir destBinPath
case imgDockerExecutables opts of
Nothing -> do
- $logInfo ""
- $logInfo "Note: 'executables' not specified for a image container, so every executable in the project's local bin dir will be used."
- mcontents <- forgivingAbsence $ listDir srcBinPath
+ logInfo ""
+ logInfo "Note: 'executables' not specified for a image container, so every executable in the project's local bin dir will be used."
+ mcontents <- liftIO $ forgivingAbsence $ listDir srcBinPath
case mcontents of
Just (files, dirs)
- | not (null files) || not (null dirs) -> copyDirRecur srcBinPath destBinPath
- _ -> $prettyWarn "The project's local bin dir contains no files, so no executables will be added to the docker image."
- $logInfo ""
+ | not (null files) || not (null dirs) -> liftIO $ copyDirRecur srcBinPath destBinPath
+ _ -> prettyWarn "The project's local bin dir contains no files, so no executables will be added to the docker image."
+ logInfo ""
Just exes ->
forM_
@@ -112,8 +107,8 @@ stageExesInDir opts dir = do
-- | Add any additional files into the temp directory, respecting the
-- (Source, Destination) mapping.
syncAddContentToDir
- :: (StackM env m, HasEnvConfig env)
- => ImageDockerOpts -> Path Abs Dir -> m ()
+ :: HasEnvConfig env
+ => ImageDockerOpts -> Path Abs Dir -> RIO env ()
syncAddContentToDir opts dir = do
root <- view projectRootL
let imgAdd = imgDockerAdd opts
@@ -123,7 +118,7 @@ syncAddContentToDir opts dir = do
do sourcePath <- resolveDir root source
let destFullPath = dir </> dropRoot destPath
ensureDir destFullPath
- copyDirRecur sourcePath destFullPath)
+ liftIO $ copyDirRecur sourcePath destFullPath)
-- | Derive an image name from the project directory.
imageName
@@ -133,17 +128,17 @@ imageName = map toLower . toFilePathNoTrailingSep . dirname
-- | Create a general purpose docker image from the temporary
-- directory of executables & static content.
createDockerImage
- :: (StackM env m, HasConfig env)
- => ImageDockerOpts -> Path Abs Dir -> m ()
+ :: HasConfig env
+ => ImageDockerOpts -> Path Abs Dir -> RIO env ()
createDockerImage dockerConfig dir = do
menv <- getMinimalEnvOverride
case imgDockerBase dockerConfig of
Nothing -> throwM StackImageDockerBaseUnspecifiedException
Just base -> do
liftIO
- (writeFile
+ (B.writeFile
(toFilePath (dir </> $(mkRelFile "Dockerfile")))
- (unlines ["FROM " ++ base, "ADD ./ /"]))
+ (encodeUtf8 (T.pack (unlines ["FROM " ++ base, "ADD ./ /"]))))
let args =
[ "build"
, "-t"
@@ -155,8 +150,8 @@ createDockerImage dockerConfig dir = do
-- | Extend the general purpose docker image with entrypoints (if specified).
extendDockerImageWithEntrypoint
- :: (StackM env m, HasConfig env)
- => ImageDockerOpts -> Path Abs Dir -> m ()
+ :: HasConfig env
+ => ImageDockerOpts -> Path Abs Dir -> RIO env ()
extendDockerImageWithEntrypoint dockerConfig dir = do
menv <- getMinimalEnvOverride
let dockerImageName =
@@ -171,14 +166,14 @@ extendDockerImageWithEntrypoint dockerConfig dir = do
eps
(\ep ->
do liftIO
- (writeFile
+ (B.writeFile
(toFilePath
(dir </> $(mkRelFile "Dockerfile")))
- (unlines
+ (encodeUtf8 (T.pack (unlines
[ "FROM " ++ dockerImageName
, "ENTRYPOINT [\"/usr/local/bin/" ++
ep ++ "\"]"
- , "CMD []"]))
+ , "CMD []"]))))
callProcess
(Cmd
Nothing
@@ -192,7 +187,7 @@ extendDockerImageWithEntrypoint dockerConfig dir = do
-- | Fail with friendly error if project root not set.
fromMaybeProjectRoot :: Maybe (Path Abs Dir) -> Path Abs Dir
fromMaybeProjectRoot =
- fromMaybe (throw StackImageCannotDetermineProjectRootException)
+ fromMaybe (impureThrow StackImageCannotDetermineProjectRootException)
-- | The command name for dealing with images.
imgCmdName
diff --git a/src/Stack/Init.hs b/src/Stack/Init.hs
index fdd89a2..76835dc 100644
--- a/src/Stack/Init.hs
+++ b/src/Stack/Init.hs
@@ -1,34 +1,26 @@
+{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
-{-# LANGUAGE TemplateHaskell #-}
module Stack.Init
( initProject
, InitOpts (..)
) where
-import Control.Exception (assert)
-import Control.Exception.Safe (catchAny)
-import Control.Monad
-import Control.Monad.Catch (throwM)
-import Control.Monad.IO.Class
-import Control.Monad.Logger
+import Stack.Prelude
import qualified Data.ByteString.Builder as B
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Lazy as L
import qualified Data.Foldable as F
-import Data.Function (on)
import qualified Data.HashMap.Strict as HM
import qualified Data.IntMap as IntMap
import Data.List (intercalate, intersect,
maximumBy)
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NonEmpty
-import